Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix dummy objects for quantization #14478

Merged
merged 2 commits into from
Nov 21, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/transformers/utils/dummy_flax_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,6 +477,13 @@ class FlaxBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["flax"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["flax"])

def __call__(self, *args, **kwargs):
requires_backends(self, ["flax"])


class FlaxBertForPreTraining:
def __init__(self, *args, **kwargs):
Expand Down
35 changes: 35 additions & 0 deletions src/transformers/utils/dummy_pt_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,13 @@ class TextDatasetForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["torch"])


class BeamScorer:
def __init__(self, *args, **kwargs):
Expand Down Expand Up @@ -783,6 +790,13 @@ class BertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["torch"])


class BertForPreTraining:
def __init__(self, *args, **kwargs):
Expand Down Expand Up @@ -2106,6 +2120,13 @@ class FNetForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["torch"])


class FNetForPreTraining:
def __init__(self, *args, **kwargs):
Expand Down Expand Up @@ -3254,6 +3275,13 @@ class MegatronBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["torch"])


class MegatronBertForPreTraining:
def __init__(self, *args, **kwargs):
Expand Down Expand Up @@ -3373,6 +3401,13 @@ class MobileBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["torch"])


class MobileBertForPreTraining:
def __init__(self, *args, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertForMultipleChoice:
def __init__(self, *args, **kwargs):
Expand All @@ -22,11 +25,21 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
Expand All @@ -36,6 +49,9 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertForSequenceClassification:
def __init__(self, *args, **kwargs):
Expand All @@ -45,6 +61,9 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertForTokenClassification:
def __init__(self, *args, **kwargs):
Expand All @@ -54,6 +73,9 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertLayer:
def __init__(self, *args, **kwargs):
Expand All @@ -68,6 +90,9 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertModel:
def __init__(self, *args, **kwargs):
Expand All @@ -77,6 +102,9 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


class QDQBertPreTrainedModel:
def __init__(self, *args, **kwargs):
Expand All @@ -86,6 +114,9 @@ def __init__(self, *args, **kwargs):
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["pytorch_quantization", "torch"])

def forward(self, *args, **kwargs):
requires_backends(self, ["pytorch_quantization", "torch"])


def load_tf_weights_in_qdqbert(*args, **kwargs):
requires_backends(load_tf_weights_in_qdqbert, ["pytorch_quantization", "torch"])
14 changes: 14 additions & 0 deletions src/transformers/utils/dummy_tf_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,6 +452,13 @@ class TFBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["tf"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["tf"])

def call(self, *args, **kwargs):
requires_backends(self, ["tf"])


class TFBertForPreTraining:
def __init__(self, *args, **kwargs):
Expand Down Expand Up @@ -1774,6 +1781,13 @@ class TFMobileBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_backends(self, ["tf"])

@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["tf"])

def call(self, *args, **kwargs):
requires_backends(self, ["tf"])


class TFMobileBertForPreTraining:
def __init__(self, *args, **kwargs):
Expand Down
3 changes: 2 additions & 1 deletion utils/check_dummies.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
PATH_TO_TRANSFORMERS = "src/transformers"

# Matches is_xxx_available()
_re_backend = re.compile(r"is\_([a-z]*)_available()")
_re_backend = re.compile(r"is\_([a-z_]*)_available()")
# Matches from xxx import bla
_re_single_line_import = re.compile(r"\s+from\s+\S*\s+import\s+([^\(\s].*)\n")
_re_test_backend = re.compile(r"^\s+if\s+is\_[a-z]*\_available\(\)")
Expand Down Expand Up @@ -131,6 +131,7 @@ def create_dummy_object(name, backend_name):
"ForConditionalGeneration",
"ForMaskedLM",
"ForMultipleChoice",
"ForNextSentencePrediction",
"ForObjectDetection",
"ForQuestionAnswering",
"ForSegmentation",
Expand Down