mirror of
https://github.com/saymrwulf/transformers.git
synced 2026-05-14 20:58:08 +00:00
Scatter dummies + skip pipeline tests (#13996)
* Scatter dummies + skip pipeline tests * Add torch scatter to build docs
This commit is contained in:
parent
b65c389769
commit
5b317f7ea4
5 changed files with 94 additions and 73 deletions
|
|
@ -753,6 +753,7 @@ jobs:
|
|||
- run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev
|
||||
- run: pip install --upgrade pip
|
||||
- run: pip install ."[docs]"
|
||||
- run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.9.0+cpu.html
|
||||
- save_cache:
|
||||
key: v0.4-build_doc-{{ checksum "setup.py" }}
|
||||
paths:
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ from . import dependency_versions_check
|
|||
from .file_utils import (
|
||||
_LazyModule,
|
||||
is_flax_available,
|
||||
is_scatter_available,
|
||||
is_sentencepiece_available,
|
||||
is_speech_available,
|
||||
is_tf_available,
|
||||
|
|
@ -488,6 +489,25 @@ else:
|
|||
name for name in dir(dummy_timm_objects) if not name.startswith("_")
|
||||
]
|
||||
|
||||
if is_scatter_available():
|
||||
_import_structure["models.tapas"].extend(
|
||||
[
|
||||
"TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST",
|
||||
"TapasForMaskedLM",
|
||||
"TapasForQuestionAnswering",
|
||||
"TapasForSequenceClassification",
|
||||
"TapasModel",
|
||||
"TapasPreTrainedModel",
|
||||
"load_tf_weights_in_tapas",
|
||||
]
|
||||
)
|
||||
else:
|
||||
from .utils import dummy_scatter_objects
|
||||
|
||||
_import_structure["utils.dummy_scatter_objects"] = [
|
||||
name for name in dir(dummy_scatter_objects) if not name.startswith("_")
|
||||
]
|
||||
|
||||
# PyTorch-backed objects
|
||||
if is_torch_available():
|
||||
_import_structure["benchmark.benchmark"] = ["PyTorchBenchmark"]
|
||||
|
|
@ -1157,17 +1177,6 @@ if is_torch_available():
|
|||
"load_tf_weights_in_t5",
|
||||
]
|
||||
)
|
||||
_import_structure["models.tapas"].extend(
|
||||
[
|
||||
"TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST",
|
||||
"TapasForMaskedLM",
|
||||
"TapasForQuestionAnswering",
|
||||
"TapasForSequenceClassification",
|
||||
"TapasModel",
|
||||
"TapasPreTrainedModel",
|
||||
"load_tf_weights_in_tapas",
|
||||
]
|
||||
)
|
||||
_import_structure["models.transfo_xl"].extend(
|
||||
[
|
||||
"TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST",
|
||||
|
|
@ -2282,6 +2291,19 @@ if TYPE_CHECKING:
|
|||
else:
|
||||
from .utils.dummy_timm_objects import *
|
||||
|
||||
if is_scatter_available():
|
||||
from .models.tapas import (
|
||||
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST,
|
||||
TapasForMaskedLM,
|
||||
TapasForQuestionAnswering,
|
||||
TapasForSequenceClassification,
|
||||
TapasModel,
|
||||
TapasPreTrainedModel,
|
||||
load_tf_weights_in_tapas,
|
||||
)
|
||||
else:
|
||||
from .utils.dummy_scatter_objects import *
|
||||
|
||||
if is_torch_available():
|
||||
# Benchmarks
|
||||
from .benchmark.benchmark import PyTorchBenchmark
|
||||
|
|
@ -2847,15 +2869,6 @@ if TYPE_CHECKING:
|
|||
T5PreTrainedModel,
|
||||
load_tf_weights_in_t5,
|
||||
)
|
||||
from .models.tapas import (
|
||||
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST,
|
||||
TapasForMaskedLM,
|
||||
TapasForQuestionAnswering,
|
||||
TapasForSequenceClassification,
|
||||
TapasModel,
|
||||
TapasPreTrainedModel,
|
||||
load_tf_weights_in_tapas,
|
||||
)
|
||||
from .models.transfo_xl import (
|
||||
TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST,
|
||||
AdaptiveEmbedding,
|
||||
|
|
|
|||
|
|
@ -3487,58 +3487,6 @@ def load_tf_weights_in_t5(*args, **kwargs):
|
|||
requires_backends(load_tf_weights_in_t5, ["torch"])
|
||||
|
||||
|
||||
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST = None
|
||||
|
||||
|
||||
class TapasForMaskedLM:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["torch"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["torch"])
|
||||
|
||||
|
||||
class TapasForQuestionAnswering:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["torch"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["torch"])
|
||||
|
||||
|
||||
class TapasForSequenceClassification:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["torch"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["torch"])
|
||||
|
||||
|
||||
class TapasModel:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["torch"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["torch"])
|
||||
|
||||
|
||||
class TapasPreTrainedModel:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["torch"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["torch"])
|
||||
|
||||
|
||||
def load_tf_weights_in_tapas(*args, **kwargs):
|
||||
requires_backends(load_tf_weights_in_tapas, ["torch"])
|
||||
|
||||
|
||||
TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST = None
|
||||
|
||||
|
||||
|
|
|
|||
54
src/transformers/utils/dummy_scatter_objects.py
Normal file
54
src/transformers/utils/dummy_scatter_objects.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
# This file is autogenerated by the command `make fix-copies`, do not edit.
|
||||
from ..file_utils import requires_backends
|
||||
|
||||
|
||||
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST = None
|
||||
|
||||
|
||||
class TapasForMaskedLM:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["scatter"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["scatter"])
|
||||
|
||||
|
||||
class TapasForQuestionAnswering:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["scatter"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["scatter"])
|
||||
|
||||
|
||||
class TapasForSequenceClassification:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["scatter"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["scatter"])
|
||||
|
||||
|
||||
class TapasModel:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["scatter"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["scatter"])
|
||||
|
||||
|
||||
class TapasPreTrainedModel:
|
||||
def __init__(self, *args, **kwargs):
|
||||
requires_backends(self, ["scatter"])
|
||||
|
||||
@classmethod
|
||||
def from_pretrained(cls, *args, **kwargs):
|
||||
requires_backends(cls, ["scatter"])
|
||||
|
||||
|
||||
def load_tf_weights_in_tapas(*args, **kwargs):
|
||||
requires_backends(load_tf_weights_in_tapas, ["scatter"])
|
||||
|
|
@ -111,7 +111,12 @@ class PipelineTestCaseMeta(type):
|
|||
tiny_config.is_encoder_decoder = False
|
||||
if ModelClass.__name__.endswith("WithLMHead"):
|
||||
tiny_config.is_decoder = True
|
||||
model = ModelClass(tiny_config)
|
||||
try:
|
||||
model = ModelClass(tiny_config)
|
||||
except ImportError as e:
|
||||
self.skipTest(
|
||||
f"Cannot run with {tiny_config} as the model requires a library that isn't installed: {e}"
|
||||
)
|
||||
if hasattr(model, "eval"):
|
||||
model = model.eval()
|
||||
if tokenizer_class is not None:
|
||||
|
|
|
|||
Loading…
Reference in a new issue