|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from typing import TYPE_CHECKING |
|
|
|
from transformers.utils import ( |
|
OptionalDependencyNotAvailable, |
|
_LazyModule, |
|
is_torch_available, |
|
) |
|
|
|
_import_structure = { |
|
"configuration_kinoe": ["KinoeConfig"], |
|
} |
|
|
|
try: |
|
if not is_torch_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
_import_structure["modeling_kinoe"] = [ |
|
"KinoeForCausalLM", |
|
"KinoeModel", |
|
"KinoePreTrainedModel", |
|
"KinoeForSequenceClassification", |
|
"KinoeForTokenClassification", |
|
] |
|
|
|
if TYPE_CHECKING: |
|
from .configuration_kinoe import KinoeConfig |
|
|
|
try: |
|
if not is_torch_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
from .modeling_kinoe import ( |
|
KinoeForCausalLM, |
|
KinoeForSequenceClassification, |
|
KinoeForTokenClassification, |
|
KinoeModel, |
|
KinoePreTrainedModel, |
|
) |
|
|
|
else: |
|
import sys |
|
|
|
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__) |