|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from typing import TYPE_CHECKING |
|
|
|
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available |
|
|
|
|
|
_import_structure = { |
|
"configuration_biogpt": ["BIOGPT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BioGptConfig"], |
|
"tokenization_biogpt": ["BioGptTokenizer"], |
|
} |
|
|
|
try: |
|
if not is_torch_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
_import_structure["modeling_biogpt"] = [ |
|
"BIOGPT_PRETRAINED_MODEL_ARCHIVE_LIST", |
|
"BioGptForCausalLM", |
|
"BioGptForTokenClassification", |
|
"BioGptForSequenceClassification", |
|
"BioGptModel", |
|
"BioGptPreTrainedModel", |
|
] |
|
|
|
|
|
if TYPE_CHECKING: |
|
from .configuration_biogpt import BIOGPT_PRETRAINED_CONFIG_ARCHIVE_MAP, BioGptConfig |
|
from .tokenization_biogpt import BioGptTokenizer |
|
|
|
try: |
|
if not is_torch_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
from .modeling_biogpt import ( |
|
BIOGPT_PRETRAINED_MODEL_ARCHIVE_LIST, |
|
BioGptForCausalLM, |
|
BioGptForSequenceClassification, |
|
BioGptForTokenClassification, |
|
BioGptModel, |
|
BioGptPreTrainedModel, |
|
) |
|
|
|
|
|
else: |
|
import sys |
|
|
|
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__) |
|
|