|
from huggingface_hub import hf_hub_download |
|
from transformers import AutoModelForPreTraining, AutoTokenizer |
|
import os |
|
|
|
|
|
model_path = hf_hub_download("law-ai/InLegalBERT", filename="pytorch_model.bin") |
|
tokenizer_path = hf_hub_download("law-ai/InLegalBERT", filename="tokenizer_config.json") |
|
|
|
|
|
model_directory = "/tmp/InLegalBERT" |
|
|
|
|
|
os.makedirs(model_directory, exist_ok=True) |
|
|
|
|
|
os.rename(model_path, os.path.join(model_directory, "pytorch_model.bin")) |
|
os.rename(tokenizer_path, os.path.join(model_directory, "tokenizer_config.json")) |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained(model_directory) |
|
model = AutoModelForPreTraining.from_pretrained(model_directory) |
|
|
|
print("Model and tokenizer loaded successfully!") |
|
|