india / app.py
jhansi1's picture
Update app.py
db3f254 verified
raw
history blame contribute delete
966 Bytes
from huggingface_hub import hf_hub_download
from transformers import AutoModelForPreTraining, AutoTokenizer
import os
# Download model and tokenizer files
model_path = hf_hub_download("law-ai/InLegalBERT", filename="pytorch_model.bin")
tokenizer_path = hf_hub_download("law-ai/InLegalBERT", filename="tokenizer_config.json")
# Define the path where the model and tokenizer will be loaded from
model_directory = "/tmp/InLegalBERT"
# Create the directory if it doesn't exist
os.makedirs(model_directory, exist_ok=True)
# Move the downloaded files to the appropriate directory
os.rename(model_path, os.path.join(model_directory, "pytorch_model.bin"))
os.rename(tokenizer_path, os.path.join(model_directory, "tokenizer_config.json"))
# Load the model and tokenizer from the local directory
tokenizer = AutoTokenizer.from_pretrained(model_directory)
model = AutoModelForPreTraining.from_pretrained(model_directory)
print("Model and tokenizer loaded successfully!")