Textwizai / tf_model.h5
Erfan11's picture
Update tf_model.h5
20f3472 verified
raw
history blame
265 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer
def load_model():
model_name = "Erfan11/Neuracraft"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
return model, tokenizer