lkjh / app.py
boompack's picture
Update app.py
f371f0b verified
raw
history blame contribute delete
470 Bytes
# Use a pipeline as a high-level helper
from transformers import pipeline
messages = [
{"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="Undi95/Meta-Llama-3.1-8B-Claude")
pipe(messages)
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("Undi95/Meta-Llama-3.1-8B-Claude")
model = AutoModelForCausalLM.from_pretrained("Undi95/Meta-Llama-3.1-8B-Claude")