File size: 304 Bytes
f235b5d
b02a8ce
46c8cfc
0710e36
f235b5d
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
from transformers import LlamaForCausalLM, LlamaTokenizer

model_name = "mlabonne/llama-2-7b-guanaco"

# Load the model
model = LlamaForCausalLM.from_pretrained(model_name)

# Load the tokenizer
tokenizer = LlamaTokenizer.from_pretrained(model_name)

# Now, you can use the model and tokenizer as needed