Model selection
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
|
4 |
# Load LLaMA 3.2 model and tokenizer
|
5 |
-
model_name = "meta-llama/LLaMA-3.2"
|
6 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
7 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
8 |
|
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
|
4 |
# Load LLaMA 3.2 model and tokenizer
|
5 |
+
model_name = "meta-llama/LLaMA-3.2"
|
6 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
7 |
model = AutoModelForCausalLM.from_pretrained(model_name)
|
8 |
|