Spaces:
Runtime error
Runtime error
# Install necessary libraries | |
# !pip install transformers | |
from transformers import AutoTokenizer, pipeline | |
import torch | |
# Model and prompt details | |
model_name = "mlabonne/llama-2-7b-guanaco" | |
prompt = "What is a large language model?" | |
# Load tokenizer and pipeline | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
text_generation_pipeline = pipeline( | |
"text-generation", | |
model=model_name, | |
torch_dtype=torch.float16, | |
device_map="auto", | |
) | |
# Generate text using the provided prompt | |
sequences = text_generation_pipeline( | |
f'<s>[INST] {prompt} [/INST]', | |
do_sample=True, | |
top_k=10, | |
num_return_sequences=1, | |
eos_token_id=tokenizer.eos_token_id, | |
max_length=200, | |
) | |
# Print the generated text | |
for seq in sequences: | |
print(f"Generated Text: {seq['generated_text']}") | |