from transformers import LlamaForCausalLM, LlamaTokenizer model_name = "mlabonne/llama-2-7b-guanaco" # Load the model model = LlamaForCausalLM.from_pretrained(model_name) # Load the tokenizer tokenizer = LlamaTokenizer.from_pretrained(model_name) # Now, you can use the model and tokenizer as needed