pip install transformers pip install huggingface_hub from huggingface_hub import login login(" ") pip install gradio transformers torch from transformers import AutoTokenizer, AutoModelForCausalLM # Load the tokenizer and model tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-multi") model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-multi") # Input text for code generation text = "def bubble_sort(list_elements):" # Tokenize the input text input_ids = tokenizer(text, return_tensors="pt").input_ids # Generate code based on the input text generated_ids = model.generate( input_ids, max_length=200, # Adjust as needed num_return_sequences=1, # Number of generated sequences to return pad_token_id=tokenizer.eos_token_id # Handle padding tokens ) # Decode the generated tokens to text generated_code = tokenizer.decode(generated_ids[0], skip_special_tokens=True) print(generated_code)