# Install required libraries pip install transformers huggingface_hub gradio torch # Import necessary modules from huggingface_hub import login from transformers import AutoTokenizer, AutoModelForCausalLM # Log in to Hugging Face (replace 'your_token' with your actual Hugging Face token) login("your_huggingface_token") # Load the tokenizer and model from Hugging Face tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-multi") model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-multi") # Input text for code generation text = "def bubble_sort(list_elements):" # Tokenize the input text input_ids = tokenizer(text, return_tensors="pt").input_ids # Generate code based on the input text generated_ids = model.generate( input_ids, max_length=200, # Adjust as needed num_return_sequences=1, # Number of generated sequences to return pad_token_id=tokenizer.eos_token_id # Handle padding tokens ) # Decode the generated tokens to text generated_code = tokenizer.decode(generated_ids[0], skip_special_tokens=True) # Output the generated code print(generated_code)