{"base_model_name_or_path": "codellama/CodeLlama-34b-hf", "bias": "none", "fan_in_fan_out": false, "inference_mode": true, "lora_alpha": 8.0, "lora_dropout": 0.0, "modules_to_save": null, "peft_type": "LORA", "r": 8, "target_modules": null, "task_type": "CAUSAL_LM"} |