File size: 270 Bytes
8dfabce
1
{"base_model_name_or_path": "meta-llama/Llama-2-13b-chat-hf", "bias": "none", "fan_in_fan_out": false, "inference_mode": true, "lora_alpha": 8.0, "lora_dropout": 0.0, "modules_to_save": null, "peft_type": "LORA", "r": 8, "target_modules": null, "task_type": "CAUSAL_LM"}