{ "peft_type": "LORA", "auto_mapping": null, "base_model_name_or_path": "Vijayendra/llama-3b-lora-cyclic-attention", // or your base model, if different "revision": null, "task_type": "CAUSAL_LM", "inference_mode": false, "r": 16, "target_modules": ["up_proj", "gate_proj", "down_proj", "o_proj", "k_proj", "v_proj", "q_proj"], "lora_alpha": 16, "lora_dropout": 0.05, "fan_in_fan_out": false, "bias": "none", "use_rslora": false, "modules_to_save": null, "init_lora_weights": true, "layers_to_transform": null, "layers_pattern": null, "rank_pattern": {}, "alpha_pattern": {}, "megatron_config": null, "megatron_core": "megatron.core", "loftq_config": {}, "use_dora": false, "layer_replication": null }