|
{ |
|
"alpha_pattern": {}, |
|
"auto_mapping": null, |
|
"base_model_name_or_path": "chargoddard/Yi-34B-Llama", |
|
"bias": "none", |
|
"fan_in_fan_out": null, |
|
"inference_mode": true, |
|
"init_lora_weights": true, |
|
"layers_pattern": null, |
|
"layers_to_transform": null, |
|
"lora_alpha": 256, |
|
"lora_dropout": 0.05, |
|
"modules_to_save": [ |
|
"embed_tokens", |
|
"lm_head" |
|
], |
|
"peft_type": "LORA", |
|
"r": 128, |
|
"rank_pattern": {}, |
|
"revision": null, |
|
"target_modules": [ |
|
"gate_proj", |
|
"k_proj", |
|
"q_proj", |
|
"up_proj", |
|
"v_proj", |
|
"o_proj", |
|
"down_proj" |
|
], |
|
"task_type": "CAUSAL_LM" |
|
} |