Tatvajsh commited on
Commit
03b4b29
1 Parent(s): 218f611

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +7 -7
  2. adapter_model.safetensors +1 -1
adapter_config.json CHANGED
@@ -8,21 +8,21 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
- "lora_alpha": 32,
12
- "lora_dropout": 0.1,
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 8,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "self_attn.k_proj",
20
- "self_attn.o_proj",
21
- "mlp.up_proj",
22
  "mlp.gate_proj",
 
 
23
  "self_attn.q_proj",
24
- "mlp.down_proj",
25
- "self_attn.v_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "lora_alpha": 16,
12
+ "lora_dropout": 0.05,
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 8,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "mlp.down_proj",
 
 
20
  "mlp.gate_proj",
21
+ "self_attn.o_proj",
22
+ "self_attn.v_proj",
23
  "self_attn.q_proj",
24
+ "mlp.up_proj",
25
+ "self_attn.k_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08b7a90de5a89b84f7120275b6501cef5df8360c155b0e282dd70a3583a3faaf
3
  size 50899792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2eb75e408612a863afc66affa3908505ed02d36b95b703ec16c360397403499f
3
  size 50899792