PEFT
Safetensors
Xinpeng Wang commited on
Commit
abf0bda
1 Parent(s): f345785

update models

Browse files
Files changed (2) hide show
  1. adapter_config.json +9 -1
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,17 +1,23 @@
1
  {
 
2
  "auto_mapping": null,
3
  "base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
 
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
 
10
  "lora_alpha": 16,
11
  "lora_dropout": 0.1,
 
 
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 64,
 
15
  "revision": null,
16
  "target_modules": [
17
  "k_proj",
@@ -23,5 +29,7 @@
23
  "v_proj",
24
  "gate_proj"
25
  ],
26
- "task_type": "CAUSAL_LM"
 
 
27
  }
 
1
  {
2
+ "alpha_pattern": {},
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
+ "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
+ "loftq_config": {},
13
  "lora_alpha": 16,
14
  "lora_dropout": 0.1,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
  "r": 64,
20
+ "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
  "k_proj",
 
29
  "v_proj",
30
  "gate_proj"
31
  ],
32
+ "task_type": "CAUSAL_LM",
33
+ "use_dora": false,
34
+ "use_rslora": false
35
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c03c78955a8fbee4e5df3313741c16fa2446013befbcb32a6fa2879e6e0b3760
3
- size 671146480
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53ac7d1b165ccd5c3db45f4cbb39b043b2ae0bbd610c8ea5db5b6a16d698834d
3
+ size 602370064