noeloco commited on
Commit
588d798
1 Parent(s): fa33dfd

Training in progress, step 17

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,14 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
 
22
  "o_proj",
23
- "k_proj",
24
- "v_proj",
25
  "q_proj",
26
- "gate_proj",
27
- "up_proj",
28
- "down_proj"
29
  ],
30
- "task_type": "CAUSAL_LM"
 
31
  }
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "up_proj",
23
+ "down_proj",
24
+ "gate_proj",
25
  "o_proj",
 
 
26
  "q_proj",
27
+ "k_proj",
28
+ "v_proj"
 
29
  ],
30
+ "task_type": "CAUSAL_LM",
31
+ "use_rslora": false
32
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8db256ab486e8cbade675780483f44d046aef2ac6de10f9e1c440760346d6d66
3
  size 80013120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8c343298d77f8c8cda22054c0e32ae441670ae776ea08d1191e83ab4a31a31f
3
  size 80013120
config.json CHANGED
@@ -34,7 +34,7 @@
34
  "rope_theta": 1000000,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
- "transformers_version": "4.37.0.dev0",
38
  "use_cache": false,
39
  "vocab_size": 32016
40
  }
 
34
  "rope_theta": 1000000,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.37.0",
38
  "use_cache": false,
39
  "vocab_size": 32016
40
  }
tokenizer_config.json CHANGED
@@ -66,6 +66,7 @@
66
  "▁<EOT>"
67
  ],
68
  "bos_token": "<s>",
 
69
  "clean_up_tokenization_spaces": false,
70
  "eos_token": "</s>",
71
  "eot_token": "▁<EOT>",
 
66
  "▁<EOT>"
67
  ],
68
  "bos_token": "<s>",
69
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
70
  "clean_up_tokenization_spaces": false,
71
  "eos_token": "</s>",
72
  "eot_token": "▁<EOT>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fd620ba2f3177e11344a2cf06fb00be4e570b95bc1ed40c63065dedc07ec2d9a
3
  size 4795
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cf34011f3480c36bc63792fde0cc4fd8f04ef7d4627f8f79baf19897b632d17
3
  size 4795