Training in progress, epoch 0
Browse files
adapter_config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
"alpha_pattern": {},
|
3 |
"auto_mapping": null,
|
4 |
-
"base_model_name_or_path": "
|
5 |
"bias": "none",
|
6 |
"fan_in_fan_out": false,
|
7 |
"inference_mode": true,
|
@@ -19,14 +19,15 @@
|
|
19 |
"rank_pattern": {},
|
20 |
"revision": null,
|
21 |
"target_modules": [
|
22 |
-
"up_proj",
|
23 |
-
"k_proj",
|
24 |
-
"o_proj",
|
25 |
"v_proj",
|
|
|
26 |
"gate_proj",
|
|
|
|
|
27 |
"down_proj",
|
28 |
"q_proj"
|
29 |
],
|
30 |
"task_type": "CAUSAL_LM",
|
|
|
31 |
"use_rslora": false
|
32 |
}
|
|
|
1 |
{
|
2 |
"alpha_pattern": {},
|
3 |
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "codellama/CodeLlama-7b-hf",
|
5 |
"bias": "none",
|
6 |
"fan_in_fan_out": false,
|
7 |
"inference_mode": true,
|
|
|
19 |
"rank_pattern": {},
|
20 |
"revision": null,
|
21 |
"target_modules": [
|
|
|
|
|
|
|
22 |
"v_proj",
|
23 |
+
"up_proj",
|
24 |
"gate_proj",
|
25 |
+
"o_proj",
|
26 |
+
"k_proj",
|
27 |
"down_proj",
|
28 |
"q_proj"
|
29 |
],
|
30 |
"task_type": "CAUSAL_LM",
|
31 |
+
"use_dora": false,
|
32 |
"use_rslora": false
|
33 |
}
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:082197c836547caaee7f8b0acd23b88aac55c953c58d2f8446293f5626b2fd1a
|
3 |
+
size 1803907984
|
runs/Mar01_22-45-59_00282ec317e5/events.out.tfevents.1709333168.00282ec317e5.1271.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40fa310692b62f039dcab425d6646a61c62f9a9de906f3543cac8067a4ad03db
|
3 |
+
size 5688
|
tokenizer.json
CHANGED
@@ -31,7 +31,43 @@
|
|
31 |
"special": true
|
32 |
},
|
33 |
{
|
34 |
-
"id":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
"content": "<|im_end|>",
|
36 |
"single_word": false,
|
37 |
"lstrip": false,
|
@@ -40,7 +76,7 @@
|
|
40 |
"special": true
|
41 |
},
|
42 |
{
|
43 |
-
"id":
|
44 |
"content": "<|im_start|>",
|
45 |
"single_word": false,
|
46 |
"lstrip": false,
|
@@ -32152,7 +32188,23 @@
|
|
32152 |
"μ": 31996,
|
32153 |
"ζΆ": 31997,
|
32154 |
"εΌ": 31998,
|
32155 |
-
"η»": 31999
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32156 |
},
|
32157 |
"merges": [
|
32158 |
"β t",
|
@@ -93403,7 +93455,18 @@
|
|
93403 |
"βββββββββ ββββββ",
|
93404 |
"βββββββ ββββββββ",
|
93405 |
"βββββββββββ ββββ",
|
93406 |
-
"β ββββββββββββββ"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93407 |
]
|
93408 |
}
|
93409 |
}
|
|
|
31 |
"special": true
|
32 |
},
|
33 |
{
|
34 |
+
"id": 32007,
|
35 |
+
"content": "β<PRE>",
|
36 |
+
"single_word": false,
|
37 |
+
"lstrip": false,
|
38 |
+
"rstrip": false,
|
39 |
+
"normalized": false,
|
40 |
+
"special": true
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"id": 32008,
|
44 |
+
"content": "β<SUF>",
|
45 |
+
"single_word": false,
|
46 |
+
"lstrip": false,
|
47 |
+
"rstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"special": true
|
50 |
+
},
|
51 |
+
{
|
52 |
+
"id": 32009,
|
53 |
+
"content": "β<MID>",
|
54 |
+
"single_word": false,
|
55 |
+
"lstrip": false,
|
56 |
+
"rstrip": false,
|
57 |
+
"normalized": false,
|
58 |
+
"special": true
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"id": 32010,
|
62 |
+
"content": "β<EOT>",
|
63 |
+
"single_word": false,
|
64 |
+
"lstrip": false,
|
65 |
+
"rstrip": false,
|
66 |
+
"normalized": false,
|
67 |
+
"special": true
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"id": 32016,
|
71 |
"content": "<|im_end|>",
|
72 |
"single_word": false,
|
73 |
"lstrip": false,
|
|
|
76 |
"special": true
|
77 |
},
|
78 |
{
|
79 |
+
"id": 32017,
|
80 |
"content": "<|im_start|>",
|
81 |
"single_word": false,
|
82 |
"lstrip": false,
|
|
|
32188 |
"μ": 31996,
|
32189 |
"ζΆ": 31997,
|
32190 |
"εΌ": 31998,
|
32191 |
+
"η»": 31999,
|
32192 |
+
"β<SU": 32000,
|
32193 |
+
"β<SUF": 32001,
|
32194 |
+
"β<PRE": 32002,
|
32195 |
+
"β<M": 32003,
|
32196 |
+
"β<MID": 32004,
|
32197 |
+
"β<E": 32005,
|
32198 |
+
"β<EOT": 32006,
|
32199 |
+
"β<PRE>": 32007,
|
32200 |
+
"β<SUF>": 32008,
|
32201 |
+
"β<MID>": 32009,
|
32202 |
+
"β<EOT>": 32010,
|
32203 |
+
"β<EOT><EOT>": 32011,
|
32204 |
+
"β<EOT><EOT><EOT>": 32012,
|
32205 |
+
"β<EOT><EOT><EOT><EOT>": 32013,
|
32206 |
+
"β<EOT><EOT><EOT><EOT><EOT>": 32014,
|
32207 |
+
"β<EOT><EOT><EOT><EOT><EOT><EOT>": 32015
|
32208 |
},
|
32209 |
"merges": [
|
32210 |
"β t",
|
|
|
93455 |
"βββββββββ ββββββ",
|
93456 |
"βββββββ ββββββββ",
|
93457 |
"βββββββββββ ββββ",
|
93458 |
+
"β ββββββββββββββ",
|
93459 |
+
"β< SU",
|
93460 |
+
"β<SU F",
|
93461 |
+
"β< PRE",
|
93462 |
+
"β< M",
|
93463 |
+
"β<M ID",
|
93464 |
+
"β< E",
|
93465 |
+
"β<E OT",
|
93466 |
+
"β<PRE >",
|
93467 |
+
"β<SUF >",
|
93468 |
+
"β<MID >",
|
93469 |
+
"β<EOT >"
|
93470 |
]
|
93471 |
}
|
93472 |
}
|
tokenizer_config.json
CHANGED
@@ -26,7 +26,39 @@
|
|
26 |
"single_word": false,
|
27 |
"special": true
|
28 |
},
|
29 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
"content": "<|im_end|>",
|
31 |
"lstrip": false,
|
32 |
"normalized": false,
|
@@ -34,7 +66,7 @@
|
|
34 |
"single_word": false,
|
35 |
"special": true
|
36 |
},
|
37 |
-
"
|
38 |
"content": "<|im_start|>",
|
39 |
"lstrip": false,
|
40 |
"normalized": false,
|
@@ -51,12 +83,16 @@
|
|
51 |
"chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
52 |
"clean_up_tokenization_spaces": false,
|
53 |
"eos_token": "<|im_end|>",
|
54 |
-
"
|
|
|
|
|
|
|
55 |
"model_max_length": 1000000000000000019884624838656,
|
56 |
"pad_token": "<|im_end|>",
|
57 |
-
"
|
58 |
"sp_model_kwargs": {},
|
59 |
-
"
|
|
|
60 |
"unk_token": "<unk>",
|
61 |
"use_default_system_prompt": false
|
62 |
}
|
|
|
26 |
"single_word": false,
|
27 |
"special": true
|
28 |
},
|
29 |
+
"32007": {
|
30 |
+
"content": "β<PRE>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"32008": {
|
38 |
+
"content": "β<SUF>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
"32009": {
|
46 |
+
"content": "β<MID>",
|
47 |
+
"lstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"rstrip": false,
|
50 |
+
"single_word": false,
|
51 |
+
"special": true
|
52 |
+
},
|
53 |
+
"32010": {
|
54 |
+
"content": "β<EOT>",
|
55 |
+
"lstrip": false,
|
56 |
+
"normalized": false,
|
57 |
+
"rstrip": false,
|
58 |
+
"single_word": false,
|
59 |
+
"special": true
|
60 |
+
},
|
61 |
+
"32016": {
|
62 |
"content": "<|im_end|>",
|
63 |
"lstrip": false,
|
64 |
"normalized": false,
|
|
|
66 |
"single_word": false,
|
67 |
"special": true
|
68 |
},
|
69 |
+
"32017": {
|
70 |
"content": "<|im_start|>",
|
71 |
"lstrip": false,
|
72 |
"normalized": false,
|
|
|
83 |
"chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
84 |
"clean_up_tokenization_spaces": false,
|
85 |
"eos_token": "<|im_end|>",
|
86 |
+
"eot_token": "β<EOT>",
|
87 |
+
"fill_token": "<FILL_ME>",
|
88 |
+
"legacy": null,
|
89 |
+
"middle_token": "β<MID>",
|
90 |
"model_max_length": 1000000000000000019884624838656,
|
91 |
"pad_token": "<|im_end|>",
|
92 |
+
"prefix_token": "β<PRE>",
|
93 |
"sp_model_kwargs": {},
|
94 |
+
"suffix_token": "β<SUF>",
|
95 |
+
"tokenizer_class": "CodeLlamaTokenizer",
|
96 |
"unk_token": "<unk>",
|
97 |
"use_default_system_prompt": false
|
98 |
}
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4728
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b97ba3e0eb8fec02715c13b56f6c885a80d87983ccfc753e0bd8535de2c9c12
|
3 |
size 4728
|