Cem13 commited on
Commit
711db3a
1 Parent(s): 73e417e

cem13/complaint_to_sythoms_mix_8x7b

Browse files
README.md ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: mistralai/Mixtral-8x7B-v0.1
3
+ datasets:
4
+ - generator
5
+ library_name: peft
6
+ license: apache-2.0
7
+ tags:
8
+ - trl
9
+ - sft
10
+ - generated_from_trainer
11
+ model-index:
12
+ - name: Mixtral_Alpace_v2
13
+ results: []
14
+ ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ # Mixtral_Alpace_v2
20
+
21
+ This model is a fine-tuned version of [mistralai/Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) on the generator dataset.
22
+ It achieves the following results on the evaluation set:
23
+ - Loss: 0.5617
24
+
25
+ ## Model description
26
+
27
+ More information needed
28
+
29
+ ## Intended uses & limitations
30
+
31
+ More information needed
32
+
33
+ ## Training and evaluation data
34
+
35
+ More information needed
36
+
37
+ ## Training procedure
38
+
39
+ ### Training hyperparameters
40
+
41
+ The following hyperparameters were used during training:
42
+ - learning_rate: 2.5e-05
43
+ - train_batch_size: 8
44
+ - eval_batch_size: 8
45
+ - seed: 42
46
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
+ - lr_scheduler_type: linear
48
+ - lr_scheduler_warmup_steps: 15
49
+ - training_steps: 1000
50
+
51
+ ### Training results
52
+
53
+ | Training Loss | Epoch | Step | Validation Loss |
54
+ |:-------------:|:------:|:----:|:---------------:|
55
+ | 1.5577 | 0.0813 | 10 | 1.5534 |
56
+ | 1.4512 | 0.1626 | 20 | 1.4827 |
57
+ | 1.4106 | 0.2439 | 30 | 1.4104 |
58
+ | 1.3419 | 0.3252 | 40 | 1.3460 |
59
+ | 1.2361 | 0.4065 | 50 | 1.2827 |
60
+ | 1.2298 | 0.4878 | 60 | 1.2097 |
61
+ | 1.1468 | 0.5691 | 70 | 1.1400 |
62
+ | 1.0874 | 0.6504 | 80 | 1.0724 |
63
+ | 1.0372 | 0.7317 | 90 | 1.0088 |
64
+ | 0.9185 | 0.8130 | 100 | 0.9566 |
65
+ | 0.8927 | 0.8943 | 110 | 0.9139 |
66
+ | 0.8264 | 0.9756 | 120 | 0.8724 |
67
+ | 0.8799 | 1.0569 | 130 | 0.8329 |
68
+ | 0.8233 | 1.1382 | 140 | 0.7947 |
69
+ | 0.7761 | 1.2195 | 150 | 0.7633 |
70
+ | 0.7568 | 1.3008 | 160 | 0.7407 |
71
+ | 0.6957 | 1.3821 | 170 | 0.7224 |
72
+ | 0.6712 | 1.4634 | 180 | 0.7048 |
73
+ | 0.6738 | 1.5447 | 190 | 0.6908 |
74
+ | 0.7165 | 1.6260 | 200 | 0.6781 |
75
+ | 0.5913 | 1.7073 | 210 | 0.6673 |
76
+ | 0.6992 | 1.7886 | 220 | 0.6584 |
77
+ | 0.6438 | 1.8699 | 230 | 0.6497 |
78
+ | 0.6649 | 1.9512 | 240 | 0.6425 |
79
+ | 0.5907 | 2.0325 | 250 | 0.6358 |
80
+ | 0.6014 | 2.1138 | 260 | 0.6302 |
81
+ | 0.5605 | 2.1951 | 270 | 0.6250 |
82
+ | 0.5893 | 2.2764 | 280 | 0.6209 |
83
+ | 0.5761 | 2.3577 | 290 | 0.6166 |
84
+ | 0.6083 | 2.4390 | 300 | 0.6132 |
85
+ | 0.6404 | 2.5203 | 310 | 0.6100 |
86
+ | 0.5949 | 2.6016 | 320 | 0.6076 |
87
+ | 0.6208 | 2.6829 | 330 | 0.6047 |
88
+ | 0.6083 | 2.7642 | 340 | 0.6025 |
89
+ | 0.5922 | 2.8455 | 350 | 0.5998 |
90
+ | 0.6377 | 2.9268 | 360 | 0.5980 |
91
+ | 0.6059 | 3.0081 | 370 | 0.5960 |
92
+ | 0.6697 | 3.0894 | 380 | 0.5940 |
93
+ | 0.5813 | 3.1707 | 390 | 0.5925 |
94
+ | 0.5442 | 3.2520 | 400 | 0.5911 |
95
+ | 0.506 | 3.3333 | 410 | 0.5889 |
96
+ | 0.5806 | 3.4146 | 420 | 0.5878 |
97
+ | 0.5504 | 3.4959 | 430 | 0.5868 |
98
+ | 0.6051 | 3.5772 | 440 | 0.5849 |
99
+ | 0.5952 | 3.6585 | 450 | 0.5838 |
100
+ | 0.5128 | 3.7398 | 460 | 0.5825 |
101
+ | 0.5779 | 3.8211 | 470 | 0.5813 |
102
+ | 0.5448 | 3.9024 | 480 | 0.5802 |
103
+ | 0.5559 | 3.9837 | 490 | 0.5796 |
104
+ | 0.6136 | 4.0650 | 500 | 0.5787 |
105
+ | 0.5329 | 4.1463 | 510 | 0.5776 |
106
+ | 0.5267 | 4.2276 | 520 | 0.5767 |
107
+ | 0.5492 | 4.3089 | 530 | 0.5763 |
108
+ | 0.5206 | 4.3902 | 540 | 0.5758 |
109
+ | 0.5088 | 4.4715 | 550 | 0.5747 |
110
+ | 0.5811 | 4.5528 | 560 | 0.5739 |
111
+ | 0.5865 | 4.6341 | 570 | 0.5728 |
112
+ | 0.5563 | 4.7154 | 580 | 0.5729 |
113
+ | 0.5692 | 4.7967 | 590 | 0.5719 |
114
+ | 0.5827 | 4.8780 | 600 | 0.5713 |
115
+ | 0.5551 | 4.9593 | 610 | 0.5715 |
116
+ | 0.5059 | 5.0407 | 620 | 0.5708 |
117
+ | 0.5132 | 5.1220 | 630 | 0.5700 |
118
+ | 0.5314 | 5.2033 | 640 | 0.5698 |
119
+ | 0.5614 | 5.2846 | 650 | 0.5696 |
120
+ | 0.5489 | 5.3659 | 660 | 0.5688 |
121
+ | 0.5404 | 5.4472 | 670 | 0.5680 |
122
+ | 0.5745 | 5.5285 | 680 | 0.5672 |
123
+ | 0.5083 | 5.6098 | 690 | 0.5673 |
124
+ | 0.5565 | 5.6911 | 700 | 0.5670 |
125
+ | 0.5515 | 5.7724 | 710 | 0.5664 |
126
+ | 0.5448 | 5.8537 | 720 | 0.5664 |
127
+ | 0.5276 | 5.9350 | 730 | 0.5657 |
128
+ | 0.5436 | 6.0163 | 740 | 0.5656 |
129
+ | 0.5988 | 6.0976 | 750 | 0.5650 |
130
+ | 0.4929 | 6.1789 | 760 | 0.5652 |
131
+ | 0.5957 | 6.2602 | 770 | 0.5645 |
132
+ | 0.4968 | 6.3415 | 780 | 0.5645 |
133
+ | 0.4822 | 6.4228 | 790 | 0.5645 |
134
+ | 0.5527 | 6.5041 | 800 | 0.5642 |
135
+ | 0.5663 | 6.5854 | 810 | 0.5640 |
136
+ | 0.493 | 6.6667 | 820 | 0.5634 |
137
+ | 0.4992 | 6.7480 | 830 | 0.5630 |
138
+ | 0.5618 | 6.8293 | 840 | 0.5630 |
139
+ | 0.568 | 6.9106 | 850 | 0.5626 |
140
+ | 0.4869 | 6.9919 | 860 | 0.5626 |
141
+ | 0.5418 | 7.0732 | 870 | 0.5625 |
142
+ | 0.5364 | 7.1545 | 880 | 0.5621 |
143
+ | 0.5675 | 7.2358 | 890 | 0.5621 |
144
+ | 0.491 | 7.3171 | 900 | 0.5620 |
145
+ | 0.5555 | 7.3984 | 910 | 0.5621 |
146
+ | 0.6093 | 7.4797 | 920 | 0.5621 |
147
+ | 0.5529 | 7.5610 | 930 | 0.5620 |
148
+ | 0.5252 | 7.6423 | 940 | 0.5620 |
149
+ | 0.5024 | 7.7236 | 950 | 0.5620 |
150
+ | 0.5639 | 7.8049 | 960 | 0.5616 |
151
+ | 0.4676 | 7.8862 | 970 | 0.5618 |
152
+ | 0.5236 | 7.9675 | 980 | 0.5617 |
153
+ | 0.4902 | 8.0488 | 990 | 0.5616 |
154
+ | 0.486 | 8.1301 | 1000 | 0.5617 |
155
+
156
+
157
+ ### Framework versions
158
+
159
+ - PEFT 0.12.0
160
+ - Transformers 4.44.0
161
+ - Pytorch 2.4.0+cu121
162
+ - Datasets 2.20.0
163
+ - Tokenizers 0.19.1
adapter_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "mistralai/Mixtral-8x7B-v0.1",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layer_replication": null,
10
+ "layers_pattern": null,
11
+ "layers_to_transform": null,
12
+ "loftq_config": {},
13
+ "lora_alpha": 16,
14
+ "lora_dropout": 0.1,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
+ "modules_to_save": null,
18
+ "peft_type": "LORA",
19
+ "r": 64,
20
+ "rank_pattern": {},
21
+ "revision": null,
22
+ "target_modules": [
23
+ "up_proj",
24
+ "lm_head",
25
+ "q_proj",
26
+ "down_proj",
27
+ "o_proj",
28
+ "v_proj",
29
+ "gate_proj",
30
+ "k_proj"
31
+ ],
32
+ "task_type": "CAUSAL_LM",
33
+ "use_dora": false,
34
+ "use_rslora": false
35
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:294b655ba9e3e8a676af934aa4488d247c197c80f183657d8f1bcbb6b92ede02
3
+ size 751667752
runs/Aug12_02-58-48_f09f93dab3bd/events.out.tfevents.1723431533.f09f93dab3bd.1488.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bdbe2363ef467f4ccba1877e38f253dabd7d2026931d500550a219be4312536
3
+ size 11558
runs/Aug12_03-00-24_f09f93dab3bd/events.out.tfevents.1723431631.f09f93dab3bd.1488.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95818856a35d8386f50a933836aeff6b8980321c486f16a3bd685f232da761f2
3
+ size 5829
runs/Aug12_03-04-50_f09f93dab3bd/events.out.tfevents.1723431895.f09f93dab3bd.1488.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6160edd8e5af794ed91c7ede02376f8f0189e924f8d3c0fdc9b15ada172043f9
3
+ size 5829
runs/Aug12_03-05-07_f09f93dab3bd/events.out.tfevents.1723431912.f09f93dab3bd.1488.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7aac5221c44bee57c39cd24dbe9bfadc1dcbd690366528c029de903129ace1d2
3
+ size 60014
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ }
30
+ },
31
+ "additional_special_tokens": [],
32
+ "bos_token": "<s>",
33
+ "clean_up_tokenization_spaces": false,
34
+ "eos_token": "</s>",
35
+ "legacy": true,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a3229bde7db39dbc160ee41aefd9f326e811733e2f230d9d1c22afbbcb83462
3
+ size 5432