PasinB commited on
Commit
0fc4f39
1 Parent(s): c02baca

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .config/.last_opt_in_prompt.yaml +1 -0
  2. .config/.last_survey_prompt.yaml +1 -0
  3. .config/.last_update_check.json +1 -0
  4. .config/active_config +1 -0
  5. .config/config_sentinel +0 -0
  6. .config/configurations/config_default +6 -0
  7. .config/default_configs.db +0 -0
  8. .config/gce +1 -0
  9. .config/logs/2024.03.07/14.31.06.507923.log +596 -0
  10. .config/logs/2024.03.07/14.31.32.270753.log +5 -0
  11. .config/logs/2024.03.07/14.31.42.911124.log +169 -0
  12. .config/logs/2024.03.07/14.31.51.601291.log +5 -0
  13. .config/logs/2024.03.07/14.32.02.191519.log +8 -0
  14. .config/logs/2024.03.07/14.32.02.895480.log +8 -0
  15. .gitattributes +5 -0
  16. README.md +3 -9
  17. generate.py +222 -0
  18. khumpun-200-18++.csv +208 -0
  19. openthaigpt_Finetuning/.gitattributes +35 -0
  20. openthaigpt_Finetuning/openthaigpt-finetune/.dockerignore +4 -0
  21. openthaigpt_Finetuning/openthaigpt-finetune/.gitignore +15 -0
  22. openthaigpt_Finetuning/openthaigpt-finetune/DATA_LICENSE +183 -0
  23. openthaigpt_Finetuning/openthaigpt-finetune/Dockerfile +18 -0
  24. openthaigpt_Finetuning/openthaigpt-finetune/LICENSE +201 -0
  25. openthaigpt_Finetuning/openthaigpt-finetune/README.md +171 -0
  26. openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data.json +3 -0
  27. openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data_cleaned_archive.json +3 -0
  28. openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data_gpt4.json +3 -0
  29. openthaigpt_Finetuning/openthaigpt-finetune/data/kumpun.jsonl +199 -0
  30. openthaigpt_Finetuning/openthaigpt-finetune/docker-compose.yml +28 -0
  31. openthaigpt_Finetuning/openthaigpt-finetune/export_hf_checkpoint.py +55 -0
  32. openthaigpt_Finetuning/openthaigpt-finetune/export_state_dict_checkpoint.py +125 -0
  33. openthaigpt_Finetuning/openthaigpt-finetune/finetune.py +283 -0
  34. openthaigpt_Finetuning/openthaigpt-finetune/generate.py +222 -0
  35. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/README.md +204 -0
  36. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/adapter_config.json +28 -0
  37. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/adapter_model.safetensors +3 -0
  38. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/README.md +204 -0
  39. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/adapter_config.json +28 -0
  40. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/adapter_model.safetensors +3 -0
  41. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/optimizer.pt +3 -0
  42. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/rng_state.pth +3 -0
  43. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/scheduler.pt +3 -0
  44. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/trainer_state.json +169 -0
  45. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/training_args.bin +3 -0
  46. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/README.md +204 -0
  47. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/adapter_config.json +28 -0
  48. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/adapter_model.safetensors +3 -0
  49. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/optimizer.pt +3 -0
  50. openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/rng_state.pth +3 -0
.config/.last_opt_in_prompt.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ {}
.config/.last_survey_prompt.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ last_prompt_time: 1709821902.2188199
.config/.last_update_check.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"last_update_check_time": 1709821910.9966443, "last_update_check_revision": 20240229170130, "notifications": [], "last_nag_times": {}}
.config/active_config ADDED
@@ -0,0 +1 @@
 
 
1
+ default
.config/config_sentinel ADDED
File without changes
.config/configurations/config_default ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ [component_manager]
2
+ disable_update_check = true
3
+
4
+ [compute]
5
+ gce_metadata_read_timeout_sec = 0
6
+
.config/default_configs.db ADDED
Binary file (12.3 kB). View file
 
.config/gce ADDED
@@ -0,0 +1 @@
 
 
1
+ False
.config/logs/2024.03.07/14.31.06.507923.log ADDED
@@ -0,0 +1,596 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-03-07 14:31:18,534 DEBUG root Loaded Command Group: ['gcloud', 'components']
2
+ 2024-03-07 14:31:18,538 DEBUG root Loaded Command Group: ['gcloud', 'components', 'update']
3
+ 2024-03-07 14:31:18,540 DEBUG root Running [gcloud.components.update] with arguments: [--allow-no-backup: "True", --compile-python: "True", --quiet: "True", COMPONENT-IDS:7: "['core', 'gcloud-deps', 'bq', 'gcloud', 'gcloud-crc32c', 'gsutil', 'anthoscli']"]
4
+ 2024-03-07 14:31:18,541 INFO ___FILE_ONLY___ Beginning update. This process may take several minutes.
5
+
6
+ 2024-03-07 14:31:18,558 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
7
+ 2024-03-07 14:31:18,613 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components-2.json HTTP/1.1" 200 214450
8
+ 2024-03-07 14:31:18,629 INFO ___FILE_ONLY___
9
+
10
+ 2024-03-07 14:31:18,629 INFO ___FILE_ONLY___
11
+ Your current Google Cloud CLI version is: 467.0.0
12
+
13
+ 2024-03-07 14:31:18,629 INFO ___FILE_ONLY___ Installing components from version: 467.0.0
14
+
15
+ 2024-03-07 14:31:18,629 INFO ___FILE_ONLY___
16
+
17
+ 2024-03-07 14:31:18,630 DEBUG root Chosen display Format:table[box,title="These components will be removed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
18
+ 2024-03-07 14:31:18,630 DEBUG root Chosen display Format:table[box,title="These components will be updated."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
19
+ 2024-03-07 14:31:18,631 DEBUG root Chosen display Format:table[box,title="These components will be installed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
20
+ 2024-03-07 14:31:18,638 INFO ___FILE_ONLY___ ┌─────────────────────────────────────────────────────────────────────────────┐
21
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___
22
+
23
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___ │ These components will be installed. │
24
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___
25
+
26
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___ ├─────────────────────────────────────────────────────┬────────────┬──────────┤
27
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___
28
+
29
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___ │ Name │ Version │ Size │
30
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___
31
+
32
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___ ├─────────────────────────────────────────────────────┼────────────┼──────────┤
33
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___
34
+
35
+ 2024-03-07 14:31:18,639 INFO ___FILE_ONLY___ │
36
+ 2024-03-07 14:31:18,640 INFO ___FILE_ONLY___ BigQuery Command Line Tool
37
+ 2024-03-07 14:31:18,640 INFO ___FILE_ONLY___
38
+ 2024-03-07 14:31:18,640 INFO ___FILE_ONLY___ │
39
+ 2024-03-07 14:31:18,640 INFO ___FILE_ONLY___ 2.0.101
40
+ 2024-03-07 14:31:18,640 INFO ___FILE_ONLY___
41
+ 2024-03-07 14:31:18,640 INFO ___FILE_ONLY___ │
42
+ 2024-03-07 14:31:18,640 INFO ___FILE_ONLY___ 1.6 MiB
43
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___
44
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___ │
45
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___
46
+
47
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___ │
48
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___ BigQuery Command Line Tool (Platform Specific)
49
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___
50
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___ │
51
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___ 2.0.101
52
+ 2024-03-07 14:31:18,641 INFO ___FILE_ONLY___
53
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___ │
54
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___ < 1 MiB
55
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___
56
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___ │
57
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___
58
+
59
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___ │
60
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___ Bundled Python 3.11
61
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___
62
+ 2024-03-07 14:31:18,642 INFO ___FILE_ONLY___ │
63
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___ 3.11.8
64
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___
65
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___ │
66
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___ 74.9 MiB
67
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___
68
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___ │
69
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___
70
+
71
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___ │
72
+ 2024-03-07 14:31:18,643 INFO ___FILE_ONLY___ Cloud Storage Command Line Tool
73
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___
74
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___ │
75
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___ 5.27
76
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___
77
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___ │
78
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___ 11.3 MiB
79
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___
80
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___ │
81
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___
82
+
83
+ 2024-03-07 14:31:18,644 INFO ___FILE_ONLY___ │
84
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___ Cloud Storage Command Line Tool (Platform Specific)
85
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___
86
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___ │
87
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___ 5.27
88
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___
89
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___ │
90
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___ < 1 MiB
91
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___
92
+ 2024-03-07 14:31:18,645 INFO ___FILE_ONLY___ │
93
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___
94
+
95
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___ │
96
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___ Google Cloud CLI Core Libraries (Platform Specific)
97
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___
98
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___ │
99
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___ 2024.01.06
100
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___
101
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___ │
102
+ 2024-03-07 14:31:18,646 INFO ___FILE_ONLY___ < 1 MiB
103
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___
104
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___ │
105
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___
106
+
107
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___ │
108
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___ Google Cloud CRC32C Hash Tool
109
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___
110
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___ │
111
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___ 1.0.0
112
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___
113
+ 2024-03-07 14:31:18,647 INFO ___FILE_ONLY___ │
114
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___ 1.2 MiB
115
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___
116
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___ │
117
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___
118
+
119
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___ │
120
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___ anthoscli
121
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___
122
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___ │
123
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___ 0.2.48
124
+ 2024-03-07 14:31:18,648 INFO ___FILE_ONLY___
125
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___ │
126
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___ 68.9 MiB
127
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___
128
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___ │
129
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___
130
+
131
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___ │
132
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___ gcloud cli dependencies
133
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___
134
+ 2024-03-07 14:31:18,649 INFO ___FILE_ONLY___ │
135
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___ 2021.04.16
136
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___
137
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___ │
138
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___ < 1 MiB
139
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___
140
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___ │
141
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___
142
+
143
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___ └─────────────────────────────────────────────────────┴────────────┴──────────┘
144
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___
145
+
146
+ 2024-03-07 14:31:18,650 INFO ___FILE_ONLY___
147
+
148
+ 2024-03-07 14:31:18,654 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
149
+ 2024-03-07 14:31:18,711 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/RELEASE_NOTES HTTP/1.1" 200 1163855
150
+ 2024-03-07 14:31:18,777 INFO ___FILE_ONLY___ For the latest full release notes, please visit:
151
+ https://cloud.google.com/sdk/release_notes
152
+
153
+
154
+ 2024-03-07 14:31:18,779 INFO ___FILE_ONLY___ ╔═════════════════════════════════════════���══════════════════╗
155
+
156
+ 2024-03-07 14:31:18,779 INFO ___FILE_ONLY___ ╠═ Creating update staging area ═╣
157
+
158
+ 2024-03-07 14:31:18,779 INFO ___FILE_ONLY___ ╚
159
+ 2024-03-07 14:31:18,780 INFO ___FILE_ONLY___ ══════
160
+ 2024-03-07 14:31:18,780 INFO ___FILE_ONLY___ ══════
161
+ 2024-03-07 14:31:18,780 INFO ___FILE_ONLY___ ══════
162
+ 2024-03-07 14:31:19,185 INFO ___FILE_ONLY___ ═
163
+ 2024-03-07 14:31:19,219 INFO ___FILE_ONLY___ ═
164
+ 2024-03-07 14:31:19,256 INFO ___FILE_ONLY___ ═
165
+ 2024-03-07 14:31:19,300 INFO ___FILE_ONLY___ ═
166
+ 2024-03-07 14:31:19,334 INFO ___FILE_ONLY___ ═
167
+ 2024-03-07 14:31:19,370 INFO ___FILE_ONLY___ ═
168
+ 2024-03-07 14:31:19,403 INFO ___FILE_ONLY___ ═
169
+ 2024-03-07 14:31:19,439 INFO ___FILE_ONLY___ ═
170
+ 2024-03-07 14:31:19,475 INFO ___FILE_ONLY___ ═
171
+ 2024-03-07 14:31:19,609 INFO ___FILE_ONLY___ ═
172
+ 2024-03-07 14:31:19,694 INFO ___FILE_ONLY___ ═
173
+ 2024-03-07 14:31:19,802 INFO ___FILE_ONLY___ ═
174
+ 2024-03-07 14:31:19,856 INFO ___FILE_ONLY___ ═
175
+ 2024-03-07 14:31:19,931 INFO ___FILE_ONLY___ ═
176
+ 2024-03-07 14:31:19,992 INFO ___FILE_ONLY___ ═
177
+ 2024-03-07 14:31:20,038 INFO ___FILE_ONLY___ ═
178
+ 2024-03-07 14:31:20,090 INFO ___FILE_ONLY___ ═
179
+ 2024-03-07 14:31:20,139 INFO ___FILE_ONLY___ ═
180
+ 2024-03-07 14:31:20,193 INFO ___FILE_ONLY___ ═
181
+ 2024-03-07 14:31:20,267 INFO ___FILE_ONLY___ ═
182
+ 2024-03-07 14:31:20,322 INFO ___FILE_ONLY___ ═
183
+ 2024-03-07 14:31:20,374 INFO ___FILE_ONLY___ ═
184
+ 2024-03-07 14:31:20,518 INFO ___FILE_ONLY___ ═
185
+ 2024-03-07 14:31:20,568 INFO ___FILE_ONLY___ ═
186
+ 2024-03-07 14:31:20,633 INFO ___FILE_ONLY___ ═
187
+ 2024-03-07 14:31:20,687 INFO ___FILE_ONLY___ ═
188
+ 2024-03-07 14:31:20,739 INFO ___FILE_ONLY___ ═
189
+ 2024-03-07 14:31:20,788 INFO ___FILE_ONLY___ ═
190
+ 2024-03-07 14:31:20,837 INFO ___FILE_ONLY___ ═
191
+ 2024-03-07 14:31:20,893 INFO ___FILE_ONLY___ ═
192
+ 2024-03-07 14:31:20,962 INFO ___FILE_ONLY___ ═
193
+ 2024-03-07 14:31:21,025 INFO ___FILE_ONLY___ ═
194
+ 2024-03-07 14:31:21,083 INFO ___FILE_ONLY___ ═
195
+ 2024-03-07 14:31:21,154 INFO ___FILE_ONLY___ ═
196
+ 2024-03-07 14:31:21,227 INFO ___FILE_ONLY___ ═
197
+ 2024-03-07 14:31:21,285 INFO ___FILE_ONLY___ ═
198
+ 2024-03-07 14:31:21,342 INFO ___FILE_ONLY___ ═
199
+ 2024-03-07 14:31:21,392 INFO ___FILE_ONLY___ ═
200
+ 2024-03-07 14:31:21,450 INFO ___FILE_ONLY___ ═
201
+ 2024-03-07 14:31:21,504 INFO ___FILE_ONLY___ ═
202
+ 2024-03-07 14:31:21,561 INFO ___FILE_ONLY___ ═
203
+ 2024-03-07 14:31:21,624 INFO ___FILE_ONLY___ ═
204
+ 2024-03-07 14:31:21,624 INFO ___FILE_ONLY___ ╝
205
+
206
+ 2024-03-07 14:31:21,711 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
207
+
208
+ 2024-03-07 14:31:21,711 INFO ___FILE_ONLY___ ╠═ Installing: BigQuery Command Line Tool ═╣
209
+
210
+ 2024-03-07 14:31:21,711 INFO ___FILE_ONLY___ ╚
211
+ 2024-03-07 14:31:21,715 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
212
+ 2024-03-07 14:31:21,771 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bq-20240112150613.tar.gz HTTP/1.1" 200 1679148
213
+ 2024-03-07 14:31:21,781 INFO ___FILE_ONLY___ ═
214
+ 2024-03-07 14:31:21,781 INFO ___FILE_ONLY___ ═
215
+ 2024-03-07 14:31:21,781 INFO ___FILE_ONLY___ ═
216
+ 2024-03-07 14:31:21,782 INFO ___FILE_ONLY___ ═
217
+ 2024-03-07 14:31:21,782 INFO ___FILE_ONLY___ ═
218
+ 2024-03-07 14:31:21,782 INFO ___FILE_ONLY___ ═
219
+ 2024-03-07 14:31:21,782 INFO ___FILE_ONLY___ ═
220
+ 2024-03-07 14:31:21,782 INFO ___FILE_ONLY___ ═
221
+ 2024-03-07 14:31:21,782 INFO ___FILE_ONLY___ ═
222
+ 2024-03-07 14:31:21,783 INFO ___FILE_ONLY___ ═
223
+ 2024-03-07 14:31:21,783 INFO ___FILE_ONLY___ ═
224
+ 2024-03-07 14:31:21,783 INFO ___FILE_ONLY___ ═
225
+ 2024-03-07 14:31:21,783 INFO ___FILE_ONLY___ ═
226
+ 2024-03-07 14:31:21,783 INFO ___FILE_ONLY___ ═
227
+ 2024-03-07 14:31:21,783 INFO ___FILE_ONLY___ ═
228
+ 2024-03-07 14:31:21,783 INFO ___FILE_ONLY___ ═
229
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
230
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
231
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
232
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
233
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
234
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
235
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
236
+ 2024-03-07 14:31:21,784 INFO ___FILE_ONLY___ ═
237
+ 2024-03-07 14:31:21,785 INFO ___FILE_ONLY___ ═
238
+ 2024-03-07 14:31:21,785 INFO ___FILE_ONLY___ ═
239
+ 2024-03-07 14:31:21,785 INFO ___FILE_ONLY___ ═
240
+ 2024-03-07 14:31:21,785 INFO ___FILE_ONLY___ ═
241
+ 2024-03-07 14:31:21,785 INFO ___FILE_ONLY___ ═
242
+ 2024-03-07 14:31:21,785 INFO ___FILE_ONLY___ ═
243
+ 2024-03-07 14:31:21,899 INFO ___FILE_ONLY___ ═
244
+ 2024-03-07 14:31:21,902 INFO ___FILE_ONLY___ ═
245
+ 2024-03-07 14:31:21,906 INFO ___FILE_ONLY___ ═
246
+ 2024-03-07 14:31:21,910 INFO ___FILE_ONLY___ ═
247
+ 2024-03-07 14:31:21,914 INFO ___FILE_ONLY___ ═
248
+ 2024-03-07 14:31:21,917 INFO ___FILE_ONLY___ ═
249
+ 2024-03-07 14:31:21,922 INFO ___FILE_ONLY___ ═
250
+ 2024-03-07 14:31:21,926 INFO ___FILE_ONLY___ ═
251
+ 2024-03-07 14:31:21,930 INFO ___FILE_ONLY___ ═
252
+ 2024-03-07 14:31:21,933 INFO ___FILE_ONLY___ ═
253
+ 2024-03-07 14:31:21,937 INFO ___FILE_ONLY___ ═
254
+ 2024-03-07 14:31:21,940 INFO ___FILE_ONLY___ ═
255
+ 2024-03-07 14:31:21,945 INFO ___FILE_ONLY___ ═
256
+ 2024-03-07 14:31:21,949 INFO ___FILE_ONLY___ ═
257
+ 2024-03-07 14:31:21,952 INFO ___FILE_ONLY___ ═
258
+ 2024-03-07 14:31:21,956 INFO ___FILE_ONLY___ ═
259
+ 2024-03-07 14:31:21,960 INFO ___FILE_ONLY___ ═
260
+ 2024-03-07 14:31:21,964 INFO ___FILE_ONLY___ ═
261
+ 2024-03-07 14:31:21,969 INFO ___FILE_ONLY___ ═
262
+ 2024-03-07 14:31:21,973 INFO ___FILE_ONLY___ ═
263
+ 2024-03-07 14:31:21,977 INFO ___FILE_ONLY___ ═
264
+ 2024-03-07 14:31:21,983 INFO ___FILE_ONLY___ ═
265
+ 2024-03-07 14:31:21,989 INFO ___FILE_ONLY___ ═
266
+ 2024-03-07 14:31:21,993 INFO ___FILE_ONLY___ ═
267
+ 2024-03-07 14:31:21,996 INFO ___FILE_ONLY___ ═
268
+ 2024-03-07 14:31:22,000 INFO ___FILE_ONLY___ ═
269
+ 2024-03-07 14:31:22,004 INFO ___FILE_ONLY___ ═
270
+ 2024-03-07 14:31:22,008 INFO ___FILE_ONLY___ ═
271
+ 2024-03-07 14:31:22,011 INFO ___FILE_ONLY___ ═
272
+ 2024-03-07 14:31:22,014 INFO ___FILE_ONLY___ ═
273
+ 2024-03-07 14:31:22,014 INFO ___FILE_ONLY___ ╝
274
+
275
+ 2024-03-07 14:31:22,032 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
276
+
277
+ 2024-03-07 14:31:22,032 INFO ___FILE_ONLY___ ╠═ Installing: BigQuery Command Line Tool (Platform Spec... ═╣
278
+
279
+ 2024-03-07 14:31:22,032 INFO ___FILE_ONLY___ ╚
280
+ 2024-03-07 14:31:22,036 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
281
+ 2024-03-07 14:31:22,092 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bq-nix-20240106004423.tar.gz HTTP/1.1" 200 2026
282
+ 2024-03-07 14:31:22,092 INFO ___FILE_ONLY___ ══════════════════════════════
283
+ 2024-03-07 14:31:22,093 INFO ___FILE_ONLY___ ══════════════════════════════
284
+ 2024-03-07 14:31:22,093 INFO ___FILE_ONLY___ ╝
285
+
286
+ 2024-03-07 14:31:22,101 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
287
+
288
+ 2024-03-07 14:31:22,102 INFO ___FILE_ONLY___ ╠═ Installing: Bundled Python 3.11 ═╣
289
+
290
+ 2024-03-07 14:31:22,102 INFO ___FILE_ONLY___ ╚
291
+ 2024-03-07 14:31:22,107 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
292
+ 2024-03-07 14:31:22,107 INFO ___FILE_ONLY___ ╝
293
+
294
+ 2024-03-07 14:31:22,108 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
295
+
296
+ 2024-03-07 14:31:22,109 INFO ___FILE_ONLY___ ╠═ Installing: Bundled Python 3.11 ═╣
297
+
298
+ 2024-03-07 14:31:22,109 INFO ___FILE_ONLY___ ╚
299
+ 2024-03-07 14:31:22,112 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
300
+ 2024-03-07 14:31:22,168 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-bundled-python3-unix-linux-x86_64-20240229170130.tar.gz HTTP/1.1" 200 78486918
301
+ 2024-03-07 14:31:22,448 INFO ___FILE_ONLY___ ═
302
+ 2024-03-07 14:31:22,451 INFO ___FILE_ONLY___ ═
303
+ 2024-03-07 14:31:22,454 INFO ___FILE_ONLY___ ═
304
+ 2024-03-07 14:31:22,457 INFO ___FILE_ONLY___ ═
305
+ 2024-03-07 14:31:22,461 INFO ___FILE_ONLY___ ═
306
+ 2024-03-07 14:31:22,464 INFO ___FILE_ONLY___ ═
307
+ 2024-03-07 14:31:22,467 INFO ___FILE_ONLY___ ═
308
+ 2024-03-07 14:31:22,470 INFO ___FILE_ONLY___ ═
309
+ 2024-03-07 14:31:22,473 INFO ___FILE_ONLY___ ═
310
+ 2024-03-07 14:31:22,476 INFO ___FILE_ONLY___ ═
311
+ 2024-03-07 14:31:22,479 INFO ___FILE_ONLY___ ═
312
+ 2024-03-07 14:31:22,482 INFO ___FILE_ONLY___ ═
313
+ 2024-03-07 14:31:22,486 INFO ___FILE_ONLY___ ═
314
+ 2024-03-07 14:31:22,489 INFO ___FILE_ONLY___ ═
315
+ 2024-03-07 14:31:22,492 INFO ___FILE_ONLY___ ═
316
+ 2024-03-07 14:31:22,495 INFO ___FILE_ONLY___ ═
317
+ 2024-03-07 14:31:22,498 INFO ___FILE_ONLY___ ═
318
+ 2024-03-07 14:31:22,501 INFO ___FILE_ONLY___ ═
319
+ 2024-03-07 14:31:22,504 INFO ___FILE_ONLY___ ═
320
+ 2024-03-07 14:31:22,507 INFO ___FILE_ONLY___ ═
321
+ 2024-03-07 14:31:22,510 INFO ___FILE_ONLY___ ═
322
+ 2024-03-07 14:31:22,513 INFO ___FILE_ONLY___ ═
323
+ 2024-03-07 14:31:22,516 INFO ___FILE_ONLY___ ═
324
+ 2024-03-07 14:31:22,519 INFO ___FILE_ONLY___ ═
325
+ 2024-03-07 14:31:22,522 INFO ___FILE_ONLY___ ═
326
+ 2024-03-07 14:31:22,525 INFO ___FILE_ONLY___ ═
327
+ 2024-03-07 14:31:22,528 INFO ___FILE_ONLY___ ═
328
+ 2024-03-07 14:31:22,531 INFO ___FILE_ONLY___ ═
329
+ 2024-03-07 14:31:22,535 INFO ___FILE_ONLY___ ═
330
+ 2024-03-07 14:31:22,538 INFO ___FILE_ONLY___ ═
331
+ 2024-03-07 14:31:24,659 INFO ___FILE_ONLY___ ═
332
+ 2024-03-07 14:31:24,685 INFO ___FILE_ONLY___ ═
333
+ 2024-03-07 14:31:24,711 INFO ___FILE_ONLY___ ═
334
+ 2024-03-07 14:31:24,737 INFO ___FILE_ONLY___ ═
335
+ 2024-03-07 14:31:24,763 INFO ___FILE_ONLY___ ═
336
+ 2024-03-07 14:31:24,788 INFO ___FILE_ONLY___ ═
337
+ 2024-03-07 14:31:24,813 INFO ___FILE_ONLY___ ═
338
+ 2024-03-07 14:31:24,838 INFO ___FILE_ONLY___ ═
339
+ 2024-03-07 14:31:24,863 INFO ___FILE_ONLY___ ═
340
+ 2024-03-07 14:31:24,888 INFO ___FILE_ONLY___ ═
341
+ 2024-03-07 14:31:24,913 INFO ___FILE_ONLY___ ═
342
+ 2024-03-07 14:31:24,938 INFO ___FILE_ONLY___ ═
343
+ 2024-03-07 14:31:24,963 INFO ___FILE_ONLY___ ═
344
+ 2024-03-07 14:31:24,987 INFO ___FILE_ONLY___ ═
345
+ 2024-03-07 14:31:25,012 INFO ___FILE_ONLY___ ═
346
+ 2024-03-07 14:31:25,038 INFO ___FILE_ONLY___ ═
347
+ 2024-03-07 14:31:25,065 INFO ___FILE_ONLY___ ═
348
+ 2024-03-07 14:31:25,427 INFO ___FILE_ONLY___ ═
349
+ 2024-03-07 14:31:25,462 INFO ___FILE_ONLY___ ═
350
+ 2024-03-07 14:31:25,509 INFO ___FILE_ONLY___ ═
351
+ 2024-03-07 14:31:25,547 INFO ___FILE_ONLY___ ═
352
+ 2024-03-07 14:31:25,725 INFO ___FILE_ONLY___ ═
353
+ 2024-03-07 14:31:25,851 INFO ___FILE_ONLY___ ═
354
+ 2024-03-07 14:31:25,888 INFO ___FILE_ONLY___ ═
355
+ 2024-03-07 14:31:25,927 INFO ___FILE_ONLY___ ═
356
+ 2024-03-07 14:31:25,991 INFO ___FILE_ONLY___ ═
357
+ 2024-03-07 14:31:26,027 INFO ___FILE_ONLY___ ═
358
+ 2024-03-07 14:31:26,070 INFO ___FILE_ONLY___ ═
359
+ 2024-03-07 14:31:27,100 INFO ___FILE_ONLY___ ═
360
+ 2024-03-07 14:31:27,130 INFO ___FILE_ONLY___ ═
361
+ 2024-03-07 14:31:27,130 INFO ___FILE_ONLY___ ╝
362
+
363
+ 2024-03-07 14:31:27,232 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
364
+
365
+ 2024-03-07 14:31:27,232 INFO ___FILE_ONLY___ ╠═ Installing: Cloud Storage Command Line Tool ═╣
366
+
367
+ 2024-03-07 14:31:27,232 INFO ___FILE_ONLY___ ╚
368
+ 2024-03-07 14:31:27,236 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
369
+ 2024-03-07 14:31:27,294 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gsutil-20231025210228.tar.gz HTTP/1.1" 200 11833901
370
+ 2024-03-07 14:31:27,340 INFO ___FILE_ONLY___ ═
371
+ 2024-03-07 14:31:27,341 INFO ___FILE_ONLY___ ═
372
+ 2024-03-07 14:31:27,341 INFO ___FILE_ONLY___ ═
373
+ 2024-03-07 14:31:27,342 INFO ___FILE_ONLY___ ═
374
+ 2024-03-07 14:31:27,342 INFO ___FILE_ONLY___ ═
375
+ 2024-03-07 14:31:27,343 INFO ___FILE_ONLY___ ═
376
+ 2024-03-07 14:31:27,343 INFO ___FILE_ONLY___ ═
377
+ 2024-03-07 14:31:27,344 INFO ___FILE_ONLY___ ═
378
+ 2024-03-07 14:31:27,344 INFO ___FILE_ONLY___ ═
379
+ 2024-03-07 14:31:27,345 INFO ___FILE_ONLY___ ═
380
+ 2024-03-07 14:31:27,346 INFO ___FILE_ONLY___ ═
381
+ 2024-03-07 14:31:27,346 INFO ___FILE_ONLY___ ═
382
+ 2024-03-07 14:31:27,347 INFO ___FILE_ONLY___ ═
383
+ 2024-03-07 14:31:27,347 INFO ___FILE_ONLY___ ═
384
+ 2024-03-07 14:31:27,348 INFO ___FILE_ONLY___ ═
385
+ 2024-03-07 14:31:27,349 INFO ___FILE_ONLY___ ═
386
+ 2024-03-07 14:31:27,349 INFO ___FILE_ONLY___ ═
387
+ 2024-03-07 14:31:27,350 INFO ___FILE_ONLY___ ═
388
+ 2024-03-07 14:31:27,350 INFO ___FILE_ONLY___ ═
389
+ 2024-03-07 14:31:27,351 INFO ___FILE_ONLY___ ═
390
+ 2024-03-07 14:31:27,351 INFO ___FILE_ONLY___ ═
391
+ 2024-03-07 14:31:27,352 INFO ___FILE_ONLY___ ═
392
+ 2024-03-07 14:31:27,352 INFO ___FILE_ONLY___ ═
393
+ 2024-03-07 14:31:27,353 INFO ___FILE_ONLY___ ═
394
+ 2024-03-07 14:31:27,353 INFO ___FILE_ONLY___ ═
395
+ 2024-03-07 14:31:27,354 INFO ___FILE_ONLY___ ═
396
+ 2024-03-07 14:31:27,355 INFO ___FILE_ONLY___ ═
397
+ 2024-03-07 14:31:27,355 INFO ___FILE_ONLY___ ═
398
+ 2024-03-07 14:31:27,356 INFO ___FILE_ONLY___ ═
399
+ 2024-03-07 14:31:27,356 INFO ___FILE_ONLY___ ═
400
+ 2024-03-07 14:31:28,048 INFO ___FILE_ONLY___ ═
401
+ 2024-03-07 14:31:28,082 INFO ___FILE_ONLY___ ═
402
+ 2024-03-07 14:31:28,110 INFO ___FILE_ONLY___ ═
403
+ 2024-03-07 14:31:28,137 INFO ___FILE_ONLY___ ═
404
+ 2024-03-07 14:31:28,162 INFO ___FILE_ONLY___ ═
405
+ 2024-03-07 14:31:28,188 INFO ___FILE_ONLY___ ═
406
+ 2024-03-07 14:31:28,207 INFO ___FILE_ONLY___ ═
407
+ 2024-03-07 14:31:28,225 INFO ___FILE_ONLY___ ═
408
+ 2024-03-07 14:31:28,247 INFO ___FILE_ONLY___ ═
409
+ 2024-03-07 14:31:28,266 INFO ___FILE_ONLY___ ═
410
+ 2024-03-07 14:31:28,287 INFO ___FILE_ONLY___ ═
411
+ 2024-03-07 14:31:28,306 INFO ___FILE_ONLY___ ═
412
+ 2024-03-07 14:31:28,335 INFO ___FILE_ONLY___ ═
413
+ 2024-03-07 14:31:28,357 INFO ___FILE_ONLY___ ═
414
+ 2024-03-07 14:31:28,388 INFO ___FILE_ONLY___ ═
415
+ 2024-03-07 14:31:28,415 INFO ___FILE_ONLY___ ═
416
+ 2024-03-07 14:31:28,443 INFO ___FILE_ONLY___ ═
417
+ 2024-03-07 14:31:28,471 INFO ___FILE_ONLY___ ═
418
+ 2024-03-07 14:31:28,491 INFO ___FILE_ONLY___ ═
419
+ 2024-03-07 14:31:28,513 INFO ___FILE_ONLY___ ═
420
+ 2024-03-07 14:31:28,534 INFO ___FILE_ONLY___ ═
421
+ 2024-03-07 14:31:28,555 INFO ___FILE_ONLY___ ═
422
+ 2024-03-07 14:31:28,576 INFO ___FILE_ONLY___ ═
423
+ 2024-03-07 14:31:28,600 INFO ___FILE_ONLY___ ═
424
+ 2024-03-07 14:31:28,619 INFO ___FILE_ONLY___ ═
425
+ 2024-03-07 14:31:28,666 INFO ___FILE_ONLY___ ═
426
+ 2024-03-07 14:31:28,691 INFO ___FILE_ONLY___ ═
427
+ 2024-03-07 14:31:28,716 INFO ___FILE_ONLY___ ═
428
+ 2024-03-07 14:31:28,744 INFO ___FILE_ONLY___ ═
429
+ 2024-03-07 14:31:28,764 INFO ___FILE_ONLY___ ═
430
+ 2024-03-07 14:31:28,764 INFO ___FILE_ONLY___ ╝
431
+
432
+ 2024-03-07 14:31:28,834 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
433
+
434
+ 2024-03-07 14:31:28,834 INFO ___FILE_ONLY___ ╠═ Installing: Cloud Storage Command Line Tool (Platform... ═╣
435
+
436
+ 2024-03-07 14:31:28,834 INFO ___FILE_ONLY___ ╚
437
+ 2024-03-07 14:31:28,838 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
438
+ 2024-03-07 14:31:28,894 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gsutil-nix-20240106004423.tar.gz HTTP/1.1" 200 2042
439
+ 2024-03-07 14:31:28,894 INFO ___FILE_ONLY___ ══════════════════════════════
440
+ 2024-03-07 14:31:28,895 INFO ___FILE_ONLY___ ══════════════════════════════
441
+ 2024-03-07 14:31:28,895 INFO ___FILE_ONLY___ ╝
442
+
443
+ 2024-03-07 14:31:28,903 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
444
+
445
+ 2024-03-07 14:31:28,903 INFO ___FILE_ONLY___ ╠═ Installing: Default set of gcloud commands ═╣
446
+
447
+ 2024-03-07 14:31:28,903 INFO ___FILE_ONLY___ ╚
448
+ 2024-03-07 14:31:28,908 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
449
+ 2024-03-07 14:31:28,908 INFO ___FILE_ONLY___ ╝
450
+
451
+ 2024-03-07 14:31:28,910 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
452
+
453
+ 2024-03-07 14:31:28,910 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CLI Core Libraries (Platform... ═╣
454
+
455
+ 2024-03-07 14:31:28,910 INFO ___FILE_ONLY___ ╚
456
+ 2024-03-07 14:31:28,914 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
457
+ 2024-03-07 14:31:28,967 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-core-nix-20240106004423.tar.gz HTTP/1.1" 200 2410
458
+ 2024-03-07 14:31:28,968 INFO ___FILE_ONLY___ ══════════════════════════════
459
+ 2024-03-07 14:31:28,969 INFO ___FILE_ONLY___ ═══════════════
460
+ 2024-03-07 14:31:28,969 INFO ___FILE_ONLY___ ═══════════════
461
+ 2024-03-07 14:31:28,969 INFO ___FILE_ONLY___ ╝
462
+
463
+ 2024-03-07 14:31:28,977 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
464
+
465
+ 2024-03-07 14:31:28,977 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CRC32C Hash Tool ═╣
466
+
467
+ 2024-03-07 14:31:28,977 INFO ___FILE_ONLY___ ╚
468
+ 2024-03-07 14:31:28,980 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
469
+ 2024-03-07 14:31:29,037 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gcloud-crc32c-linux-x86_64-20231215195722.tar.gz HTTP/1.1" 200 1287877
470
+ 2024-03-07 14:31:29,046 INFO ___FILE_ONLY___ ═
471
+ 2024-03-07 14:31:29,046 INFO ___FILE_ONLY___ ═
472
+ 2024-03-07 14:31:29,046 INFO ___FILE_ONLY___ ═
473
+ 2024-03-07 14:31:29,046 INFO ___FILE_ONLY___ ═
474
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
475
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
476
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
477
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
478
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
479
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
480
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
481
+ 2024-03-07 14:31:29,047 INFO ___FILE_ONLY___ ═
482
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
483
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
484
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
485
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
486
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
487
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
488
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
489
+ 2024-03-07 14:31:29,048 INFO ___FILE_ONLY___ ═
490
+ 2024-03-07 14:31:29,049 INFO ___FILE_ONLY___ ═
491
+ 2024-03-07 14:31:29,049 INFO ___FILE_ONLY___ ═
492
+ 2024-03-07 14:31:29,049 INFO ___FILE_ONLY___ ═
493
+ 2024-03-07 14:31:29,049 INFO ___FILE_ONLY___ ═
494
+ 2024-03-07 14:31:29,049 INFO ___FILE_ONLY___ ═
495
+ 2024-03-07 14:31:29,049 INFO ___FILE_ONLY___ ═
496
+ 2024-03-07 14:31:29,049 INFO ___FILE_ONLY___ ═
497
+ 2024-03-07 14:31:29,050 INFO ___FILE_ONLY___ ═
498
+ 2024-03-07 14:31:29,050 INFO ___FILE_ONLY___ ═
499
+ 2024-03-07 14:31:29,050 INFO ___FILE_ONLY___ ═
500
+ 2024-03-07 14:31:29,081 INFO ___FILE_ONLY___ ═══════════════
501
+ 2024-03-07 14:31:29,082 INFO ___FILE_ONLY___ ═══════════════
502
+ 2024-03-07 14:31:29,082 INFO ___FILE_ONLY___ ╝
503
+
504
+ 2024-03-07 14:31:29,090 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
505
+
506
+ 2024-03-07 14:31:29,090 INFO ___FILE_ONLY___ ╠═ Installing: Google Cloud CRC32C Hash Tool ═╣
507
+
508
+ 2024-03-07 14:31:29,090 INFO ___FILE_ONLY___ ╚
509
+ 2024-03-07 14:31:29,095 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
510
+ 2024-03-07 14:31:29,095 INFO ___FILE_ONLY___ ╝
511
+
512
+ 2024-03-07 14:31:29,097 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
513
+
514
+ 2024-03-07 14:31:29,097 INFO ___FILE_ONLY___ ╠═ Installing: anthoscli ═╣
515
+
516
+ 2024-03-07 14:31:29,097 INFO ___FILE_ONLY___ ╚
517
+ 2024-03-07 14:31:29,102 INFO ___FILE_ONLY___ ════════════════════════════════════════════════════════════
518
+ 2024-03-07 14:31:29,102 INFO ___FILE_ONLY___ ╝
519
+
520
+ 2024-03-07 14:31:29,104 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
521
+
522
+ 2024-03-07 14:31:29,104 INFO ___FILE_ONLY___ ╠═ Installing: anthoscli ═╣
523
+
524
+ 2024-03-07 14:31:29,104 INFO ___FILE_ONLY___ ╚
525
+ 2024-03-07 14:31:29,108 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
526
+ 2024-03-07 14:31:29,164 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-anthoscli-linux-x86_64-20240209195330.tar.gz HTTP/1.1" 200 72231225
527
+ 2024-03-07 14:31:29,408 INFO ___FILE_ONLY___ ═
528
+ 2024-03-07 14:31:29,411 INFO ___FILE_ONLY___ ═
529
+ 2024-03-07 14:31:29,414 INFO ___FILE_ONLY___ ═
530
+ 2024-03-07 14:31:29,417 INFO ___FILE_ONLY___ ═
531
+ 2024-03-07 14:31:29,420 INFO ___FILE_ONLY___ ═
532
+ 2024-03-07 14:31:29,423 INFO ___FILE_ONLY___ ═
533
+ 2024-03-07 14:31:29,425 INFO ___FILE_ONLY___ ═
534
+ 2024-03-07 14:31:29,428 INFO ___FILE_ONLY___ ═
535
+ 2024-03-07 14:31:29,431 INFO ___FILE_ONLY___ ═
536
+ 2024-03-07 14:31:29,434 INFO ___FILE_ONLY___ ═
537
+ 2024-03-07 14:31:29,437 INFO ___FILE_ONLY___ ═
538
+ 2024-03-07 14:31:29,440 INFO ___FILE_ONLY___ ═
539
+ 2024-03-07 14:31:29,443 INFO ___FILE_ONLY___ ═
540
+ 2024-03-07 14:31:29,446 INFO ___FILE_ONLY___ ═
541
+ 2024-03-07 14:31:29,448 INFO ___FILE_ONLY___ ═
542
+ 2024-03-07 14:31:29,451 INFO ___FILE_ONLY___ ═
543
+ 2024-03-07 14:31:29,454 INFO ___FILE_ONLY___ ═
544
+ 2024-03-07 14:31:29,457 INFO ___FILE_ONLY___ ═
545
+ 2024-03-07 14:31:29,460 INFO ___FILE_ONLY___ ═
546
+ 2024-03-07 14:31:29,462 INFO ___FILE_ONLY___ ═
547
+ 2024-03-07 14:31:29,465 INFO ___FILE_ONLY___ ═
548
+ 2024-03-07 14:31:29,468 INFO ___FILE_ONLY___ ═
549
+ 2024-03-07 14:31:29,471 INFO ___FILE_ONLY___ ═
550
+ 2024-03-07 14:31:29,473 INFO ___FILE_ONLY___ ═
551
+ 2024-03-07 14:31:29,476 INFO ___FILE_ONLY___ ═
552
+ 2024-03-07 14:31:29,479 INFO ___FILE_ONLY___ ═
553
+ 2024-03-07 14:31:29,482 INFO ___FILE_ONLY___ ═
554
+ 2024-03-07 14:31:29,484 INFO ___FILE_ONLY___ ═
555
+ 2024-03-07 14:31:29,487 INFO ___FILE_ONLY___ ═
556
+ 2024-03-07 14:31:29,490 INFO ___FILE_ONLY___ ═
557
+ 2024-03-07 14:31:31,560 INFO ___FILE_ONLY___ ══════════
558
+ 2024-03-07 14:31:31,566 INFO ___FILE_ONLY___ ═════════
559
+ 2024-03-07 14:31:31,594 INFO ___FILE_ONLY___ ═══════════
560
+ 2024-03-07 14:31:31,594 INFO ___FILE_ONLY___ ╝
561
+
562
+ 2024-03-07 14:31:31,617 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
563
+
564
+ 2024-03-07 14:31:31,617 INFO ___FILE_ONLY___ ╠═ Installing: gcloud cli dependencies ═╣
565
+
566
+ 2024-03-07 14:31:31,617 INFO ___FILE_ONLY___ ╚
567
+ 2024-03-07 14:31:31,621 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
568
+ 2024-03-07 14:31:31,680 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-gcloud-deps-linux-x86_64-20210416153011.tar.gz HTTP/1.1" 200 104
569
+ 2024-03-07 14:31:31,680 INFO ___FILE_ONLY___ ══════════════════════════════
570
+ 2024-03-07 14:31:31,681 INFO ___FILE_ONLY___ ══════════════════════════════
571
+ 2024-03-07 14:31:31,681 INFO ___FILE_ONLY___ ╝
572
+
573
+ 2024-03-07 14:31:31,689 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
574
+
575
+ 2024-03-07 14:31:31,689 INFO ___FILE_ONLY___ ╠═ Creating backup and activating new installation ═╣
576
+
577
+ 2024-03-07 14:31:31,689 INFO ___FILE_ONLY___ ╚
578
+ 2024-03-07 14:31:31,690 DEBUG root Attempting to move directory [/tools/google-cloud-sdk] to [/tools/google-cloud-sdk.staging/.install/.backup]
579
+ 2024-03-07 14:31:31,690 INFO ___FILE_ONLY___ ══════════════════════════════
580
+ 2024-03-07 14:31:31,690 DEBUG root Attempting to move directory [/tools/google-cloud-sdk.staging] to [/tools/google-cloud-sdk]
581
+ 2024-03-07 14:31:31,690 INFO ___FILE_ONLY___ ══════════════════════════════
582
+ 2024-03-07 14:31:31,690 INFO ___FILE_ONLY___ ╝
583
+
584
+ 2024-03-07 14:31:31,694 DEBUG root Updating notification cache...
585
+ 2024-03-07 14:31:31,694 INFO ___FILE_ONLY___
586
+
587
+ 2024-03-07 14:31:31,696 INFO ___FILE_ONLY___ Performing post processing steps...
588
+ 2024-03-07 14:31:31,697 DEBUG root Executing command: ['/tools/google-cloud-sdk/bin/gcloud', 'components', 'post-process']
589
+ 2024-03-07 14:31:42,153 DEBUG ___FILE_ONLY___
590
+ 2024-03-07 14:31:42,154 DEBUG ___FILE_ONLY___
591
+ 2024-03-07 14:31:42,213 INFO ___FILE_ONLY___
592
+ Update done!
593
+
594
+
595
+ 2024-03-07 14:31:42,217 DEBUG root Chosen display Format:none
596
+ 2024-03-07 14:31:42,218 INFO root Display format: "none"
.config/logs/2024.03.07/14.31.32.270753.log ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ 2024-03-07 14:31:32,271 DEBUG root Loaded Command Group: ['gcloud', 'components']
2
+ 2024-03-07 14:31:32,273 DEBUG root Loaded Command Group: ['gcloud', 'components', 'post_process']
3
+ 2024-03-07 14:31:32,276 DEBUG root Running [gcloud.components.post-process] with arguments: []
4
+ 2024-03-07 14:31:42,047 DEBUG root Chosen display Format:none
5
+ 2024-03-07 14:31:42,047 INFO root Display format: "none"
.config/logs/2024.03.07/14.31.42.911124.log ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-03-07 14:31:42,912 DEBUG root Loaded Command Group: ['gcloud', 'components']
2
+ 2024-03-07 14:31:42,914 DEBUG root Loaded Command Group: ['gcloud', 'components', 'update']
3
+ 2024-03-07 14:31:42,916 DEBUG root Running [gcloud.components.update] with arguments: [--quiet: "True", COMPONENT-IDS:8: "['gcloud', 'core', 'bq', 'gsutil', 'compute', 'preview', 'alpha', 'beta']"]
4
+ 2024-03-07 14:31:42,918 INFO ___FILE_ONLY___ Beginning update. This process may take several minutes.
5
+
6
+ 2024-03-07 14:31:42,923 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
7
+ 2024-03-07 14:31:42,981 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components-2.json HTTP/1.1" 200 214450
8
+ 2024-03-07 14:31:43,001 WARNING root Component [preview] no longer exists.
9
+ 2024-03-07 14:31:43,001 WARNING root Component [compute] no longer exists.
10
+ 2024-03-07 14:31:43,002 INFO ___FILE_ONLY___
11
+
12
+ 2024-03-07 14:31:43,003 INFO ___FILE_ONLY___
13
+ Your current Google Cloud CLI version is: 467.0.0
14
+
15
+ 2024-03-07 14:31:43,003 INFO ___FILE_ONLY___ Installing components from version: 467.0.0
16
+
17
+ 2024-03-07 14:31:43,003 INFO ___FILE_ONLY___
18
+
19
+ 2024-03-07 14:31:43,003 DEBUG root Chosen display Format:table[box,title="These components will be removed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
20
+ 2024-03-07 14:31:43,004 DEBUG root Chosen display Format:table[box,title="These components will be updated."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
21
+ 2024-03-07 14:31:43,005 DEBUG root Chosen display Format:table[box,title="These components will be installed."](details.display_name:label=Name:align=left,version.version_string:label=Version:align=right,data.size.size(zero="",min=1048576):label=Size:align=right)
22
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___ ┌──────────────────────────────────────────────┐
23
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___
24
+
25
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___ │ These components will be installed. │
26
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___
27
+
28
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___ ├───────────────────────┬────────────┬─────────┤
29
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___
30
+
31
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___ │ Name │ Version │ Size │
32
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___
33
+
34
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___ ├───────────────────────┼────────────┼─────────┤
35
+ 2024-03-07 14:31:43,007 INFO ___FILE_ONLY___
36
+
37
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___ │
38
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___ gcloud Alpha Commands
39
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___
40
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___ │
41
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___ 2024.02.29
42
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___
43
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___ │
44
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___ < 1 MiB
45
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___
46
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___ │
47
+ 2024-03-07 14:31:43,008 INFO ___FILE_ONLY___
48
+
49
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___ │
50
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___ gcloud Beta Commands
51
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___
52
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___ │
53
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___ 2024.02.29
54
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___
55
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___ │
56
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___ < 1 MiB
57
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___
58
+ 2024-03-07 14:31:43,009 INFO ___FILE_ONLY___ │
59
+ 2024-03-07 14:31:43,010 INFO ___FILE_ONLY___
60
+
61
+ 2024-03-07 14:31:43,010 INFO ___FILE_ONLY___ └───────────────────────┴────────────┴─────────┘
62
+ 2024-03-07 14:31:43,010 INFO ___FILE_ONLY___
63
+
64
+ 2024-03-07 14:31:43,010 INFO ___FILE_ONLY___
65
+
66
+ 2024-03-07 14:31:43,013 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
67
+ 2024-03-07 14:31:43,069 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/RELEASE_NOTES HTTP/1.1" 200 1163855
68
+ 2024-03-07 14:31:43,136 INFO ___FILE_ONLY___ For the latest full release notes, please visit:
69
+ https://cloud.google.com/sdk/release_notes
70
+
71
+
72
+ 2024-03-07 14:31:43,138 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
73
+
74
+ 2024-03-07 14:31:43,139 INFO ___FILE_ONLY___ ╠═ Creating update staging area ═╣
75
+
76
+ 2024-03-07 14:31:43,139 INFO ___FILE_ONLY___ ╚
77
+ 2024-03-07 14:31:43,139 INFO ___FILE_ONLY___ ══════
78
+ 2024-03-07 14:31:43,848 INFO ___FILE_ONLY___ ══════
79
+ 2024-03-07 14:31:43,848 INFO ___FILE_ONLY___ ══════
80
+ 2024-03-07 14:31:44,612 INFO ___FILE_ONLY___ ═
81
+ 2024-03-07 14:31:44,651 INFO ___FILE_ONLY___ ═
82
+ 2024-03-07 14:31:44,705 INFO ___FILE_ONLY___ ═
83
+ 2024-03-07 14:31:44,744 INFO ___FILE_ONLY___ ═
84
+ 2024-03-07 14:31:44,786 INFO ___FILE_ONLY___ ═
85
+ 2024-03-07 14:31:44,826 INFO ___FILE_ONLY___ ═
86
+ 2024-03-07 14:31:44,869 INFO ___FILE_ONLY___ ═
87
+ 2024-03-07 14:31:44,918 INFO ___FILE_ONLY___ ═
88
+ 2024-03-07 14:31:45,095 INFO ___FILE_ONLY___ ═
89
+ 2024-03-07 14:31:45,182 INFO ___FILE_ONLY___ ═
90
+ 2024-03-07 14:31:45,352 INFO ___FILE_ONLY___ ═
91
+ 2024-03-07 14:31:45,420 INFO ___FILE_ONLY___ ═
92
+ 2024-03-07 14:31:45,562 INFO ___FILE_ONLY___ ═
93
+ 2024-03-07 14:31:45,646 INFO ___FILE_ONLY___ ═
94
+ 2024-03-07 14:31:45,729 INFO ___FILE_ONLY___ ═
95
+ 2024-03-07 14:31:45,794 INFO ___FILE_ONLY___ ═
96
+ 2024-03-07 14:31:45,840 INFO ___FILE_ONLY___ ═
97
+ 2024-03-07 14:31:45,902 INFO ___FILE_ONLY___ ═
98
+ 2024-03-07 14:31:45,972 INFO ___FILE_ONLY___ ═
99
+ 2024-03-07 14:31:46,016 INFO ___FILE_ONLY___ ═
100
+ 2024-03-07 14:31:46,074 INFO ___FILE_ONLY___ ═
101
+ 2024-03-07 14:31:46,167 INFO ___FILE_ONLY___ ═
102
+ 2024-03-07 14:31:46,237 INFO ___FILE_ONLY___ ═
103
+ 2024-03-07 14:31:46,294 INFO ___FILE_ONLY___ ═
104
+ 2024-03-07 14:31:46,359 INFO ___FILE_ONLY___ ═
105
+ 2024-03-07 14:31:46,435 INFO ___FILE_ONLY___ ═
106
+ 2024-03-07 14:31:46,584 INFO ___FILE_ONLY___ ═
107
+ 2024-03-07 14:31:46,642 INFO ___FILE_ONLY___ ═
108
+ 2024-03-07 14:31:46,703 INFO ___FILE_ONLY___ ═
109
+ 2024-03-07 14:31:46,784 INFO ___FILE_ONLY___ ═
110
+ 2024-03-07 14:31:46,844 INFO ___FILE_ONLY___ ═
111
+ 2024-03-07 14:31:46,902 INFO ___FILE_ONLY___ ═
112
+ 2024-03-07 14:31:46,965 INFO ___FILE_ONLY___ ═
113
+ 2024-03-07 14:31:47,025 INFO ___FILE_ONLY___ ═
114
+ 2024-03-07 14:31:47,138 INFO ___FILE_ONLY___ ═
115
+ 2024-03-07 14:31:47,196 INFO ___FILE_ONLY___ ═
116
+ 2024-03-07 14:31:47,261 INFO ___FILE_ONLY___ ═
117
+ 2024-03-07 14:31:47,315 INFO ___FILE_ONLY___ ═
118
+ 2024-03-07 14:31:47,393 INFO ___FILE_ONLY___ ═
119
+ 2024-03-07 14:31:47,458 INFO ___FILE_ONLY___ ═
120
+ 2024-03-07 14:31:47,529 INFO ___FILE_ONLY___ ═
121
+ 2024-03-07 14:31:47,608 INFO ___FILE_ONLY___ ═
122
+ 2024-03-07 14:31:47,608 INFO ___FILE_ONLY___ ╝
123
+
124
+ 2024-03-07 14:31:50,832 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
125
+
126
+ 2024-03-07 14:31:50,832 INFO ___FILE_ONLY___ ╠═ Installing: gcloud Alpha Commands ═╣
127
+
128
+ 2024-03-07 14:31:50,832 INFO ___FILE_ONLY___ ╚
129
+ 2024-03-07 14:31:50,836 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
130
+ 2024-03-07 14:31:50,898 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-alpha-20240229170130.tar.gz HTTP/1.1" 200 800
131
+ 2024-03-07 14:31:50,899 INFO ___FILE_ONLY___ ══════════════════════════════
132
+ 2024-03-07 14:31:50,901 INFO ___FILE_ONLY___ ══════════════════════════════
133
+ 2024-03-07 14:31:50,901 INFO ___FILE_ONLY___ ╝
134
+
135
+ 2024-03-07 14:31:50,910 INFO ___FILE_ONLY___ ╔════════════════════════════════════════════════════════════╗
136
+
137
+ 2024-03-07 14:31:50,910 INFO ___FILE_ONLY___ ╠═ Installing: gcloud Beta Commands ═╣
138
+
139
+ 2024-03-07 14:31:50,910 INFO ___FILE_ONLY___ ╚
140
+ 2024-03-07 14:31:50,914 DEBUG urllib3.connectionpool Starting new HTTPS connection (1): dl.google.com:443
141
+ 2024-03-07 14:31:50,980 DEBUG urllib3.connectionpool https://dl.google.com:443 "GET /dl/cloudsdk/channels/rapid/components/google-cloud-sdk-beta-20240229170130.tar.gz HTTP/1.1" 200 797
142
+ 2024-03-07 14:31:50,981 INFO ___FILE_ONLY___ ══════════════════════════════
143
+ 2024-03-07 14:31:50,982 INFO ___FILE_ONLY___ ══════════════════════════════
144
+ 2024-03-07 14:31:50,982 INFO ___FILE_ONLY___ ╝
145
+
146
+ 2024-03-07 14:31:50,990 INFO ___FILE_ONLY___ ��════════════════════════════════════════════════════════════╗
147
+
148
+ 2024-03-07 14:31:50,991 INFO ___FILE_ONLY___ ╠═ Creating backup and activating new installation ═╣
149
+
150
+ 2024-03-07 14:31:50,991 INFO ___FILE_ONLY___ ╚
151
+ 2024-03-07 14:31:50,991 DEBUG root Attempting to move directory [/tools/google-cloud-sdk] to [/tools/google-cloud-sdk.staging/.install/.backup]
152
+ 2024-03-07 14:31:50,991 INFO ___FILE_ONLY___ ══════════════════════════════
153
+ 2024-03-07 14:31:50,991 DEBUG root Attempting to move directory [/tools/google-cloud-sdk.staging] to [/tools/google-cloud-sdk]
154
+ 2024-03-07 14:31:50,991 INFO ___FILE_ONLY___ ══════════════════════════════
155
+ 2024-03-07 14:31:50,991 INFO ___FILE_ONLY___ ╝
156
+
157
+ 2024-03-07 14:31:50,996 DEBUG root Updating notification cache...
158
+ 2024-03-07 14:31:50,997 INFO ___FILE_ONLY___
159
+
160
+ 2024-03-07 14:31:50,999 INFO ___FILE_ONLY___ Performing post processing steps...
161
+ 2024-03-07 14:31:50,999 DEBUG root Executing command: ['/tools/google-cloud-sdk/bin/gcloud', 'components', 'post-process']
162
+ 2024-03-07 14:32:01,443 DEBUG ___FILE_ONLY___
163
+ 2024-03-07 14:32:01,444 DEBUG ___FILE_ONLY___
164
+ 2024-03-07 14:32:01,515 INFO ___FILE_ONLY___
165
+ Update done!
166
+
167
+
168
+ 2024-03-07 14:32:01,519 DEBUG root Chosen display Format:none
169
+ 2024-03-07 14:32:01,519 INFO root Display format: "none"
.config/logs/2024.03.07/14.31.51.601291.log ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ 2024-03-07 14:31:51,602 DEBUG root Loaded Command Group: ['gcloud', 'components']
2
+ 2024-03-07 14:31:51,603 DEBUG root Loaded Command Group: ['gcloud', 'components', 'post_process']
3
+ 2024-03-07 14:31:51,606 DEBUG root Running [gcloud.components.post-process] with arguments: []
4
+ 2024-03-07 14:32:01,336 DEBUG root Chosen display Format:none
5
+ 2024-03-07 14:32:01,336 INFO root Display format: "none"
.config/logs/2024.03.07/14.32.02.191519.log ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ 2024-03-07 14:32:02,193 DEBUG root Loaded Command Group: ['gcloud', 'config']
2
+ 2024-03-07 14:32:02,220 DEBUG root Loaded Command Group: ['gcloud', 'config', 'set']
3
+ 2024-03-07 14:32:02,222 DEBUG root Running [gcloud.config.set] with arguments: [SECTION/PROPERTY: "component_manager/disable_update_check", VALUE: "true"]
4
+ 2024-03-07 14:32:02,223 INFO ___FILE_ONLY___ Updated property [component_manager/disable_update_check].
5
+
6
+ 2024-03-07 14:32:02,224 DEBUG root Chosen display Format:default
7
+ 2024-03-07 14:32:02,225 INFO root Display format: "default"
8
+ 2024-03-07 14:32:02,225 DEBUG root SDK update checks are disabled.
.config/logs/2024.03.07/14.32.02.895480.log ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ 2024-03-07 14:32:02,897 DEBUG root Loaded Command Group: ['gcloud', 'config']
2
+ 2024-03-07 14:32:02,923 DEBUG root Loaded Command Group: ['gcloud', 'config', 'set']
3
+ 2024-03-07 14:32:02,926 DEBUG root Running [gcloud.config.set] with arguments: [SECTION/PROPERTY: "compute/gce_metadata_read_timeout_sec", VALUE: "0"]
4
+ 2024-03-07 14:32:02,926 INFO ___FILE_ONLY___ Updated property [compute/gce_metadata_read_timeout_sec].
5
+
6
+ 2024-03-07 14:32:02,927 DEBUG root Chosen display Format:default
7
+ 2024-03-07 14:32:02,928 INFO root Display format: "default"
8
+ 2024-03-07 14:32:02,928 DEBUG root SDK update checks are disabled.
.gitattributes CHANGED
@@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data.json filter=lfs diff=lfs merge=lfs -text
37
+ openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data_cleaned_archive.json filter=lfs diff=lfs merge=lfs -text
38
+ openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data_gpt4.json filter=lfs diff=lfs merge=lfs -text
39
+ sample_data/mnist_test.csv filter=lfs diff=lfs merge=lfs -text
40
+ sample_data/mnist_train_small.csv filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,12 +1,6 @@
1
  ---
2
- title: Content
3
- emoji: 🏢
4
- colorFrom: blue
5
- colorTo: red
6
  sdk: gradio
7
- sdk_version: 4.21.0
8
- app_file: app.py
9
- pinned: false
10
  ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: content
3
+ app_file: generate.py
 
 
4
  sdk: gradio
5
+ sdk_version: 3.44.4
 
 
6
  ---
 
 
generate.py ADDED
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+
4
+ import fire
5
+ import gradio as gr
6
+ import torch
7
+ import transformers
8
+ from peft import PeftModel
9
+ from transformers import GenerationConfig, LlamaForCausalLM, LlamaTokenizer
10
+
11
+ from utils.callbacks import Iteratorize, Stream
12
+ from utils.prompter import Prompter
13
+
14
+ if torch.cuda.is_available():
15
+ device = "cuda"
16
+ else:
17
+ device = "cpu"
18
+
19
+ try:
20
+ if torch.backends.mps.is_available():
21
+ device = "mps"
22
+ except: # noqa: E722
23
+ pass
24
+
25
+
26
+ def main(
27
+ load_8bit: bool = False,
28
+ base_model: str = "",
29
+ lora_weights: str = "",
30
+ prompt_template: str = "llama_v2", # The prompt template to use, will default to alpaca.
31
+ server_name: str = "0.0.0.0", # Allows to listen on all interfaces by providing '0.
32
+ share_gradio: bool = False,
33
+ ):
34
+ lora_weights = lora_weights or os.environ.get("LORA_WEIGHTS", "")
35
+ base_model = base_model or os.environ.get("BASE_MODEL", "")
36
+ assert (
37
+ base_model
38
+ ), "Please specify a --base_model, e.g. --base_model='decapoda-research/llama-7b-hf'"
39
+
40
+ print("base_model:", base_model)
41
+ print("lora_weights:", lora_weights)
42
+
43
+ prompter = Prompter(prompt_template)
44
+ tokenizer = LlamaTokenizer.from_pretrained(base_model)
45
+ if device == "cuda":
46
+ model = LlamaForCausalLM.from_pretrained(
47
+ base_model,
48
+ load_in_8bit=load_8bit,
49
+ torch_dtype=torch.float16,
50
+ device_map="auto",
51
+ )
52
+ model = PeftModel.from_pretrained(
53
+ model,
54
+ lora_weights,
55
+ torch_dtype=torch.float16,
56
+ )
57
+ elif device == "mps":
58
+ model = LlamaForCausalLM.from_pretrained(
59
+ base_model,
60
+ device_map={"": device},
61
+ torch_dtype=torch.float16,
62
+ )
63
+ model = PeftModel.from_pretrained(
64
+ model,
65
+ lora_weights,
66
+ device_map={"": device},
67
+ torch_dtype=torch.float16,
68
+ )
69
+ else:
70
+ model = LlamaForCausalLM.from_pretrained(
71
+ base_model, device_map={"": device}, low_cpu_mem_usage=True
72
+ )
73
+ model = PeftModel.from_pretrained(
74
+ model,
75
+ lora_weights,
76
+ device_map={"": device},
77
+ )
78
+
79
+ # unwind broken decapoda-research config
80
+ model.config.pad_token_id = tokenizer.pad_token_id = 0 # unk
81
+ model.config.bos_token_id = 1
82
+ model.config.eos_token_id = 2
83
+
84
+ if not load_8bit:
85
+ model.half() # seems to fix bugs for some users.
86
+
87
+ model.eval()
88
+ if torch.__version__ >= "2" and sys.platform != "win32":
89
+ model = torch.compile(model)
90
+
91
+ def evaluate(
92
+ instruction,
93
+ input=None,
94
+ temperature=0.1,
95
+ top_p=0.75,
96
+ top_k=40,
97
+ num_beams=4,
98
+ max_new_tokens=128,
99
+ stream_output=False,
100
+ **kwargs,
101
+ ):
102
+ prompt = prompter.generate_prompt(instruction, input)
103
+ inputs = tokenizer(prompt, return_tensors="pt")
104
+ input_ids = inputs["input_ids"].to(device)
105
+ generation_config = GenerationConfig(
106
+ temperature=temperature,
107
+ top_p=top_p,
108
+ top_k=top_k,
109
+ num_beams=num_beams,
110
+ **kwargs,
111
+ )
112
+
113
+ generate_params = {
114
+ "input_ids": input_ids,
115
+ "generation_config": generation_config,
116
+ "return_dict_in_generate": True,
117
+ "output_scores": True,
118
+ "max_new_tokens": max_new_tokens,
119
+ }
120
+
121
+ if stream_output:
122
+ # Stream the reply 1 token at a time.
123
+ # This is based on the trick of using 'stopping_criteria' to create an iterator,
124
+ # from https://github.com/oobabooga/text-generation-webui/blob/ad37f396fc8bcbab90e11ecf17c56c97bfbd4a9c/modules/text_generation.py#L216-L243.
125
+
126
+ def generate_with_callback(callback=None, **kwargs):
127
+ kwargs.setdefault(
128
+ "stopping_criteria", transformers.StoppingCriteriaList()
129
+ )
130
+ kwargs["stopping_criteria"].append(
131
+ Stream(callback_func=callback)
132
+ )
133
+ with torch.no_grad():
134
+ model.generate(**kwargs)
135
+
136
+ def generate_with_streaming(**kwargs):
137
+ return Iteratorize(
138
+ generate_with_callback, kwargs, callback=None
139
+ )
140
+
141
+ with generate_with_streaming(**generate_params) as generator:
142
+ for output in generator:
143
+ # new_tokens = len(output) - len(input_ids[0])
144
+ decoded_output = tokenizer.decode(output)
145
+
146
+ if output[-1] in [tokenizer.eos_token_id]:
147
+ break
148
+
149
+ yield prompter.get_response(decoded_output)
150
+ return # early return for stream_output
151
+
152
+ # Without streaming
153
+ with torch.no_grad():
154
+ generation_output = model.generate(
155
+ input_ids=input_ids,
156
+ generation_config=generation_config,
157
+ return_dict_in_generate=True,
158
+ output_scores=True,
159
+ max_new_tokens=max_new_tokens,
160
+ )
161
+ s = generation_output.sequences[0]
162
+ output = tokenizer.decode(s)
163
+ yield prompter.get_response(output)
164
+
165
+ gr.Interface(
166
+ fn=evaluate,
167
+ inputs=[
168
+ gr.components.Textbox(
169
+ lines=2,
170
+ label="Instruction",
171
+ placeholder="Tell me about llama.",
172
+ ),
173
+ gr.components.Textbox(lines=2, label="Input", placeholder="none"),
174
+ gr.components.Slider(
175
+ minimum=0, maximum=1, value=0.1, label="Temperature"
176
+ ),
177
+ gr.components.Slider(
178
+ minimum=0, maximum=1, value=0.75, label="Top p"
179
+ ),
180
+ gr.components.Slider(
181
+ minimum=0, maximum=100, step=1, value=40, label="Top k"
182
+ ),
183
+ gr.components.Slider(
184
+ minimum=1, maximum=4, step=1, value=4, label="Beams"
185
+ ),
186
+ gr.components.Slider(
187
+ minimum=1, maximum=2000, step=1, value=128, label="Max tokens"
188
+ ),
189
+ gr.components.Checkbox(label="Stream output"),
190
+ ],
191
+ outputs=[
192
+ gr.inputs.Textbox(
193
+ lines=5,
194
+ label="Output",
195
+ )
196
+ ],
197
+ title="🇹🇭 OpenThaiGPT 1.0.0-beta",
198
+ description="🇹🇭 OpenThaiGPT 1.0.0-beta is a 7B-parameter LLaMA model finetuned to follow Thai instructions. It is trained on various dataset and makes use of the Huggingface LLaMA implementation. For more information, please visit [the project's website](https://openthaigpt.aieat.or.th).", # noqa: E501
199
+ ).queue().launch(server_name="0.0.0.0", share=share_gradio)
200
+ # Old testing code follows.
201
+
202
+ """
203
+ # testing code for readme
204
+ for instruction in [
205
+ "Tell me about alpacas.",
206
+ "Tell me about the president of Mexico in 2019.",
207
+ "Tell me about the king of France in 2019.",
208
+ "List all Canadian provinces in alphabetical order.",
209
+ "Write a Python program that prints the first 10 Fibonacci numbers.",
210
+ "Write a program that prints the numbers from 1 to 100. But for multiples of three print 'Fizz' instead of the number and for the multiples of five print 'Buzz'. For numbers which are multiples of both three and five print 'FizzBuzz'.", # noqa: E501
211
+ "Tell me five words that rhyme with 'shock'.",
212
+ "Translate the sentence 'I have no mouth but I must scream' into Spanish.",
213
+ "Count up from 1 to 500.",
214
+ ]:
215
+ print("Instruction:", instruction)
216
+ print("Response:", evaluate(instruction))
217
+ print()
218
+ """
219
+
220
+
221
+ if __name__ == "__main__":
222
+ fire.Fire(main)
khumpun-200-18++.csv ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ w1,w2
2
+ คำผวน,ควนผำ
3
+ นอนแล้ว,แนวล้อน
4
+ ตะปู,ตูปะ
5
+ นักเรียน,เนียนรัก
6
+ ขนม,ขมหนะ
7
+ เรอทัก,รักเทอ
8
+ ลองดู,ลูดอง
9
+ เจอพี่,จีเพ่อ
10
+ เรอมัก,รักเมอ
11
+ อาไบ้,ไอบ้า
12
+ หิวข้าว,หาวขิ้ว
13
+ กะหล่ำ,ก่ำหละ
14
+ เจอหมึก,จึกเมอ
15
+ มะนาวต่างดุ๊ด,มะนุดต่างดาว
16
+ กาเป็นหมู,กูเป็นหมา
17
+ ก้างใหญ่,ใก้หญ่าง
18
+ อะหรี่ดอย,อะหร่อยดี
19
+ นอนแล้ว,แนวล้อน
20
+ ตะปู,ตูปะ
21
+ นักเรียน,เนียนรัก
22
+ ขนม,ขมหนะ
23
+ เรอทัก,รักเทอ
24
+ สวัสดี,สะวีดัส
25
+ เจออยู่ที่ไทย,ใจอยู่ที่เธอ
26
+ เชี่ยเธอนะนอบ,"ชอบเธอนะเนี่ย
27
+
28
+ "
29
+ แสงดี,สีแดง
30
+ แช่งมั่ง,ชั่งแม่ง
31
+ นายห,นกยา
32
+ คั่นกู,คู่กัน
33
+ คั่นกลาง,ข้างกัน
34
+ ดิสกว่าที่คีย์,ดีกว่าที่คิด
35
+ ล้าเธอนัก,รักเธอน้า
36
+ เรอแต่ทัก,รักแต่เธอ
37
+ เบลอว่าชอบแถบ,"แบบว่าชอบเธอ
38
+
39
+ "
40
+ เมาเป็นแฟนรา,มาเป็นแฟนเรา
41
+ ไหล่โดนจอ,หล่อโดนใจ
42
+ เจอละไม,ใจละเมอ
43
+ ฟันแกน,แฟนกัน
44
+ ชู้แต่เธอไม่รอบ,ชอบแต่เธอไม่รู้
45
+ แขนเป็นฟอ,ขอเป็นแฟน
46
+ ไข้ที่ชล,คนที่ใช่
47
+ "ดอยหมอ
48
+ ",ดอหมอย
49
+ ดอยติดหมอ,ดอติดหมอย
50
+ ดอยนี้มีแต่หมอ,ดอนี้มีแต่หมอย
51
+ ดอยหมู,ดูหมอย
52
+ ดอยหมึง,ดึงหมอย
53
+ ดอยหึง,ดึงหอย
54
+ ดอยไหม,ไดร์หมอย
55
+ ดอยหม,ดมหมอย
56
+ คนล่องหวย,ควยล่องหน
57
+ หมีจับหวย,หมวยจับหี
58
+ หมีเล่นหวย,หมวยเล่นหี
59
+ หมีคลำหวย,หมวยคลำหี
60
+ หมีขายหวย,หมวยขายหี
61
+ หวยลำเค็ญ,เหม็นลำควย
62
+ หวยเคน,เห็นควย
63
+ "หวยอาหมี
64
+
65
+ ",หีอาหมวย
66
+ หวยเมาคลี,หีเมาควย
67
+ หวยสี,หีสวย
68
+ สีเหียว,เสียวหี
69
+ "หวยคลุกคลี
70
+ ",หีคลุกควย
71
+ คีย์แทงหวย,ควยแทงหี
72
+ หวยโค,โหควย
73
+ หวยคาร์,หาควย
74
+ หวยซี,หีซวย
75
+ โครมหักหวย,ควยหักโหม
76
+ ผีกาลกิณัว,ผัวกาลกิณี
77
+ ผีจับหัว,ผัวจับหี
78
+ ผีไร้หัว,ผัวไร้หี
79
+ ผีไม่มัว,ผัวไม่มี
80
+ ผีเหา,เผาหี
81
+ ผีเอาฮา,พาเอาหี
82
+ ผีห่าน,ผ่านหี
83
+ ผีไร้ห้อง,พ่องไร้หี
84
+ ผีบังห้า,ผ้าบังหี
85
+ ผีเห็ด,เผ็ดหี
86
+ ผีเล่นฮอน,พรเล่นหี
87
+ ผีกับเฮีย,เพลียกับหี
88
+ ผีไม่สนหัว,ผัวไม่สนหี
89
+ ใช้หัวจำ,ช้ำหัวใจ
90
+ เอดส์ยศ,อดเย็ด
91
+ เห็นหมี,หีเหม็น
92
+ เค้กไม่รวย,ควยไม่เล็ก
93
+ ไข่พาดรวย,ควยพาดไหล่
94
+ เค้กรวย,ควยเล็ก
95
+ ครกซกมวย,ควยซกมก
96
+ โค้กแข็งปวย,ควยแข็งโป๊ก
97
+ หูไม่น่าดี,หีไม่น่าดู
98
+ เพชรยักษ์,พักเย็ด
99
+ ยักษ์ไม่มีเพชร,เย็ดไม่มีพัก
100
+ แบกไหขึ้นรถฟรี,แบกหีขึ้นรถไฟ
101
+ อุปสมบท,อดผสมบุตร
102
+ สูดรูเตียว,เสียวรูตู
103
+ ชางเหมียง,เชียงใหม่
104
+ ชายเรียง,เชียงราย
105
+ ,
106
+ เพรชยุ่ง,พุ่งเย็ด
107
+ เพชรกระไดยิง,พิงกระไดเย็ด
108
+ เพชรมายา,พามาเย็ด
109
+ เพชรกันยา,พากันเย็ด
110
+ เพชรกันหยัด,ผลัดกันเย็ด
111
+ เพชรกำแพงยิง,พิงกำแพงเย็ด
112
+ ยักไม่มีเพชร,เย็ดไม่มีพัก
113
+ ยักไม่ให้เพชร,เย็ดไม่ให้พัก
114
+ เพชรย้อม,พร้อมเย็ด
115
+ พาเพชรมายะ,พาพระมาเย็ด
116
+ พาเพชรมาแยะ,พาแพะมาเย็ด
117
+ เพชรจับยัว,ผัวจับเย็บ
118
+ สีแหบ,แสบหี
119
+ สีเหียว,เสียวหี
120
+ สีดูหุ้ม,ซุ้มดูหี
121
+ สีเฮี่ยน,เสี้ยหี
122
+ สีเหียบ,เสียบหี
123
+ สี่หาย,สายหี
124
+ สีหัก,สักหี
125
+ หมอยู,หมูยอ
126
+ ถาวฝักยัว,ถั่วฝักยาว
127
+ ไกปู,กูไป
128
+ คิงรวย,ควยลิง
129
+ มวยโค๊ก,โม๊กควย
130
+ หมูกะโจ๊ก,โม๊กกะจู๋
131
+ ขวดตำรวย,ควยตำรวย
132
+ ลีเหีย,เลียหี
133
+ ฟูยัก,ฟักยู
134
+ ผีดูหัว,ผัวดูหี
135
+ พรมลัด,พัดลม
136
+ เชี่ยนะนอบ,ชอบนะเนี้ย
137
+ ขาดน้วม,ขวดน้ำ
138
+ ถอยหมอน,ถอนหมอย
139
+ หมออ้อย,หมอย
140
+ หมากระทุ,หมุกระทะ
141
+ แต้ชู้,ตู้แช่
142
+ ลิ้นจี่หน้าหอ,ลิ้นจอหน้าหี
143
+ ถอกระดอก,ถอกกระดอ
144
+ กบโดนต้ม,ก้มโดนตบ
145
+ หมออยู่บนดอย,หมอยอยู่บนดอ
146
+ ,
147
+ เจ็ดยับ,จับเย็ด
148
+ ยูประดุกฟัม,ยำปลาดุกฟู
149
+ ยำปลาจาราเม็ด,เย็ดปลาจารามัม
150
+ ,
151
+ พายมึงต้อ,พ่อมึงตาย
152
+ เปิดหูละหน้าดี,เปิดหีละน่าดู
153
+ ,
154
+ ม้านางฝอย,หมอยนางฟ้า
155
+ จานพยาบิม,จิ๋มพายาบาล
156
+ คอหมวย,ควยหมอ
157
+ ไหมเส้นหย่อยๆ,หมอยเส้นใหญ่
158
+ จากระแทกขิม,จิ๋มกระแทกขา
159
+ บีแห่,แบหี
160
+ คลังพะลุงพะลวย,ควยพะลุงพะลัง
161
+ อย่ามาออตรงกระได,อย่ามาไอตรงกระดอ
162
+ เหี้ยนะห่วง,ห่วงนะเนี้ย
163
+ รอซื้อกระดุม,รุมซื้อกระดอ
164
+ เหี้นเท่าเกาะสะหมี,หีเท่าเกาะเสม็ด
165
+ จอดับ,จับดอ
166
+ จี๋เห็บ,เจ็บหี
167
+ หอโดนดี,หีโดนดอ
168
+ ผีทะลวงหัว,ผัวทะลวงหี
169
+ น้ำมันหมีเข้าหู,น้ำมันหมูทอดหี
170
+ หาดโดนฉีขี,หีโดนฉีกขาด
171
+ สหมีเหอ,สเมอหนี
172
+ น้ำตาลในรูทะแวก,น้ำแตกในรูทะวาน
173
+ ก้างใหญ่,ไก่ย่าง
174
+ มดทรยิด,มิตรทรยศ
175
+ แมวกัน,มันแกว
176
+ ปาสายเต้า,เป้าสายตา
177
+ เศษซะกี๋,ศรีสะเกด
178
+ ขวดตำรุน,คุณตำรวย
179
+ หมอกจิ้งจา,หมาจิ้งจอก
180
+ สีกระแทกเหา,เสากระแทกหี
181
+ กระเป๋าไดโนโสก,กระโปกไดโนเสา
182
+ ไตหาหัวจาม,ตามหาหัวใจ
183
+ มันเป็นแฟกา,มาเป็นแฟนกัน
184
+ เว้นวรรคให้เทอไลค์,เว้นไว้ให้เธอรัก
185
+ ลาหรือปั๊ก,รักรึป่าว
186
+ ผักไหมถ้าจะริบ,ผิดไหมที่จะรัก
187
+ ลบเธอเทอไม่รุ้จัก,รักเธอไม่รุ้จบ
188
+ ดวงจันทร์ไม่เคยห่างไกล,ดวงใจไม่เคยห่างกัน
189
+ เจอมีแต่ไท,ใจมีแต่เธอ
190
+ โล่นะเด็กงัก,รักนะเด็กโง่
191
+ หยอดให้เธอกาก,อยากให้เธอกอด
192
+ ยักให้บอหหลาก,อยากให้บอกรัก
193
+ ออบไม่ต่าน,อ่านไม่ตอบ
194
+ เขียดผู้มีแกรก,แขกผู้มีเกียจ
195
+ เหี่ยวเธอคนดวง,หวงเธอคนเดียว
196
+ ไขให้เธอเห็นจอ,ขอให้เธอเห็นใจ
197
+ ตีไม่มัง,ตังไม่มี
198
+ ไขตังหน่อ,ขอตังหน่อย
199
+ ไล่คือการฮัก,รักคือการให้
200
+ จันอยู่ใกล้ไกล,ใจอยู่ใกล้กัน
201
+ คะกันนบ,คบกันนะ
202
+ แพ้รักทบ,พบรักแท้
203
+ สบีดาย,สบายดี
204
+ ไรเต็มหัวจั๊ก,รักเต็มหัวใจ
205
+ ไทยมีเจอ,เทอไม่มีใจ
206
+ คันถึงทุกวิ,คิดถึงทุกวัน
207
+ รอบมั้ยว่าชู้,รู้มั้ยว่าชอบ
208
+ เขอมีเพียงทอ,ขอมีเพียงเธอ
openthaigpt_Finetuning/.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
openthaigpt_Finetuning/openthaigpt-finetune/.dockerignore ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ .venv
2
+ .github
3
+ .vscode
4
+ .docker-compose.yml
openthaigpt_Finetuning/openthaigpt-finetune/.gitignore ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ out/
2
+ 7B/
3
+ 13B/
4
+ __pycache__/
5
+ checkpoint**
6
+ minimal-llama**
7
+ upload.py
8
+ lora-**
9
+ *ckpt
10
+ wandb
11
+ evaluate.py
12
+ test_data.json
13
+ todo.txt
14
+ .venv
15
+ .vscode
openthaigpt_Finetuning/openthaigpt-finetune/DATA_LICENSE ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Attribution License (ODC-By)
2
+ PREAMBLE
3
+ The Open Data Commons Attribution License is a license agreement intended to allow users to freely share, modify, and use this Database subject only to the attribution requirements set out in Section 4.
4
+
5
+ Databases can contain a wide variety of types of content (images, audiovisual material, and sounds all in the same database, for example), and so this license only governs the rights over the Database, and not the contents of the Database individually. Licensors may therefore wish to use this license together with another license for the contents.
6
+
7
+ Sometimes the contents of a database, or the database itself, can be covered by other rights not addressed here (such as private contracts, trademark over the name, or privacy rights / data protection rights over information in the contents), and so you are advised that you may have to consult other documents or clear other rights before doing activities not covered by this License.
8
+
9
+ The Licensor (as defined below)
10
+
11
+ and
12
+
13
+ You (as defined below)
14
+
15
+ agree as follows:
16
+
17
+ 1.0 DEFINITIONS OF CAPITALISED WORDS
18
+ “Collective Database” – Means this Database in unmodified form as part of a collection of independent databases in themselves that together are assembled into a collective whole. A work that constitutes a Collective Database will not be considered a Derivative Database.
19
+
20
+ “Convey” – As a verb, means Using the Database, a Derivative Database, or the Database as part of a Collective Database in any way that enables a Person to make or receive copies of the Database or a Derivative Database. Conveying does not include interaction with a user through a computer network, or creating and Using a Produced Work, where no transfer of a copy of the Database or a Derivative Database occurs.
21
+
22
+ “Contents” – The contents of this Database, which includes the information, independent works, or other material collected into the Database. For example, the contents of the Database could be factual data or works such as images, audiovisual material, text, or sounds.
23
+
24
+ “Database” – A collection of material (the Contents) arranged in a systematic or methodical way and individually accessible by electronic or other means offered under the terms of this License.
25
+
26
+ “Database Directive” – Means Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended or succeeded.
27
+
28
+ “Database Right” – Means rights resulting from the Chapter III (“sui generis”) rights in the Database Directive (as amended and as transposed by member states), which includes the Extraction and Re-utilisation of the whole or a Substantial part of the Contents, as well as any similar rights available in the relevant jurisdiction under Section 10.4.
29
+
30
+ “Derivative Database” – Means a database based upon the Database, and includes any translation, adaptation, arrangement, modification, or any other alteration of the Database or of a Substantial part of the Contents. This includes, but is not limited to, Extracting or Re-utilising the whole or a Substantial part of the Contents in a new Database.
31
+
32
+ “Extraction” – Means the permanent or temporary transfer of all or a Substantial part of the Contents to another medium by any means or in any form.
33
+
34
+ “License” – Means this license agreement and is both a license of rights such as copyright and Database Rights and an agreement in contract.
35
+
36
+ “Licensor” – Means the Person that offers the Database under the terms of this License.
37
+
38
+ “Person” – Means a natural or legal person or a body of persons corporate or incorporate.
39
+
40
+ “Produced Work” – a work (such as an image, audiovisual material, text, or sounds) resulting from using the whole or a Substantial part of the Contents (via a search or other query) from this Database, a Derivative Database, or this Database as part of a Collective Database.
41
+
42
+ “Publicly” – means to Persons other than You or under Your control by either more than 50% ownership or by the power to direct their activities (such as contracting with an independent consultant).
43
+
44
+ “Re-utilisation” – means any form of making available to the public all or a Substantial part of the Contents by the distribution of copies, by renting, by online or other forms of transmission.
45
+
46
+ “Substantial” – Means substantial in terms of quantity or quality or a combination of both. The repeated and systematic Extraction or Re-utilisation of insubstantial parts of the Contents may amount to the Extraction or Re-utilisation of a Substantial part of the Contents.
47
+
48
+ “Use” – As a verb, means doing any act that is restricted by copyright or Database Rights whether in the original medium or any other; and includes without limitation distributing, copying, publicly performing, publicly displaying, and preparing derivative works of the Database, as well as modifying the Database as may be technically necessary to use it in a different mode or format.
49
+
50
+ “You” – Means a Person exercising rights under this License who has not previously violated the terms of this License with respect to the Database, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation.
51
+
52
+ Words in the singular include the plural and vice versa.
53
+
54
+ 2.0 WHAT THIS LICENSE COVERS
55
+ 2.1. Legal effect of this document. This License is:
56
+
57
+ a. A license of applicable copyright and neighbouring rights;
58
+
59
+ b. A license of the Database Right; and
60
+
61
+ c. An agreement in contract between You and the Licensor.
62
+
63
+ 2.2 Legal rights covered. This License covers the legal rights in the Database, including:
64
+
65
+ a. Copyright. Any copyright or neighbouring rights in the Database. The copyright licensed includes any individual elements of the Database, but does not cover the copyright over the Contents independent of this Database. See Section 2.4 for details. Copyright law varies between jurisdictions, but is likely to cover: the Database model or schema, which is the structure, arrangement, and organisation of the Database, and can also include the Database tables and table indexes; the data entry and output sheets; and the Field names of Contents stored in the Database;
66
+
67
+ b. Database Rights. Database Rights only extend to the Extraction and Re-utilisation of the whole or a Substantial part of the Contents. Database Rights can apply even when there is no copyright over the Database. Database Rights can also apply when the Contents are removed from the Database and are selected and arranged in a way that would not infringe any applicable copyright; and
68
+
69
+ c. Contract. This is an agreement between You and the Licensor for access to the Database. In return you agree to certain conditions of use on this access as outlined in this License.
70
+
71
+ 2.3 Rights not covered.
72
+
73
+ a. This License does not apply to computer programs used in the making or operation of the Database;
74
+
75
+ b. This License does not cover any patents over the Contents or the Database; and
76
+
77
+ c. This License does not cover any trademarks associated with the Database.
78
+
79
+ 2.4 Relationship to Contents in the Database. The individual items of the Contents contained in this Database may be covered by other rights, including copyright, patent, data protection, privacy, or personality rights, and this License does not cover any rights (other than Database Rights or in contract) in individual Contents contained in the Database.
80
+
81
+ For example, if used on a Database of images (the Contents), this License would not apply to copyright over individual images, which could have their own separate licenses, or one single license covering all of the rights over the images.
82
+
83
+ 3.0 RIGHTS GRANTED
84
+ 3.1 Subject to the terms and conditions of this License, the Licensor grants to You a worldwide, royalty-free, non-exclusive, terminable (but only under Section 9) license to Use the Database for the duration of any applicable copyright and Database Rights. These rights explicitly include commercial use, and do not exclude any field of endeavour. To the extent possible in the relevant jurisdiction, these rights may be exercised in all media and formats whether now known or created in the future.
85
+
86
+ The rights granted cover, for example:
87
+
88
+ a. Extraction and Re-utilisation of the whole or a Substantial part of the Contents;
89
+
90
+ b. Creation of Derivative Databases;
91
+
92
+ c. Creation of Collective Databases;
93
+
94
+ d. Creation of temporary or permanent reproductions by any means and in any form, in whole or in part, including of any Derivative Databases or as a part of Collective Databases; and
95
+
96
+ e. Distribution, communication, display, lending, making available, or performance to the public by any means and in any form, in whole or in part, including of any Derivative Database or as a part of Collective Databases.
97
+
98
+ 3.2 Compulsory license schemes. For the avoidance of doubt:
99
+
100
+ a. Non-waivable compulsory license schemes. In those jurisdictions in which the right to collect royalties through any statutory or compulsory licensing scheme cannot be waived, the Licensor reserves the exclusive right to collect such royalties for any exercise by You of the rights granted under this License;
101
+
102
+ b. Waivable compulsory license schemes. In those jurisdictions in which the right to collect royalties through any statutory or compulsory licensing scheme can be waived, the Licensor waives the exclusive right to collect such royalties for any exercise by You of the rights granted under this License; and,
103
+
104
+ c. Voluntary license schemes. The Licensor waives the right to collect royalties, whether individually or, in the event that the Licensor is a member of a collecting society that administers voluntary licensing schemes, via that society, from any exercise by You of the rights granted under this License.
105
+
106
+ 3.3 The right to release the Database under different terms, or to stop distributing or making available the Database, is reserved. Note that this Database may be multiple-licensed, and so You may have the choice of using alternative licenses for this Database. Subject to Section 10.4, all other rights not expressly granted by Licensor are reserved.
107
+
108
+ 4.0 CONDITIONS OF USE
109
+ 4.1 The rights granted in Section 3 above are expressly made subject to Your complying with the following conditions of use. These are important conditions of this License, and if You fail to follow them, You will be in material breach of its terms.
110
+
111
+ 4.2 Notices. If You Publicly Convey this Database, any Derivative Database, or the Database as part of a Collective Database, then You must:
112
+
113
+ a. Do so only under the terms of this License;
114
+
115
+ b. Include a copy of this License or its Uniform Resource Identifier (URI) with the Database or Derivative Database, including both in the Database or Derivative Database and in any relevant documentation;
116
+
117
+ c. Keep intact any copyright or Database Right notices and notices that refer to this License; and
118
+
119
+ d. If it is not possible to put the required notices in a particular file due to its structure, then You must include the notices in a location (such as a relevant directory) where users would be likely to look for it.
120
+
121
+ 4.3 Notice for using output (Contents). Creating and Using a Produced Work does not require the notice in Section 4.2. However, if you Publicly Use a Produced Work, You must include a notice associated with the Produced Work reasonably calculated to make any Person that uses, views, accesses, interacts with, or is otherwise exposed to the Produced Work aware that Content was obtained from the Database, Derivative Database, or the Database as part of a Collective Database, and that it is available under this License.
122
+
123
+ a. Example notice. The following text will satisfy notice under Section 4.3:
124
+
125
+ Contains information from DATABASE NAME which is made available
126
+ under the ODC Attribution License.
127
+ DATABASE NAME should be replaced with the name of the Database and a hyperlink to the location of the Database. “ODC Attribution License” should contain a hyperlink to the URI of the text of this License. If hyperlinks are not possible, You should include the plain text of the required URI’s with the above notice.
128
+
129
+ 4.4 Licensing of others. You may not sublicense the Database. Each time You communicate the Database, the whole or Substantial part of the Contents, or any Derivative Database to anyone else in any way, the Licensor offers to the recipient a license to the Database on the same terms and conditions as this License. You are not responsible for enforcing compliance by third parties with this License, but You may enforce any rights that You have over a Derivative Database. You are solely responsible for any modifications of a Derivative Database made by You or another Person at Your direction. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License.
130
+
131
+ 5.0 MORAL RIGHTS
132
+ 5.1 Moral rights. This section covers moral rights, including any rights to be identified as the author of the Database or to object to treatment that would otherwise prejudice the author’s honour and reputation, or any other derogatory treatment:
133
+
134
+ a. For jurisdictions allowing waiver of moral rights, Licensor waives all moral rights that Licensor may have in the Database to the fullest extent possible by the law of the relevant jurisdiction under Section 10.4;
135
+
136
+ b. If waiver of moral rights under Section 5.1 a in the relevant jurisdiction is not possible, Licensor agrees not to assert any moral rights over the Database and waives all claims in moral rights to the fullest extent possible by the law of the relevant jurisdiction under Section 10.4; and
137
+
138
+ c. For jurisdictions not allowing waiver or an agreement not to assert moral rights under Section 5.1 a and b, the author may retain their moral rights over certain aspects of the Database.
139
+
140
+ Please note that some jurisdictions do not allow for the waiver of moral rights, and so moral rights may still subsist over the Database in some jurisdictions.
141
+
142
+ 6.0 FAIR DEALING, DATABASE EXCEPTIONS, AND OTHER RIGHTS NOT AFFECTED
143
+ 6.1 This License does not affect any rights that You or anyone else may independently have under any applicable law to make any use of this Database, including without limitation:
144
+
145
+ a. Exceptions to the Database Right including: Extraction of Contents from non-electronic Databases for private purposes, Extraction for purposes of illustration for teaching or scientific research, and Extraction or Re-utilisation for public security or an administrative or judicial procedure.
146
+
147
+ b. Fair dealing, fair use, or any other legally recognised limitation or exception to infringement of copyright or other applicable laws.
148
+
149
+ 6.2 This License does not affect any rights of lawful users to Extract and Re-utilise insubstantial parts of the Contents, evaluated quantitatively or qualitatively, for any purposes whatsoever, including creating a Derivative Database (subject to other rights over the Contents, see Section 2.4). The repeated and systematic Extraction or Re-utilisation of insubstantial parts of the Contents may however amount to the Extraction or Re-utilisation of a Substantial part of the Contents.
150
+
151
+ 7.0 WARRANTIES AND DISCLAIMER
152
+ 7.1 The Database is licensed by the Licensor “as is” and without any warranty of any kind, either express, implied, or arising by statute, custom, course of dealing, or trade usage. Licensor specifically disclaims any and all implied warranties or conditions of title, non-infringement, accuracy or completeness, the presence or absence of errors, fitness for a particular purpose, merchantability, or otherwise. Some jurisdictions do not allow the exclusion of implied warranties, so this exclusion may not apply to You.
153
+
154
+ 8.0 LIMITATION OF LIABILITY
155
+ 8.1 Subject to any liability that may not be excluded or limited by law, the Licensor is not liable for, and expressly excludes, all liability for loss or damage however and whenever caused to anyone by any use under this License, whether by You or by anyone else, and whether caused by any fault on the part of the Licensor or not. This exclusion of liability includes, but is not limited to, any special, incidental, consequential, punitive, or exemplary damages such as loss of revenue, data, anticipated profits, and lost business. This exclusion applies even if the Licensor has been advised of the possibility of such damages.
156
+
157
+ 8.2 If liability may not be excluded by law, it is limited to actual and direct financial loss to the extent it is caused by proved negligence on the part of the Licensor.
158
+
159
+ 9.0 TERMINATION OF YOUR RIGHTS UNDER THIS LICENSE
160
+ 9.1 Any breach by You of the terms and conditions of this License automatically terminates this License with immediate effect and without notice to You. For the avoidance of doubt, Persons who have received the Database, the whole or a Substantial part of the Contents, Derivative Databases, or the Database as part of a Collective Database from You under this License will not have their licenses terminated provided their use is in full compliance with this License or a license granted under Section 4.8 of this License. Sections 1, 2, 7, 8, 9 and 10 will survive any termination of this License.
161
+
162
+ 9.2 If You are not in breach of the terms of this License, the Licensor will not terminate Your rights under it.
163
+
164
+ 9.3 Unless terminated under Section 9.1, this License is granted to You for the duration of applicable rights in the Database.
165
+
166
+ 9.4 Reinstatement of rights. If you cease any breach of the terms and conditions of this License, then your full rights under this License will be reinstated:
167
+
168
+ a. Provisionally and subject to permanent termination until the 60th day after cessation of breach;
169
+
170
+ b. Permanently on the 60th day after cessation of breach unless otherwise reasonably notified by the Licensor; or
171
+
172
+ c. Permanently if reasonably notified by the Licensor of the violation, this is the first time You have received notice of violation of this License from the Licensor, and You cure the violation prior to 30 days after your receipt of the notice.
173
+
174
+ 9.5 Notwithstanding the above, Licensor reserves the right to release the Database under different license terms or to stop distributing or making available the Database. Releasing the Database under different license terms or stopping the distribution of the Database will not withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above.
175
+
176
+ 10.0 GENERAL
177
+ 10.1 If any provision of this License is held to be invalid or unenforceable, that must not affect the validity or enforceability of the remainder of the terms and conditions of this License and each remaining provision of this License shall be valid and enforced to the fullest extent permitted by law.
178
+
179
+ 10.2 This License is the entire agreement between the parties with respect to the rights granted here over the Database. It replaces any earlier understandings, agreements or representations with respect to the Database.
180
+
181
+ 10.3 If You are in breach of the terms of this License, You will not be entitled to rely on the terms of this License or to complain of any breach by the Licensor.
182
+
183
+ 10.4 Choice of law. This License takes effect in and will be governed by the laws of the relevant jurisdiction in which the License terms are sought to be enforced. If the standard suite of rights granted under applicable copyright law and Database Rights in the relevant jurisdiction includes additional rights not granted under this License, these additional rights are granted in this License in order to meet the terms of this License.
openthaigpt_Finetuning/openthaigpt-finetune/Dockerfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
2
+
3
+ ARG DEBIAN_FRONTEND=noninteractive
4
+
5
+ RUN apt-get update && apt-get install -y \
6
+ git \
7
+ curl \
8
+ software-properties-common \
9
+ && add-apt-repository ppa:deadsnakes/ppa \
10
+ && apt install -y python3.10 \
11
+ && rm -rf /var/lib/apt/lists/*
12
+ WORKDIR /workspace
13
+ COPY requirements.txt requirements.txt
14
+ RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 \
15
+ && python3.10 -m pip install -r requirements.txt \
16
+ && python3.10 -m pip install numpy --pre torch --force-reinstall --index-url https://download.pytorch.org/whl/nightly/cu118
17
+ COPY . .
18
+ ENTRYPOINT [ "python3.10"]
openthaigpt_Finetuning/openthaigpt-finetune/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
openthaigpt_Finetuning/openthaigpt-finetune/README.md ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 🇹🇭 OpenThaiGPT 1.0.0-beta
2
+ <img src="https://1173516064-files.gitbook.io/~/files/v0/b/gitbook-x-prod.appspot.com/o/spaces%2FvvbWvIIe82Iv1yHaDBC5%2Fuploads%2Fb8eiMDaqiEQL6ahbAY0h%2Fimage.png?alt=media&token=6fce78fd-2cca-4c0a-9648-bd5518e644ce" width="200px">
3
+
4
+ OpenThaiGPT Version 1.0.0-beta is a 7B-parameter LLaMA model finetuned to follow Thai translated instructions below and makes use of the Huggingface LLaMA implementation.
5
+
6
+ ## Support
7
+ - Official website: https://openthaigpt.aieat.or.th
8
+ - Facebook page: https://web.facebook.com/groups/openthaigpt
9
+ - A Discord server for discussion and support [here](https://discord.gg/rUTp6dfVUF)
10
+ - E-mail: [email protected]
11
+
12
+ ## License
13
+ - **Source Code**: Apache Software License 2.0.<br>
14
+ - **Weight**: For research use only (due to the Facebook LLama's Weight LICENSE).<br>
15
+ - <i>Note that: A commercial use license for OpenThaiGPT 0.1.0 weight will be released later soon!</i>
16
+
17
+ ## Code and Weight
18
+
19
+ - **Libary Code**: https://github.com/OpenThaiGPT/openthaigpt<br>
20
+ - **Finetune Code**: https://github.com/OpenThaiGPT/openthaigpt-finetune-010beta<br>
21
+ - **Weight**: https://huggingface.co/kobkrit/openthaigpt-0.1.0-beta
22
+
23
+ ## Sponsors
24
+ Pantip.com, ThaiSC<br>
25
+ <table>
26
+ <tr><td>
27
+ <img src="https://1173516064-files.gitbook.io/~/files/v0/b/gitbook-x-prod.appspot.com/o/spaces%2FvvbWvIIe82Iv1yHaDBC5%2Fuploads%2FiWjRxBQgo0HUDcpZKf6A%2Fimage.png?alt=media&token=4fef4517-0b4d-46d6-a5e3-25c30c8137a6" width="100px"></td><td>
28
+ <img src="https://1173516064-files.gitbook.io/~/files/v0/b/gitbook-x-prod.appspot.com/o/spaces%2FvvbWvIIe82Iv1yHaDBC5%2Fuploads%2Ft96uNUI71mAFwkXUtxQt%2Fimage.png?alt=media&token=f8057c0c-5c5f-41ac-bb4b-ad02ee3d4dc2" width="100px"></td>
29
+ </tr><table>
30
+
31
+ ### Powered by
32
+ OpenThaiGPT Volunteers, Artificial Intelligence Entrepreneur Association of Thailand (AIEAT), and Artificial Intelligence Association of Thailand (AIAT)
33
+
34
+ <table>
35
+ <tr>
36
+ <td>
37
+ <img src="https://1173516064-files.gitbook.io/~/files/v0/b/gitbook-x-prod.appspot.com/o/spaces%2FvvbWvIIe82Iv1yHaDBC5%2Fuploads%2F6yWPXxdoW76a4UBsM8lw%2Fimage.png?alt=media&token=1006ee8e-5327-4bc0-b9a9-a02e93b0c032" width="100px"></td><td><img src="https://1173516064-files.gitbook.io/~/files/v0/b/gitbook-x-prod.appspot.com/o/spaces%2FvvbWvIIe82Iv1yHaDBC5%2Fuploads%2FBwsmSovEIhW9AEOlHTFU%2Fimage.png?alt=media&token=5b550289-e9e2-44b3-bb8f-d3057d74f247" width="100px"></td></tr><table>
38
+
39
+ ### Authors
40
+ Kobkrit Viriyayudhakorn ([email protected]), Sumeth Yuenyong ([email protected]) and Thaweewat Ruksujarit ([email protected]).
41
+
42
+ <i>Disclaimer: Provided responses are not guaranteed.</i>
43
+
44
+ ### Local Setup
45
+
46
+ 1. Install dependencies
47
+
48
+ ```bash
49
+ pip install -r requirements.txt
50
+ ```
51
+
52
+ 1. If bitsandbytes doesn't work, [install it from source.](https://github.com/TimDettmers/bitsandbytes/blob/main/compile_from_source.md) Windows users can follow [these instructions](https://github.com/tloen/alpaca-lora/issues/17).
53
+
54
+ ### Training (`finetune.py`)
55
+
56
+ This file contains a straightforward application of PEFT to the LLaMA model,
57
+ as well as some code related to prompt construction and tokenization.
58
+ PRs adapting this code to support larger models are always welcome.
59
+
60
+ Example usage:
61
+
62
+ ```bash
63
+ python finetune.py \
64
+ --base_model 'decapoda-research/llama-7b-hf' \
65
+ --data_path 'Thaweewat/alpaca-cleaned-52k-th' \
66
+ --output_dir './openthaigpt-010-beta'
67
+ ```
68
+
69
+ We can also tweak our hyperparameters:
70
+
71
+ ```bash
72
+ python finetune.py \
73
+ --base_model 'decapoda-research/llama-7b-hf' \
74
+ --data_path 'Thaweewat/alpaca-cleaned-52k-th' \
75
+ --output_dir './openthaigpt-010-beta' \
76
+ --batch_size 128 \
77
+ --micro_batch_size 4 \
78
+ --num_epochs 3 \
79
+ --learning_rate 1e-4 \
80
+ --cutoff_len 512 \
81
+ --val_set_size 2000 \
82
+ --lora_r 8 \
83
+ --lora_alpha 16 \
84
+ --lora_dropout 0.05 \
85
+ --lora_target_modules '[q_proj,v_proj]' \
86
+ --train_on_inputs \
87
+ --group_by_length
88
+ ```
89
+
90
+ ### Inference (`generate.py`)
91
+
92
+ This file reads the foundation model from the Hugging Face model hub and the LoRA weights from `kobkrit/openthaigpt-0.1.0-beta`, and runs a Gradio interface for inference on a specified input. Users should treat this as example code for the use of the model, and modify it as needed.
93
+
94
+ Example usage:
95
+
96
+ ```bash
97
+ python generate.py \
98
+ --load_8bit \
99
+ --base_model 'decapoda-research/llama-7b-hf' \
100
+ --lora_weights 'kobkrit/openthaigpt-0.1.0-beta'
101
+ ```
102
+
103
+ ### Official weights
104
+
105
+ The most recent "official" OpenThaiGPT 0.1.0-beta adapter available at [`kobkrit/openthaigpt-0.1.0-beta`](https://huggingface.co/kobkrit/openthaigpt-0.1.0-beta) was trained on May 13 with the following command:
106
+
107
+ ```bash
108
+ python finetune.py \
109
+ --base_model='decapoda-research/llama-7b-hf' \
110
+ --data_path '../datasets/cleaned' \
111
+ --num_epochs=3 \
112
+ --cutoff_len=2048 \
113
+ --group_by_length \
114
+ --output_dir='./openthaigpt-010-beta' \
115
+ --lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' \
116
+ --lora_r=64 \
117
+ --batch_size=64 \
118
+ --micro_batch_size=4
119
+ ```
120
+
121
+ ### Checkpoint export (`export_*_checkpoint.py`)
122
+
123
+ These files contain scripts that merge the LoRA weights back into the base model
124
+ for export to Hugging Face format and to PyTorch `state_dicts`.
125
+ They should help users
126
+ who want to run inference in projects like [llama.cpp](https://github.com/ggerganov/llama.cpp)
127
+ or [alpaca.cpp](https://github.com/antimatter15/alpaca.cpp).
128
+
129
+ ### Docker Setup & Inference
130
+
131
+ 1. Build the container image:
132
+
133
+ ```bash
134
+ docker build -t openthaigpt-finetune-010beta .
135
+ ```
136
+
137
+ 2. Run the container (you can also use `finetune.py` and all of its parameters as shown above for training):
138
+
139
+ ```bash
140
+ docker run --gpus=all --shm-size 64g -p 7860:7860 -v ${HOME}/.cache:/root/.cache --rm openthaigpt-finetune-010beta generate.py \
141
+ --load_8bit \
142
+ --base_model 'decapoda-research/llama-7b-hf' \
143
+ --lora_weights 'kobkrit/openthaigpt-0.1.0-beta'
144
+ ```
145
+
146
+ 3. Open `https://localhost:7860` in the browser
147
+
148
+ ### Docker Compose Setup & Inference
149
+
150
+ 1. (optional) Change desired model and weights under `environment` in the `docker-compose.yml`
151
+
152
+ 2. Build and run the container
153
+
154
+ ```bash
155
+ docker-compose up -d --build
156
+ ```
157
+
158
+ 3. Open `https://localhost:7860` in the browser
159
+
160
+ 4. See logs:
161
+
162
+ ```bash
163
+ docker-compose logs -f
164
+ ```
165
+
166
+ 5. Clean everything up:
167
+
168
+ ```bash
169
+ docker-compose down --volumes --rmi all
170
+ ```
171
+
openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2eddafc6b977608d778aaab8dfc7e50e547b3af9826dfb9e909d9fc362e4a419
3
+ size 22773992
openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data_cleaned_archive.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdbc6fff26e130b521edd96995d9f3b391622d532b2b6822e601fcb7aa42ed7a
3
+ size 22680910
openthaigpt_Finetuning/openthaigpt-finetune/alpaca_data_gpt4.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bd4f62585d494b794deb043ce0baddfec02f27696857c57c9c238d6eff35a18
3
+ size 43379276
openthaigpt_Finetuning/openthaigpt-finetune/data/kumpun.jsonl ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"input":"\u0e04\u0e33\u0e1c\u0e27\u0e19","output":"\u0e04\u0e27\u0e19\u0e1c\u0e33","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
2
+ {"input":"\u0e19\u0e2d\u0e19\u0e41\u0e25\u0e49\u0e27","output":"\u0e41\u0e19\u0e27\u0e25\u0e49\u0e2d\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
3
+ {"input":"\u0e15\u0e30\u0e1b\u0e39","output":"\u0e15\u0e39\u0e1b\u0e30","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
4
+ {"input":"\u0e19\u0e31\u0e01\u0e40\u0e23\u0e35\u0e22\u0e19","output":"\u0e40\u0e19\u0e35\u0e22\u0e19\u0e23\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
5
+ {"input":"\u0e02\u0e19\u0e21","output":"\u0e02\u0e21\u0e2b\u0e19\u0e30","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
6
+ {"input":"\u0e40\u0e23\u0e2d\u0e17\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e40\u0e17\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
7
+ {"input":"\u0e25\u0e2d\u0e07\u0e14\u0e39","output":"\u0e25\u0e39\u0e14\u0e2d\u0e07","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
8
+ {"input":"\u0e40\u0e08\u0e2d\u0e1e\u0e35\u0e48","output":"\u0e08\u0e35\u0e40\u0e1e\u0e48\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
9
+ {"input":"\u0e40\u0e23\u0e2d\u0e21\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e40\u0e21\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
10
+ {"input":"\u0e2d\u0e32\u0e44\u0e1a\u0e49","output":"\u0e44\u0e2d\u0e1a\u0e49\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
11
+ {"input":"\u0e2b\u0e34\u0e27\u0e02\u0e49\u0e32\u0e27","output":"\u0e2b\u0e32\u0e27\u0e02\u0e34\u0e49\u0e27","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
12
+ {"input":"\u0e01\u0e30\u0e2b\u0e25\u0e48\u0e33","output":"\u0e01\u0e48\u0e33\u0e2b\u0e25\u0e30","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
13
+ {"input":"\u0e40\u0e08\u0e2d\u0e2b\u0e21\u0e36\u0e01","output":"\u0e08\u0e36\u0e01\u0e40\u0e21\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
14
+ {"input":"\u0e21\u0e30\u0e19\u0e32\u0e27\u0e15\u0e48\u0e32\u0e07\u0e14\u0e38\u0e4a\u0e14","output":"\u0e21\u0e30\u0e19\u0e38\u0e14\u0e15\u0e48\u0e32\u0e07\u0e14\u0e32\u0e27","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
15
+ {"input":"\u0e01\u0e32\u0e40\u0e1b\u0e47\u0e19\u0e2b\u0e21\u0e39","output":"\u0e01\u0e39\u0e40\u0e1b\u0e47\u0e19\u0e2b\u0e21\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
16
+ {"input":"\u0e01\u0e49\u0e32\u0e07\u0e43\u0e2b\u0e0d\u0e48","output":"\u0e43\u0e01\u0e49\u0e2b\u0e0d\u0e48\u0e32\u0e07","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
17
+ {"input":"\u0e2d\u0e30\u0e2b\u0e23\u0e35\u0e48\u0e14\u0e2d\u0e22","output":"\u0e2d\u0e30\u0e2b\u0e23\u0e48\u0e2d\u0e22\u0e14\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
18
+ {"input":"\u0e19\u0e2d\u0e19\u0e41\u0e25\u0e49\u0e27","output":"\u0e41\u0e19\u0e27\u0e25\u0e49\u0e2d\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
19
+ {"input":"\u0e15\u0e30\u0e1b\u0e39","output":"\u0e15\u0e39\u0e1b\u0e30","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
20
+ {"input":"\u0e19\u0e31\u0e01\u0e40\u0e23\u0e35\u0e22\u0e19","output":"\u0e40\u0e19\u0e35\u0e22\u0e19\u0e23\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
21
+ {"input":"\u0e02\u0e19\u0e21","output":"\u0e02\u0e21\u0e2b\u0e19\u0e30","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
22
+ {"input":"\u0e40\u0e23\u0e2d\u0e17\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e40\u0e17\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
23
+ {"input":"\u0e2a\u0e27\u0e31\u0e2a\u0e14\u0e35","output":"\u0e2a\u0e30\u0e27\u0e35\u0e14\u0e31\u0e2a","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
24
+ {"input":"\u0e40\u0e08\u0e2d\u0e2d\u0e22\u0e39\u0e48\u0e17\u0e35\u0e48\u0e44\u0e17\u0e22","output":"\u0e43\u0e08\u0e2d\u0e22\u0e39\u0e48\u0e17\u0e35\u0e48\u0e40\u0e18\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
25
+ {"input":"\u0e40\u0e0a\u0e35\u0e48\u0e22\u0e40\u0e18\u0e2d\u0e19\u0e30\u0e19\u0e2d\u0e1a","output":"\u0e0a\u0e2d\u0e1a\u0e40\u0e18\u0e2d\u0e19\u0e30\u0e40\u0e19\u0e35\u0e48\u0e22\n\n","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
26
+ {"input":"\u0e41\u0e2a\u0e07\u0e14\u0e35","output":"\u0e2a\u0e35\u0e41\u0e14\u0e07","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
27
+ {"input":"\u0e41\u0e0a\u0e48\u0e07\u0e21\u0e31\u0e48\u0e07","output":"\u0e0a\u0e31\u0e48\u0e07\u0e41\u0e21\u0e48\u0e07","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
28
+ {"input":"\u0e19\u0e32\u0e22\u0e2b","output":"\u0e19\u0e01\u0e22\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
29
+ {"input":"\u0e04\u0e31\u0e48\u0e19\u0e01\u0e39","output":"\u0e04\u0e39\u0e48\u0e01\u0e31\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
30
+ {"input":"\u0e04\u0e31\u0e48\u0e19\u0e01\u0e25\u0e32\u0e07","output":"\u0e02\u0e49\u0e32\u0e07\u0e01\u0e31\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
31
+ {"input":"\u0e14\u0e34\u0e2a\u0e01\u0e27\u0e48\u0e32\u0e17\u0e35\u0e48\u0e04\u0e35\u0e22\u0e4c","output":"\u0e14\u0e35\u0e01\u0e27\u0e48\u0e32\u0e17\u0e35\u0e48\u0e04\u0e34\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
32
+ {"input":"\u0e25\u0e49\u0e32\u0e40\u0e18\u0e2d\u0e19\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e40\u0e18\u0e2d\u0e19\u0e49\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
33
+ {"input":"\u0e40\u0e23\u0e2d\u0e41\u0e15\u0e48\u0e17\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e41\u0e15\u0e48\u0e40\u0e18\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
34
+ {"input":"\u0e40\u0e1a\u0e25\u0e2d\u0e27\u0e48\u0e32\u0e0a\u0e2d\u0e1a\u0e41\u0e16\u0e1a","output":"\u0e41\u0e1a\u0e1a\u0e27\u0e48\u0e32\u0e0a\u0e2d\u0e1a\u0e40\u0e18\u0e2d\n\n","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
35
+ {"input":"\u0e40\u0e21\u0e32\u0e40\u0e1b\u0e47\u0e19\u0e41\u0e1f\u0e19\u0e23\u0e32","output":"\u0e21\u0e32\u0e40\u0e1b\u0e47\u0e19\u0e41\u0e1f\u0e19\u0e40\u0e23\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
36
+ {"input":"\u0e44\u0e2b\u0e25\u0e48\u0e42\u0e14\u0e19\u0e08\u0e2d","output":"\u0e2b\u0e25\u0e48\u0e2d\u0e42\u0e14\u0e19\u0e43\u0e08","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
37
+ {"input":"\u0e40\u0e08\u0e2d\u0e25\u0e30\u0e44\u0e21","output":"\u0e43\u0e08\u0e25\u0e30\u0e40\u0e21\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
38
+ {"input":"\u0e1f\u0e31\u0e19\u0e41\u0e01\u0e19","output":"\u0e41\u0e1f\u0e19\u0e01\u0e31\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
39
+ {"input":"\u0e0a\u0e39\u0e49\u0e41\u0e15\u0e48\u0e40\u0e18\u0e2d\u0e44\u0e21\u0e48\u0e23\u0e2d\u0e1a","output":"\u0e0a\u0e2d\u0e1a\u0e41\u0e15\u0e48\u0e40\u0e18\u0e2d\u0e44\u0e21\u0e48\u0e23\u0e39\u0e49","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
40
+ {"input":"\u0e41\u0e02\u0e19\u0e40\u0e1b\u0e47\u0e19\u0e1f\u0e2d","output":"\u0e02\u0e2d\u0e40\u0e1b\u0e47\u0e19\u0e41\u0e1f\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
41
+ {"input":"\u0e44\u0e02\u0e49\u0e17\u0e35\u0e48\u0e0a\u0e25","output":"\u0e04\u0e19\u0e17\u0e35\u0e48\u0e43\u0e0a\u0e48","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
42
+ {"input":"\u0e14\u0e2d\u0e22\u0e2b\u0e21\u0e2d\n","output":"\u0e14\u0e2d\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
43
+ {"input":"\u0e14\u0e2d\u0e22\u0e15\u0e34\u0e14\u0e2b\u0e21\u0e2d","output":"\u0e14\u0e2d\u0e15\u0e34\u0e14\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
44
+ {"input":"\u0e14\u0e2d\u0e22\u0e19\u0e35\u0e49\u0e21\u0e35\u0e41\u0e15\u0e48\u0e2b\u0e21\u0e2d","output":"\u0e14\u0e2d\u0e19\u0e35\u0e49\u0e21\u0e35\u0e41\u0e15\u0e48\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
45
+ {"input":"\u0e14\u0e2d\u0e22\u0e2b\u0e21\u0e39","output":"\u0e14\u0e39\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
46
+ {"input":"\u0e14\u0e2d\u0e22\u0e2b\u0e21\u0e36\u0e07","output":"\u0e14\u0e36\u0e07\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
47
+ {"input":"\u0e14\u0e2d\u0e22\u0e2b\u0e36\u0e07","output":"\u0e14\u0e36\u0e07\u0e2b\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
48
+ {"input":"\u0e14\u0e2d\u0e22\u0e44\u0e2b\u0e21","output":"\u0e44\u0e14\u0e23\u0e4c\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
49
+ {"input":"\u0e14\u0e2d\u0e22\u0e2b\u0e21","output":"\u0e14\u0e21\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
50
+ {"input":"\u0e04\u0e19\u0e25\u0e48\u0e2d\u0e07\u0e2b\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e25\u0e48\u0e2d\u0e07\u0e2b\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
51
+ {"input":"\u0e2b\u0e21\u0e35\u0e08\u0e31\u0e1a\u0e2b\u0e27\u0e22","output":"\u0e2b\u0e21\u0e27\u0e22\u0e08\u0e31\u0e1a\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
52
+ {"input":"\u0e2b\u0e21\u0e35\u0e40\u0e25\u0e48\u0e19\u0e2b\u0e27\u0e22","output":"\u0e2b\u0e21\u0e27\u0e22\u0e40\u0e25\u0e48\u0e19\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
53
+ {"input":"\u0e2b\u0e21\u0e35\u0e04\u0e25\u0e33\u0e2b\u0e27\u0e22","output":"\u0e2b\u0e21\u0e27\u0e22\u0e04\u0e25\u0e33\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
54
+ {"input":"\u0e2b\u0e21\u0e35\u0e02\u0e32\u0e22\u0e2b\u0e27\u0e22","output":"\u0e2b\u0e21\u0e27\u0e22\u0e02\u0e32\u0e22\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
55
+ {"input":"\u0e2b\u0e27\u0e22\u0e25\u0e33\u0e40\u0e04\u0e47\u0e0d","output":"\u0e40\u0e2b\u0e21\u0e47\u0e19\u0e25\u0e33\u0e04\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
56
+ {"input":"\u0e2b\u0e27\u0e22\u0e40\u0e04\u0e19","output":"\u0e40\u0e2b\u0e47\u0e19\u0e04\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
57
+ {"input":"\u0e2b\u0e27\u0e22\u0e2d\u0e32\u0e2b\u0e21\u0e35\n\n","output":"\u0e2b\u0e35\u0e2d\u0e32\u0e2b\u0e21\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
58
+ {"input":"\u0e2b\u0e27\u0e22\u0e40\u0e21\u0e32\u0e04\u0e25\u0e35","output":"\u0e2b\u0e35\u0e40\u0e21\u0e32\u0e04\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
59
+ {"input":"\u0e2b\u0e27\u0e22\u0e2a\u0e35","output":"\u0e2b\u0e35\u0e2a\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
60
+ {"input":"\u0e2a\u0e35\u0e40\u0e2b\u0e35\u0e22\u0e27","output":"\u0e40\u0e2a\u0e35\u0e22\u0e27\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
61
+ {"input":"\u0e2b\u0e27\u0e22\u0e04\u0e25\u0e38\u0e01\u0e04\u0e25\u0e35\n","output":"\u0e2b\u0e35\u0e04\u0e25\u0e38\u0e01\u0e04\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
62
+ {"input":"\u0e04\u0e35\u0e22\u0e4c\u0e41\u0e17\u0e07\u0e2b\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e41\u0e17\u0e07\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
63
+ {"input":"\u0e2b\u0e27\u0e22\u0e42\u0e04","output":"\u0e42\u0e2b\u0e04\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
64
+ {"input":"\u0e2b\u0e27\u0e22\u0e04\u0e32\u0e23\u0e4c","output":"\u0e2b\u0e32\u0e04\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
65
+ {"input":"\u0e2b\u0e27\u0e22\u0e0b\u0e35","output":"\u0e2b\u0e35\u0e0b\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
66
+ {"input":"\u0e42\u0e04\u0e23\u0e21\u0e2b\u0e31\u0e01\u0e2b\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e2b\u0e31\u0e01\u0e42\u0e2b\u0e21","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
67
+ {"input":"\u0e1c\u0e35\u0e01\u0e32\u0e25\u0e01\u0e34\u0e13\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e01\u0e32\u0e25\u0e01\u0e34\u0e13\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
68
+ {"input":"\u0e1c\u0e35\u0e08\u0e31\u0e1a\u0e2b\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e08\u0e31\u0e1a\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
69
+ {"input":"\u0e1c\u0e35\u0e44\u0e23\u0e49\u0e2b\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e44\u0e23\u0e49\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
70
+ {"input":"\u0e1c\u0e35\u0e44\u0e21\u0e48\u0e21\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e44\u0e21\u0e48\u0e21\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
71
+ {"input":"\u0e1c\u0e35\u0e40\u0e2b\u0e32","output":"\u0e40\u0e1c\u0e32\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
72
+ {"input":"\u0e1c\u0e35\u0e40\u0e2d\u0e32\u0e2e\u0e32","output":"\u0e1e\u0e32\u0e40\u0e2d\u0e32\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
73
+ {"input":"\u0e1c\u0e35\u0e2b\u0e48\u0e32\u0e19","output":"\u0e1c\u0e48\u0e32\u0e19\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
74
+ {"input":"\u0e1c\u0e35\u0e44\u0e23\u0e49\u0e2b\u0e49\u0e2d\u0e07","output":"\u0e1e\u0e48\u0e2d\u0e07\u0e44\u0e23\u0e49\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
75
+ {"input":"\u0e1c\u0e35\u0e1a\u0e31\u0e07\u0e2b\u0e49\u0e32","output":"\u0e1c\u0e49\u0e32\u0e1a\u0e31\u0e07\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
76
+ {"input":"\u0e1c\u0e35\u0e40\u0e2b\u0e47\u0e14","output":"\u0e40\u0e1c\u0e47\u0e14\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
77
+ {"input":"\u0e1c\u0e35\u0e40\u0e25\u0e48\u0e19\u0e2e\u0e2d\u0e19","output":"\u0e1e\u0e23\u0e40\u0e25\u0e48\u0e19\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
78
+ {"input":"\u0e1c\u0e35\u0e01\u0e31\u0e1a\u0e40\u0e2e\u0e35\u0e22","output":"\u0e40\u0e1e\u0e25\u0e35\u0e22\u0e01\u0e31\u0e1a\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
79
+ {"input":"\u0e1c\u0e35\u0e44\u0e21\u0e48\u0e2a\u0e19\u0e2b\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e44\u0e21\u0e48\u0e2a\u0e19\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
80
+ {"input":"\u0e43\u0e0a\u0e49\u0e2b\u0e31\u0e27\u0e08\u0e33","output":"\u0e0a\u0e49\u0e33\u0e2b\u0e31\u0e27\u0e43\u0e08","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
81
+ {"input":"\u0e40\u0e2d\u0e14\u0e2a\u0e4c\u0e22\u0e28","output":"\u0e2d\u0e14\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
82
+ {"input":"\u0e40\u0e2b\u0e47\u0e19\u0e2b\u0e21\u0e35","output":"\u0e2b\u0e35\u0e40\u0e2b\u0e21\u0e47\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
83
+ {"input":"\u0e40\u0e04\u0e49\u0e01\u0e44\u0e21\u0e48\u0e23\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e44\u0e21\u0e48\u0e40\u0e25\u0e47\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
84
+ {"input":"\u0e44\u0e02\u0e48\u0e1e\u0e32\u0e14\u0e23\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e1e\u0e32\u0e14\u0e44\u0e2b\u0e25\u0e48","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
85
+ {"input":"\u0e40\u0e04\u0e49\u0e01\u0e23\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e40\u0e25\u0e47\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
86
+ {"input":"\u0e04\u0e23\u0e01\u0e0b\u0e01\u0e21\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e0b\u0e01\u0e21\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
87
+ {"input":"\u0e42\u0e04\u0e49\u0e01\u0e41\u0e02\u0e47\u0e07\u0e1b\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e41\u0e02\u0e47\u0e07\u0e42\u0e1b\u0e4a\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
88
+ {"input":"\u0e2b\u0e39\u0e44\u0e21\u0e48\u0e19\u0e48\u0e32\u0e14\u0e35","output":"\u0e2b\u0e35\u0e44\u0e21\u0e48\u0e19\u0e48\u0e32\u0e14\u0e39","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
89
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e22\u0e31\u0e01\u0e29\u0e4c","output":"\u0e1e\u0e31\u0e01\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
90
+ {"input":"\u0e22\u0e31\u0e01\u0e29\u0e4c\u0e44\u0e21\u0e48\u0e21\u0e35\u0e40\u0e1e\u0e0a\u0e23","output":"\u0e40\u0e22\u0e47\u0e14\u0e44\u0e21\u0e48\u0e21\u0e35\u0e1e\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
91
+ {"input":"\u0e41\u0e1a\u0e01\u0e44\u0e2b\u0e02\u0e36\u0e49\u0e19\u0e23\u0e16\u0e1f\u0e23\u0e35","output":"\u0e41\u0e1a\u0e01\u0e2b\u0e35\u0e02\u0e36\u0e49\u0e19\u0e23\u0e16\u0e44\u0e1f","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
92
+ {"input":"\u0e2d\u0e38\u0e1b\u0e2a\u0e21\u0e1a\u0e17","output":"\u0e2d\u0e14\u0e1c\u0e2a\u0e21\u0e1a\u0e38\u0e15\u0e23","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
93
+ {"input":"\u0e2a\u0e39\u0e14\u0e23\u0e39\u0e40\u0e15\u0e35\u0e22\u0e27","output":"\u0e40\u0e2a\u0e35\u0e22\u0e27\u0e23\u0e39\u0e15\u0e39","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
94
+ {"input":"\u0e0a\u0e32\u0e07\u0e40\u0e2b\u0e21\u0e35\u0e22\u0e07","output":"\u0e40\u0e0a\u0e35\u0e22\u0e07\u0e43\u0e2b\u0e21\u0e48","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
95
+ {"input":"\u0e0a\u0e32\u0e22\u0e40\u0e23\u0e35\u0e22\u0e07","output":"\u0e40\u0e0a\u0e35\u0e22\u0e07\u0e23\u0e32\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
96
+ {"input":null,"output":null,"instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
97
+ {"input":"\u0e40\u0e1e\u0e23\u0e0a\u0e22\u0e38\u0e48\u0e07","output":"\u0e1e\u0e38\u0e48\u0e07\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
98
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e01\u0e23\u0e30\u0e44\u0e14\u0e22\u0e34\u0e07","output":"\u0e1e\u0e34\u0e07\u0e01\u0e23\u0e30\u0e44\u0e14\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
99
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e21\u0e32\u0e22\u0e32","output":"\u0e1e\u0e32\u0e21\u0e32\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
100
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e01\u0e31\u0e19\u0e22\u0e32","output":"\u0e1e\u0e32\u0e01\u0e31\u0e19\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
101
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e01\u0e31\u0e19\u0e2b\u0e22\u0e31\u0e14","output":"\u0e1c\u0e25\u0e31\u0e14\u0e01\u0e31\u0e19\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
102
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e01\u0e33\u0e41\u0e1e\u0e07\u0e22\u0e34\u0e07","output":"\u0e1e\u0e34\u0e07\u0e01\u0e33\u0e41\u0e1e\u0e07\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
103
+ {"input":"\u0e22\u0e31\u0e01\u0e44\u0e21\u0e48\u0e21\u0e35\u0e40\u0e1e\u0e0a\u0e23","output":"\u0e40\u0e22\u0e47\u0e14\u0e44\u0e21\u0e48\u0e21\u0e35\u0e1e\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
104
+ {"input":"\u0e22\u0e31\u0e01\u0e44\u0e21\u0e48\u0e43\u0e2b\u0e49\u0e40\u0e1e\u0e0a\u0e23","output":"\u0e40\u0e22\u0e47\u0e14\u0e44\u0e21\u0e48\u0e43\u0e2b\u0e49\u0e1e\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
105
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e22\u0e49\u0e2d\u0e21","output":"\u0e1e\u0e23\u0e49\u0e2d\u0e21\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
106
+ {"input":"\u0e1e\u0e32\u0e40\u0e1e\u0e0a\u0e23\u0e21\u0e32\u0e22\u0e30","output":"\u0e1e\u0e32\u0e1e\u0e23\u0e30\u0e21\u0e32\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
107
+ {"input":"\u0e1e\u0e32\u0e40\u0e1e\u0e0a\u0e23\u0e21\u0e32\u0e41\u0e22\u0e30","output":"\u0e1e\u0e32\u0e41\u0e1e\u0e30\u0e21\u0e32\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
108
+ {"input":"\u0e40\u0e1e\u0e0a\u0e23\u0e08\u0e31\u0e1a\u0e22\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e08\u0e31\u0e1a\u0e40\u0e22\u0e47\u0e1a","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
109
+ {"input":"\u0e2a\u0e35\u0e41\u0e2b\u0e1a","output":"\u0e41\u0e2a\u0e1a\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
110
+ {"input":"\u0e2a\u0e35\u0e40\u0e2b\u0e35\u0e22\u0e27","output":"\u0e40\u0e2a\u0e35\u0e22\u0e27\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
111
+ {"input":"\u0e2a\u0e35\u0e14\u0e39\u0e2b\u0e38\u0e49\u0e21","output":"\u0e0b\u0e38\u0e49\u0e21\u0e14\u0e39\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
112
+ {"input":"\u0e2a\u0e35\u0e40\u0e2e\u0e35\u0e48\u0e22\u0e19","output":"\u0e40\u0e2a\u0e35\u0e49\u0e22\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
113
+ {"input":"\u0e2a\u0e35\u0e40\u0e2b\u0e35\u0e22\u0e1a","output":"\u0e40\u0e2a\u0e35\u0e22\u0e1a\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
114
+ {"input":"\u0e2a\u0e35\u0e48\u0e2b\u0e32\u0e22","output":"\u0e2a\u0e32\u0e22\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
115
+ {"input":"\u0e2a\u0e35\u0e2b\u0e31\u0e01","output":"\u0e2a\u0e31\u0e01\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
116
+ {"input":"\u0e2b\u0e21\u0e2d\u0e22\u0e39","output":"\u0e2b\u0e21\u0e39\u0e22\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
117
+ {"input":"\u0e16\u0e32\u0e27\u0e1d\u0e31\u0e01\u0e22\u0e31\u0e27","output":"\u0e16\u0e31\u0e48\u0e27\u0e1d\u0e31\u0e01\u0e22\u0e32\u0e27","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
118
+ {"input":"\u0e44\u0e01\u0e1b\u0e39","output":"\u0e01\u0e39\u0e44\u0e1b","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
119
+ {"input":"\u0e04\u0e34\u0e07\u0e23\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e25\u0e34\u0e07","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
120
+ {"input":"\u0e21\u0e27\u0e22\u0e42\u0e04\u0e4a\u0e01","output":"\u0e42\u0e21\u0e4a\u0e01\u0e04\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
121
+ {"input":"\u0e2b\u0e21\u0e39\u0e01\u0e30\u0e42\u0e08\u0e4a\u0e01","output":"\u0e42\u0e21\u0e4a\u0e01\u0e01\u0e30\u0e08\u0e39\u0e4b","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
122
+ {"input":"\u0e02\u0e27\u0e14\u0e15\u0e33\u0e23\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e15\u0e33\u0e23\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
123
+ {"input":"\u0e25\u0e35\u0e40\u0e2b\u0e35\u0e22","output":"\u0e40\u0e25\u0e35\u0e22\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
124
+ {"input":"\u0e1f\u0e39\u0e22\u0e31\u0e01","output":"\u0e1f\u0e31\u0e01\u0e22\u0e39","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
125
+ {"input":"\u0e1c\u0e35\u0e14\u0e39\u0e2b\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e14\u0e39\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
126
+ {"input":"\u0e1e\u0e23\u0e21\u0e25\u0e31\u0e14","output":"\u0e1e\u0e31\u0e14\u0e25\u0e21","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
127
+ {"input":"\u0e40\u0e0a\u0e35\u0e48\u0e22\u0e19\u0e30\u0e19\u0e2d\u0e1a","output":"\u0e0a\u0e2d\u0e1a\u0e19\u0e30\u0e40\u0e19\u0e35\u0e49\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
128
+ {"input":"\u0e02\u0e32\u0e14\u0e19\u0e49\u0e27\u0e21","output":"\u0e02\u0e27\u0e14\u0e19\u0e49\u0e33","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
129
+ {"input":"\u0e16\u0e2d\u0e22\u0e2b\u0e21\u0e2d\u0e19","output":"\u0e16\u0e2d\u0e19\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
130
+ {"input":"\u0e2b\u0e21\u0e2d\u0e2d\u0e49\u0e2d\u0e22","output":"\u0e2b\u0e21\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
131
+ {"input":"\u0e2b\u0e21\u0e32\u0e01\u0e23\u0e30\u0e17\u0e38","output":"\u0e2b\u0e21\u0e38\u0e01\u0e23\u0e30\u0e17\u0e30","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
132
+ {"input":"\u0e41\u0e15\u0e49\u0e0a\u0e39\u0e49","output":"\u0e15\u0e39\u0e49\u0e41\u0e0a\u0e48","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
133
+ {"input":"\u0e25\u0e34\u0e49\u0e19\u0e08\u0e35\u0e48\u0e2b\u0e19\u0e49\u0e32\u0e2b\u0e2d","output":"\u0e25\u0e34\u0e49\u0e19\u0e08\u0e2d\u0e2b\u0e19\u0e49\u0e32\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
134
+ {"input":"\u0e16\u0e2d\u0e01\u0e23\u0e30\u0e14\u0e2d\u0e01","output":"\u0e16\u0e2d\u0e01\u0e01\u0e23\u0e30\u0e14\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
135
+ {"input":"\u0e01\u0e1a\u0e42\u0e14\u0e19\u0e15\u0e49\u0e21","output":"\u0e01\u0e49\u0e21\u0e42\u0e14\u0e19\u0e15\u0e1a","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
136
+ {"input":"\u0e2b\u0e21\u0e2d\u0e2d\u0e22\u0e39\u0e48\u0e1a\u0e19\u0e14\u0e2d\u0e22","output":"\u0e2b\u0e21\u0e2d\u0e22\u0e2d\u0e22\u0e39\u0e48\u0e1a\u0e19\u0e14\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
137
+ {"input":null,"output":null,"instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
138
+ {"input":"\u0e40\u0e08\u0e47\u0e14\u0e22\u0e31\u0e1a","output":"\u0e08\u0e31\u0e1a\u0e40\u0e22\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
139
+ {"input":"\u0e22\u0e39\u0e1b\u0e23\u0e30\u0e14\u0e38\u0e01\u0e1f\u0e31\u0e21","output":"\u0e22\u0e33\u0e1b\u0e25\u0e32\u0e14\u0e38\u0e01\u0e1f\u0e39","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
140
+ {"input":"\u0e22\u0e33\u0e1b\u0e25\u0e32\u0e08\u0e32\u0e23\u0e32\u0e40\u0e21\u0e47\u0e14","output":"\u0e40\u0e22\u0e47\u0e14\u0e1b\u0e25\u0e32\u0e08\u0e32\u0e23\u0e32\u0e21\u0e31\u0e21","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
141
+ {"input":null,"output":null,"instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
142
+ {"input":"\u0e1e\u0e32\u0e22\u0e21\u0e36\u0e07\u0e15\u0e49\u0e2d","output":"\u0e1e\u0e48\u0e2d\u0e21\u0e36\u0e07\u0e15\u0e32\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
143
+ {"input":"\u0e40\u0e1b\u0e34\u0e14\u0e2b\u0e39\u0e25\u0e30\u0e2b\u0e19\u0e49\u0e32\u0e14\u0e35","output":"\u0e40\u0e1b\u0e34\u0e14\u0e2b\u0e35\u0e25\u0e30\u0e19\u0e48\u0e32\u0e14\u0e39","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
144
+ {"input":null,"output":null,"instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
145
+ {"input":"\u0e21\u0e49\u0e32\u0e19\u0e32\u0e07\u0e1d\u0e2d\u0e22","output":"\u0e2b\u0e21\u0e2d\u0e22\u0e19\u0e32\u0e07\u0e1f\u0e49\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
146
+ {"input":"\u0e08\u0e32\u0e19\u0e1e\u0e22\u0e32\u0e1a\u0e34\u0e21","output":"\u0e08\u0e34\u0e4b\u0e21\u0e1e\u0e32\u0e22\u0e32\u0e1a\u0e32\u0e25","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
147
+ {"input":"\u0e04\u0e2d\u0e2b\u0e21\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e2b\u0e21\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
148
+ {"input":"\u0e44\u0e2b\u0e21\u0e40\u0e2a\u0e49\u0e19\u0e2b\u0e22\u0e48\u0e2d\u0e22\u0e46","output":"\u0e2b\u0e21\u0e2d\u0e22\u0e40\u0e2a\u0e49\u0e19\u0e43\u0e2b\u0e0d\u0e48","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
149
+ {"input":"\u0e08\u0e32\u0e01\u0e23\u0e30\u0e41\u0e17\u0e01\u0e02\u0e34\u0e21","output":"\u0e08\u0e34\u0e4b\u0e21\u0e01\u0e23\u0e30\u0e41\u0e17\u0e01\u0e02\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
150
+ {"input":"\u0e1a\u0e35\u0e41\u0e2b\u0e48","output":"\u0e41\u0e1a\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
151
+ {"input":"\u0e04\u0e25\u0e31\u0e07\u0e1e\u0e30\u0e25\u0e38\u0e07\u0e1e\u0e30\u0e25\u0e27\u0e22","output":"\u0e04\u0e27\u0e22\u0e1e\u0e30\u0e25\u0e38\u0e07\u0e1e\u0e30\u0e25\u0e31\u0e07","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
152
+ {"input":"\u0e2d\u0e22\u0e48\u0e32\u0e21\u0e32\u0e2d\u0e2d\u0e15\u0e23\u0e07\u0e01\u0e23\u0e30\u0e44\u0e14","output":"\u0e2d\u0e22\u0e48\u0e32\u0e21\u0e32\u0e44\u0e2d\u0e15\u0e23\u0e07\u0e01\u0e23\u0e30\u0e14\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
153
+ {"input":"\u0e40\u0e2b\u0e35\u0e49\u0e22\u0e19\u0e30\u0e2b\u0e48\u0e27\u0e07","output":"\u0e2b\u0e48\u0e27\u0e07\u0e19\u0e30\u0e40\u0e19\u0e35\u0e49\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
154
+ {"input":"\u0e23\u0e2d\u0e0b\u0e37\u0e49\u0e2d\u0e01\u0e23\u0e30\u0e14\u0e38\u0e21","output":"\u0e23\u0e38\u0e21\u0e0b\u0e37\u0e49\u0e2d\u0e01\u0e23\u0e30\u0e14\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
155
+ {"input":"\u0e40\u0e2b\u0e35\u0e49\u0e19\u0e40\u0e17\u0e48\u0e32\u0e40\u0e01\u0e32\u0e30\u0e2a\u0e30\u0e2b\u0e21\u0e35","output":"\u0e2b\u0e35\u0e40\u0e17\u0e48\u0e32\u0e40\u0e01\u0e32\u0e30\u0e40\u0e2a\u0e21\u0e47\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
156
+ {"input":"\u0e08\u0e2d\u0e14\u0e31\u0e1a","output":"\u0e08\u0e31\u0e1a\u0e14\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
157
+ {"input":"\u0e08\u0e35\u0e4b\u0e40\u0e2b\u0e47\u0e1a","output":"\u0e40\u0e08\u0e47\u0e1a\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
158
+ {"input":"\u0e2b\u0e2d\u0e42\u0e14\u0e19\u0e14\u0e35","output":"\u0e2b\u0e35\u0e42\u0e14\u0e19\u0e14\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
159
+ {"input":"\u0e1c\u0e35\u0e17\u0e30\u0e25\u0e27\u0e07\u0e2b\u0e31\u0e27","output":"\u0e1c\u0e31\u0e27\u0e17\u0e30\u0e25\u0e27\u0e07\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
160
+ {"input":"\u0e19\u0e49\u0e33\u0e21\u0e31\u0e19\u0e2b\u0e21\u0e35\u0e40\u0e02\u0e49\u0e32\u0e2b\u0e39","output":"\u0e19\u0e49\u0e33\u0e21\u0e31\u0e19\u0e2b\u0e21\u0e39\u0e17\u0e2d\u0e14\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
161
+ {"input":"\u0e2b\u0e32\u0e14\u0e42\u0e14\u0e19\u0e09\u0e35\u0e02\u0e35","output":"\u0e2b\u0e35\u0e42\u0e14\u0e19\u0e09\u0e35\u0e01\u0e02\u0e32\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
162
+ {"input":"\u0e2a\u0e2b\u0e21\u0e35\u0e40\u0e2b\u0e2d","output":"\u0e2a\u0e40\u0e21\u0e2d\u0e2b\u0e19\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
163
+ {"input":"\u0e19\u0e49\u0e33\u0e15\u0e32\u0e25\u0e43\u0e19\u0e23\u0e39\u0e17\u0e30\u0e41\u0e27\u0e01","output":"\u0e19\u0e49\u0e33\u0e41\u0e15\u0e01\u0e43\u0e19\u0e23\u0e39\u0e17\u0e30\u0e27\u0e32\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
164
+ {"input":"\u0e01\u0e49\u0e32\u0e07\u0e43\u0e2b\u0e0d\u0e48","output":"\u0e44\u0e01\u0e48\u0e22\u0e48\u0e32\u0e07","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
165
+ {"input":"\u0e21\u0e14\u0e17\u0e23\u0e22\u0e34\u0e14","output":"\u0e21\u0e34\u0e15\u0e23\u0e17\u0e23\u0e22\u0e28","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
166
+ {"input":"\u0e41\u0e21\u0e27\u0e01\u0e31\u0e19","output":"\u0e21\u0e31\u0e19\u0e41\u0e01\u0e27","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
167
+ {"input":"\u0e1b\u0e32\u0e2a\u0e32\u0e22\u0e40\u0e15\u0e49\u0e32","output":"\u0e40\u0e1b\u0e49\u0e32\u0e2a\u0e32\u0e22\u0e15\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
168
+ {"input":"\u0e40\u0e28\u0e29\u0e0b\u0e30\u0e01\u0e35\u0e4b","output":"\u0e28\u0e23\u0e35\u0e2a\u0e30\u0e40\u0e01\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
169
+ {"input":"\u0e02\u0e27\u0e14\u0e15\u0e33\u0e23\u0e38\u0e19","output":"\u0e04\u0e38\u0e13\u0e15\u0e33\u0e23\u0e27\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
170
+ {"input":"\u0e2b\u0e21\u0e2d\u0e01\u0e08\u0e34\u0e49\u0e07\u0e08\u0e32","output":"\u0e2b\u0e21\u0e32\u0e08\u0e34\u0e49\u0e07\u0e08\u0e2d\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
171
+ {"input":"\u0e2a\u0e35\u0e01\u0e23\u0e30\u0e41\u0e17\u0e01\u0e40\u0e2b\u0e32","output":"\u0e40\u0e2a\u0e32\u0e01\u0e23\u0e30\u0e41\u0e17\u0e01\u0e2b\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
172
+ {"input":"\u0e01\u0e23\u0e30\u0e40\u0e1b\u0e4b\u0e32\u0e44\u0e14\u0e42\u0e19\u0e42\u0e2a\u0e01","output":"\u0e01\u0e23\u0e30\u0e42\u0e1b\u0e01\u0e44\u0e14\u0e42\u0e19\u0e40\u0e2a\u0e32","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
173
+ {"input":"\u0e44\u0e15\u0e2b\u0e32\u0e2b\u0e31\u0e27\u0e08\u0e32\u0e21","output":"\u0e15\u0e32\u0e21\u0e2b\u0e32\u0e2b\u0e31\u0e27\u0e43\u0e08","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
174
+ {"input":"\u0e21\u0e31\u0e19\u0e40\u0e1b\u0e47\u0e19\u0e41\u0e1f\u0e01\u0e32","output":"\u0e21\u0e32\u0e40\u0e1b\u0e47\u0e19\u0e41\u0e1f\u0e19\u0e01\u0e31\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
175
+ {"input":"\u0e40\u0e27\u0e49\u0e19\u0e27\u0e23\u0e23\u0e04\u0e43\u0e2b\u0e49\u0e40\u0e17\u0e2d\u0e44\u0e25\u0e04\u0e4c","output":"\u0e40\u0e27\u0e49\u0e19\u0e44\u0e27\u0e49\u0e43\u0e2b\u0e49\u0e40\u0e18\u0e2d\u0e23\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
176
+ {"input":"\u0e25\u0e32\u0e2b\u0e23\u0e37\u0e2d\u0e1b\u0e31\u0e4a\u0e01","output":"\u0e23\u0e31\u0e01\u0e23\u0e36\u0e1b\u0e48\u0e32\u0e27","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
177
+ {"input":"\u0e1c\u0e31\u0e01\u0e44\u0e2b\u0e21\u0e16\u0e49\u0e32\u0e08\u0e30\u0e23\u0e34\u0e1a","output":"\u0e1c\u0e34\u0e14\u0e44\u0e2b\u0e21\u0e17\u0e35\u0e48\u0e08\u0e30\u0e23\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
178
+ {"input":"\u0e25\u0e1a\u0e40\u0e18\u0e2d\u0e40\u0e17\u0e2d\u0e44\u0e21\u0e48\u0e23\u0e38\u0e49\u0e08\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e40\u0e18\u0e2d\u0e44\u0e21\u0e48\u0e23\u0e38\u0e49\u0e08\u0e1a","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
179
+ {"input":"\u0e14\u0e27\u0e07\u0e08\u0e31\u0e19\u0e17\u0e23\u0e4c\u0e44\u0e21\u0e48\u0e40\u0e04\u0e22\u0e2b\u0e48\u0e32\u0e07\u0e44\u0e01\u0e25","output":"\u0e14\u0e27\u0e07\u0e43\u0e08\u0e44\u0e21\u0e48\u0e40\u0e04\u0e22\u0e2b\u0e48\u0e32\u0e07\u0e01\u0e31\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
180
+ {"input":"\u0e40\u0e08\u0e2d\u0e21\u0e35\u0e41\u0e15\u0e48\u0e44\u0e17","output":"\u0e43\u0e08\u0e21\u0e35\u0e41\u0e15\u0e48\u0e40\u0e18\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
181
+ {"input":"\u0e42\u0e25\u0e48\u0e19\u0e30\u0e40\u0e14\u0e47\u0e01\u0e07\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e19\u0e30\u0e40\u0e14\u0e47\u0e01\u0e42\u0e07\u0e48","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
182
+ {"input":"\u0e2b\u0e22\u0e2d\u0e14\u0e43\u0e2b\u0e49\u0e40\u0e18\u0e2d\u0e01\u0e32\u0e01","output":"\u0e2d\u0e22\u0e32\u0e01\u0e43\u0e2b\u0e49\u0e40\u0e18\u0e2d\u0e01\u0e2d\u0e14","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
183
+ {"input":"\u0e22\u0e31\u0e01\u0e43\u0e2b\u0e49\u0e1a\u0e2d\u0e2b\u0e2b\u0e25\u0e32\u0e01","output":"\u0e2d\u0e22\u0e32\u0e01\u0e43\u0e2b\u0e49\u0e1a\u0e2d\u0e01\u0e23\u0e31\u0e01","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
184
+ {"input":"\u0e2d\u0e2d\u0e1a\u0e44\u0e21\u0e48\u0e15\u0e48\u0e32\u0e19","output":"\u0e2d\u0e48\u0e32\u0e19\u0e44\u0e21\u0e48\u0e15\u0e2d\u0e1a","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
185
+ {"input":"\u0e40\u0e02\u0e35\u0e22\u0e14\u0e1c\u0e39\u0e49\u0e21\u0e35\u0e41\u0e01\u0e23\u0e01","output":"\u0e41\u0e02\u0e01\u0e1c\u0e39\u0e49\u0e21\u0e35\u0e40\u0e01\u0e35\u0e22\u0e08","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
186
+ {"input":"\u0e40\u0e2b\u0e35\u0e48\u0e22\u0e27\u0e40\u0e18\u0e2d\u0e04\u0e19\u0e14\u0e27\u0e07","output":"\u0e2b\u0e27\u0e07\u0e40\u0e18\u0e2d\u0e04\u0e19\u0e40\u0e14\u0e35\u0e22\u0e27","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
187
+ {"input":"\u0e44\u0e02\u0e43\u0e2b\u0e49\u0e40\u0e18\u0e2d\u0e40\u0e2b\u0e47\u0e19\u0e08\u0e2d","output":"\u0e02\u0e2d\u0e43\u0e2b\u0e49\u0e40\u0e18\u0e2d\u0e40\u0e2b\u0e47\u0e19\u0e43\u0e08","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
188
+ {"input":"\u0e15\u0e35\u0e44\u0e21\u0e48\u0e21\u0e31\u0e07","output":"\u0e15\u0e31\u0e07\u0e44\u0e21\u0e48\u0e21\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
189
+ {"input":"\u0e44\u0e02\u0e15\u0e31\u0e07\u0e2b\u0e19\u0e48\u0e2d","output":"\u0e02\u0e2d\u0e15\u0e31\u0e07\u0e2b\u0e19\u0e48\u0e2d\u0e22","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
190
+ {"input":"\u0e44\u0e25\u0e48\u0e04\u0e37\u0e2d\u0e01\u0e32\u0e23\u0e2e\u0e31\u0e01","output":"\u0e23\u0e31\u0e01\u0e04\u0e37\u0e2d\u0e01\u0e32\u0e23\u0e43\u0e2b\u0e49","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
191
+ {"input":"\u0e08\u0e31\u0e19\u0e2d\u0e22\u0e39\u0e48\u0e43\u0e01\u0e25\u0e49\u0e44\u0e01\u0e25","output":"\u0e43\u0e08\u0e2d\u0e22\u0e39\u0e48\u0e43\u0e01\u0e25\u0e49\u0e01\u0e31\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
192
+ {"input":"\u0e04\u0e30\u0e01\u0e31\u0e19\u0e19\u0e1a","output":"\u0e04\u0e1a\u0e01\u0e31\u0e19\u0e19\u0e30","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
193
+ {"input":"\u0e41\u0e1e\u0e49\u0e23\u0e31\u0e01\u0e17\u0e1a","output":"\u0e1e\u0e1a\u0e23\u0e31\u0e01\u0e41\u0e17\u0e49","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
194
+ {"input":"\u0e2a\u0e1a\u0e35\u0e14\u0e32\u0e22","output":"\u0e2a\u0e1a\u0e32\u0e22\u0e14\u0e35","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
195
+ {"input":"\u0e44\u0e23\u0e40\u0e15\u0e47\u0e21\u0e2b\u0e31\u0e27\u0e08\u0e31\u0e4a\u0e01","output":"\u0e23\u0e31\u0e01\u0e40\u0e15\u0e47\u0e21\u0e2b\u0e31\u0e27\u0e43\u0e08","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
196
+ {"input":"\u0e44\u0e17\u0e22\u0e21\u0e35\u0e40\u0e08\u0e2d","output":"\u0e40\u0e17\u0e2d\u0e44\u0e21\u0e48\u0e21\u0e35\u0e43\u0e08","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
197
+ {"input":"\u0e04\u0e31\u0e19\u0e16\u0e36\u0e07\u0e17\u0e38\u0e01\u0e27\u0e34","output":"\u0e04\u0e34\u0e14\u0e16\u0e36\u0e07\u0e17\u0e38\u0e01\u0e27\u0e31\u0e19","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
198
+ {"input":"\u0e23\u0e2d\u0e1a\u0e21\u0e31\u0e49\u0e22\u0e27\u0e48\u0e32\u0e0a\u0e39\u0e49","output":"\u0e23\u0e39\u0e49\u0e21\u0e31\u0e49\u0e22\u0e27\u0e48\u0e32\u0e0a\u0e2d\u0e1a","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
199
+ {"input":"\u0e40\u0e02\u0e2d\u0e21\u0e35\u0e40\u0e1e\u0e35\u0e22\u0e07\u0e17\u0e2d","output":"\u0e02\u0e2d\u0e21\u0e35\u0e40\u0e1e\u0e35\u0e22\u0e07\u0e40\u0e18\u0e2d","instruction":"\u0e1c\u0e27\u0e19\u0e04\u0e33\u0e43\u0e2b\u0e49\u0e2b\u0e19\u0e48\u0e2d\u0e22"}
openthaigpt_Finetuning/openthaigpt-finetune/docker-compose.yml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3'
2
+
3
+ services:
4
+ openthaigpt-0.1.0-beta:
5
+ build:
6
+ context: ./
7
+ dockerfile: Dockerfile
8
+ args:
9
+ BUILDKIT_INLINE_CACHE: "0"
10
+ image: openthaigpt-0.1.0-beta
11
+ shm_size: '64gb'
12
+ command: generate.py --load_8bit --base_model $BASE_MODEL --lora_weights 'kobkrit/openthaigpt-0.1.0-beta'
13
+ restart: unless-stopped
14
+ volumes:
15
+ - openthaigpt-0.1.0-beta:/root/.cache # Location downloaded weights will be stored
16
+ ports:
17
+ - 7860:7860
18
+ deploy:
19
+ resources:
20
+ reservations:
21
+ devices:
22
+ - driver: nvidia
23
+ count: all
24
+ capabilities: [ gpu ]
25
+
26
+ volumes:
27
+ openthaigpt-0.1.0-beta:
28
+ name: openthaigpt-0.1.0-beta
openthaigpt_Finetuning/openthaigpt-finetune/export_hf_checkpoint.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ import torch
4
+ import transformers
5
+ from peft import PeftModel
6
+ from transformers import LlamaForCausalLM, LlamaTokenizer # noqa: F402
7
+
8
+ BASE_MODEL = os.environ.get("BASE_MODEL", None)
9
+ assert (
10
+ BASE_MODEL
11
+ ), "Please specify a value for BASE_MODEL environment variable, e.g. `export BASE_MODEL=decapoda-research/llama-7b-hf`" # noqa: E501
12
+
13
+ tokenizer = LlamaTokenizer.from_pretrained(BASE_MODEL)
14
+
15
+ base_model = LlamaForCausalLM.from_pretrained(
16
+ BASE_MODEL,
17
+ load_in_8bit=False,
18
+ torch_dtype=torch.float16,
19
+ device_map={"": "cpu"},
20
+ )
21
+
22
+ first_weight = base_model.model.layers[0].self_attn.q_proj.weight
23
+ first_weight_old = first_weight.clone()
24
+
25
+ lora_model = PeftModel.from_pretrained(
26
+ base_model,
27
+ "./openthaigpt-010-beta",
28
+ device_map={"": "cpu"},
29
+ torch_dtype=torch.float16,
30
+ )
31
+
32
+ lora_weight = lora_model.base_model.model.model.layers[
33
+ 0
34
+ ].self_attn.q_proj.weight
35
+
36
+ assert torch.allclose(first_weight_old, first_weight)
37
+
38
+ # merge weights - new merging method from peft
39
+ lora_model = lora_model.merge_and_unload()
40
+
41
+ lora_model.train(False)
42
+
43
+ # did we do anything?
44
+ assert not torch.allclose(first_weight_old, first_weight)
45
+
46
+ lora_model_sd = lora_model.state_dict()
47
+ deloreanized_sd = {
48
+ k.replace("base_model.model.", ""): v
49
+ for k, v in lora_model_sd.items()
50
+ if "lora" not in k
51
+ }
52
+
53
+ LlamaForCausalLM.save_pretrained(
54
+ base_model, "./hf_ckpt", state_dict=deloreanized_sd, max_shard_size="400MB"
55
+ )
openthaigpt_Finetuning/openthaigpt-finetune/export_state_dict_checkpoint.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+
4
+ import torch
5
+ import transformers
6
+ from peft import PeftModel
7
+ from transformers import LlamaForCausalLM, LlamaTokenizer # noqa: E402
8
+
9
+ BASE_MODEL = os.environ.get("BASE_MODEL", None)
10
+ assert (
11
+ BASE_MODEL
12
+ ), "Please specify a value for BASE_MODEL environment variable, e.g. `export BASE_MODEL=decapoda-research/llama-7b-hf`" # noqa: E501
13
+
14
+ tokenizer = LlamaTokenizer.from_pretrained(BASE_MODEL)
15
+
16
+ base_model = LlamaForCausalLM.from_pretrained(
17
+ BASE_MODEL,
18
+ load_in_8bit=False,
19
+ torch_dtype=torch.float16,
20
+ device_map={"": "cpu"},
21
+ )
22
+
23
+ lora_model = PeftModel.from_pretrained(
24
+ base_model,
25
+ "kobkrit/openthaigpt-0.1.0-beta",
26
+ device_map={"": "cpu"},
27
+ torch_dtype=torch.float16,
28
+ )
29
+
30
+ # merge weights
31
+ for layer in lora_model.base_model.model.model.layers:
32
+ layer.self_attn.q_proj.merge_weights = True
33
+ layer.self_attn.v_proj.merge_weights = True
34
+
35
+ lora_model.train(False)
36
+
37
+ lora_model_sd = lora_model.state_dict()
38
+
39
+ params = {
40
+ "dim": 4096,
41
+ "multiple_of": 256,
42
+ "n_heads": 32,
43
+ "n_layers": 32,
44
+ "norm_eps": 1e-06,
45
+ "vocab_size": -1,
46
+ }
47
+ n_layers = params["n_layers"]
48
+ n_heads = params["n_heads"]
49
+ dim = params["dim"]
50
+ dims_per_head = dim // n_heads
51
+ base = 10000.0
52
+ inv_freq = 1.0 / (
53
+ base ** (torch.arange(0, dims_per_head, 2).float() / dims_per_head)
54
+ )
55
+
56
+
57
+ def permute(w):
58
+ return (
59
+ w.view(n_heads, dim // n_heads // 2, 2, dim)
60
+ .transpose(1, 2)
61
+ .reshape(dim, dim)
62
+ )
63
+
64
+
65
+ def unpermute(w):
66
+ return (
67
+ w.view(n_heads, 2, dim // n_heads // 2, dim)
68
+ .transpose(1, 2)
69
+ .reshape(dim, dim)
70
+ )
71
+
72
+
73
+ def translate_state_dict_key(k): # noqa: C901
74
+ k = k.replace("base_model.model.", "")
75
+ if k == "model.embed_tokens.weight":
76
+ return "tok_embeddings.weight"
77
+ elif k == "model.norm.weight":
78
+ return "norm.weight"
79
+ elif k == "lm_head.weight":
80
+ return "output.weight"
81
+ elif k.startswith("model.layers."):
82
+ layer = k.split(".")[2]
83
+ if k.endswith(".self_attn.q_proj.weight"):
84
+ return f"layers.{layer}.attention.wq.weight"
85
+ elif k.endswith(".self_attn.k_proj.weight"):
86
+ return f"layers.{layer}.attention.wk.weight"
87
+ elif k.endswith(".self_attn.v_proj.weight"):
88
+ return f"layers.{layer}.attention.wv.weight"
89
+ elif k.endswith(".self_attn.o_proj.weight"):
90
+ return f"layers.{layer}.attention.wo.weight"
91
+ elif k.endswith(".mlp.gate_proj.weight"):
92
+ return f"layers.{layer}.feed_forward.w1.weight"
93
+ elif k.endswith(".mlp.down_proj.weight"):
94
+ return f"layers.{layer}.feed_forward.w2.weight"
95
+ elif k.endswith(".mlp.up_proj.weight"):
96
+ return f"layers.{layer}.feed_forward.w3.weight"
97
+ elif k.endswith(".input_layernorm.weight"):
98
+ return f"layers.{layer}.attention_norm.weight"
99
+ elif k.endswith(".post_attention_layernorm.weight"):
100
+ return f"layers.{layer}.ffn_norm.weight"
101
+ elif k.endswith("rotary_emb.inv_freq") or "lora" in k:
102
+ return None
103
+ else:
104
+ print(layer, k)
105
+ raise NotImplementedError
106
+ else:
107
+ print(k)
108
+ raise NotImplementedError
109
+
110
+
111
+ new_state_dict = {}
112
+ for k, v in lora_model_sd.items():
113
+ new_k = translate_state_dict_key(k)
114
+ if new_k is not None:
115
+ if "wq" in new_k or "wk" in new_k:
116
+ new_state_dict[new_k] = unpermute(v)
117
+ else:
118
+ new_state_dict[new_k] = v
119
+
120
+ os.makedirs("./ckpt", exist_ok=True)
121
+
122
+ torch.save(new_state_dict, "./ckpt/consolidated.00.pth")
123
+
124
+ with open("./ckpt/params.json", "w") as f:
125
+ json.dump(params, f)
openthaigpt_Finetuning/openthaigpt-finetune/finetune.py ADDED
@@ -0,0 +1,283 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ from typing import List
4
+
5
+ import fire
6
+ import torch
7
+ import transformers
8
+ from datasets import load_dataset
9
+
10
+ """
11
+ Unused imports:
12
+ import torch.nn as nn
13
+ import bitsandbytes as bnb
14
+ """
15
+
16
+ from peft import (
17
+ LoraConfig,
18
+ get_peft_model,
19
+ get_peft_model_state_dict,
20
+ prepare_model_for_int8_training,
21
+ set_peft_model_state_dict,
22
+ )
23
+ from transformers import LlamaForCausalLM, LlamaTokenizer
24
+
25
+ from utils.prompter import Prompter
26
+
27
+
28
+ def train(
29
+ # model/data params
30
+ base_model: str = "", # the only required argument
31
+ data_path: str = "Thaweewat/alpaca-cleaned-52k-th",
32
+ output_dir: str = "./openthaigpt-100-beta",
33
+ # training hyperparams
34
+ batch_size: int = 128,
35
+ micro_batch_size: int = 4,
36
+ num_epochs: int = 3,
37
+ learning_rate: float = 3e-4,
38
+ cutoff_len: int = 256,
39
+ val_set_size: int = 2000,
40
+ # lora hyperparams
41
+ lora_r: int = 8,
42
+ lora_alpha: int = 16,
43
+ lora_dropout: float = 0.05,
44
+ lora_target_modules: List[str] = [
45
+ "q_proj",
46
+ "v_proj",
47
+ ],
48
+ # llm hyperparams
49
+ train_on_inputs: bool = True, # if False, masks out inputs in loss
50
+ add_eos_token: bool = False,
51
+ group_by_length: bool = False, # faster, but produces an odd training loss curve
52
+ # wandb params
53
+ wandb_project: str = "",
54
+ wandb_run_name: str = "",
55
+ wandb_watch: str = "", # options: false | gradients | all
56
+ wandb_log_model: str = "", # options: false | true
57
+ resume_from_checkpoint: str = None, # either training checkpoint or final adapter
58
+ prompt_template_name: str = "llama_v2", # The prompt template to use, will default to alpaca.
59
+ ):
60
+ if int(os.environ.get("LOCAL_RANK", 0)) == 0:
61
+ print(
62
+ f"Training Llama v2 model with params:\n"
63
+ f"base_model: {base_model}\n"
64
+ f"data_path: {data_path}\n"
65
+ f"output_dir: {output_dir}\n"
66
+ f"batch_size: {batch_size}\n"
67
+ f"micro_batch_size: {micro_batch_size}\n"
68
+ f"num_epochs: {num_epochs}\n"
69
+ f"learning_rate: {learning_rate}\n"
70
+ f"cutoff_len: {cutoff_len}\n"
71
+ f"val_set_size: {val_set_size}\n"
72
+ f"lora_r: {lora_r}\n"
73
+ f"lora_alpha: {lora_alpha}\n"
74
+ f"lora_dropout: {lora_dropout}\n"
75
+ f"lora_target_modules: {lora_target_modules}\n"
76
+ f"train_on_inputs: {train_on_inputs}\n"
77
+ f"add_eos_token: {add_eos_token}\n"
78
+ f"group_by_length: {group_by_length}\n"
79
+ f"wandb_project: {wandb_project}\n"
80
+ f"wandb_run_name: {wandb_run_name}\n"
81
+ f"wandb_watch: {wandb_watch}\n"
82
+ f"wandb_log_model: {wandb_log_model}\n"
83
+ f"resume_from_checkpoint: {resume_from_checkpoint or False}\n"
84
+ f"prompt template: {prompt_template_name}\n"
85
+ )
86
+ assert (
87
+ base_model
88
+ ), "Please specify a --base_model, e.g. --base_model='decapoda-research/llama-7b-hf'"
89
+ gradient_accumulation_steps = batch_size // micro_batch_size
90
+
91
+ prompter = Prompter(prompt_template_name)
92
+
93
+ device_map = "auto"
94
+ world_size = int(os.environ.get("WORLD_SIZE", 1))
95
+ ddp = world_size != 1
96
+ if ddp:
97
+ device_map = {"": int(os.environ.get("LOCAL_RANK") or 0)}
98
+ gradient_accumulation_steps = gradient_accumulation_steps // world_size
99
+
100
+ # Check if parameter passed or if set within environ
101
+ use_wandb = len(wandb_project) > 0 or (
102
+ "WANDB_PROJECT" in os.environ and len(os.environ["WANDB_PROJECT"]) > 0
103
+ )
104
+ # Only overwrite environ if wandb param passed
105
+ if len(wandb_project) > 0:
106
+ os.environ["WANDB_PROJECT"] = wandb_project
107
+ if len(wandb_watch) > 0:
108
+ os.environ["WANDB_WATCH"] = wandb_watch
109
+ if len(wandb_log_model) > 0:
110
+ os.environ["WANDB_LOG_MODEL"] = wandb_log_model
111
+
112
+ model = LlamaForCausalLM.from_pretrained(
113
+ base_model,
114
+ load_in_8bit=True,
115
+ torch_dtype=torch.float16,
116
+ device_map=device_map,
117
+ )
118
+
119
+ tokenizer = LlamaTokenizer.from_pretrained(base_model)
120
+
121
+ tokenizer.pad_token_id = (
122
+ 0 # unk. we want this to be different from the eos token
123
+ )
124
+ tokenizer.padding_side = "left" # Allow batched inference
125
+
126
+ def tokenize(prompt, add_eos_token=True):
127
+ # there's probably a way to do this with the tokenizer settings
128
+ # but again, gotta move fast
129
+ result = tokenizer(
130
+ prompt,
131
+ truncation=True,
132
+ max_length=cutoff_len,
133
+ padding=False,
134
+ return_tensors=None,
135
+ )
136
+ if (
137
+ result["input_ids"][-1] != tokenizer.eos_token_id
138
+ and len(result["input_ids"]) < cutoff_len
139
+ and add_eos_token
140
+ ):
141
+ result["input_ids"].append(tokenizer.eos_token_id)
142
+ result["attention_mask"].append(1)
143
+
144
+ result["labels"] = result["input_ids"].copy()
145
+
146
+ return result
147
+
148
+ def generate_and_tokenize_prompt(data_point):
149
+ full_prompt = prompter.generate_prompt(
150
+ data_point["instruction"],
151
+ data_point["input"],
152
+ data_point["output"],
153
+ )
154
+ tokenized_full_prompt = tokenize(full_prompt)
155
+ if not train_on_inputs:
156
+ user_prompt = prompter.generate_prompt(
157
+ data_point["instruction"], data_point["input"]
158
+ )
159
+ tokenized_user_prompt = tokenize(
160
+ user_prompt, add_eos_token=add_eos_token
161
+ )
162
+ user_prompt_len = len(tokenized_user_prompt["input_ids"])
163
+
164
+ if add_eos_token:
165
+ user_prompt_len -= 1
166
+
167
+ tokenized_full_prompt["labels"] = [
168
+ -100
169
+ ] * user_prompt_len + tokenized_full_prompt["labels"][
170
+ user_prompt_len:
171
+ ] # could be sped up, probably
172
+ return tokenized_full_prompt
173
+
174
+ model = prepare_model_for_int8_training(model)
175
+
176
+ config = LoraConfig(
177
+ r=lora_r,
178
+ lora_alpha=lora_alpha,
179
+ target_modules=lora_target_modules,
180
+ lora_dropout=lora_dropout,
181
+ bias="none",
182
+ task_type="CAUSAL_LM",
183
+ )
184
+ model = get_peft_model(model, config)
185
+
186
+ if data_path.endswith(".json") or data_path.endswith(".jsonl"):
187
+ data = load_dataset("json", data_files=data_path)
188
+ else:
189
+ data = load_dataset(data_path)
190
+
191
+ if resume_from_checkpoint:
192
+ # Check the available weights and load them
193
+ checkpoint_name = os.path.join(
194
+ resume_from_checkpoint, "pytorch_model.bin"
195
+ ) # Full checkpoint
196
+ if not os.path.exists(checkpoint_name):
197
+ checkpoint_name = os.path.join(
198
+ resume_from_checkpoint, "adapter_model.bin"
199
+ ) # only LoRA model - LoRA config above has to fit
200
+ resume_from_checkpoint = (
201
+ False # So the trainer won't try loading its state
202
+ )
203
+ # The two files above have a different name depending on how they were saved, but are actually the same.
204
+ if os.path.exists(checkpoint_name):
205
+ print(f"Restarting from {checkpoint_name}")
206
+ adapters_weights = torch.load(checkpoint_name)
207
+ set_peft_model_state_dict(model, adapters_weights)
208
+ else:
209
+ print(f"Checkpoint {checkpoint_name} not found")
210
+
211
+ model.print_trainable_parameters() # Be more transparent about the % of trainable params.
212
+
213
+ if val_set_size > 0:
214
+ train_val = data["train"].train_test_split(
215
+ test_size=val_set_size, shuffle=True, seed=42
216
+ )
217
+ train_data = (
218
+ train_val["train"].shuffle().map(generate_and_tokenize_prompt)
219
+ )
220
+ val_data = (
221
+ train_val["test"].shuffle().map(generate_and_tokenize_prompt)
222
+ )
223
+ else:
224
+ train_data = data["train"].shuffle().map(generate_and_tokenize_prompt)
225
+ val_data = None
226
+
227
+ if not ddp and torch.cuda.device_count() > 1:
228
+ # keeps Trainer from trying its own DataParallelism when more than 1 gpu is available
229
+ model.is_parallelizable = True
230
+ model.model_parallel = True
231
+
232
+ trainer = transformers.Trainer(
233
+ model=model,
234
+ train_dataset=train_data,
235
+ eval_dataset=val_data,
236
+ args=transformers.TrainingArguments(
237
+ per_device_train_batch_size=micro_batch_size,
238
+ gradient_accumulation_steps=gradient_accumulation_steps,
239
+ warmup_steps=100,
240
+ num_train_epochs=num_epochs,
241
+ learning_rate=learning_rate,
242
+ fp16=True,
243
+ logging_steps=10,
244
+ optim="adamw_torch",
245
+ evaluation_strategy="steps" if val_set_size > 0 else "no",
246
+ save_strategy="steps",
247
+ eval_steps=200 if val_set_size > 0 else None,
248
+ save_steps=200,
249
+ output_dir=output_dir,
250
+ save_total_limit=3,
251
+ load_best_model_at_end=True if val_set_size > 0 else False,
252
+ ddp_find_unused_parameters=False if ddp else None,
253
+ group_by_length=group_by_length,
254
+ report_to="wandb" if use_wandb else None,
255
+ run_name=wandb_run_name if use_wandb else None,
256
+ ),
257
+ data_collator=transformers.DataCollatorForSeq2Seq(
258
+ tokenizer, pad_to_multiple_of=8, return_tensors="pt", padding=True
259
+ ),
260
+ )
261
+ model.config.use_cache = False
262
+
263
+ # old_state_dict = model.state_dict
264
+ # model.state_dict = (
265
+ # lambda self, *_, **__: get_peft_model_state_dict(
266
+ # self, old_state_dict()
267
+ # )
268
+ # ).__get__(model, type(model))
269
+
270
+ if torch.__version__ >= "2" and sys.platform != "win32":
271
+ model = torch.compile(model)
272
+
273
+ trainer.train(resume_from_checkpoint=resume_from_checkpoint)
274
+
275
+ model.save_pretrained(output_dir)
276
+
277
+ print(
278
+ "\n If there's a warning about missing keys above, please disregard :)"
279
+ )
280
+
281
+
282
+ if __name__ == "__main__":
283
+ fire.Fire(train)
openthaigpt_Finetuning/openthaigpt-finetune/generate.py ADDED
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+
4
+ import fire
5
+ import gradio as gr
6
+ import torch
7
+ import transformers
8
+ from peft import PeftModel
9
+ from transformers import GenerationConfig, LlamaForCausalLM, LlamaTokenizer
10
+
11
+ from utils.callbacks import Iteratorize, Stream
12
+ from utils.prompter import Prompter
13
+
14
+ if torch.cuda.is_available():
15
+ device = "cuda"
16
+ else:
17
+ device = "cpu"
18
+
19
+ try:
20
+ if torch.backends.mps.is_available():
21
+ device = "mps"
22
+ except: # noqa: E722
23
+ pass
24
+
25
+
26
+ def main(
27
+ load_8bit: bool = False,
28
+ base_model: str = "",
29
+ lora_weights: str = "",
30
+ prompt_template: str = "llama_v2", # The prompt template to use, will default to alpaca.
31
+ server_name: str = "0.0.0.0", # Allows to listen on all interfaces by providing '0.
32
+ share_gradio: bool = False,
33
+ ):
34
+ lora_weights = lora_weights or os.environ.get("LORA_WEIGHTS", "")
35
+ base_model = base_model or os.environ.get("BASE_MODEL", "")
36
+ assert (
37
+ base_model
38
+ ), "Please specify a --base_model, e.g. --base_model='decapoda-research/llama-7b-hf'"
39
+
40
+ print("base_model:", base_model)
41
+ print("lora_weights:", lora_weights)
42
+
43
+ prompter = Prompter(prompt_template)
44
+ tokenizer = LlamaTokenizer.from_pretrained(base_model)
45
+ if device == "cuda":
46
+ model = LlamaForCausalLM.from_pretrained(
47
+ base_model,
48
+ load_in_8bit=load_8bit,
49
+ torch_dtype=torch.float16,
50
+ device_map="auto",
51
+ )
52
+ model = PeftModel.from_pretrained(
53
+ model,
54
+ lora_weights,
55
+ torch_dtype=torch.float16,
56
+ )
57
+ elif device == "mps":
58
+ model = LlamaForCausalLM.from_pretrained(
59
+ base_model,
60
+ device_map={"": device},
61
+ torch_dtype=torch.float16,
62
+ )
63
+ model = PeftModel.from_pretrained(
64
+ model,
65
+ lora_weights,
66
+ device_map={"": device},
67
+ torch_dtype=torch.float16,
68
+ )
69
+ else:
70
+ model = LlamaForCausalLM.from_pretrained(
71
+ base_model, device_map={"": device}, low_cpu_mem_usage=True
72
+ )
73
+ model = PeftModel.from_pretrained(
74
+ model,
75
+ lora_weights,
76
+ device_map={"": device},
77
+ )
78
+
79
+ # unwind broken decapoda-research config
80
+ model.config.pad_token_id = tokenizer.pad_token_id = 0 # unk
81
+ model.config.bos_token_id = 1
82
+ model.config.eos_token_id = 2
83
+
84
+ if not load_8bit:
85
+ model.half() # seems to fix bugs for some users.
86
+
87
+ model.eval()
88
+ if torch.__version__ >= "2" and sys.platform != "win32":
89
+ model = torch.compile(model)
90
+
91
+ def evaluate(
92
+ instruction,
93
+ input=None,
94
+ temperature=0.1,
95
+ top_p=0.75,
96
+ top_k=40,
97
+ num_beams=4,
98
+ max_new_tokens=128,
99
+ stream_output=False,
100
+ **kwargs,
101
+ ):
102
+ prompt = prompter.generate_prompt(instruction, input)
103
+ inputs = tokenizer(prompt, return_tensors="pt")
104
+ input_ids = inputs["input_ids"].to(device)
105
+ generation_config = GenerationConfig(
106
+ temperature=temperature,
107
+ top_p=top_p,
108
+ top_k=top_k,
109
+ num_beams=num_beams,
110
+ **kwargs,
111
+ )
112
+
113
+ generate_params = {
114
+ "input_ids": input_ids,
115
+ "generation_config": generation_config,
116
+ "return_dict_in_generate": True,
117
+ "output_scores": True,
118
+ "max_new_tokens": max_new_tokens,
119
+ }
120
+
121
+ if stream_output:
122
+ # Stream the reply 1 token at a time.
123
+ # This is based on the trick of using 'stopping_criteria' to create an iterator,
124
+ # from https://github.com/oobabooga/text-generation-webui/blob/ad37f396fc8bcbab90e11ecf17c56c97bfbd4a9c/modules/text_generation.py#L216-L243.
125
+
126
+ def generate_with_callback(callback=None, **kwargs):
127
+ kwargs.setdefault(
128
+ "stopping_criteria", transformers.StoppingCriteriaList()
129
+ )
130
+ kwargs["stopping_criteria"].append(
131
+ Stream(callback_func=callback)
132
+ )
133
+ with torch.no_grad():
134
+ model.generate(**kwargs)
135
+
136
+ def generate_with_streaming(**kwargs):
137
+ return Iteratorize(
138
+ generate_with_callback, kwargs, callback=None
139
+ )
140
+
141
+ with generate_with_streaming(**generate_params) as generator:
142
+ for output in generator:
143
+ # new_tokens = len(output) - len(input_ids[0])
144
+ decoded_output = tokenizer.decode(output)
145
+
146
+ if output[-1] in [tokenizer.eos_token_id]:
147
+ break
148
+
149
+ yield prompter.get_response(decoded_output)
150
+ return # early return for stream_output
151
+
152
+ # Without streaming
153
+ with torch.no_grad():
154
+ generation_output = model.generate(
155
+ input_ids=input_ids,
156
+ generation_config=generation_config,
157
+ return_dict_in_generate=True,
158
+ output_scores=True,
159
+ max_new_tokens=max_new_tokens,
160
+ )
161
+ s = generation_output.sequences[0]
162
+ output = tokenizer.decode(s)
163
+ yield prompter.get_response(output)
164
+
165
+ gr.Interface(
166
+ fn=evaluate,
167
+ inputs=[
168
+ gr.components.Textbox(
169
+ lines=2,
170
+ label="Instruction",
171
+ placeholder="Tell me about llama.",
172
+ ),
173
+ gr.components.Textbox(lines=2, label="Input", placeholder="none"),
174
+ gr.components.Slider(
175
+ minimum=0, maximum=1, value=0.1, label="Temperature"
176
+ ),
177
+ gr.components.Slider(
178
+ minimum=0, maximum=1, value=0.75, label="Top p"
179
+ ),
180
+ gr.components.Slider(
181
+ minimum=0, maximum=100, step=1, value=40, label="Top k"
182
+ ),
183
+ gr.components.Slider(
184
+ minimum=1, maximum=4, step=1, value=4, label="Beams"
185
+ ),
186
+ gr.components.Slider(
187
+ minimum=1, maximum=2000, step=1, value=128, label="Max tokens"
188
+ ),
189
+ gr.components.Checkbox(label="Stream output"),
190
+ ],
191
+ outputs=[
192
+ gr.inputs.Textbox(
193
+ lines=5,
194
+ label="Output",
195
+ )
196
+ ],
197
+ title="🇹🇭 OpenThaiGPT 1.0.0-beta",
198
+ description="🇹🇭 OpenThaiGPT 1.0.0-beta is a 7B-parameter LLaMA model finetuned to follow Thai instructions. It is trained on various dataset and makes use of the Huggingface LLaMA implementation. For more information, please visit [the project's website](https://openthaigpt.aieat.or.th).", # noqa: E501
199
+ ).queue().launch(server_name="0.0.0.0", share=share_gradio)
200
+ # Old testing code follows.
201
+
202
+ """
203
+ # testing code for readme
204
+ for instruction in [
205
+ "Tell me about alpacas.",
206
+ "Tell me about the president of Mexico in 2019.",
207
+ "Tell me about the king of France in 2019.",
208
+ "List all Canadian provinces in alphabetical order.",
209
+ "Write a Python program that prints the first 10 Fibonacci numbers.",
210
+ "Write a program that prints the numbers from 1 to 100. But for multiples of three print 'Fizz' instead of the number and for the multiples of five print 'Buzz'. For numbers which are multiples of both three and five print 'FizzBuzz'.", # noqa: E501
211
+ "Tell me five words that rhyme with 'shock'.",
212
+ "Translate the sentence 'I have no mouth but I must scream' into Spanish.",
213
+ "Count up from 1 to 500.",
214
+ ]:
215
+ print("Instruction:", instruction)
216
+ print("Response:", evaluate(instruction))
217
+ print()
218
+ """
219
+
220
+
221
+ if __name__ == "__main__":
222
+ fire.Fire(main)
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/README.md ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ base_model: openthaigpt/openthaigpt-1.0.0-beta-7b-chat-ckpt-hf
4
+ ---
5
+
6
+ # Model Card for Model ID
7
+
8
+ <!-- Provide a quick summary of what the model is/does. -->
9
+
10
+
11
+
12
+ ## Model Details
13
+
14
+ ### Model Description
15
+
16
+ <!-- Provide a longer summary of what this model is. -->
17
+
18
+
19
+
20
+ - **Developed by:** [More Information Needed]
21
+ - **Funded by [optional]:** [More Information Needed]
22
+ - **Shared by [optional]:** [More Information Needed]
23
+ - **Model type:** [More Information Needed]
24
+ - **Language(s) (NLP):** [More Information Needed]
25
+ - **License:** [More Information Needed]
26
+ - **Finetuned from model [optional]:** [More Information Needed]
27
+
28
+ ### Model Sources [optional]
29
+
30
+ <!-- Provide the basic links for the model. -->
31
+
32
+ - **Repository:** [More Information Needed]
33
+ - **Paper [optional]:** [More Information Needed]
34
+ - **Demo [optional]:** [More Information Needed]
35
+
36
+ ## Uses
37
+
38
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
39
+
40
+ ### Direct Use
41
+
42
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
43
+
44
+ [More Information Needed]
45
+
46
+ ### Downstream Use [optional]
47
+
48
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
49
+
50
+ [More Information Needed]
51
+
52
+ ### Out-of-Scope Use
53
+
54
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
55
+
56
+ [More Information Needed]
57
+
58
+ ## Bias, Risks, and Limitations
59
+
60
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
61
+
62
+ [More Information Needed]
63
+
64
+ ### Recommendations
65
+
66
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
67
+
68
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
69
+
70
+ ## How to Get Started with the Model
71
+
72
+ Use the code below to get started with the model.
73
+
74
+ [More Information Needed]
75
+
76
+ ## Training Details
77
+
78
+ ### Training Data
79
+
80
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
81
+
82
+ [More Information Needed]
83
+
84
+ ### Training Procedure
85
+
86
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
87
+
88
+ #### Preprocessing [optional]
89
+
90
+ [More Information Needed]
91
+
92
+
93
+ #### Training Hyperparameters
94
+
95
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
96
+
97
+ #### Speeds, Sizes, Times [optional]
98
+
99
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
100
+
101
+ [More Information Needed]
102
+
103
+ ## Evaluation
104
+
105
+ <!-- This section describes the evaluation protocols and provides the results. -->
106
+
107
+ ### Testing Data, Factors & Metrics
108
+
109
+ #### Testing Data
110
+
111
+ <!-- This should link to a Dataset Card if possible. -->
112
+
113
+ [More Information Needed]
114
+
115
+ #### Factors
116
+
117
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
118
+
119
+ [More Information Needed]
120
+
121
+ #### Metrics
122
+
123
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
124
+
125
+ [More Information Needed]
126
+
127
+ ### Results
128
+
129
+ [More Information Needed]
130
+
131
+ #### Summary
132
+
133
+
134
+
135
+ ## Model Examination [optional]
136
+
137
+ <!-- Relevant interpretability work for the model goes here -->
138
+
139
+ [More Information Needed]
140
+
141
+ ## Environmental Impact
142
+
143
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
144
+
145
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
146
+
147
+ - **Hardware Type:** [More Information Needed]
148
+ - **Hours used:** [More Information Needed]
149
+ - **Cloud Provider:** [More Information Needed]
150
+ - **Compute Region:** [More Information Needed]
151
+ - **Carbon Emitted:** [More Information Needed]
152
+
153
+ ## Technical Specifications [optional]
154
+
155
+ ### Model Architecture and Objective
156
+
157
+ [More Information Needed]
158
+
159
+ ### Compute Infrastructure
160
+
161
+ [More Information Needed]
162
+
163
+ #### Hardware
164
+
165
+ [More Information Needed]
166
+
167
+ #### Software
168
+
169
+ [More Information Needed]
170
+
171
+ ## Citation [optional]
172
+
173
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
174
+
175
+ **BibTeX:**
176
+
177
+ [More Information Needed]
178
+
179
+ **APA:**
180
+
181
+ [More Information Needed]
182
+
183
+ ## Glossary [optional]
184
+
185
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
186
+
187
+ [More Information Needed]
188
+
189
+ ## More Information [optional]
190
+
191
+ [More Information Needed]
192
+
193
+ ## Model Card Authors [optional]
194
+
195
+ [More Information Needed]
196
+
197
+ ## Model Card Contact
198
+
199
+ [More Information Needed]
200
+
201
+
202
+ ### Framework versions
203
+
204
+ - PEFT 0.9.1.dev0
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/adapter_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "openthaigpt/openthaigpt-1.0.0-beta-7b-chat-ckpt-hf",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 16,
13
+ "lora_dropout": 0.05,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": null,
17
+ "peft_type": "LORA",
18
+ "r": 4,
19
+ "rank_pattern": {},
20
+ "revision": null,
21
+ "target_modules": [
22
+ "v_proj",
23
+ "q_proj"
24
+ ],
25
+ "task_type": "CAUSAL_LM",
26
+ "use_dora": false,
27
+ "use_rslora": false
28
+ }
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e12dc1b704f1337d4c698c639b719225dd5cee62fa4ffa29109e9f0574251490
3
+ size 8405472
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/README.md ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ base_model: openthaigpt/openthaigpt-1.0.0-beta-7b-chat-ckpt-hf
4
+ ---
5
+
6
+ # Model Card for Model ID
7
+
8
+ <!-- Provide a quick summary of what the model is/does. -->
9
+
10
+
11
+
12
+ ## Model Details
13
+
14
+ ### Model Description
15
+
16
+ <!-- Provide a longer summary of what this model is. -->
17
+
18
+
19
+
20
+ - **Developed by:** [More Information Needed]
21
+ - **Funded by [optional]:** [More Information Needed]
22
+ - **Shared by [optional]:** [More Information Needed]
23
+ - **Model type:** [More Information Needed]
24
+ - **Language(s) (NLP):** [More Information Needed]
25
+ - **License:** [More Information Needed]
26
+ - **Finetuned from model [optional]:** [More Information Needed]
27
+
28
+ ### Model Sources [optional]
29
+
30
+ <!-- Provide the basic links for the model. -->
31
+
32
+ - **Repository:** [More Information Needed]
33
+ - **Paper [optional]:** [More Information Needed]
34
+ - **Demo [optional]:** [More Information Needed]
35
+
36
+ ## Uses
37
+
38
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
39
+
40
+ ### Direct Use
41
+
42
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
43
+
44
+ [More Information Needed]
45
+
46
+ ### Downstream Use [optional]
47
+
48
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
49
+
50
+ [More Information Needed]
51
+
52
+ ### Out-of-Scope Use
53
+
54
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
55
+
56
+ [More Information Needed]
57
+
58
+ ## Bias, Risks, and Limitations
59
+
60
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
61
+
62
+ [More Information Needed]
63
+
64
+ ### Recommendations
65
+
66
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
67
+
68
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
69
+
70
+ ## How to Get Started with the Model
71
+
72
+ Use the code below to get started with the model.
73
+
74
+ [More Information Needed]
75
+
76
+ ## Training Details
77
+
78
+ ### Training Data
79
+
80
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
81
+
82
+ [More Information Needed]
83
+
84
+ ### Training Procedure
85
+
86
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
87
+
88
+ #### Preprocessing [optional]
89
+
90
+ [More Information Needed]
91
+
92
+
93
+ #### Training Hyperparameters
94
+
95
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
96
+
97
+ #### Speeds, Sizes, Times [optional]
98
+
99
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
100
+
101
+ [More Information Needed]
102
+
103
+ ## Evaluation
104
+
105
+ <!-- This section describes the evaluation protocols and provides the results. -->
106
+
107
+ ### Testing Data, Factors & Metrics
108
+
109
+ #### Testing Data
110
+
111
+ <!-- This should link to a Dataset Card if possible. -->
112
+
113
+ [More Information Needed]
114
+
115
+ #### Factors
116
+
117
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
118
+
119
+ [More Information Needed]
120
+
121
+ #### Metrics
122
+
123
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
124
+
125
+ [More Information Needed]
126
+
127
+ ### Results
128
+
129
+ [More Information Needed]
130
+
131
+ #### Summary
132
+
133
+
134
+
135
+ ## Model Examination [optional]
136
+
137
+ <!-- Relevant interpretability work for the model goes here -->
138
+
139
+ [More Information Needed]
140
+
141
+ ## Environmental Impact
142
+
143
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
144
+
145
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
146
+
147
+ - **Hardware Type:** [More Information Needed]
148
+ - **Hours used:** [More Information Needed]
149
+ - **Cloud Provider:** [More Information Needed]
150
+ - **Compute Region:** [More Information Needed]
151
+ - **Carbon Emitted:** [More Information Needed]
152
+
153
+ ## Technical Specifications [optional]
154
+
155
+ ### Model Architecture and Objective
156
+
157
+ [More Information Needed]
158
+
159
+ ### Compute Infrastructure
160
+
161
+ [More Information Needed]
162
+
163
+ #### Hardware
164
+
165
+ [More Information Needed]
166
+
167
+ #### Software
168
+
169
+ [More Information Needed]
170
+
171
+ ## Citation [optional]
172
+
173
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
174
+
175
+ **BibTeX:**
176
+
177
+ [More Information Needed]
178
+
179
+ **APA:**
180
+
181
+ [More Information Needed]
182
+
183
+ ## Glossary [optional]
184
+
185
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
186
+
187
+ [More Information Needed]
188
+
189
+ ## More Information [optional]
190
+
191
+ [More Information Needed]
192
+
193
+ ## Model Card Authors [optional]
194
+
195
+ [More Information Needed]
196
+
197
+ ## Model Card Contact
198
+
199
+ [More Information Needed]
200
+
201
+
202
+ ### Framework versions
203
+
204
+ - PEFT 0.9.1.dev0
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/adapter_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "openthaigpt/openthaigpt-1.0.0-beta-7b-chat-ckpt-hf",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 16,
13
+ "lora_dropout": 0.05,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": null,
17
+ "peft_type": "LORA",
18
+ "r": 4,
19
+ "rank_pattern": {},
20
+ "revision": null,
21
+ "target_modules": [
22
+ "v_proj",
23
+ "q_proj"
24
+ ],
25
+ "task_type": "CAUSAL_LM",
26
+ "use_dora": false,
27
+ "use_rslora": false
28
+ }
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36a1d70c6f2727b8c46946c44f67e602d8d00f2e764d43c8e6186f9dd251753c
3
+ size 8405472
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb0c7fadc4081a273a3afe1f017fb88ad7180cca51806ee84a44f71af319519e
3
+ size 16884858
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c94bf09734487eb39bb5c71485a66e3ec0dba7d2bee69647cbca3c76b5779951
3
+ size 14244
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b57c6f46e35e4ea774f0bc1eb319efae302cd9a9eaa31bfacbfcf3e5ae721291
3
+ size 1064
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/trainer_state.json ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.3133811950683594,
3
+ "best_model_checkpoint": "./kumpun-output/checkpoint-200",
4
+ "epoch": 1.0582010582010581,
5
+ "eval_steps": 200,
6
+ "global_step": 200,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.05,
13
+ "grad_norm": 5.246432304382324,
14
+ "learning_rate": 7.000000000000001e-05,
15
+ "loss": 4.0922,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.11,
20
+ "grad_norm": 3.076571226119995,
21
+ "learning_rate": 0.00017,
22
+ "loss": 3.5074,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.16,
27
+ "grad_norm": 11.657795906066895,
28
+ "learning_rate": 0.00026000000000000003,
29
+ "loss": 2.2509,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.21,
34
+ "grad_norm": 3.2525696754455566,
35
+ "learning_rate": 0.00034,
36
+ "loss": 0.8157,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.26,
41
+ "grad_norm": 2.755035400390625,
42
+ "learning_rate": 0.00044,
43
+ "loss": 0.5126,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.32,
48
+ "grad_norm": 2.1165263652801514,
49
+ "learning_rate": 0.00054,
50
+ "loss": 0.4988,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.37,
55
+ "grad_norm": 0.805057168006897,
56
+ "learning_rate": 0.00064,
57
+ "loss": 0.2959,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.42,
62
+ "grad_norm": 0.8083710074424744,
63
+ "learning_rate": 0.00074,
64
+ "loss": 0.2779,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.48,
69
+ "grad_norm": 0.7004362344741821,
70
+ "learning_rate": 0.00084,
71
+ "loss": 0.244,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.53,
76
+ "grad_norm": 1.5553646087646484,
77
+ "learning_rate": 0.00094,
78
+ "loss": 0.3729,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.58,
83
+ "grad_norm": 0.8424310684204102,
84
+ "learning_rate": 0.0009914346895074947,
85
+ "loss": 0.3408,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.63,
90
+ "grad_norm": 6.2793192863464355,
91
+ "learning_rate": 0.0009700214132762313,
92
+ "loss": 0.3011,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.69,
97
+ "grad_norm": 0.8749362826347351,
98
+ "learning_rate": 0.0009486081370449678,
99
+ "loss": 0.249,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.74,
104
+ "grad_norm": 0.6772909760475159,
105
+ "learning_rate": 0.0009271948608137045,
106
+ "loss": 0.2258,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.79,
111
+ "grad_norm": 1.4329707622528076,
112
+ "learning_rate": 0.0009057815845824411,
113
+ "loss": 0.2977,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.85,
118
+ "grad_norm": 1.501881718635559,
119
+ "learning_rate": 0.0008843683083511778,
120
+ "loss": 0.2276,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.9,
125
+ "grad_norm": 1.2264193296432495,
126
+ "learning_rate": 0.0008629550321199144,
127
+ "loss": 0.2724,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.95,
132
+ "grad_norm": 0.8799680471420288,
133
+ "learning_rate": 0.0008415417558886511,
134
+ "loss": 0.2358,
135
+ "step": 180
136
+ },
137
+ {
138
+ "epoch": 1.01,
139
+ "grad_norm": 23.913034439086914,
140
+ "learning_rate": 0.0008201284796573875,
141
+ "loss": 0.4151,
142
+ "step": 190
143
+ },
144
+ {
145
+ "epoch": 1.06,
146
+ "grad_norm": 2.3118550777435303,
147
+ "learning_rate": 0.0007987152034261242,
148
+ "loss": 0.4287,
149
+ "step": 200
150
+ },
151
+ {
152
+ "epoch": 1.06,
153
+ "eval_loss": 0.3133811950683594,
154
+ "eval_runtime": 0.6704,
155
+ "eval_samples_per_second": 14.916,
156
+ "eval_steps_per_second": 2.983,
157
+ "step": 200
158
+ }
159
+ ],
160
+ "logging_steps": 10,
161
+ "max_steps": 567,
162
+ "num_input_tokens_seen": 0,
163
+ "num_train_epochs": 3,
164
+ "save_steps": 200,
165
+ "total_flos": 1229703996899328.0,
166
+ "train_batch_size": 1,
167
+ "trial_name": null,
168
+ "trial_params": null
169
+ }
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-200/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e3c8675ea67901e6232ccf2daea1905c7ec23beb0824721b1f43b026d12c244
3
+ size 4856
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/README.md ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ base_model: openthaigpt/openthaigpt-1.0.0-beta-7b-chat-ckpt-hf
4
+ ---
5
+
6
+ # Model Card for Model ID
7
+
8
+ <!-- Provide a quick summary of what the model is/does. -->
9
+
10
+
11
+
12
+ ## Model Details
13
+
14
+ ### Model Description
15
+
16
+ <!-- Provide a longer summary of what this model is. -->
17
+
18
+
19
+
20
+ - **Developed by:** [More Information Needed]
21
+ - **Funded by [optional]:** [More Information Needed]
22
+ - **Shared by [optional]:** [More Information Needed]
23
+ - **Model type:** [More Information Needed]
24
+ - **Language(s) (NLP):** [More Information Needed]
25
+ - **License:** [More Information Needed]
26
+ - **Finetuned from model [optional]:** [More Information Needed]
27
+
28
+ ### Model Sources [optional]
29
+
30
+ <!-- Provide the basic links for the model. -->
31
+
32
+ - **Repository:** [More Information Needed]
33
+ - **Paper [optional]:** [More Information Needed]
34
+ - **Demo [optional]:** [More Information Needed]
35
+
36
+ ## Uses
37
+
38
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
39
+
40
+ ### Direct Use
41
+
42
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
43
+
44
+ [More Information Needed]
45
+
46
+ ### Downstream Use [optional]
47
+
48
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
49
+
50
+ [More Information Needed]
51
+
52
+ ### Out-of-Scope Use
53
+
54
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
55
+
56
+ [More Information Needed]
57
+
58
+ ## Bias, Risks, and Limitations
59
+
60
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
61
+
62
+ [More Information Needed]
63
+
64
+ ### Recommendations
65
+
66
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
67
+
68
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
69
+
70
+ ## How to Get Started with the Model
71
+
72
+ Use the code below to get started with the model.
73
+
74
+ [More Information Needed]
75
+
76
+ ## Training Details
77
+
78
+ ### Training Data
79
+
80
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
81
+
82
+ [More Information Needed]
83
+
84
+ ### Training Procedure
85
+
86
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
87
+
88
+ #### Preprocessing [optional]
89
+
90
+ [More Information Needed]
91
+
92
+
93
+ #### Training Hyperparameters
94
+
95
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
96
+
97
+ #### Speeds, Sizes, Times [optional]
98
+
99
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
100
+
101
+ [More Information Needed]
102
+
103
+ ## Evaluation
104
+
105
+ <!-- This section describes the evaluation protocols and provides the results. -->
106
+
107
+ ### Testing Data, Factors & Metrics
108
+
109
+ #### Testing Data
110
+
111
+ <!-- This should link to a Dataset Card if possible. -->
112
+
113
+ [More Information Needed]
114
+
115
+ #### Factors
116
+
117
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
118
+
119
+ [More Information Needed]
120
+
121
+ #### Metrics
122
+
123
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
124
+
125
+ [More Information Needed]
126
+
127
+ ### Results
128
+
129
+ [More Information Needed]
130
+
131
+ #### Summary
132
+
133
+
134
+
135
+ ## Model Examination [optional]
136
+
137
+ <!-- Relevant interpretability work for the model goes here -->
138
+
139
+ [More Information Needed]
140
+
141
+ ## Environmental Impact
142
+
143
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
144
+
145
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
146
+
147
+ - **Hardware Type:** [More Information Needed]
148
+ - **Hours used:** [More Information Needed]
149
+ - **Cloud Provider:** [More Information Needed]
150
+ - **Compute Region:** [More Information Needed]
151
+ - **Carbon Emitted:** [More Information Needed]
152
+
153
+ ## Technical Specifications [optional]
154
+
155
+ ### Model Architecture and Objective
156
+
157
+ [More Information Needed]
158
+
159
+ ### Compute Infrastructure
160
+
161
+ [More Information Needed]
162
+
163
+ #### Hardware
164
+
165
+ [More Information Needed]
166
+
167
+ #### Software
168
+
169
+ [More Information Needed]
170
+
171
+ ## Citation [optional]
172
+
173
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
174
+
175
+ **BibTeX:**
176
+
177
+ [More Information Needed]
178
+
179
+ **APA:**
180
+
181
+ [More Information Needed]
182
+
183
+ ## Glossary [optional]
184
+
185
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
186
+
187
+ [More Information Needed]
188
+
189
+ ## More Information [optional]
190
+
191
+ [More Information Needed]
192
+
193
+ ## Model Card Authors [optional]
194
+
195
+ [More Information Needed]
196
+
197
+ ## Model Card Contact
198
+
199
+ [More Information Needed]
200
+
201
+
202
+ ### Framework versions
203
+
204
+ - PEFT 0.9.1.dev0
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/adapter_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "openthaigpt/openthaigpt-1.0.0-beta-7b-chat-ckpt-hf",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 16,
13
+ "lora_dropout": 0.05,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": null,
17
+ "peft_type": "LORA",
18
+ "r": 4,
19
+ "rank_pattern": {},
20
+ "revision": null,
21
+ "target_modules": [
22
+ "v_proj",
23
+ "q_proj"
24
+ ],
25
+ "task_type": "CAUSAL_LM",
26
+ "use_dora": false,
27
+ "use_rslora": false
28
+ }
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e12dc1b704f1337d4c698c639b719225dd5cee62fa4ffa29109e9f0574251490
3
+ size 8405472
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9302d354a9073f83e4d541f25be4e84a485ac75f58f01b555304aecccfad76b0
3
+ size 16884858
openthaigpt_Finetuning/openthaigpt-finetune/kumpun-output/checkpoint-400/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73a970b0899d02a14b2f88feaa728311b7cffeb814828528697b95d03b5a8e52
3
+ size 14244