huseinzol05
commited on
Commit
•
447bc02
1
Parent(s):
8adeee0
Upload base-7b-vs-malaysian-llama2-7b.ipynb
Browse files
base-7b-vs-malaysian-llama2-7b.ipynb
ADDED
@@ -0,0 +1,507 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"id": "66b70728",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [
|
9 |
+
{
|
10 |
+
"name": "stderr",
|
11 |
+
"output_type": "stream",
|
12 |
+
"text": [
|
13 |
+
"Loading the tokenizer from the `special_tokens_map.json` and the `added_tokens.json` will be removed in `transformers 5`, it is kept for forward compatibility, but it is recommended to update your `tokenizer_config.json` by uploading it again. You will see the new `added_tokens_decoder` attribute that will store the relevant information.\n"
|
14 |
+
]
|
15 |
+
}
|
16 |
+
],
|
17 |
+
"source": [
|
18 |
+
"from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig\n",
|
19 |
+
"import torch\n",
|
20 |
+
"\n",
|
21 |
+
"tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-2-13b-hf')"
|
22 |
+
]
|
23 |
+
},
|
24 |
+
{
|
25 |
+
"cell_type": "code",
|
26 |
+
"execution_count": 2,
|
27 |
+
"id": "d608fb34",
|
28 |
+
"metadata": {},
|
29 |
+
"outputs": [],
|
30 |
+
"source": [
|
31 |
+
"nf4_config = BitsAndBytesConfig(\n",
|
32 |
+
" load_in_4bit=True,\n",
|
33 |
+
" bnb_4bit_quant_type='nf4',\n",
|
34 |
+
" bnb_4bit_use_double_quant=True,\n",
|
35 |
+
" bnb_4bit_compute_dtype=torch.bfloat16\n",
|
36 |
+
")"
|
37 |
+
]
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"cell_type": "code",
|
41 |
+
"execution_count": 5,
|
42 |
+
"id": "4d63a1ec",
|
43 |
+
"metadata": {},
|
44 |
+
"outputs": [
|
45 |
+
{
|
46 |
+
"data": {
|
47 |
+
"application/vnd.jupyter.widget-view+json": {
|
48 |
+
"model_id": "b003c36bd0a64b0f97b7868bd7e04a7f",
|
49 |
+
"version_major": 2,
|
50 |
+
"version_minor": 0
|
51 |
+
},
|
52 |
+
"text/plain": [
|
53 |
+
"Downloading (…)lve/main/config.json: 0%| | 0.00/609 [00:00<?, ?B/s]"
|
54 |
+
]
|
55 |
+
},
|
56 |
+
"metadata": {},
|
57 |
+
"output_type": "display_data"
|
58 |
+
},
|
59 |
+
{
|
60 |
+
"name": "stdout",
|
61 |
+
"output_type": "stream",
|
62 |
+
"text": [
|
63 |
+
"[2023-09-28 13:24:24,730] [INFO] [real_accelerator.py:158:get_accelerator] Setting ds_accelerator to cuda (auto detect)\n"
|
64 |
+
]
|
65 |
+
},
|
66 |
+
{
|
67 |
+
"name": "stderr",
|
68 |
+
"output_type": "stream",
|
69 |
+
"text": [
|
70 |
+
"2023-09-28 13:24:28.218982: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n",
|
71 |
+
"To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
|
72 |
+
"2023-09-28 13:24:28.911407: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n"
|
73 |
+
]
|
74 |
+
},
|
75 |
+
{
|
76 |
+
"data": {
|
77 |
+
"application/vnd.jupyter.widget-view+json": {
|
78 |
+
"model_id": "7ce8339d5c0846adb3a94e4a22c9e59b",
|
79 |
+
"version_major": 2,
|
80 |
+
"version_minor": 0
|
81 |
+
},
|
82 |
+
"text/plain": [
|
83 |
+
"Downloading (…)fetensors.index.json: 0%| | 0.00/26.8k [00:00<?, ?B/s]"
|
84 |
+
]
|
85 |
+
},
|
86 |
+
"metadata": {},
|
87 |
+
"output_type": "display_data"
|
88 |
+
},
|
89 |
+
{
|
90 |
+
"data": {
|
91 |
+
"application/vnd.jupyter.widget-view+json": {
|
92 |
+
"model_id": "9b4905f1af81490a9f55683c90cbcda9",
|
93 |
+
"version_major": 2,
|
94 |
+
"version_minor": 0
|
95 |
+
},
|
96 |
+
"text/plain": [
|
97 |
+
"Downloading shards: 0%| | 0/2 [00:00<?, ?it/s]"
|
98 |
+
]
|
99 |
+
},
|
100 |
+
"metadata": {},
|
101 |
+
"output_type": "display_data"
|
102 |
+
},
|
103 |
+
{
|
104 |
+
"data": {
|
105 |
+
"application/vnd.jupyter.widget-view+json": {
|
106 |
+
"model_id": "7339175f24e24272b07e2715df854538",
|
107 |
+
"version_major": 2,
|
108 |
+
"version_minor": 0
|
109 |
+
},
|
110 |
+
"text/plain": [
|
111 |
+
"Downloading (…)of-00002.safetensors: 0%| | 0.00/9.98G [00:00<?, ?B/s]"
|
112 |
+
]
|
113 |
+
},
|
114 |
+
"metadata": {},
|
115 |
+
"output_type": "display_data"
|
116 |
+
},
|
117 |
+
{
|
118 |
+
"data": {
|
119 |
+
"application/vnd.jupyter.widget-view+json": {
|
120 |
+
"model_id": "1c754d8f6cd747cd84eef4187820fd88",
|
121 |
+
"version_major": 2,
|
122 |
+
"version_minor": 0
|
123 |
+
},
|
124 |
+
"text/plain": [
|
125 |
+
"Downloading (…)of-00002.safetensors: 0%| | 0.00/3.50G [00:00<?, ?B/s]"
|
126 |
+
]
|
127 |
+
},
|
128 |
+
"metadata": {},
|
129 |
+
"output_type": "display_data"
|
130 |
+
},
|
131 |
+
{
|
132 |
+
"data": {
|
133 |
+
"application/vnd.jupyter.widget-view+json": {
|
134 |
+
"model_id": "057d41f35eab4955b4952beaf2aa8417",
|
135 |
+
"version_major": 2,
|
136 |
+
"version_minor": 0
|
137 |
+
},
|
138 |
+
"text/plain": [
|
139 |
+
"Loading checkpoint shards: 0%| | 0/2 [00:00<?, ?it/s]"
|
140 |
+
]
|
141 |
+
},
|
142 |
+
"metadata": {},
|
143 |
+
"output_type": "display_data"
|
144 |
+
},
|
145 |
+
{
|
146 |
+
"data": {
|
147 |
+
"application/vnd.jupyter.widget-view+json": {
|
148 |
+
"model_id": "965c5c7ec4de4c8c8cc901e2e5b70dbd",
|
149 |
+
"version_major": 2,
|
150 |
+
"version_minor": 0
|
151 |
+
},
|
152 |
+
"text/plain": [
|
153 |
+
"Downloading (…)neration_config.json: 0%| | 0.00/188 [00:00<?, ?B/s]"
|
154 |
+
]
|
155 |
+
},
|
156 |
+
"metadata": {},
|
157 |
+
"output_type": "display_data"
|
158 |
+
}
|
159 |
+
],
|
160 |
+
"source": [
|
161 |
+
"base_model = AutoModelForCausalLM.from_pretrained('meta-llama/Llama-2-7b-hf', quantization_config=nf4_config)"
|
162 |
+
]
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"cell_type": "code",
|
166 |
+
"execution_count": 6,
|
167 |
+
"id": "18177b55",
|
168 |
+
"metadata": {},
|
169 |
+
"outputs": [
|
170 |
+
{
|
171 |
+
"data": {
|
172 |
+
"application/vnd.jupyter.widget-view+json": {
|
173 |
+
"model_id": "397bcf8440944efcba4369046e329954",
|
174 |
+
"version_major": 2,
|
175 |
+
"version_minor": 0
|
176 |
+
},
|
177 |
+
"text/plain": [
|
178 |
+
"Downloading (…)lve/main/config.json: 0%| | 0.00/628 [00:00<?, ?B/s]"
|
179 |
+
]
|
180 |
+
},
|
181 |
+
"metadata": {},
|
182 |
+
"output_type": "display_data"
|
183 |
+
},
|
184 |
+
{
|
185 |
+
"data": {
|
186 |
+
"application/vnd.jupyter.widget-view+json": {
|
187 |
+
"model_id": "dedb4a297bcc4c5c8b31b49fd7708e1f",
|
188 |
+
"version_major": 2,
|
189 |
+
"version_minor": 0
|
190 |
+
},
|
191 |
+
"text/plain": [
|
192 |
+
"Downloading (…)fetensors.index.json: 0%| | 0.00/26.8k [00:00<?, ?B/s]"
|
193 |
+
]
|
194 |
+
},
|
195 |
+
"metadata": {},
|
196 |
+
"output_type": "display_data"
|
197 |
+
},
|
198 |
+
{
|
199 |
+
"data": {
|
200 |
+
"application/vnd.jupyter.widget-view+json": {
|
201 |
+
"model_id": "f0d6111f52424e8dabee6d2410b440d7",
|
202 |
+
"version_major": 2,
|
203 |
+
"version_minor": 0
|
204 |
+
},
|
205 |
+
"text/plain": [
|
206 |
+
"Downloading shards: 0%| | 0/2 [00:00<?, ?it/s]"
|
207 |
+
]
|
208 |
+
},
|
209 |
+
"metadata": {},
|
210 |
+
"output_type": "display_data"
|
211 |
+
},
|
212 |
+
{
|
213 |
+
"data": {
|
214 |
+
"application/vnd.jupyter.widget-view+json": {
|
215 |
+
"model_id": "f0191a9254c44bbdaa3913d4a56717f4",
|
216 |
+
"version_major": 2,
|
217 |
+
"version_minor": 0
|
218 |
+
},
|
219 |
+
"text/plain": [
|
220 |
+
"Downloading (…)of-00002.safetensors: 0%| | 0.00/9.98G [00:00<?, ?B/s]"
|
221 |
+
]
|
222 |
+
},
|
223 |
+
"metadata": {},
|
224 |
+
"output_type": "display_data"
|
225 |
+
},
|
226 |
+
{
|
227 |
+
"data": {
|
228 |
+
"application/vnd.jupyter.widget-view+json": {
|
229 |
+
"model_id": "fff09b462e5e45f7868a5f004cab7dc1",
|
230 |
+
"version_major": 2,
|
231 |
+
"version_minor": 0
|
232 |
+
},
|
233 |
+
"text/plain": [
|
234 |
+
"Downloading (…)of-00002.safetensors: 0%| | 0.00/3.50G [00:00<?, ?B/s]"
|
235 |
+
]
|
236 |
+
},
|
237 |
+
"metadata": {},
|
238 |
+
"output_type": "display_data"
|
239 |
+
},
|
240 |
+
{
|
241 |
+
"data": {
|
242 |
+
"application/vnd.jupyter.widget-view+json": {
|
243 |
+
"model_id": "5f8ab76dc6104036bda0aecc62628a6f",
|
244 |
+
"version_major": 2,
|
245 |
+
"version_minor": 0
|
246 |
+
},
|
247 |
+
"text/plain": [
|
248 |
+
"Loading checkpoint shards: 0%| | 0/2 [00:00<?, ?it/s]"
|
249 |
+
]
|
250 |
+
},
|
251 |
+
"metadata": {},
|
252 |
+
"output_type": "display_data"
|
253 |
+
},
|
254 |
+
{
|
255 |
+
"data": {
|
256 |
+
"application/vnd.jupyter.widget-view+json": {
|
257 |
+
"model_id": "657f81ff29324d44ae6ff19d42113353",
|
258 |
+
"version_major": 2,
|
259 |
+
"version_minor": 0
|
260 |
+
},
|
261 |
+
"text/plain": [
|
262 |
+
"Downloading (…)neration_config.json: 0%| | 0.00/183 [00:00<?, ?B/s]"
|
263 |
+
]
|
264 |
+
},
|
265 |
+
"metadata": {},
|
266 |
+
"output_type": "display_data"
|
267 |
+
}
|
268 |
+
],
|
269 |
+
"source": [
|
270 |
+
"fpf_model = AutoModelForCausalLM.from_pretrained('mesolitica/llama-7b-hf-32768-fpf', quantization_config=nf4_config)"
|
271 |
+
]
|
272 |
+
},
|
273 |
+
{
|
274 |
+
"cell_type": "code",
|
275 |
+
"execution_count": 7,
|
276 |
+
"id": "af485f12",
|
277 |
+
"metadata": {},
|
278 |
+
"outputs": [],
|
279 |
+
"source": [
|
280 |
+
"import time\n",
|
281 |
+
"from tqdm import tqdm\n",
|
282 |
+
"\n",
|
283 |
+
"kwargs = {\n",
|
284 |
+
" 'temperature': 0.9, \n",
|
285 |
+
" 'max_new_tokens': 256, \n",
|
286 |
+
" 'top_p': 0.95, \n",
|
287 |
+
" 'repetition_penalty': 1.0, \n",
|
288 |
+
" 'do_sample': True,\n",
|
289 |
+
" 'num_beams': 1,\n",
|
290 |
+
"}"
|
291 |
+
]
|
292 |
+
},
|
293 |
+
{
|
294 |
+
"cell_type": "code",
|
295 |
+
"execution_count": 8,
|
296 |
+
"id": "8209faad",
|
297 |
+
"metadata": {},
|
298 |
+
"outputs": [],
|
299 |
+
"source": [
|
300 |
+
"inputs = tokenizer(['ketiak ak masham'], return_tensors='pt').to('cuda')"
|
301 |
+
]
|
302 |
+
},
|
303 |
+
{
|
304 |
+
"cell_type": "code",
|
305 |
+
"execution_count": 9,
|
306 |
+
"id": "07251c54",
|
307 |
+
"metadata": {},
|
308 |
+
"outputs": [],
|
309 |
+
"source": [
|
310 |
+
"generate_kwargs = dict(inputs)\n",
|
311 |
+
"generate_kwargs = {**generate_kwargs, **kwargs}"
|
312 |
+
]
|
313 |
+
},
|
314 |
+
{
|
315 |
+
"cell_type": "markdown",
|
316 |
+
"id": "d5303ac9",
|
317 |
+
"metadata": {},
|
318 |
+
"source": [
|
319 |
+
"## Base 7B"
|
320 |
+
]
|
321 |
+
},
|
322 |
+
{
|
323 |
+
"cell_type": "code",
|
324 |
+
"execution_count": 10,
|
325 |
+
"id": "9c0cbb2b",
|
326 |
+
"metadata": {},
|
327 |
+
"outputs": [],
|
328 |
+
"source": [
|
329 |
+
"o = base_model.generate(**generate_kwargs)"
|
330 |
+
]
|
331 |
+
},
|
332 |
+
{
|
333 |
+
"cell_type": "code",
|
334 |
+
"execution_count": 11,
|
335 |
+
"id": "e102daa2",
|
336 |
+
"metadata": {
|
337 |
+
"scrolled": true
|
338 |
+
},
|
339 |
+
"outputs": [
|
340 |
+
{
|
341 |
+
"name": "stdout",
|
342 |
+
"output_type": "stream",
|
343 |
+
"text": [
|
344 |
+
"ketiak ak mashamat 17. броја (1282) - Pismo LXXVII\n",
|
345 |
+
"Dodatkowe usytuowanie elementów elementów stropowych 6.d. - wykonanie 6.d.\n",
|
346 |
+
"The aim of this work was to design a new and simple solution for the steel trusses' elements, which could be used in the construction of light-industrial buildings, as well as in the construction of the buildings in general. The goal of the research was to develop a new method of positioning the steel trusses, which would be better adapted to the construction of light-industrial buildings. The design of the new truss, which can be easily placed during the construction process, was prepared in a three-dimensional model, thanks to which the parameters of the truss were determined. As a result, it was found that the trusses have the following parameters: width of the arch: 64 cm and 111 cm, the length of the straight side: 60 cm, 150 cm and 175 cm, the angle of inclination of the arch side: 29, 53 and 68.46 °, the\n"
|
347 |
+
]
|
348 |
+
}
|
349 |
+
],
|
350 |
+
"source": [
|
351 |
+
"print(tokenizer.decode(o[0], skip_special_tokens = True).split('[/INST]')[-1].strip())"
|
352 |
+
]
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"cell_type": "markdown",
|
356 |
+
"id": "a817fd0b",
|
357 |
+
"metadata": {},
|
358 |
+
"source": [
|
359 |
+
"## Malaysian Llama2 7B 32k"
|
360 |
+
]
|
361 |
+
},
|
362 |
+
{
|
363 |
+
"cell_type": "code",
|
364 |
+
"execution_count": 12,
|
365 |
+
"id": "f26fa04a",
|
366 |
+
"metadata": {},
|
367 |
+
"outputs": [],
|
368 |
+
"source": [
|
369 |
+
"o = fpf_model.generate(**generate_kwargs)"
|
370 |
+
]
|
371 |
+
},
|
372 |
+
{
|
373 |
+
"cell_type": "code",
|
374 |
+
"execution_count": 13,
|
375 |
+
"id": "b70e4dac",
|
376 |
+
"metadata": {},
|
377 |
+
"outputs": [
|
378 |
+
{
|
379 |
+
"name": "stdout",
|
380 |
+
"output_type": "stream",
|
381 |
+
"text": [
|
382 |
+
"ketiak ak masham. itu ak yg masham..hahahahhahahha..tapi sebenarnya kita takut nak duduk sebelah penyangkut baju..hahahahha! ak penakut dik..hehe..xnk duduk dkt2..ak jenis kena dptkan sesuatu tu dulu baru aku join dgn group..hahahaha! Aku kena cepat. Sbb ak jenis nak melompat..haha..kalau jenis yg suka duduk diam2, boleh la duduk kat penyangkut baju tuu. Tp ak jenis dlm drama, ak yg masham. Tp sebenarnya ak yg penakut..haha.. ak boleh plak kta ak penakut..haha..ak ni jenis kuat melompat..haha..so aku jenis takut lah..haha..pastu klu dh join group, ak suka yg ak kena first..hahahahaha..ak x\n"
|
383 |
+
]
|
384 |
+
}
|
385 |
+
],
|
386 |
+
"source": [
|
387 |
+
"print(tokenizer.decode(o[0], skip_special_tokens = True).split('[/INST]')[-1].strip())"
|
388 |
+
]
|
389 |
+
},
|
390 |
+
{
|
391 |
+
"cell_type": "code",
|
392 |
+
"execution_count": 14,
|
393 |
+
"id": "951886d5",
|
394 |
+
"metadata": {},
|
395 |
+
"outputs": [],
|
396 |
+
"source": [
|
397 |
+
"inputs = tokenizer(['harga barang kat malaysia ni semakin naik, apa kita nak buat'], return_tensors='pt').to('cuda')\n",
|
398 |
+
"generate_kwargs = dict(inputs)\n",
|
399 |
+
"generate_kwargs = {**generate_kwargs, **kwargs}"
|
400 |
+
]
|
401 |
+
},
|
402 |
+
{
|
403 |
+
"cell_type": "markdown",
|
404 |
+
"id": "ad00d89f",
|
405 |
+
"metadata": {},
|
406 |
+
"source": [
|
407 |
+
"## Base 7B"
|
408 |
+
]
|
409 |
+
},
|
410 |
+
{
|
411 |
+
"cell_type": "code",
|
412 |
+
"execution_count": 15,
|
413 |
+
"id": "001032d9",
|
414 |
+
"metadata": {},
|
415 |
+
"outputs": [],
|
416 |
+
"source": [
|
417 |
+
"o = base_model.generate(**generate_kwargs)"
|
418 |
+
]
|
419 |
+
},
|
420 |
+
{
|
421 |
+
"cell_type": "code",
|
422 |
+
"execution_count": 16,
|
423 |
+
"id": "41b76cc6",
|
424 |
+
"metadata": {
|
425 |
+
"scrolled": true
|
426 |
+
},
|
427 |
+
"outputs": [
|
428 |
+
{
|
429 |
+
"name": "stdout",
|
430 |
+
"output_type": "stream",
|
431 |
+
"text": [
|
432 |
+
"harga barang kat malaysia ni semakin naik, apa kita nak buat..\n",
|
433 |
+
" насељать, сравнить и привести в пример.\n",
|
434 |
+
"He was appointed a judge in the Supreme Court and later, the Chief Justice of the Supreme Court.\n",
|
435 |
+
"They are all from outside of the country.\n",
|
436 |
+
"He was appointed a judge in the Supreme Court and later, the Chief Justice of the Supreme Court.\n",
|
437 |
+
"It is not the first time that he had resigned in the last 10 years.\n",
|
438 |
+
"He was appointed a judge in the Supreme Court and later, the Chief Justice of the Supreme Court. It is not the first time that he had resigned in the last 10 years.\n",
|
439 |
+
"You have to learn to say that this is not the first time that he had resigned in the last 10 years.\n",
|
440 |
+
"I was appointed a judge in the Supreme Court and later, the Chief Justice of the Supreme Court.\n",
|
441 |
+
"It is not the first time that I had resigned in the last 10 years.\n"
|
442 |
+
]
|
443 |
+
}
|
444 |
+
],
|
445 |
+
"source": [
|
446 |
+
"print(tokenizer.decode(o[0], skip_special_tokens = True).split('[/INST]')[-1].strip())"
|
447 |
+
]
|
448 |
+
},
|
449 |
+
{
|
450 |
+
"cell_type": "markdown",
|
451 |
+
"id": "c363f5a3",
|
452 |
+
"metadata": {},
|
453 |
+
"source": [
|
454 |
+
"## Malaysian Llama2 7B 32k"
|
455 |
+
]
|
456 |
+
},
|
457 |
+
{
|
458 |
+
"cell_type": "code",
|
459 |
+
"execution_count": 21,
|
460 |
+
"id": "54bfed7f",
|
461 |
+
"metadata": {},
|
462 |
+
"outputs": [],
|
463 |
+
"source": [
|
464 |
+
"o = fpf_model.generate(**generate_kwargs)"
|
465 |
+
]
|
466 |
+
},
|
467 |
+
{
|
468 |
+
"cell_type": "code",
|
469 |
+
"execution_count": 22,
|
470 |
+
"id": "a2e16fb4",
|
471 |
+
"metadata": {},
|
472 |
+
"outputs": [
|
473 |
+
{
|
474 |
+
"name": "stdout",
|
475 |
+
"output_type": "stream",
|
476 |
+
"text": [
|
477 |
+
"harga barang kat malaysia ni semakin naik, apa kita nak buat? mula bisnes dari rumah., jual barang tak banyak pun, cukup buat kita makan. cuma kita kena keluar modal sikit untuk dapatkan bekalan. tapi sekali keluar modal, selepas tu, tak perlu keluar modal lagi. jual kepada rakan2, keluarga, atau jual di rumah sebagai contoh. tak rugi pun, dapat jual dapat untung. syarat kena ada modal la. sbb kita tak boleh nak untung banyak. kita nak jual sikit pun dah cukup, yang penting kita tak perlu bayar utk ambil barang. lagi satu sekali, kita nak keluarkan modal, kita kena tau mana nak beli barang yang kita nak jual tu. jangan beli kat pasaraya, jangan beli kat pembekal yang mahal, beli je kat mana-mana y\n"
|
478 |
+
]
|
479 |
+
}
|
480 |
+
],
|
481 |
+
"source": [
|
482 |
+
"print(tokenizer.decode(o[0], skip_special_tokens = True).split('[/INST]')[-1].strip())"
|
483 |
+
]
|
484 |
+
}
|
485 |
+
],
|
486 |
+
"metadata": {
|
487 |
+
"kernelspec": {
|
488 |
+
"display_name": "Python 3 (ipykernel)",
|
489 |
+
"language": "python",
|
490 |
+
"name": "python3"
|
491 |
+
},
|
492 |
+
"language_info": {
|
493 |
+
"codemirror_mode": {
|
494 |
+
"name": "ipython",
|
495 |
+
"version": 3
|
496 |
+
},
|
497 |
+
"file_extension": ".py",
|
498 |
+
"mimetype": "text/x-python",
|
499 |
+
"name": "python",
|
500 |
+
"nbconvert_exporter": "python",
|
501 |
+
"pygments_lexer": "ipython3",
|
502 |
+
"version": "3.10.12"
|
503 |
+
}
|
504 |
+
},
|
505 |
+
"nbformat": 4,
|
506 |
+
"nbformat_minor": 5
|
507 |
+
}
|