Burn Oil
commited on
Commit
β’
b6348ae
1
Parent(s):
3c72ee4
Upload 45 files
Browse files- README.md +39 -1
- config.json +28 -0
- mergekit_config.yml +9 -0
- model-00001-of-00037.safetensors +3 -0
- model-00002-of-00037.safetensors +3 -0
- model-00003-of-00037.safetensors +3 -0
- model-00004-of-00037.safetensors +3 -0
- model-00005-of-00037.safetensors +3 -0
- model-00006-of-00037.safetensors +3 -0
- model-00007-of-00037.safetensors +3 -0
- model-00008-of-00037.safetensors +3 -0
- model-00009-of-00037.safetensors +3 -0
- model-00010-of-00037.safetensors +3 -0
- model-00011-of-00037.safetensors +3 -0
- model-00012-of-00037.safetensors +3 -0
- model-00013-of-00037.safetensors +3 -0
- model-00014-of-00037.safetensors +3 -0
- model-00015-of-00037.safetensors +3 -0
- model-00016-of-00037.safetensors +3 -0
- model-00017-of-00037.safetensors +3 -0
- model-00018-of-00037.safetensors +3 -0
- model-00019-of-00037.safetensors +3 -0
- model-00020-of-00037.safetensors +3 -0
- model-00021-of-00037.safetensors +3 -0
- model-00022-of-00037.safetensors +3 -0
- model-00023-of-00037.safetensors +3 -0
- model-00024-of-00037.safetensors +3 -0
- model-00025-of-00037.safetensors +3 -0
- model-00026-of-00037.safetensors +3 -0
- model-00027-of-00037.safetensors +3 -0
- model-00028-of-00037.safetensors +3 -0
- model-00029-of-00037.safetensors +3 -0
- model-00030-of-00037.safetensors +3 -0
- model-00031-of-00037.safetensors +3 -0
- model-00032-of-00037.safetensors +3 -0
- model-00033-of-00037.safetensors +3 -0
- model-00034-of-00037.safetensors +3 -0
- model-00035-of-00037.safetensors +3 -0
- model-00036-of-00037.safetensors +3 -0
- model-00037-of-00037.safetensors +3 -0
- model.safetensors.index.json +1 -0
- special_tokens_map.json +23 -0
- tokenizer.json +0 -0
- tokenizer.model +3 -0
- tokenizer_config.json +42 -0
README.md
CHANGED
@@ -1,3 +1,41 @@
|
|
1 |
---
|
2 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
base_model:
|
3 |
+
- Sao10K/Fimbulvetr-11B-v2
|
4 |
+
- Undi95/Mistral-11B-CC-Air-RP
|
5 |
+
library_name: transformers
|
6 |
+
tags:
|
7 |
+
- mergekit
|
8 |
+
- merge
|
9 |
+
|
10 |
---
|
11 |
+
# merge
|
12 |
+
|
13 |
+
This is a merge of pre-trained language models created using [mergekit](https://github.com/cg123/mergekit).
|
14 |
+
|
15 |
+
## Merge Details
|
16 |
+
### Merge Method
|
17 |
+
|
18 |
+
This model was merged using the passthrough merge method.
|
19 |
+
|
20 |
+
### Models Merged
|
21 |
+
|
22 |
+
The following models were included in the merge:
|
23 |
+
* [Sao10K/Fimbulvetr-11B-v2](https://huggingface.co/Sao10K/Fimbulvetr-11B-v2)
|
24 |
+
* [Undi95/Mistral-11B-CC-Air-RP](https://huggingface.co/Undi95/Mistral-11B-CC-Air-RP)
|
25 |
+
|
26 |
+
### Configuration
|
27 |
+
|
28 |
+
The following YAML configuration was used to produce this model:
|
29 |
+
|
30 |
+
```yaml
|
31 |
+
slices:
|
32 |
+
- sources:
|
33 |
+
- model: Sao10K/Fimbulvetr-11B-v2
|
34 |
+
layer_range: [0, 40]
|
35 |
+
- sources:
|
36 |
+
- model: Undi95/Mistral-11B-CC-Air-RP
|
37 |
+
layer_range: [8, 48]
|
38 |
+
merge_method: passthrough
|
39 |
+
dtype: bfloat16
|
40 |
+
|
41 |
+
```
|
config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "Sao10K/Fimbulvetr-11B-v2",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 1,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 4096,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 14336,
|
14 |
+
"max_position_embeddings": 4096,
|
15 |
+
"model_type": "llama",
|
16 |
+
"num_attention_heads": 32,
|
17 |
+
"num_hidden_layers": 80,
|
18 |
+
"num_key_value_heads": 8,
|
19 |
+
"pretraining_tp": 1,
|
20 |
+
"rms_norm_eps": 1e-05,
|
21 |
+
"rope_scaling": null,
|
22 |
+
"rope_theta": 10000.0,
|
23 |
+
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "bfloat16",
|
25 |
+
"transformers_version": "4.40.2",
|
26 |
+
"use_cache": false,
|
27 |
+
"vocab_size": 32000
|
28 |
+
}
|
mergekit_config.yml
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
slices:
|
2 |
+
- sources:
|
3 |
+
- model: Sao10K/Fimbulvetr-11B-v2
|
4 |
+
layer_range: [0, 40]
|
5 |
+
- sources:
|
6 |
+
- model: Undi95/Mistral-11B-CC-Air-RP
|
7 |
+
layer_range: [8, 48]
|
8 |
+
merge_method: passthrough
|
9 |
+
dtype: bfloat16
|
model-00001-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6137adc53b263e9e25ce1f3525bad88474276b7bb0ec4ca94bbec557cf614b87
|
3 |
+
size 960521552
|
model-00002-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3b06b4443c5c168dcd1013509da9f815e1fcb21dc2ecd6b805d3930478b4f3b6
|
3 |
+
size 905995864
|
model-00003-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:009ddbb675e4f51e0befdd97bad247e547c745492745895e2541911522148213
|
3 |
+
size 989890728
|
model-00004-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:051ded11d8a7c6ab36d50a3a358a1123aaebe61e03cabbd0663c02b2e7e00a72
|
3 |
+
size 998296072
|
model-00005-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c54e248705b4bfb857a1073e3870cb33407daed83129d273955010400089e945
|
3 |
+
size 981502456
|
model-00006-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9446d97dd5f0cdbbfaf1e6c43209f3c79fb8cf11fd217b622c75c540155eaca3
|
3 |
+
size 922798696
|
model-00007-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4581c73af7566384a005bde2070c0bf617b3a2597fa84ef6914cc38eac261aab
|
3 |
+
size 989890728
|
model-00008-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f57a50485c4b113f384ab25982c5bd0233e0636f2b8dbc042773e9da1c6a263e
|
3 |
+
size 989890728
|
model-00009-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8bfa1e946e6efedf9c7e7dcccec1832c533c3baae6db343ea7c789919c233afc
|
3 |
+
size 989890728
|
model-00010-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b956e635066b0f5e9c60f3535accbb214e1fc573f6d318b4d50754247fa1f5d
|
3 |
+
size 989890728
|
model-00011-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:122f6d04775c13f16a8ec1197987e377f85404c2d817c3ec134f63402a700993
|
3 |
+
size 956353248
|
model-00012-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66f7820884916329648d63a5ffdc507ef6ff54ac334301a99f7fe7590b16e83c
|
3 |
+
size 989890728
|
model-00013-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:055c44bd135872426121be9133aa3b3cf764927a0cb6bad0e07f2d35e8e2412f
|
3 |
+
size 989890728
|
model-00014-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:48cd9524f6b8a039ada6523578f60751db0633d1544bc27b871118b9ca954035
|
3 |
+
size 989890728
|
model-00015-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17f5a7cf1fed8f57bc84fbc73589c410260b2cf83b328b8d7c91b2caa32069d7
|
3 |
+
size 989890728
|
model-00016-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5d61d29aa0d615e5a4242377c712e24d8fd26484cb8cbea34e17cc3f278cd450
|
3 |
+
size 998296072
|
model-00017-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d16b852805efcdab8296413714ca932e67403087bd65a60ea4c5d37d96b4f76c
|
3 |
+
size 981502456
|
model-00018-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0972143120c809adf6c80a0f03d8fe72016c38291a9ed1734083d93b78fb6213
|
3 |
+
size 922798696
|
model-00019-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:031ac15376d86e429851c76abdbfd5b5fa865be10259154dbb6770cb2a806681
|
3 |
+
size 989890728
|
model-00020-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f6ca7dea6447a37e2aaa0536b3f0e7d5d83afdb858c329e26493c2453a9bab61
|
3 |
+
size 989882424
|
model-00021-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:20774fe3f54a47deb047a0428aa6067ceaedea31a2e012844a72315fd8b288ee
|
3 |
+
size 989899032
|
model-00022-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d2a53d79deecb0ed605182d061aae56d0e87d556d4d5b98e4d2d04500234f28
|
3 |
+
size 956353240
|
model-00023-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8c2364a8ab1cbb866ebb02ac78e0f157110e5ce8a68e1a0be32a3595ad344c24
|
3 |
+
size 989890728
|
model-00024-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:608a5261f5dcf7858c9e2fb67bc214dad21d0003f5440f6a2940c8b0929665b9
|
3 |
+
size 989890728
|
model-00025-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e19eea0ccd4943ebe4aad29b2d3110f77877edf7303ff880f07852beaada29b4
|
3 |
+
size 989890728
|
model-00026-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fc621e9f1186ab53e1de54ed19e6b3b9817994f612fc458ac9ec9541cc3cad11
|
3 |
+
size 989890728
|
model-00027-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f863523754d367b8df1501d6b02398cb2f46e9c43bf78d9cd2927055eae6b4fa
|
3 |
+
size 989890728
|
model-00028-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:35ccd21fa029065273a68de14aa3006c846634e64ff1fa5b9e738f123ed0bbe6
|
3 |
+
size 998296072
|
model-00029-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ca7f80722a7b96bef8e2e9c7fbd72dce6b4a93fd442540849e3692cb79860834
|
3 |
+
size 981502456
|
model-00030-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8dbc5cfeb436bbc2c6fa1f7c4c559955aeeef0ed889af8f6d69f080e85733523
|
3 |
+
size 922790384
|
model-00031-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:070ad600bec8ad780e9c045002e969016eec9dbcfea1fcc6f4a0d2189b5d3664
|
3 |
+
size 989890728
|
model-00032-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ea2430603e5a13e27d04ba84130d63c9b25f09f1c49f8d4a82510547dffb837a
|
3 |
+
size 989890728
|
model-00033-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8426ffc0a26e7d15921b25174708b3a2ab2f7f4bf69fb35fffdfb8131d0f25a4
|
3 |
+
size 998287760
|
model-00034-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:584212d848efa531de74af121ba720a75a399d37b1b903282aa4e0add9c986aa
|
3 |
+
size 947956192
|
model-00035-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a1e44aacf4e6c91c47d9dec1554c5a31d826e888cef921542542062bf005a719
|
3 |
+
size 905995856
|
model-00036-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:deeb1efd14bc344d258e0c98e287d8a6c9db5203d229b65e2b3b23e59ee1d21b
|
3 |
+
size 989890720
|
model-00037-of-00037.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e12404373abf67c29a0906369d08cc14514920286f6454ba01d98d76b2b30126
|
3 |
+
size 285238624
|
model.safetensors.index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"metadata": {"mergekit_version": "0.0.4.2", "total_size": 35422216192}, "weight_map": {"lm_head.weight": "model-00001-of-00037.safetensors", "model.embed_tokens.weight": "model-00001-of-00037.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00037.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00037.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00037.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00037.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00037.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00037.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00037.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00037.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00037.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00037.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00002-of-00037.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00037.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00002-of-00037.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00037.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00037.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00037.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00037.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00037.safetensors", "model.layers.42.input_layernorm.weight": "model-00002-of-00037.safetensors", "model.layers.10.input_layernorm.weight": "model-00002-of-00037.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00002-of-00037.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00002-of-00037.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00002-of-00037.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00037.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00003-of-00037.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00003-of-00037.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00003-of-00037.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00037.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00003-of-00037.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00037.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00003-of-00037.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00037.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00003-of-00037.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00037.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00003-of-00037.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00037.safetensors", "model.layers.43.input_layernorm.weight": "model-00003-of-00037.safetensors", "model.layers.11.input_layernorm.weight": "model-00003-of-00037.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00003-of-00037.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00003-of-00037.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00003-of-00037.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00037.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00003-of-00037.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00004-of-00037.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00004-of-00037.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00004-of-00037.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00004-of-00037.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00004-of-00037.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00004-of-00037.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00004-of-00037.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00004-of-00037.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00004-of-00037.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00004-of-00037.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00004-of-00037.safetensors", "model.layers.44.input_layernorm.weight": "model-00004-of-00037.safetensors", "model.layers.12.input_layernorm.weight": "model-00004-of-00037.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00004-of-00037.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00004-of-00037.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00004-of-00037.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00004-of-00037.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00004-of-00037.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00004-of-00037.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00004-of-00037.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00004-of-00037.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00004-of-00037.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00005-of-00037.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00005-of-00037.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00005-of-00037.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00005-of-00037.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00005-of-00037.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00005-of-00037.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00037.safetensors", "model.layers.45.input_layernorm.weight": "model-00005-of-00037.safetensors", "model.layers.13.input_layernorm.weight": "model-00005-of-00037.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00005-of-00037.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00005-of-00037.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00005-of-00037.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00005-of-00037.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00005-of-00037.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00005-of-00037.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00005-of-00037.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00005-of-00037.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00005-of-00037.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00005-of-00037.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00005-of-00037.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00005-of-00037.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00005-of-00037.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00006-of-00037.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00006-of-00037.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00006-of-00037.safetensors", "model.layers.46.input_layernorm.weight": "model-00006-of-00037.safetensors", "model.layers.14.input_layernorm.weight": "model-00006-of-00037.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00006-of-00037.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00006-of-00037.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00006-of-00037.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00006-of-00037.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00006-of-00037.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00006-of-00037.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00006-of-00037.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00006-of-00037.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00006-of-00037.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00006-of-00037.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00006-of-00037.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00006-of-00037.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00006-of-00037.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00006-of-00037.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00006-of-00037.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00006-of-00037.safetensors", "model.layers.47.input_layernorm.weight": "model-00006-of-00037.safetensors", "model.layers.15.input_layernorm.weight": "model-00006-of-00037.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00007-of-00037.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00007-of-00037.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00007-of-00037.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00007-of-00037.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00007-of-00037.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00007-of-00037.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00007-of-00037.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00007-of-00037.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00007-of-00037.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00007-of-00037.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00007-of-00037.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00007-of-00037.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00007-of-00037.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00007-of-00037.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00007-of-00037.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00007-of-00037.safetensors", "model.layers.48.input_layernorm.weight": "model-00007-of-00037.safetensors", "model.layers.16.input_layernorm.weight": "model-00007-of-00037.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00007-of-00037.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00008-of-00037.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00008-of-00037.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00008-of-00037.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00008-of-00037.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00008-of-00037.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00008-of-00037.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00008-of-00037.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00008-of-00037.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00008-of-00037.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00008-of-00037.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00008-of-00037.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00008-of-00037.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00008-of-00037.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00008-of-00037.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00008-of-00037.safetensors", "model.layers.49.input_layernorm.weight": "model-00008-of-00037.safetensors", "model.layers.17.input_layernorm.weight": "model-00008-of-00037.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00008-of-00037.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00008-of-00037.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00009-of-00037.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00009-of-00037.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00009-of-00037.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00009-of-00037.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00009-of-00037.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00009-of-00037.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00009-of-00037.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00009-of-00037.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00009-of-00037.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00009-of-00037.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00009-of-00037.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00009-of-00037.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00009-of-00037.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00009-of-00037.safetensors", "model.layers.50.input_layernorm.weight": "model-00009-of-00037.safetensors", "model.layers.18.input_layernorm.weight": "model-00009-of-00037.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00009-of-00037.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00009-of-00037.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00009-of-00037.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00010-of-00037.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00010-of-00037.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00010-of-00037.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00010-of-00037.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00010-of-00037.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00010-of-00037.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00010-of-00037.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00010-of-00037.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00010-of-00037.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00010-of-00037.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00010-of-00037.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00010-of-00037.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00010-of-00037.safetensors", "model.layers.51.input_layernorm.weight": "model-00010-of-00037.safetensors", "model.layers.19.input_layernorm.weight": "model-00010-of-00037.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00010-of-00037.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00010-of-00037.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00010-of-00037.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00010-of-00037.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00011-of-00037.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00011-of-00037.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00011-of-00037.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00011-of-00037.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00011-of-00037.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00011-of-00037.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00011-of-00037.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00011-of-00037.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00011-of-00037.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00011-of-00037.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00011-of-00037.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00011-of-00037.safetensors", "model.layers.2.input_layernorm.weight": "model-00011-of-00037.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00011-of-00037.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00011-of-00037.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00011-of-00037.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00011-of-00037.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00011-of-00037.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00011-of-00037.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00011-of-00037.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00011-of-00037.safetensors", "model.layers.52.input_layernorm.weight": "model-00011-of-00037.safetensors", "model.layers.20.input_layernorm.weight": "model-00011-of-00037.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00011-of-00037.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00012-of-00037.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00012-of-00037.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00012-of-00037.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00012-of-00037.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00012-of-00037.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00012-of-00037.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00012-of-00037.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00012-of-00037.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00012-of-00037.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00012-of-00037.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00012-of-00037.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00012-of-00037.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00012-of-00037.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00012-of-00037.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00012-of-00037.safetensors", "model.layers.53.input_layernorm.weight": "model-00012-of-00037.safetensors", "model.layers.21.input_layernorm.weight": "model-00012-of-00037.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00012-of-00037.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00012-of-00037.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00013-of-00037.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00013-of-00037.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00013-of-00037.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00013-of-00037.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00013-of-00037.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00013-of-00037.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00013-of-00037.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00013-of-00037.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00013-of-00037.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00013-of-00037.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00013-of-00037.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00013-of-00037.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00013-of-00037.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00013-of-00037.safetensors", "model.layers.54.input_layernorm.weight": "model-00013-of-00037.safetensors", "model.layers.22.input_layernorm.weight": "model-00013-of-00037.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00013-of-00037.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00013-of-00037.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00013-of-00037.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00014-of-00037.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00014-of-00037.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00014-of-00037.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00014-of-00037.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00014-of-00037.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00014-of-00037.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00014-of-00037.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00014-of-00037.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00014-of-00037.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00014-of-00037.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00014-of-00037.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00014-of-00037.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00014-of-00037.safetensors", "model.layers.55.input_layernorm.weight": "model-00014-of-00037.safetensors", "model.layers.23.input_layernorm.weight": "model-00014-of-00037.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00014-of-00037.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00014-of-00037.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00014-of-00037.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00014-of-00037.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00015-of-00037.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00015-of-00037.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00015-of-00037.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00015-of-00037.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00015-of-00037.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00015-of-00037.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00015-of-00037.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00015-of-00037.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00015-of-00037.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00015-of-00037.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00015-of-00037.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00015-of-00037.safetensors", "model.layers.56.input_layernorm.weight": "model-00015-of-00037.safetensors", "model.layers.24.input_layernorm.weight": "model-00015-of-00037.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00015-of-00037.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00015-of-00037.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00015-of-00037.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00015-of-00037.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00015-of-00037.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00016-of-00037.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00016-of-00037.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00016-of-00037.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00016-of-00037.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00016-of-00037.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00016-of-00037.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00016-of-00037.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00016-of-00037.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00016-of-00037.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00016-of-00037.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00016-of-00037.safetensors", "model.layers.57.input_layernorm.weight": "model-00016-of-00037.safetensors", "model.layers.25.input_layernorm.weight": "model-00016-of-00037.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00016-of-00037.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00016-of-00037.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00016-of-00037.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00016-of-00037.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00016-of-00037.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00016-of-00037.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00016-of-00037.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00016-of-00037.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00016-of-00037.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00017-of-00037.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00017-of-00037.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00017-of-00037.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00017-of-00037.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00017-of-00037.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00017-of-00037.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00017-of-00037.safetensors", "model.layers.58.input_layernorm.weight": "model-00017-of-00037.safetensors", "model.layers.26.input_layernorm.weight": "model-00017-of-00037.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00017-of-00037.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00017-of-00037.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00017-of-00037.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00017-of-00037.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00017-of-00037.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00017-of-00037.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00017-of-00037.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00017-of-00037.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00017-of-00037.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00017-of-00037.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00017-of-00037.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00017-of-00037.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00017-of-00037.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00018-of-00037.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00018-of-00037.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00018-of-00037.safetensors", "model.layers.59.input_layernorm.weight": "model-00018-of-00037.safetensors", "model.layers.27.input_layernorm.weight": "model-00018-of-00037.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00018-of-00037.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00018-of-00037.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00018-of-00037.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00018-of-00037.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00018-of-00037.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00018-of-00037.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00018-of-00037.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00018-of-00037.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00018-of-00037.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00018-of-00037.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00018-of-00037.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00018-of-00037.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00018-of-00037.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00018-of-00037.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00018-of-00037.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00018-of-00037.safetensors", "model.layers.60.input_layernorm.weight": "model-00018-of-00037.safetensors", "model.layers.28.input_layernorm.weight": "model-00018-of-00037.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00019-of-00037.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00019-of-00037.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00019-of-00037.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00019-of-00037.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00019-of-00037.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00019-of-00037.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00019-of-00037.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00019-of-00037.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00019-of-00037.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00019-of-00037.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00019-of-00037.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00019-of-00037.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00019-of-00037.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00019-of-00037.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00019-of-00037.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00019-of-00037.safetensors", "model.layers.61.input_layernorm.weight": "model-00019-of-00037.safetensors", "model.layers.29.input_layernorm.weight": "model-00019-of-00037.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00019-of-00037.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00020-of-00037.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00020-of-00037.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00020-of-00037.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00020-of-00037.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00020-of-00037.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00020-of-00037.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00020-of-00037.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00020-of-00037.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00020-of-00037.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00020-of-00037.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00020-of-00037.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00020-of-00037.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00020-of-00037.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00020-of-00037.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00020-of-00037.safetensors", "model.layers.3.input_layernorm.weight": "model-00020-of-00037.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00020-of-00037.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00020-of-00037.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00021-of-00037.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00021-of-00037.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00021-of-00037.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00021-of-00037.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00021-of-00037.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00021-of-00037.safetensors", "model.layers.62.input_layernorm.weight": "model-00021-of-00037.safetensors", "model.layers.30.input_layernorm.weight": "model-00021-of-00037.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00021-of-00037.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00021-of-00037.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00021-of-00037.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00021-of-00037.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00021-of-00037.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00021-of-00037.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00021-of-00037.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00021-of-00037.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00021-of-00037.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00021-of-00037.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00021-of-00037.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00021-of-00037.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00022-of-00037.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00022-of-00037.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00022-of-00037.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00022-of-00037.safetensors", "model.layers.63.input_layernorm.weight": "model-00022-of-00037.safetensors", "model.layers.31.input_layernorm.weight": "model-00022-of-00037.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00022-of-00037.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00022-of-00037.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00022-of-00037.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00022-of-00037.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00022-of-00037.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00022-of-00037.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00022-of-00037.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00022-of-00037.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00022-of-00037.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00022-of-00037.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00022-of-00037.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00022-of-00037.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00022-of-00037.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00022-of-00037.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00022-of-00037.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00022-of-00037.safetensors", "model.layers.64.input_layernorm.weight": "model-00022-of-00037.safetensors", "model.layers.32.input_layernorm.weight": "model-00022-of-00037.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00023-of-00037.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00023-of-00037.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00023-of-00037.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00023-of-00037.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00023-of-00037.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00023-of-00037.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00023-of-00037.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00023-of-00037.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00023-of-00037.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00023-of-00037.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00023-of-00037.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00023-of-00037.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00023-of-00037.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00023-of-00037.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00023-of-00037.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00023-of-00037.safetensors", "model.layers.65.input_layernorm.weight": "model-00023-of-00037.safetensors", "model.layers.33.input_layernorm.weight": "model-00023-of-00037.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00023-of-00037.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00024-of-00037.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00024-of-00037.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00024-of-00037.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00024-of-00037.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00024-of-00037.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00024-of-00037.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00024-of-00037.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00024-of-00037.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00024-of-00037.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00024-of-00037.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00024-of-00037.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00024-of-00037.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00024-of-00037.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00024-of-00037.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00024-of-00037.safetensors", "model.layers.66.input_layernorm.weight": "model-00024-of-00037.safetensors", "model.layers.34.input_layernorm.weight": "model-00024-of-00037.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00024-of-00037.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00024-of-00037.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00025-of-00037.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00025-of-00037.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00025-of-00037.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00025-of-00037.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00025-of-00037.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00025-of-00037.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00025-of-00037.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00025-of-00037.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00025-of-00037.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00025-of-00037.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00025-of-00037.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00025-of-00037.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00025-of-00037.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00025-of-00037.safetensors", "model.layers.67.input_layernorm.weight": "model-00025-of-00037.safetensors", "model.layers.35.input_layernorm.weight": "model-00025-of-00037.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00025-of-00037.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00025-of-00037.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00025-of-00037.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00026-of-00037.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00026-of-00037.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00026-of-00037.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00026-of-00037.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00026-of-00037.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00026-of-00037.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00026-of-00037.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00026-of-00037.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00026-of-00037.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00026-of-00037.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00026-of-00037.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00026-of-00037.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00026-of-00037.safetensors", "model.layers.68.input_layernorm.weight": "model-00026-of-00037.safetensors", "model.layers.36.input_layernorm.weight": "model-00026-of-00037.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00026-of-00037.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00026-of-00037.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00026-of-00037.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00026-of-00037.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00027-of-00037.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00027-of-00037.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00027-of-00037.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00027-of-00037.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00027-of-00037.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00027-of-00037.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00027-of-00037.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00027-of-00037.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00027-of-00037.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00027-of-00037.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00027-of-00037.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00027-of-00037.safetensors", "model.layers.69.input_layernorm.weight": "model-00027-of-00037.safetensors", "model.layers.37.input_layernorm.weight": "model-00027-of-00037.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00027-of-00037.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00027-of-00037.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00027-of-00037.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00027-of-00037.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00027-of-00037.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00028-of-00037.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00028-of-00037.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00028-of-00037.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00028-of-00037.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00028-of-00037.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00028-of-00037.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00028-of-00037.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00028-of-00037.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00028-of-00037.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00028-of-00037.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00028-of-00037.safetensors", "model.layers.70.input_layernorm.weight": "model-00028-of-00037.safetensors", "model.layers.38.input_layernorm.weight": "model-00028-of-00037.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00028-of-00037.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00028-of-00037.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00028-of-00037.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00028-of-00037.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00028-of-00037.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00028-of-00037.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00028-of-00037.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00028-of-00037.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00028-of-00037.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00029-of-00037.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00029-of-00037.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00029-of-00037.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00029-of-00037.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00029-of-00037.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00029-of-00037.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00029-of-00037.safetensors", "model.layers.71.input_layernorm.weight": "model-00029-of-00037.safetensors", "model.layers.39.input_layernorm.weight": "model-00029-of-00037.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00029-of-00037.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00029-of-00037.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00029-of-00037.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00029-of-00037.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00029-of-00037.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00029-of-00037.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00029-of-00037.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00029-of-00037.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00029-of-00037.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00029-of-00037.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00029-of-00037.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00029-of-00037.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00029-of-00037.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00030-of-00037.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00030-of-00037.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00030-of-00037.safetensors", "model.layers.4.input_layernorm.weight": "model-00030-of-00037.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00030-of-00037.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00030-of-00037.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00030-of-00037.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00030-of-00037.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00030-of-00037.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00030-of-00037.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00030-of-00037.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00030-of-00037.safetensors", "model.layers.72.input_layernorm.weight": "model-00030-of-00037.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00030-of-00037.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00030-of-00037.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00030-of-00037.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00030-of-00037.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00030-of-00037.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00030-of-00037.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00030-of-00037.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00030-of-00037.safetensors", "model.layers.73.input_layernorm.weight": "model-00030-of-00037.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00031-of-00037.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00031-of-00037.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00031-of-00037.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00031-of-00037.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00031-of-00037.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00031-of-00037.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00031-of-00037.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00031-of-00037.safetensors", "model.layers.74.input_layernorm.weight": "model-00031-of-00037.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00031-of-00037.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00031-of-00037.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00031-of-00037.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00031-of-00037.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00031-of-00037.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00031-of-00037.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00031-of-00037.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00031-of-00037.safetensors", "model.layers.75.input_layernorm.weight": "model-00031-of-00037.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00031-of-00037.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00032-of-00037.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00032-of-00037.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00032-of-00037.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00032-of-00037.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00032-of-00037.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00032-of-00037.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00032-of-00037.safetensors", "model.layers.76.input_layernorm.weight": "model-00032-of-00037.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00032-of-00037.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00032-of-00037.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00032-of-00037.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00032-of-00037.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00032-of-00037.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00032-of-00037.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00032-of-00037.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00032-of-00037.safetensors", "model.layers.77.input_layernorm.weight": "model-00032-of-00037.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00032-of-00037.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00032-of-00037.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00033-of-00037.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00033-of-00037.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00033-of-00037.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00033-of-00037.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00033-of-00037.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00033-of-00037.safetensors", "model.layers.78.input_layernorm.weight": "model-00033-of-00037.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00033-of-00037.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00033-of-00037.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00033-of-00037.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00033-of-00037.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00033-of-00037.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00033-of-00037.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00033-of-00037.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00033-of-00037.safetensors", "model.layers.79.input_layernorm.weight": "model-00033-of-00037.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00033-of-00037.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00033-of-00037.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00033-of-00037.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00033-of-00037.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00033-of-00037.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00034-of-00037.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00034-of-00037.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00034-of-00037.safetensors", "model.layers.5.input_layernorm.weight": "model-00034-of-00037.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00034-of-00037.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00034-of-00037.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00034-of-00037.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00034-of-00037.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00034-of-00037.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00034-of-00037.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00034-of-00037.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00034-of-00037.safetensors", "model.layers.6.input_layernorm.weight": "model-00034-of-00037.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00034-of-00037.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00034-of-00037.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00034-of-00037.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00034-of-00037.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00034-of-00037.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00034-of-00037.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00034-of-00037.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00034-of-00037.safetensors", "model.layers.7.input_layernorm.weight": "model-00034-of-00037.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00035-of-00037.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00035-of-00037.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00035-of-00037.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00035-of-00037.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00035-of-00037.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00035-of-00037.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00035-of-00037.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00035-of-00037.safetensors", "model.layers.40.input_layernorm.weight": "model-00035-of-00037.safetensors", "model.layers.8.input_layernorm.weight": "model-00035-of-00037.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00035-of-00037.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00035-of-00037.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00035-of-00037.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00035-of-00037.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00036-of-00037.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00036-of-00037.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00036-of-00037.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00036-of-00037.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00036-of-00037.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00036-of-00037.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00036-of-00037.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00036-of-00037.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00036-of-00037.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00036-of-00037.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00036-of-00037.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00036-of-00037.safetensors", "model.layers.41.input_layernorm.weight": "model-00036-of-00037.safetensors", "model.layers.9.input_layernorm.weight": "model-00036-of-00037.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00036-of-00037.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00036-of-00037.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00036-of-00037.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00036-of-00037.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00036-of-00037.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00037-of-00037.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00037-of-00037.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00037-of-00037.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00037-of-00037.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00037-of-00037.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00037-of-00037.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00037-of-00037.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00037-of-00037.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00037-of-00037.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00037-of-00037.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00037-of-00037.safetensors", "model.norm.weight": "model-00037-of-00037.safetensors"}}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"unk_token": {
|
17 |
+
"content": "<unk>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
tokenizer_config.json
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"0": {
|
6 |
+
"content": "<unk>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"1": {
|
14 |
+
"content": "<s>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"2": {
|
22 |
+
"content": "</s>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
}
|
29 |
+
},
|
30 |
+
"additional_special_tokens": [],
|
31 |
+
"bos_token": "<s>",
|
32 |
+
"clean_up_tokenization_spaces": false,
|
33 |
+
"eos_token": "</s>",
|
34 |
+
"legacy": true,
|
35 |
+
"model_max_length": 1000000000000000019884624838656,
|
36 |
+
"pad_token": null,
|
37 |
+
"sp_model_kwargs": {},
|
38 |
+
"spaces_between_special_tokens": false,
|
39 |
+
"tokenizer_class": "LlamaTokenizer",
|
40 |
+
"unk_token": "<unk>",
|
41 |
+
"use_default_system_prompt": true
|
42 |
+
}
|