qeternity commited on
Commit
d4c00b1
1 Parent(s): 21dd23e

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/root/model",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "compression_config": {
9
+ "config_groups": {
10
+ "group_0": {
11
+ "input_activations": {
12
+ "actorder": null,
13
+ "block_structure": null,
14
+ "dynamic": true,
15
+ "group_size": null,
16
+ "num_bits": 8,
17
+ "observer": "memoryless",
18
+ "observer_kwargs": {},
19
+ "strategy": "token",
20
+ "symmetric": true,
21
+ "type": "int"
22
+ },
23
+ "output_activations": null,
24
+ "targets": [
25
+ "Linear"
26
+ ],
27
+ "weights": {
28
+ "actorder": null,
29
+ "block_structure": null,
30
+ "dynamic": false,
31
+ "group_size": null,
32
+ "num_bits": 8,
33
+ "observer": "minmax",
34
+ "observer_kwargs": {},
35
+ "strategy": "channel",
36
+ "symmetric": true,
37
+ "type": "int"
38
+ }
39
+ }
40
+ },
41
+ "format": "int-quantized",
42
+ "global_compression_ratio": 1.240802814962739,
43
+ "ignore": [
44
+ "lm_head"
45
+ ],
46
+ "kv_cache_scheme": null,
47
+ "quant_method": "compressed-tensors",
48
+ "quantization_status": "compressed"
49
+ },
50
+ "eos_token_id": 2,
51
+ "head_dim": 128,
52
+ "hidden_act": "silu",
53
+ "hidden_size": 12288,
54
+ "initializer_range": 0.02,
55
+ "intermediate_size": 28672,
56
+ "max_position_embeddings": 131072,
57
+ "model_type": "mistral",
58
+ "num_attention_heads": 96,
59
+ "num_hidden_layers": 88,
60
+ "num_key_value_heads": 8,
61
+ "rms_norm_eps": 1e-05,
62
+ "rope_theta": 1000000.0,
63
+ "sliding_window": null,
64
+ "tie_word_embeddings": false,
65
+ "torch_dtype": "bfloat16",
66
+ "transformers_version": "4.44.2",
67
+ "use_cache": true,
68
+ "vocab_size": 32768
69
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.44.2"
6
+ }
model-00001-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d5285ce84564f010b95883acdd793d91163777923553c36d508fb0512c88416
3
+ size 4958398024
model-00002-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e5e07e837f5d1b194df85f0a88506847256df1b90b068cc7437882210f772b4
3
+ size 4832680360
model-00003-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fc446253fad189cc740057ca8480d58409e8f45627f272dddbc960053e9944a
3
+ size 4857866184
model-00004-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:385c5c06908464b2d5ef19bad9987926a9881b770a84de69bb82dc2f897b8078
3
+ size 4832680416
model-00005-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af79eb5fbccf9763d035fc3cf329e58cf193c8b2ec23d78d54bea8e138f7c924
3
+ size 4857866240
model-00006-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6627471af10fe7d95bb3c72e28bde9b2b7cb0d417b94ef4f753669aef05dd79b
3
+ size 4832680416
model-00007-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8cc1c46550427c846c74e53c5bc0d3ca551bbb95e157debe805c7fb6e0b3265
3
+ size 4857866240
model-00008-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e60a2916bdccedd8751ba621d38a38fec2bddc990f28b349ff6f5bdc9f298c57
3
+ size 4832680416
model-00009-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ce0ef8ca250f0e1cea1bdb0ce8d8097c1c3ff72d157138a3c5730839b910266
3
+ size 4857866240
model-00010-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e89a0cb2eed1d05348420bfbe28cfc019ea392b8bdc26949cab1682ba85736e7
3
+ size 4832680416
model-00011-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71ea1901075ff2ffbeb39bf6e7599a7a257fd5815cfa8b5d711e4d3c1a6d32a6
3
+ size 4857866240
model-00012-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b41915149f1737ba20c63346eb4dc4006dbec886245e743182555bede5c1c31c
3
+ size 4832680416
model-00013-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a968e541602a0796fb9f5fd9dd6178fc47107647d6c2e4150caa676e361948b
3
+ size 4857866240
model-00014-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a01a7281a993defcb3c6ef19c0e2de2758a3deadf3a4f9db3bd30d5ed98c27d
3
+ size 4832680416
model-00015-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1345aaa5dd6090129f2a5b4952fdb003416f8b381a1f846af5c48ee85dc0caba
3
+ size 4857866240
model-00016-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82d0c7b8c32ed812118df0b97c721a2fed626ba3da8e310680ec7d0387be088b
3
+ size 4832680416
model-00017-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2da4d9c8263db6f5eceeef734802c2b23ed831eb31b0a80506af9157b13a1ce7
3
+ size 4857866240
model-00018-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdfc4dcfe55ad041d3c3e4de520ce6fb044bd8015f4bb0f723205dbed014fedc
3
+ size 4832680416
model-00019-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a9268770516b97aa9fafc70df65f68fa1776e200816f732f7ffeba3751bbc72
3
+ size 4857866240
model-00020-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a572825f90f1d9156c9275aaf78ac401cd0610562bf3a68ce2204191ce9a6d2
3
+ size 4832680416
model-00021-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0b0f4a9fff440397a67ead0e74c01e8fab841a1d736f1c81190afd5d3ac2d2c
3
+ size 4857866240
model-00022-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccb1f07c8fe4fbbfdd7726ccdfda58590f5de8127f3b9a83e148e47eba7a4304
3
+ size 4832680416
model-00023-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cc7eaa48cd5e2f91cf7f417aa17c945c1330a903c9e3cf0b4f6ebc792de62fb
3
+ size 4857866240
model-00024-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5dbd93c8c9947867e9f9ef9c9ca9b81f1408cbf519d4930889a14b836684d06
3
+ size 4832680416
model-00025-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4453c4266574536b716f515e6d04b1a945adebedb049bc5eb0ea2fbf18b6386
3
+ size 4857866240
model-00026-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:268bd49a48c9bb092d05e3c2f61395cad3a780c83dcbf9b6770e6fdfc92e531c
3
+ size 2189695016
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff