BahamutRU commited on
Commit
074917e
1 Parent(s): e71d5c8

Upload 4 files

Browse files
config.json CHANGED
@@ -23,13 +23,13 @@
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.38.2",
26
- "use_cache": true,
27
  "vocab_size": 128256,
28
  "quantization_config": {
29
  "quant_method": "exl2",
30
- "version": "0.0.20",
31
  "bits": 8.0,
32
- "head_bits": 6,
33
  "calibration": {
34
  "rows": 100,
35
  "length": 2048,
 
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.38.2",
26
+ "use_cache": false,
27
  "vocab_size": 128256,
28
  "quantization_config": {
29
  "quant_method": "exl2",
30
+ "version": "0.1.7",
31
  "bits": 8.0,
32
+ "head_bits": 8,
33
  "calibration": {
34
  "rows": 100,
35
  "length": 2048,
output.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:58bb7e678a76b433b0f2f9a1f3c8af7fdac3d46eef10e1264d25d1a479be20b9
3
- size 8391372780
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:047b4572c0ae5438b606f68e3db5a06ac1c6db9efb3350ce2211299945838b6d
3
+ size 8519963032
special_tokens_map.json CHANGED
@@ -7,7 +7,7 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|eot_id|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -2052,11 +2052,12 @@
2052
  "bos_token": "<|begin_of_text|>",
2053
  "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% else %}{{ eos_token }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|end_of_text|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 1000000000000000019884624838656,
 
2061
  "tokenizer_class": "PreTrainedTokenizerFast"
2062
  }
 
2052
  "bos_token": "<|begin_of_text|>",
2053
  "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% else %}{{ eos_token }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 1000000000000000019884624838656,
2061
+ "pad_token": "<|end_of_text|>",
2062
  "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }