Text Generation
Transformers
PyTorch
Safetensors
English
llama
text-generation-inference
Inference Endpoints
TinyLlama-1.1B-Chat-v0.4 / special_tokens_map.json
Tianduo's picture
Upload tokenizer
9afa970
raw
history blame contribute delete
513 Bytes
{
"additional_special_tokens": [
"<|im_start|>",
"<|im_end|>"
],
"bos_token": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": "[PAD]",
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}