finegptproject commited on
Commit
ad383ef
1 Parent(s): 5bbb786

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -16
config.json DELETED
@@ -1,16 +0,0 @@
1
- {
2
- "model_type": "llama",
3
- "architectures": ["LlamaForCausalLM"],
4
- "hidden_size": 2048,
5
- "num_attention_heads": 32,
6
- "num_hidden_layers": 22,
7
- "intermediate_size": 5632,
8
- "max_position_embeddings": 2048,
9
- "hidden_act": "silu",
10
- "initializer_range": 0.02,
11
- "bos_token_id": 1,
12
- "eos_token_id": 2,
13
- "pad_token_id": 0,
14
- "vocab_size": 32000,
15
- "torch_dtype": "bfloat16"
16
- }