gpt-pretrain-lm-gp / config.json
adammoss's picture
Iteration 1000
1db9624 verified
raw
history blame contribute delete
364 Bytes
{
"architectures": [
"GPTModel"
],
"dropout": 0.1,
"head_type": "lm",
"is_causal": true,
"model_type": "gptmodel",
"n_embd": 384,
"n_head": 6,
"n_labels": 18,
"n_layer": 6,
"n_positions": 1024,
"n_static": 2,
"position_embedding": "relative_key",
"torch_dtype": "float32",
"transformers_version": "4.39.3",
"vocab_size": 3501
}