{ "architectures": [ "GPTModel" ], "dropout": 0.1, "head_type": "lm", "is_causal": true, "model_type": "gptmodel", "n_embd": 384, "n_head": 6, "n_labels": 18, "n_layer": 6, "n_positions": 1024, "n_static": 2, "position_embedding": "relative_key", "torch_dtype": "float32", "transformers_version": "4.39.2", "vocab_size": 3501 }