{ "_name": "Shorsey-T2000", "_name_or_path": "Wonder-Griffin/Shorsey-T2000", "activation_function": "gelu_new", "architectures": [ "GPTForCausalLM" ], "model_type": "gpt2", "vocab_size": 30522, "max_len": 512, "hidden_size": 512, "dropout": 0.1, "n_layer": 4, "n_head": 4, "ff_expansion_factor": 4, "n_embd": 128, "rnn_units": 512, "num_labels": 5, "task_specific_params": { "qa": { "num_labels": 5 }, "causal_lm": { "vocab_size": 30522 }, "general": { "vocab_size": 30522 } }, "library_name": "transformers", "tags": [ "text-generation-inference", "causal-lm", "question-answering" ], "model-index": [ { "name": "Shorsey-T2000", "results": [] } ], "datasets": [ "stanfordnlp/imdb" ], "language": [ "en" ], "pipeline_tag": "text-generation" }