Wonder-Griffin
commited on
Commit
•
c7d880f
1
Parent(s):
e1ee4e0
Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"GPTForCausalLM"
|
7 |
],
|
8 |
"model_type": "gpt2",
|
9 |
-
"vocab_size":
|
10 |
"max_len": 512,
|
11 |
"hidden_size": 512,
|
12 |
"dropout": 0.1,
|
@@ -21,10 +21,10 @@
|
|
21 |
"num_labels": 5
|
22 |
},
|
23 |
"causal_lm": {
|
24 |
-
"vocab_size":
|
25 |
},
|
26 |
"general": {
|
27 |
-
"vocab_size":
|
28 |
}
|
29 |
},
|
30 |
"library_name": "transformers",
|
|
|
6 |
"GPTForCausalLM"
|
7 |
],
|
8 |
"model_type": "gpt2",
|
9 |
+
"vocab_size": 30522,
|
10 |
"max_len": 512,
|
11 |
"hidden_size": 512,
|
12 |
"dropout": 0.1,
|
|
|
21 |
"num_labels": 5
|
22 |
},
|
23 |
"causal_lm": {
|
24 |
+
"vocab_size": 30522
|
25 |
},
|
26 |
"general": {
|
27 |
+
"vocab_size": 30522
|
28 |
}
|
29 |
},
|
30 |
"library_name": "transformers",
|