Wonder-Griffin
commited on
Commit
•
e1ee4e0
1
Parent(s):
2d6d577
Update config.json
Browse files- config.json +7 -7
config.json
CHANGED
@@ -1,10 +1,10 @@
|
|
1 |
{
|
2 |
-
"
|
3 |
"_name_or_path": "Wonder-Griffin/Shorsey-T2000",
|
4 |
"activation_function": "gelu_new",
|
5 |
"architectures": [
|
6 |
-
"
|
7 |
-
|
8 |
"model_type": "gpt2",
|
9 |
"vocab_size": 60000,
|
10 |
"max_len": 512,
|
@@ -20,7 +20,7 @@
|
|
20 |
"qa": {
|
21 |
"num_labels": 5
|
22 |
},
|
23 |
-
"
|
24 |
"vocab_size": 60000
|
25 |
},
|
26 |
"general": {
|
@@ -30,7 +30,7 @@
|
|
30 |
"library_name": "transformers",
|
31 |
"tags": [
|
32 |
"text-generation-inference",
|
33 |
-
"
|
34 |
"question-answering"
|
35 |
],
|
36 |
"model-index": [
|
@@ -45,5 +45,5 @@
|
|
45 |
"language": [
|
46 |
"en"
|
47 |
],
|
48 |
-
"pipeline_tag": "text-generation"
|
49 |
-
|
|
|
1 |
{
|
2 |
+
"_name": "Shorsey-T2000",
|
3 |
"_name_or_path": "Wonder-Griffin/Shorsey-T2000",
|
4 |
"activation_function": "gelu_new",
|
5 |
"architectures": [
|
6 |
+
"GPTForCausalLM"
|
7 |
+
],
|
8 |
"model_type": "gpt2",
|
9 |
"vocab_size": 60000,
|
10 |
"max_len": 512,
|
|
|
20 |
"qa": {
|
21 |
"num_labels": 5
|
22 |
},
|
23 |
+
"causal_lm": {
|
24 |
"vocab_size": 60000
|
25 |
},
|
26 |
"general": {
|
|
|
30 |
"library_name": "transformers",
|
31 |
"tags": [
|
32 |
"text-generation-inference",
|
33 |
+
"causal-lm",
|
34 |
"question-answering"
|
35 |
],
|
36 |
"model-index": [
|
|
|
45 |
"language": [
|
46 |
"en"
|
47 |
],
|
48 |
+
"pipeline_tag": "text-generation"
|
49 |
+
}
|