GinnM commited on
Commit
99e756c
1 Parent(s): d8f1e36

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -7
config.json CHANGED
@@ -1,11 +1,7 @@
1
  {
2
- "architectures": [
3
- "ProSSTForMaskedLM"
4
- ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "auto_map": {
7
- "AutoConfig": "configuration_prosst.ProSSTConfig",
8
- "AutoModelForMaskedLM": "modeling_prosst.ProSSTForMaskedLM"
9
  },
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
@@ -28,7 +24,7 @@
28
  "pos_att_type": [
29
  "aa2pos",
30
  "pos2aa",
31
- "aa2ss"
32
  ],
33
  "position_biased_input": false,
34
  "position_embedding_type": "relative",
@@ -36,7 +32,6 @@
36
  "scale_hidden": 1,
37
  "ss_vocab_size": 2051,
38
  "token_dropout": true,
39
- "torch_dtype": "float32",
40
  "transformers_version": "4.38.2",
41
  "type_vocab_size": 0,
42
  "vocab_size": 25
 
1
  {
 
 
 
2
  "attention_probs_dropout_prob": 0.1,
3
  "auto_map": {
4
+ "AutoConfig": "configuration_prosst.ProSSTConfig"
 
5
  },
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
 
24
  "pos_att_type": [
25
  "aa2pos",
26
  "pos2aa",
27
+ "ss2aa"
28
  ],
29
  "position_biased_input": false,
30
  "position_embedding_type": "relative",
 
32
  "scale_hidden": 1,
33
  "ss_vocab_size": 2051,
34
  "token_dropout": true,
 
35
  "transformers_version": "4.38.2",
36
  "type_vocab_size": 0,
37
  "vocab_size": 25