GinnM commited on
Commit
b16e466
1 Parent(s): d7964a1

Upload ProSSTForMaskedLM

Browse files
Files changed (2) hide show
  1. config.json +6 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,7 +1,11 @@
1
  {
 
 
 
2
  "attention_probs_dropout_prob": 0.1,
3
  "auto_map": {
4
- "AutoConfig": "configuration_prosst.ProSSTConfig"
 
5
  },
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
@@ -32,6 +36,7 @@
32
  "scale_hidden": 1,
33
  "ss_vocab_size": 2051,
34
  "token_dropout": true,
 
35
  "transformers_version": "4.38.2",
36
  "type_vocab_size": 0,
37
  "vocab_size": 25
 
1
  {
2
+ "architectures": [
3
+ "ProSSTForMaskedLM"
4
+ ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "auto_map": {
7
+ "AutoConfig": "configuration_prosst.ProSSTConfig",
8
+ "AutoModelForMaskedLM": "modeling_prosst.ProSSTForMaskedLM"
9
  },
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
 
36
  "scale_hidden": 1,
37
  "ss_vocab_size": 2051,
38
  "token_dropout": true,
39
+ "torch_dtype": "float32",
40
  "transformers_version": "4.38.2",
41
  "type_vocab_size": 0,
42
  "vocab_size": 25
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:92f1600fdb44d50829810e86dab1e53088cb1d40e1d31c2630710edd9db5d96c
3
- size 440269240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61e0529b3381c9f2d376a7aad60edd985e9de01a122fa733d755cc3dfa133f81
3
+ size 440307592