Zongxia commited on
Commit
556944b
1 Parent(s): a266660
Files changed (2) hide show
  1. config.json +4 -4
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/srv/www/active-topic-modeling/ae_tune/models--google--bert_uncased_L-2_H-128_A-2/snapshots/30b0a37ccaaa32f332884b96992754e246e48c5f",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
@@ -7,13 +7,13 @@
7
  "classifier_dropout": null,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
- "hidden_size": 128,
11
  "initializer_range": 0.02,
12
- "intermediate_size": 512,
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
  "model_type": "bert",
16
- "num_attention_heads": 2,
17
  "num_hidden_layers": 2,
18
  "pad_token_id": 0,
19
  "position_embedding_type": "absolute",
 
1
  {
2
+ "_name_or_path": "/srv/www/active-topic-modeling/ae_tune/models--google--bert_uncased_L-2_H-256_A-4/snapshots/4e937a8675e5afd9a4836735c186ec01695bc3ea",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
 
7
  "classifier_dropout": null,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 256,
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 1024,
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
  "model_type": "bert",
16
+ "num_attention_heads": 4,
17
  "num_hidden_layers": 2,
18
  "pad_token_id": 0,
19
  "position_embedding_type": "absolute",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8e737403eedecbc205d922eff07f07a3eaf4b324d697fb890cce3f1ca409eb57
3
- size 17549312
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e96d9ffc4de70c727d505b649465c5fe1349dd9d455c351d8ed8bb3a9751b3c5
3
+ size 38370824