ogbi commited on
Commit
332b8d3
1 Parent(s): 1181c0e

Training in progress, step 200

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/mms-1b",
3
  "activation_dropout": 0.0,
4
  "adapter_attn_dim": null,
5
  "adapter_kernel_size": 3,
@@ -12,7 +12,7 @@
12
  "attention_dropout": 0.1,
13
  "bos_token_id": 1,
14
  "classifier_proj_size": 256,
15
- "codevector_dim": 1024,
16
  "contrastive_logits_temperature": 0.1,
17
  "conv_bias": true,
18
  "conv_dim": [
@@ -56,9 +56,9 @@
56
  "gradient_checkpointing": false,
57
  "hidden_act": "gelu",
58
  "hidden_dropout": 0.1,
59
- "hidden_size": 1280,
60
  "initializer_range": 0.02,
61
- "intermediate_size": 5120,
62
  "layer_norm_eps": 1e-05,
63
  "layerdrop": 0.0,
64
  "mask_feature_length": 10,
@@ -75,11 +75,11 @@
75
  "num_conv_pos_embedding_groups": 16,
76
  "num_conv_pos_embeddings": 128,
77
  "num_feat_extract_layers": 7,
78
- "num_hidden_layers": 48,
79
  "num_negatives": 100,
80
- "output_hidden_size": 1280,
81
  "pad_token_id": 33,
82
- "proj_codevector_dim": 1024,
83
  "tdnn_dilation": [
84
  1,
85
  2,
 
1
  {
2
+ "_name_or_path": "facebook/mms-300m",
3
  "activation_dropout": 0.0,
4
  "adapter_attn_dim": null,
5
  "adapter_kernel_size": 3,
 
12
  "attention_dropout": 0.1,
13
  "bos_token_id": 1,
14
  "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
  "contrastive_logits_temperature": 0.1,
17
  "conv_bias": true,
18
  "conv_dim": [
 
56
  "gradient_checkpointing": false,
57
  "hidden_act": "gelu",
58
  "hidden_dropout": 0.1,
59
+ "hidden_size": 1024,
60
  "initializer_range": 0.02,
61
+ "intermediate_size": 4096,
62
  "layer_norm_eps": 1e-05,
63
  "layerdrop": 0.0,
64
  "mask_feature_length": 10,
 
75
  "num_conv_pos_embedding_groups": 16,
76
  "num_conv_pos_embeddings": 128,
77
  "num_feat_extract_layers": 7,
78
+ "num_hidden_layers": 24,
79
  "num_negatives": 100,
80
+ "output_hidden_size": 1024,
81
  "pad_token_id": 33,
82
+ "proj_codevector_dim": 768,
83
  "tdnn_dilation": [
84
  1,
85
  2,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:94663caf9113a0f82c6ee79d873972394d7830508c5c7c7de090daf512b98b1e
3
- size 3850275464
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:300204f0afd9443aa6fcb8c15162630bb17e8d5ce648328037bd2129bdd7e4d0
3
+ size 1261955080
runs/Jun12_21-37-33_103f35af7d2e/events.out.tfevents.1718228358.103f35af7d2e.2293.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b7dc091665a254cacdbc437ad8169a166b3121d7fa64948543cd25bcecbac2f
3
+ size 14004
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa23c7ce281022431199386fff78b4eab4ab67a2dfe22b4e065e631d7974fd2f
3
  size 5048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:479edb3bdc92f031a3b9810004e4a6898e211066e75ffcdfcff328760ac2dfac
3
  size 5048