robin commited on
Commit
6d4c74b
1 Parent(s): 54376f8

Clip_OutEffHop_OPT_125m

Browse files
all_results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"perplexity": 15.894995701938292, "model.decoder.final_layer_norm": 94.85217790442836, "model.decoder.layers.0": 61.72161496072118, "model.decoder.layers.1": 63.45426087435118, "model.decoder.layers.2": 65.16417294995794, "model.decoder.layers.3": 66.45850705420152, "model.decoder.layers.4": 67.45579717811752, "model.decoder.layers.5": 67.49456974999295, "model.decoder.layers.6": 67.57569958418415, "model.decoder.layers.7": 67.60390376735445, "model.decoder.layers.8": 67.55277501315304, "model.decoder.layers.9": 67.4413675717652, "model.decoder.layers.10": 67.15642369922188, "model.decoder.layers.11": 8.40556866378945, "model.decoder.layers.0.fc2": 0.4700959697075855, "model.decoder.layers.1.fc2": 0.0667475177344761, "model.decoder.layers.2.fc2": 0.05530324209621599, "model.decoder.layers.3.fc2": 0.07785968031207566, "model.decoder.layers.4.fc2": 0.09141296370078153, "model.decoder.layers.5.fc2": 0.10546309152562486, "model.decoder.layers.6.fc2": 0.117569733937736, "model.decoder.layers.7.fc2": 0.17874600119821138, "model.decoder.layers.8.fc2": 0.3013071445446225, "model.decoder.layers.9.fc2": 0.45822949808008784, "model.decoder.layers.10.fc2": 0.5499530389769687, "model.decoder.layers.11.fc2": 0.42582166247098824, "model.decoder.layers.0.final_layer_norm": 0.5029111054876739, "model.decoder.layers.1.final_layer_norm": 0.8855151668901263, "model.decoder.layers.2.final_layer_norm": 1.4255385544460906, "model.decoder.layers.3.final_layer_norm": 0.6276186083395013, "model.decoder.layers.4.final_layer_norm": 0.6252138745432942, "model.decoder.layers.5.final_layer_norm": 0.6402498175409863, "model.decoder.layers.6.final_layer_norm": 0.7048307306592474, "model.decoder.layers.7.final_layer_norm": 0.8000356420296124, "model.decoder.layers.8.final_layer_norm": 1.2570207932869295, "model.decoder.layers.9.final_layer_norm": 1.5736880635334742, "model.decoder.layers.10.final_layer_norm": 1.9472224616314464, "model.decoder.layers.11.final_layer_norm": 1.6762300501915024, "model.decoder.layers.0.self_attn.out_proj": 0.16683884755492057, "model.decoder.layers.1.self_attn.out_proj": 0.24019762328626582, "model.decoder.layers.2.self_attn.out_proj": 0.124162913914939, "model.decoder.layers.3.self_attn.out_proj": 0.1778325774790248, "model.decoder.layers.4.self_attn.out_proj": 0.22608052215378263, "model.decoder.layers.5.self_attn.out_proj": 0.2442916889908094, "model.decoder.layers.6.self_attn.out_proj": 0.24172581772859922, "model.decoder.layers.7.self_attn.out_proj": 0.46892321001540205, "model.decoder.layers.8.self_attn.out_proj": 0.6346672246119083, "model.decoder.layers.9.self_attn.out_proj": 1.0770352335136997, "model.decoder.layers.10.self_attn.out_proj": 1.9275887930903453, "model.decoder.layers.11.self_attn.out_proj": 63.86637179798962, "model.decoder.layers.0.self_attn_layer_norm": 3.349244225929683, "model.decoder.layers.1.self_attn_layer_norm": 3.3546658255324133, "model.decoder.layers.2.self_attn_layer_norm": 7.636088764621126, "model.decoder.layers.3.self_attn_layer_norm": 8.149288036402716, "model.decoder.layers.4.self_attn_layer_norm": 7.130700929155609, "model.decoder.layers.5.self_attn_layer_norm": 7.788260178575256, "model.decoder.layers.6.self_attn_layer_norm": 7.217336144892622, "model.decoder.layers.7.self_attn_layer_norm": 5.3960551579329445, "model.decoder.layers.8.self_attn_layer_norm": 6.789148417196385, "model.decoder.layers.9.self_attn_layer_norm": 6.424844545844928, "model.decoder.layers.10.self_attn_layer_norm": 7.448416051001864, "model.decoder.layers.11.self_attn_layer_norm": 8.919588321996262, "max_inf_norm": 94.85217790442836, "max_ffn_inf_norm": 0.5499530389769687, "max_layer_inf_norm": 67.60390376735445, "avg_kurtosis": Infinity, "max_kurtosis": Infinity, "max_kurtosis_layers": 260056.0572941999}
checkpoints/checkpoint_120000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33e9f6231103f1050143006f60ee6bf931fbb8d142d1d1cc5c483f3d74e219f5
3
+ size 496261008
checkpoints/checkpoint_120000/optimizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4037bd12980a62e75a03b1d2f36a18f4ffc8d4a68dd870cbcca11229945378e
3
+ size 992641146
checkpoints/checkpoint_120000/random_states_0.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:724a079cb13a9f67324d9820a142b4d730c424dc325b21027ada9966b885996a
3
+ size 14540
checkpoints/checkpoint_120000/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a65cad7a1a289006c1fc5cca7a10a7bb83f500f49ff5420c0939f99b83542cb
3
+ size 988
checkpoints/checkpoint_120000/scheduler.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92ba14df51b5d79bdbfe06e141b8ca3fdeae5eb33766f6f59b17a73a279e367b
3
+ size 1064
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_remove_final_layer_norm": false,
3
+ "activation_function": "relu",
4
+ "architectures": [
5
+ "OPTForCausalLM"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 2,
9
+ "do_layer_norm_before": true,
10
+ "dropout": 0.1,
11
+ "enable_bias": true,
12
+ "eos_token_id": 2,
13
+ "ffn_dim": 3072,
14
+ "hidden_size": 768,
15
+ "init_std": 0.006,
16
+ "layer_norm_elementwise_affine": true,
17
+ "layerdrop": 0.0,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "opt",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_layers": 12,
22
+ "pad_token_id": 1,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.31.0",
25
+ "use_cache": true,
26
+ "vocab_size": 50272,
27
+ "word_embed_proj_dim": 768
28
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 2,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 1,
6
+ "transformers_version": "4.31.0"
7
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27944c71ca01d860a7760da861d5190fcf1a4c667a0d8c8202d781943ad0a59b
3
+ size 496305242
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "</s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "</s>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_prefix_space": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "</s>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": true,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "errors": "replace",
22
+ "model_max_length": 1000000000000000019884624838656,
23
+ "pad_token": {
24
+ "__type": "AddedToken",
25
+ "content": "<pad>",
26
+ "lstrip": false,
27
+ "normalized": true,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
+ "tokenizer_class": "GPT2Tokenizer",
32
+ "unk_token": {
33
+ "__type": "AddedToken",
34
+ "content": "</s>",
35
+ "lstrip": false,
36
+ "normalized": true,
37
+ "rstrip": false,
38
+ "single_word": false
39
+ }
40
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff