layer_id
int64 0
223
| name
stringlengths 26
32
| D
float64 0.03
0.12
| M
int64 1.02k
4.1k
| N
int64 4.1k
14.3k
| Q
float64 1
4
| alpha
float64 2.98
23.9
| alpha_weighted
float64 -65.71
-6.41
| entropy
float64 1.11
1.57
| has_esd
bool 1
class | lambda_max
float32 0
0.02
| layer_type
stringclasses 1
value | log_alpha_norm
float64 -64.96
-5.95
| log_norm
float32 -1.43
-0.48
| log_spectral_norm
float32 -2.81
-1.77
| matrix_rank
int64 64
64
| norm
float32 0.04
0.33
| num_evals
int64 1.02k
4.1k
| num_pl_spikes
int64 10
64
| rank_loss
int64 960
4.03k
| rf
int64 1
1
| sigma
float64 0.25
5.88
| spectral_norm
float32 0
0.02
| stable_rank
float32 7.52
56.2
| status
stringclasses 1
value | sv_max
float64 0.04
0.13
| sv_min
float64 0
0
| warning
stringclasses 2
values | weak_rank_loss
int64 960
4.03k
| xmax
float64 0
0.02
| xmin
float64 0
0
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
200 | model.layers.28.self_attn.o_proj | 0.041197 | 4,096 | 4,096 | 1 | 11.01609 | -28.828882 | 1.566721 | true | 0.002416 | dense | -28.544096 | -1.007876 | -2.61698 | 64 | 0.098203 | 4,096 | 64 | 4,032 | 1 | 1.252011 | 0.002416 | 40.654037 | success | 0.049148 | 0 | under-trained | 4,032 | 0.002416 | 0.00138 |
201 | model.layers.28.self_attn.q_proj | 0.040291 | 4,096 | 4,096 | 1 | 6.343314 | -15.971821 | 1.563312 | true | 0.003035 | dense | -15.770653 | -1.058412 | -2.517899 | 64 | 0.087415 | 4,096 | 47 | 4,032 | 1 | 0.779402 | 0.003035 | 28.806244 | success | 0.055087 | 0 | under-trained | 4,032 | 0.003035 | 0.001181 |
202 | model.layers.28.self_attn.v_proj | 0.066078 | 1,024 | 4,096 | 4 | 16.502318 | -45.673627 | 1.136797 | true | 0.001707 | dense | -45.314251 | -1.080851 | -2.76771 | 64 | 0.083014 | 1,024 | 27 | 960 | 1 | 2.983423 | 0.001707 | 48.624928 | success | 0.041319 | 0.000001 | under-trained | 960 | 0.001707 | 0.001304 |
203 | model.layers.29.mlp.down_proj | 0.058877 | 4,096 | 14,336 | 3.5 | 10.575127 | -24.526027 | 1.566756 | true | 0.004795 | dense | -24.259765 | -0.718782 | -2.319218 | 64 | 0.191081 | 4,096 | 31 | 4,032 | 1 | 1.719744 | 0.004795 | 39.850769 | success | 0.069245 | 0.000001 | under-trained | 4,032 | 0.004795 | 0.00286 |
204 | model.layers.29.mlp.gate_proj | 0.064726 | 4,096 | 14,336 | 3.5 | 7.03554 | -13.710543 | 1.558954 | true | 0.011252 | dense | -13.680309 | -0.640899 | -1.948755 | 64 | 0.228613 | 4,096 | 64 | 4,032 | 1 | 0.754443 | 0.011252 | 20.316813 | success | 0.106077 | 0.000001 | under-trained | 4,032 | 0.011252 | 0.002935 |
205 | model.layers.29.mlp.up_proj | 0.071536 | 4,096 | 14,336 | 3.5 | 5.994073 | -11.971793 | 1.560465 | true | 0.010063 | dense | -11.896176 | -0.659252 | -1.997272 | 64 | 0.219153 | 4,096 | 21 | 4,032 | 1 | 1.089796 | 0.010063 | 21.778074 | success | 0.100315 | 0.000001 | 4,032 | 0.010063 | 0.003302 |
|
206 | model.layers.29.self_attn.k_proj | 0.027305 | 1,024 | 4,096 | 4 | 5.673421 | -14.569579 | 1.130258 | true | 0.002704 | dense | -14.257931 | -1.125259 | -2.568041 | 64 | 0.074945 | 1,024 | 64 | 960 | 1 | 0.584178 | 0.002704 | 27.719282 | success | 0.051997 | 0.000001 | 960 | 0.002704 | 0.00092 |
|
207 | model.layers.29.self_attn.o_proj | 0.078943 | 4,096 | 4,096 | 1 | 10.376519 | -26.202336 | 1.565592 | true | 0.002984 | dense | -25.921757 | -0.965514 | -2.525157 | 64 | 0.108265 | 4,096 | 64 | 4,032 | 1 | 1.172065 | 0.002984 | 36.277966 | success | 0.054629 | 0 | under-trained | 4,032 | 0.002984 | 0.001505 |
208 | model.layers.29.self_attn.q_proj | 0.042463 | 4,096 | 4,096 | 1 | 6.201798 | -15.974763 | 1.563786 | true | 0.002656 | dense | -15.666368 | -1.083036 | -2.575828 | 64 | 0.082597 | 4,096 | 64 | 4,032 | 1 | 0.650225 | 0.002656 | 31.1022 | success | 0.051533 | 0 | under-trained | 4,032 | 0.002656 | 0.001046 |
209 | model.layers.29.self_attn.v_proj | 0.044242 | 1,024 | 4,096 | 4 | 15.119148 | -40.901417 | 1.136675 | true | 0.001971 | dense | -40.713053 | -1.051864 | -2.705273 | 64 | 0.088743 | 1,024 | 31 | 960 | 1 | 2.535874 | 0.001971 | 45.020294 | success | 0.044398 | 0.000001 | under-trained | 960 | 0.001971 | 0.001369 |
210 | model.layers.30.mlp.down_proj | 0.055847 | 4,096 | 14,336 | 3.5 | 7.012627 | -15.242136 | 1.564144 | true | 0.006706 | dense | -15.056465 | -0.694559 | -2.173527 | 64 | 0.202042 | 4,096 | 33 | 4,032 | 1 | 1.046664 | 0.006706 | 30.127838 | success | 0.081891 | 0.000001 | under-trained | 4,032 | 0.006706 | 0.002908 |
211 | model.layers.30.mlp.gate_proj | 0.072323 | 4,096 | 14,336 | 3.5 | 6.243777 | -11.749667 | 1.557202 | true | 0.013127 | dense | -11.714567 | -0.608049 | -1.88182 | 64 | 0.246576 | 4,096 | 43 | 4,032 | 1 | 0.799669 | 0.013127 | 18.783279 | success | 0.114575 | 0.000001 | under-trained | 4,032 | 0.013127 | 0.0033 |
212 | model.layers.30.mlp.up_proj | 0.061377 | 4,096 | 14,336 | 3.5 | 4.781096 | -9.162759 | 1.55794 | true | 0.012121 | dense | -9.05048 | -0.629157 | -1.916456 | 64 | 0.234878 | 4,096 | 14 | 4,032 | 1 | 1.01054 | 0.012121 | 19.377544 | success | 0.110096 | 0.000001 | 4,032 | 0.012121 | 0.003747 |
|
213 | model.layers.30.self_attn.k_proj | 0.044896 | 1,024 | 4,096 | 4 | 5.673842 | -14.993671 | 1.13231 | true | 0.002277 | dense | -14.262159 | -1.079453 | -2.642596 | 64 | 0.083281 | 1,024 | 51 | 960 | 1 | 0.654469 | 0.002277 | 36.571552 | success | 0.04772 | 0.000001 | 960 | 0.002277 | 0.001083 |
|
214 | model.layers.30.self_attn.o_proj | 0.085833 | 4,096 | 4,096 | 1 | 4.185827 | -9.4888 | 1.555713 | true | 0.005409 | dense | -9.141102 | -0.93027 | -2.266888 | 64 | 0.117417 | 4,096 | 21 | 4,032 | 1 | 0.695204 | 0.005409 | 21.707933 | success | 0.073545 | 0 | 4,032 | 0.005409 | 0.001675 |
|
215 | model.layers.30.self_attn.q_proj | 0.030675 | 4,096 | 4,096 | 1 | 5.626068 | -13.164741 | 1.560031 | true | 0.004571 | dense | -13.014311 | -0.965638 | -2.339954 | 64 | 0.108233 | 4,096 | 43 | 4,032 | 1 | 0.705469 | 0.004571 | 23.676403 | success | 0.067612 | 0 | 4,032 | 0.004571 | 0.001446 |
|
216 | model.layers.30.self_attn.v_proj | 0.051708 | 1,024 | 4,096 | 4 | 15.165945 | -41.261232 | 1.136643 | true | 0.001903 | dense | -40.82963 | -1.04063 | -2.72065 | 64 | 0.091069 | 1,024 | 22 | 960 | 1 | 3.02019 | 0.001903 | 47.865246 | success | 0.043619 | 0.000001 | under-trained | 960 | 0.001903 | 0.00145 |
217 | model.layers.31.mlp.down_proj | 0.061909 | 4,096 | 14,336 | 3.5 | 3.910137 | -7.230149 | 1.548955 | true | 0.014155 | dense | -6.915653 | -0.592589 | -1.849078 | 64 | 0.255512 | 4,096 | 22 | 4,032 | 1 | 0.620443 | 0.014155 | 18.050489 | success | 0.118976 | 0.000001 | 4,032 | 0.014155 | 0.003618 |
|
218 | model.layers.31.mlp.gate_proj | 0.067566 | 4,096 | 14,336 | 3.5 | 4.579302 | -8.127726 | 1.554599 | true | 0.016793 | dense | -8.0711 | -0.571228 | -1.774883 | 64 | 0.268394 | 4,096 | 14 | 4,032 | 1 | 0.956609 | 0.016793 | 15.982895 | success | 0.129586 | 0.000001 | 4,032 | 0.016793 | 0.004214 |
|
219 | model.layers.31.mlp.up_proj | 0.062518 | 4,096 | 14,336 | 3.5 | 4.467994 | -8.32529 | 1.558079 | true | 0.013699 | dense | -8.182721 | -0.578391 | -1.863317 | 64 | 0.264003 | 4,096 | 13 | 4,032 | 1 | 0.961848 | 0.013699 | 19.271957 | success | 0.117042 | 0.000001 | 4,032 | 0.013699 | 0.004143 |
|
220 | model.layers.31.self_attn.k_proj | 0.048769 | 1,024 | 4,096 | 4 | 5.477654 | -14.433448 | 1.131628 | true | 0.002318 | dense | -13.74517 | -1.093285 | -2.634969 | 64 | 0.080671 | 1,024 | 55 | 960 | 1 | 0.603767 | 0.002318 | 34.808407 | success | 0.048141 | 0.000001 | 960 | 0.002318 | 0.001021 |
|
221 | model.layers.31.self_attn.o_proj | 0.120417 | 4,096 | 4,096 | 1 | 3.675778 | -6.591092 | 1.522689 | true | 0.016102 | dense | -6.582583 | -0.916658 | -1.793115 | 64 | 0.121155 | 4,096 | 12 | 4,032 | 1 | 0.772431 | 0.016102 | 7.52414 | success | 0.126894 | 0 | 4,032 | 0.016102 | 0.001798 |
|
222 | model.layers.31.self_attn.q_proj | 0.025553 | 4,096 | 4,096 | 1 | 5.538945 | -12.81395 | 1.558859 | true | 0.004859 | dense | -12.727901 | -0.988689 | -2.313428 | 64 | 0.102639 | 4,096 | 61 | 4,032 | 1 | 0.581152 | 0.004859 | 21.122208 | success | 0.069709 | 0 | 4,032 | 0.004859 | 0.001259 |
|
223 | model.layers.31.self_attn.v_proj | 0.065206 | 1,024 | 4,096 | 4 | 10.682958 | -28.048469 | 1.135977 | true | 0.002368 | dense | -27.835504 | -1.03148 | -2.625534 | 64 | 0.093008 | 1,024 | 56 | 960 | 1 | 1.29394 | 0.002368 | 39.269352 | success | 0.048667 | 0.000001 | under-trained | 960 | 0.002368 | 0.001322 |