download
history
blame
contribute
delete
265 MB
Detected Pickle imports (122)
- "__torch__.torch.nn.modules.linear.___torch_mangle_30.Linear",
- "__torch__.zipformer.Zipformer2Encoder",
- "__torch__.scaling.BiasNorm",
- "__torch__.torch.nn.modules.conv.Conv2d",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.zipformer.Zipformer2EncoderLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_7.Linear",
- "__torch__.zipformer.ConvolutionModule",
- "__torch__.torch.nn.modules.conv.___torch_mangle_73.Conv1d",
- "__torch__.zipformer.___torch_mangle_54.Zipformer2EncoderLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_41.Linear",
- "__torch__.zipformer.RelPositionMultiheadAttentionWeights",
- "__torch__.zipformer.___torch_mangle_25.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_67.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_60.RelPositionMultiheadAttentionWeights",
- "__torch__.torch.nn.modules.linear.___torch_mangle_47.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_49.Linear",
- "__torch__.zipformer.SelfAttention",
- "__torch__.zipformer.___torch_mangle_71.NonlinAttention",
- "__torch__.zipformer.___torch_mangle_46.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_20.RelPositionMultiheadAttentionWeights",
- "__torch__.zipformer.Zipformer2",
- "__torch__.zipformer.___torch_mangle_13.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_44.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_31.NonlinAttention",
- "__torch__.torch.nn.modules.container.___torch_mangle_79.ModuleList",
- "__torch__.zipformer.___torch_mangle_65.FeedforwardModule",
- "__torch__.scaling.Dropout2",
- "__torch__.zipformer.___torch_mangle_42.SelfAttention",
- "__torch__.zipformer.___torch_mangle_53.ConvolutionModule",
- "__torch__.zipformer.___torch_mangle_63.SelfAttention",
- "__torch__.zipformer.___torch_mangle_6.BypassModule",
- "__torch__.torch.nn.modules.linear.___torch_mangle_40.Linear",
- "__torch__.torch.nn.modules.conv.___torch_mangle_4.Conv2d",
- "__torch__.torch.nn.modules.linear.___torch_mangle_14.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_81.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_66.Linear",
- "__torch__.decoder.Decoder",
- "__torch__.zipformer.___torch_mangle_75.Zipformer2EncoderLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_10.Linear",
- "__torch__.scaling.SwooshL",
- "__torch__.scaling.Identity",
- "__torch__.zipformer.___torch_mangle_74.ConvolutionModule",
- "__torch__.torch.nn.modules.activation.Tanh",
- "__torch__.zipformer.___torch_mangle_15.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_34.ConvolutionModule",
- "__torch__.scaling.___torch_mangle_18.ActivationDropoutAndLinear",
- "__torch__.torch.nn.modules.conv.___torch_mangle_2.Conv2d",
- "__torch__.joiner.Joiner",
- "__torch__.torch.nn.modules.container.___torch_mangle_76.ModuleList",
- "__torch__.zipformer.___torch_mangle_56.Zipformer2Encoder",
- "__torch__.zipformer.NonlinAttention",
- "collections.OrderedDict",
- "__torch__.subsampling.Conv2dSubsampling",
- "__torch__.torch.nn.modules.linear.___torch_mangle_59.Linear",
- "__torch__.zipformer.SimpleDownsample",
- "__torch__.torch.nn.modules.linear.___torch_mangle_21.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_8.Linear",
- "__torch__.torch.nn.modules.conv.___torch_mangle_3.Conv2d",
- "__torch__.torch.nn.modules.conv.___torch_mangle_80.Conv1d",
- "__torch__.torch.nn.modules.conv.___torch_mangle_52.Conv1d",
- "__torch__.torch.nn.modules.linear.___torch_mangle_12.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_32.Linear",
- "__torch__.torch.nn.modules.conv.___torch_mangle_0.Conv2d",
- "__torch__.zipformer.___torch_mangle_37.Zipformer2Encoder",
- "__torch__.torch.nn.modules.container.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_70.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_61.Linear",
- "__torch__.scaling.ScheduledFloat",
- "__torch__.zipformer.BypassModule",
- "__torch__.zipformer.___torch_mangle_77.Zipformer2Encoder",
- "__torch__.zipformer.___torch_mangle_69.FeedforwardModule",
- "torch.FloatStorage",
- "__torch__.zipformer.___torch_mangle_29.FeedforwardModule",
- "__torch__.subsampling.ConvNeXt",
- "__torch__.zipformer.DownsampledZipformer2Encoder",
- "__torch__.torch.nn.modules.linear.___torch_mangle_50.Linear",
- "__torch__.zipformer.___torch_mangle_35.Zipformer2EncoderLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_11.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_22.Linear",
- "__torch__.scaling.___torch_mangle_5.ScheduledFloat",
- "__torch__.torch.nn.modules.linear.___torch_mangle_17.Linear",
- "__torch__.scaling.SwooshR",
- "__torch__.torch.nn.modules.linear.___torch_mangle_16.Linear",
- "__torch__.torch.nn.modules.container.ModuleList",
- "__torch__.torch.nn.modules.linear.___torch_mangle_68.Linear",
- "__torch__.EncoderModel",
- "__torch__.zipformer.CompactRelPositionalEncoding",
- "__torch__.zipformer.___torch_mangle_23.SelfAttention",
- "__torch__.scaling.ActivationDropoutAndLinear",
- "__torch__.zipformer.___torch_mangle_78.DownsampledZipformer2Encoder",
- "__torch__.torch.nn.modules.linear.Identity",
- "__torch__.torch.nn.modules.linear.___torch_mangle_28.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_62.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_36.ModuleList",
- "__torch__.zipformer.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_48.FeedforwardModule",
- "__torch__.zipformer.___torch_mangle_39.RelPositionMultiheadAttentionWeights",
- "__torch__.torch.nn.modules.linear.___torch_mangle_58.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_82.Linear",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.model.Transducer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_64.Linear",
- "torch.jit._pickle.build_intlist",
- "__torch__.torch.nn.modules.conv.___torch_mangle_1.Conv2d",
- "__torch__.torch.nn.modules.linear.Linear",
- "__torch__.zipformer.___torch_mangle_57.DownsampledZipformer2Encoder",
- "__torch__.zipformer.SimpleUpsample",
- "__torch__.torch.nn.modules.conv.___torch_mangle_33.Conv1d",
- "__torch__.torch.nn.modules.conv.Conv1d",
- "__torch__.zipformer.___torch_mangle_51.NonlinAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_72.Linear",
- "__torch__.torch.nn.modules.activation.Sigmoid",
- "__torch__.torch.nn.modules.linear.___torch_mangle_26.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_38.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_19.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_45.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_9.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_55.ModuleList",
- "__torch__.torch.nn.modules.linear.___torch_mangle_24.Linear",
- "__torch__.zipformer.___torch_mangle_27.FeedforwardModule",
- "__torch__.torch.nn.modules.linear.___torch_mangle_43.Linear"
Git LFS Details
- SHA256: d5b64964bad8c24fe48f5d9c0ffe98c4787495ea991a186f00b059fc3fa549c9
- Pointer size: 134 Bytes
- Size of remote file: 265 MB
Git Large File Storage (LFS) replaces large files with text pointers inside Git, while storing the file contents on a remote server. More info.