pszemraj's picture
Model save
18191c0
{
"_label_trainable_num_channels": 1024,
"_name_or_path": "deepmind/language-perceiver",
"architectures": [
"PerceiverForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"audio_samples_per_frame": 1920,
"cross_attention_shape_for_attention": "kv",
"cross_attention_widening_factor": 1,
"d_latents": 1280,
"d_model": 768,
"finetuning_task": "text-classification",
"hidden_act": "gelu",
"id2label": {
"0": "History & Politics",
"1": "Health & Medicine",
"2": "Mystery & Thriller",
"3": "Arts & Design",
"4": "Self-Help & Wellness",
"5": "Sports & Recreation",
"6": "Non-Fiction",
"7": "Science Fiction & Fantasy",
"8": "Countries & Geography",
"9": "Other",
"10": "Nature & Environment",
"11": "Business & Finance",
"12": "Romance",
"13": "Philosophy & Religion",
"14": "Literature & Fiction",
"15": "Science & Technology",
"16": "Children & Young Adult",
"17": "Food & Cooking"
},
"image_size": 56,
"initializer_range": 0.02,
"label2id": {
"Arts & Design": 3,
"Business & Finance": 11,
"Children & Young Adult": 16,
"Countries & Geography": 8,
"Food & Cooking": 17,
"Health & Medicine": 1,
"History & Politics": 0,
"Literature & Fiction": 14,
"Mystery & Thriller": 2,
"Nature & Environment": 10,
"Non-Fiction": 6,
"Other": 9,
"Philosophy & Religion": 13,
"Romance": 12,
"Science & Technology": 15,
"Science Fiction & Fantasy": 7,
"Self-Help & Wellness": 4,
"Sports & Recreation": 5
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 2048,
"model_type": "perceiver",
"num_blocks": 1,
"num_cross_attention_heads": 8,
"num_frames": 16,
"num_latents": 256,
"num_self_attends_per_block": 26,
"num_self_attention_heads": 8,
"output_num_channels": 512,
"output_shape": [
1,
16,
224,
224
],
"problem_type": "multi_label_classification",
"qk_channels": 256,
"samples_per_patch": 16,
"self_attention_widening_factor": 1,
"torch_dtype": "float32",
"train_size": [
368,
496
],
"transformers_version": "4.33.3",
"use_query_residual": true,
"v_channels": 1280,
"vocab_size": 262
}