evlinzxxx commited on
Commit
fab9c86
1 Parent(s): cd16a75

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -184
config.json DELETED
@@ -1,184 +0,0 @@
1
- {
2
- "architectures": [
3
- "VisionEncoderDecoderModel"
4
- ],
5
- "decoder": {
6
- "_name_or_path": "gpt2",
7
- "activation_function": "gelu_new",
8
- "add_cross_attention": true,
9
- "architectures": [
10
- "GPT2LMHeadModel"
11
- ],
12
- "attn_pdrop": 0.1,
13
- "bad_words_ids": null,
14
- "begin_suppress_tokens": null,
15
- "bos_token_id": 50256,
16
- "chunk_size_feed_forward": 0,
17
- "cross_attention_hidden_size": null,
18
- "decoder_start_token_id": null,
19
- "diversity_penalty": 0.0,
20
- "do_sample": false,
21
- "early_stopping": false,
22
- "embd_pdrop": 0.1,
23
- "encoder_no_repeat_ngram_size": 0,
24
- "eos_token_id": 50256,
25
- "exponential_decay_length_penalty": null,
26
- "finetuning_task": null,
27
- "forced_bos_token_id": null,
28
- "forced_eos_token_id": null,
29
- "id2label": {
30
- "0": "LABEL_0",
31
- "1": "LABEL_1"
32
- },
33
- "initializer_range": 0.02,
34
- "is_decoder": true,
35
- "is_encoder_decoder": false,
36
- "label2id": {
37
- "LABEL_0": 0,
38
- "LABEL_1": 1
39
- },
40
- "layer_norm_epsilon": 1e-05,
41
- "length_penalty": 1.0,
42
- "max_length": 20,
43
- "min_length": 0,
44
- "model_type": "gpt2",
45
- "n_ctx": 1024,
46
- "n_embd": 768,
47
- "n_head": 12,
48
- "n_inner": null,
49
- "n_layer": 12,
50
- "n_positions": 1024,
51
- "no_repeat_ngram_size": 0,
52
- "num_beam_groups": 1,
53
- "num_beams": 1,
54
- "num_return_sequences": 1,
55
- "output_attentions": false,
56
- "output_hidden_states": false,
57
- "output_scores": false,
58
- "pad_token_id": null,
59
- "prefix": null,
60
- "problem_type": null,
61
- "pruned_heads": {},
62
- "remove_invalid_values": false,
63
- "reorder_and_upcast_attn": false,
64
- "repetition_penalty": 1.0,
65
- "resid_pdrop": 0.1,
66
- "return_dict": true,
67
- "return_dict_in_generate": false,
68
- "scale_attn_by_inverse_layer_idx": false,
69
- "scale_attn_weights": true,
70
- "sep_token_id": null,
71
- "summary_activation": null,
72
- "summary_first_dropout": 0.1,
73
- "summary_proj_to_labels": true,
74
- "summary_type": "cls_index",
75
- "summary_use_proj": true,
76
- "suppress_tokens": null,
77
- "task_specific_params": {
78
- "text-generation": {
79
- "do_sample": true,
80
- "max_length": 50
81
- }
82
- },
83
- "temperature": 1.0,
84
- "tf_legacy_loss": false,
85
- "tie_encoder_decoder": false,
86
- "tie_word_embeddings": true,
87
- "tokenizer_class": null,
88
- "top_k": 50,
89
- "top_p": 1.0,
90
- "torch_dtype": null,
91
- "torchscript": false,
92
- "typical_p": 1.0,
93
- "use_bfloat16": false,
94
- "use_cache": true,
95
- "vocab_size": 50257
96
- },
97
- "decoder_start_token_id": 50256,
98
- "encoder": {
99
- "_name_or_path": "google/vit-base-patch16-224-in21k",
100
- "add_cross_attention": false,
101
- "architectures": [
102
- "ViTModel"
103
- ],
104
- "attention_probs_dropout_prob": 0.0,
105
- "bad_words_ids": null,
106
- "begin_suppress_tokens": null,
107
- "bos_token_id": null,
108
- "chunk_size_feed_forward": 0,
109
- "cross_attention_hidden_size": null,
110
- "decoder_start_token_id": null,
111
- "diversity_penalty": 0.0,
112
- "do_sample": false,
113
- "early_stopping": false,
114
- "encoder_no_repeat_ngram_size": 0,
115
- "encoder_stride": 16,
116
- "eos_token_id": null,
117
- "exponential_decay_length_penalty": null,
118
- "finetuning_task": null,
119
- "forced_bos_token_id": null,
120
- "forced_eos_token_id": null,
121
- "hidden_act": "gelu",
122
- "hidden_dropout_prob": 0.0,
123
- "hidden_size": 768,
124
- "id2label": {
125
- "0": "LABEL_0",
126
- "1": "LABEL_1"
127
- },
128
- "image_size": 224,
129
- "initializer_range": 0.02,
130
- "intermediate_size": 3072,
131
- "is_decoder": false,
132
- "is_encoder_decoder": false,
133
- "label2id": {
134
- "LABEL_0": 0,
135
- "LABEL_1": 1
136
- },
137
- "layer_norm_eps": 1e-12,
138
- "length_penalty": 1.0,
139
- "max_length": 20,
140
- "min_length": 0,
141
- "model_type": "vit",
142
- "no_repeat_ngram_size": 0,
143
- "num_attention_heads": 12,
144
- "num_beam_groups": 1,
145
- "num_beams": 1,
146
- "num_channels": 3,
147
- "num_hidden_layers": 12,
148
- "num_return_sequences": 1,
149
- "output_attentions": false,
150
- "output_hidden_states": false,
151
- "output_scores": false,
152
- "pad_token_id": null,
153
- "patch_size": 16,
154
- "prefix": null,
155
- "problem_type": null,
156
- "pruned_heads": {},
157
- "qkv_bias": true,
158
- "remove_invalid_values": false,
159
- "repetition_penalty": 1.0,
160
- "return_dict": true,
161
- "return_dict_in_generate": false,
162
- "sep_token_id": null,
163
- "suppress_tokens": null,
164
- "task_specific_params": null,
165
- "temperature": 1.0,
166
- "tf_legacy_loss": false,
167
- "tie_encoder_decoder": false,
168
- "tie_word_embeddings": true,
169
- "tokenizer_class": null,
170
- "top_k": 50,
171
- "top_p": 1.0,
172
- "torch_dtype": null,
173
- "torchscript": false,
174
- "typical_p": 1.0,
175
- "use_bfloat16": false
176
- },
177
- "eos_token_id": 50256,
178
- "is_encoder_decoder": true,
179
- "model_type": "vision-encoder-decoder",
180
- "pad_token_id": 50256,
181
- "tie_word_embeddings": false,
182
- "torch_dtype": "float32",
183
- "transformers_version": "4.40.1"
184
- }