|
{ |
|
"MSA_end": null, |
|
"MSA_filename": null, |
|
"MSA_start": null, |
|
"MSA_weight_file_name": null, |
|
"_name_or_path": "Tranception_Large", |
|
"activation_function": "squared_relu", |
|
"architectures": [ |
|
"TranceptionLMHeadModel" |
|
], |
|
"attention_mode": "tranception", |
|
"attn_pdrop": 0.1, |
|
"bos_token_id": 1, |
|
"clustal_omega_location": null, |
|
"embd_pdrop": 0.1, |
|
"eos_token_id": 2, |
|
"full_protein_length": null, |
|
"initializer_range": 0.02, |
|
"layer_norm_epsilon": 1e-05, |
|
"local_batch_size": 1, |
|
"model_type": "tranception", |
|
"n_ctx": 1024, |
|
"n_embd": 1280, |
|
"n_head": 20, |
|
"n_inner": 5120, |
|
"n_layer": 36, |
|
"n_positions": 1024, |
|
"position_embedding": "grouped_alibi", |
|
"reorder_and_upcast_attn": false, |
|
"resid_pdrop": 0.1, |
|
"retrieval_aggregation_mode": null, |
|
"retrieval_inference_weight": 0.6, |
|
"scale_attn_by_inverse_layer_idx": false, |
|
"scale_attn_weights": true, |
|
"scoring_window": "optimal", |
|
"summary_activation": null, |
|
"summary_first_dropout": 0.1, |
|
"summary_proj_to_labels": true, |
|
"summary_type": "cls_index", |
|
"summary_use_proj": true, |
|
"tokenizer": null, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.17.0", |
|
"use_cache": true, |
|
"vocab_size": 25 |
|
} |
|
|