|
{
|
|
"lr_encoder": "1e-5",
|
|
"lr_others": "5e-5",
|
|
"num_steps": 1,
|
|
"warmup_ratio": 0.1,
|
|
"train_batch_size": 8,
|
|
"eval_every": 900000,
|
|
"max_width": 10,
|
|
"model_name": "FacebookAI/xlm-roberta-large",
|
|
"fine_tune": true,
|
|
"subtoken_pooling": "first",
|
|
"hidden_size": 768,
|
|
"span_mode": "markerV0",
|
|
"dropout": 0.4,
|
|
"root_dir": "ablation_backbone",
|
|
"train_data": "dataset/train.json",
|
|
"prev_path": "none",
|
|
"size_sup": -1,
|
|
"max_types": 50,
|
|
"shuffle_types": true,
|
|
"random_drop": true,
|
|
"max_neg_type_ratio": 1,
|
|
"max_len": 384,
|
|
"name": "large",
|
|
"log_dir": "logs"
|
|
} |