therealvul's picture
Upload 3 files
fe7d6bb verified
raw
history blame
1.94 kB
{ASR_config: Utils/ASR/config.yml, ASR_path: Utils/ASR/epoch_00080.pth, F0_path: Utils/JDC/bst.t7,
PLBERT_dir: Utils/PLBERT/, batch_size: 6, data_params: {OOD_data: Data/OOD_texts.txt,
min_length: 50, root_path: Multi0_data, train_data: Data/train_list_omni.txt,
val_data: Data/val_list_omni.txt}, device: cuda, epochs_1st: 200, epochs_2nd: 60, first_stage_path: first_stage.pth,
log_dir: Models/Multi0, log_interval: 10, loss_params: {TMA_epoch: 50, diff_epoch: 14,
joint_epoch: 19, lambda_F0: 1.0, lambda_ce: 20.0, lambda_diff: 1.0, lambda_dur: 1.0,
lambda_gen: 1.0, lambda_mel: 5.0, lambda_mono: 1.0, lambda_norm: 1.0, lambda_s2s: 1.0,
lambda_slm: 1.0, lambda_sty: 1.0}, max_len: 400, model_params: {decoder: {gen_istft_hop_size: 5,
gen_istft_n_fft: 20, resblock_dilation_sizes: [[1, 3, 5], [1, 3, 5], [1, 3,
5]], resblock_kernel_sizes: [3, 7, 11], type: istftnet, upsample_initial_channel: 512,
upsample_kernel_sizes: [20, 12], upsample_rates: [10, 6]}, diffusion: {dist: {
estimate_sigma_data: true, mean: -3.0, sigma_data: 0.2969374090377316, std: 1.0},
embedding_mask_proba: 0.1, transformer: {head_features: 64, multiplier: 2, num_heads: 8,
num_layers: 3}}, dim_in: 64, dropout: 0.2, hidden_dim: 512, max_conv_dim: 512,
max_dur: 50, multispeaker: true, n_layer: 3, n_mels: 80, n_token: 178, slm: {
hidden: 768, initial_channel: 64, model: microsoft/wavlm-base-plus, nlayers: 13,
sr: 16000}, style_dim: 128}, optimizer_params: {bert_lr: 1.0e-05, ft_lr: 1.0e-05,
lr: 0.0001}, preprocess_params: {spect_params: {hop_length: 300, n_fft: 2048,
win_length: 1200}, sr: 24000}, pretrained_model: '', resume: true, save_freq: 1,
saver_freq_steps: 150, saver_max_ckpts: 5, saver_mode: ITER, second_stage_load_pretrained: true,
slmadv_params: {batch_percentage: 0.5, iter: 10, max_len: 400, min_len: 160, scale: 0.01,
sig: 1.5, thresh: 5}}