KenWu commited on
Commit
8982a5b
1 Parent(s): 3401549

Upload folder using huggingface_hub

Browse files
config_ft.yml CHANGED
@@ -8,12 +8,13 @@
8
  model_params: {decoder: {resblock_dilation_sizes: [[1, 3, 5], [1, 3, 5], [1, 3,
9
  5]], resblock_kernel_sizes: [3, 7, 11], type: hifigan, upsample_initial_channel: 512,
10
  upsample_kernel_sizes: [20, 10, 6, 4], upsample_rates: [10, 5, 3, 2]}, diffusion: {
11
- dist: {estimate_sigma_data: true, mean: -3.0, sigma_data: 0.2, std: 1.0}, embedding_mask_proba: 0.1,
12
- transformer: {head_features: 64, multiplier: 2, num_heads: 8, num_layers: 3}},
13
- dim_in: 64, dropout: 0.2, hidden_dim: 512, max_conv_dim: 512, max_dur: 50, multispeaker: true,
14
- n_layer: 3, n_mels: 80, n_token: 178, slm: {hidden: 768, initial_channel: 64,
15
- model: microsoft/wavlm-base-plus, nlayers: 13, sr: 16000}, style_dim: 128},
16
- optimizer_params: {bert_lr: 1.0e-05, ft_lr: 0.0001, lr: 0.0001}, preprocess_params: {
17
- spect_params: {hop_length: 300, n_fft: 2048, win_length: 1200}, sr: 24000}, pretrained_model: Models/LibriTTS/epochs_2nd_00020.pth,
 
18
  save_freq: 5, second_stage_load_pretrained: true, slmadv_params: {batch_percentage: 0.5,
19
  iter: 10, max_len: 500, min_len: 400, scale: 0.01, sig: 1.5, thresh: 5}}
 
8
  model_params: {decoder: {resblock_dilation_sizes: [[1, 3, 5], [1, 3, 5], [1, 3,
9
  5]], resblock_kernel_sizes: [3, 7, 11], type: hifigan, upsample_initial_channel: 512,
10
  upsample_kernel_sizes: [20, 10, 6, 4], upsample_rates: [10, 5, 3, 2]}, diffusion: {
11
+ dist: {estimate_sigma_data: true, mean: -3.0, sigma_data: 0.21043557314670575,
12
+ std: 1.0}, embedding_mask_proba: 0.1, transformer: {head_features: 64, multiplier: 2,
13
+ num_heads: 8, num_layers: 3}}, dim_in: 64, dropout: 0.2, hidden_dim: 512,
14
+ max_conv_dim: 512, max_dur: 50, multispeaker: true, n_layer: 3, n_mels: 80, n_token: 178,
15
+ slm: {hidden: 768, initial_channel: 64, model: microsoft/wavlm-base-plus, nlayers: 13,
16
+ sr: 16000}, style_dim: 128}, optimizer_params: {bert_lr: 1.0e-05, ft_lr: 0.0001,
17
+ lr: 0.0001}, preprocess_params: {spect_params: {hop_length: 300, n_fft: 2048,
18
+ win_length: 1200}, sr: 24000}, pretrained_model: Models/LibriTTS/epochs_2nd_00020.pth,
19
  save_freq: 5, second_stage_load_pretrained: true, slmadv_params: {batch_percentage: 0.5,
20
  iter: 10, max_len: 500, min_len: 400, scale: 0.01, sig: 1.5, thresh: 5}}
epoch_2nd_00004.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fec01ea68b83ee0f9606126dfa656a442c8efb508005a9a63c44c17944caf748
3
+ size 2040174838
epoch_2nd_00009.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5b8a0ac31a4c9cfa25aa1ebeee28e5032554f310bbe9fb990be63db4cc0e0c4
3
+ size 2040174838
epoch_2nd_00014.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d13ea9bcdffc3048fa853bc17e076987d09706681378152dba86d1714ffeae9b
3
+ size 2242832422
epoch_2nd_00019.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3597babc5e5d73a7c32ad2263d8123180cbdedcdcdc561625c15a1f3b7655c59
3
+ size 2242832422
epoch_2nd_00024.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be1e5f52b2a54e56bf023768b9f8285a294fcb13f2924d46fc362caa188bc220
3
+ size 2242832422
epoch_2nd_00029.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27fe843f0d67aeefa09a4c62da0ad03e49328e7e7dc11b6cca54625f111c7a39
3
+ size 2242832422
epoch_2nd_00034.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4e92483868d8417da9e9e8bc33805ac8ecaa9f59ec2817b374f8427ef7b3d16
3
+ size 2242832422
epoch_2nd_00039.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c94ac2a3838887f32f7b4872c9d981388535cd55343c8a0d85c8e7667b94877d
3
+ size 2242832422
epoch_2nd_00044.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cab4b5a0150fbdc36337adb35cdbe19334934836ad2a54bd9fb729293ab8892c
3
+ size 2242832422
epoch_2nd_00049.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d79d2dae12c27270c8096b88f231efd7d2616abedf4b39ba9b65f779f1e1f834
3
+ size 2242832422
tensorboard/events.out.tfevents.1721013723.ip-172-31-45-72.551439.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c300e41f42d4776d10c0b2799f1cae21c7f39162a89071b81ef789a2cd70574d
3
+ size 551544
train.log CHANGED
The diff for this file is too large to render. See raw diff