File size: 1,578 Bytes
17cacd3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
{
    "dataset_name": "Mtot_Nbody_SIMBA",
    "dataset_config_name": null,
    "dataset_conditional_name": null,
    "model_config_name_or_path": null,
    "vae_model": "stabilityai/sd-vae-ft-ema",
    "train_data_dir": null,
    "output_dir": "output/ddpm-ema-256-Mtot-Nbody-SIMBA-1683055770",
    "overwrite_output_dir": false,
    "cache_dir": "data",
    "resolution": 256,
    "data_size": 13500,
    "super_resolution": null,
    "conditional": false,
    "center_crop": false,
    "random_flip": false,
    "train_batch_size": 32,
    "eval_batch_size": 16,
    "dataloader_num_workers": 0,
    "num_epochs": 100,
    "save_images_epochs": 10,
    "save_model_epochs": 10,
    "gradient_accumulation_steps": 1,
    "learning_rate": 0.0001,
    "lr_scheduler": "cosine",
    "lr_warmup_steps": 500,
    "adam_beta1": 0.95,
    "adam_beta2": 0.999,
    "adam_weight_decay": 1e-06,
    "adam_epsilon": 1e-08,
    "use_ema": true,
    "ema_inv_gamma": 1.0,
    "ema_power": 0.75,
    "ema_max_decay": 0.9999,
    "push_to_hub": true,
    "hub_token": "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",
    "hub_model_id": null,
    "hub_private_repo": false,
    "logger": "tensorboard",
    "logging_dir": "logs",
    "local_rank": -1,
    "mixed_precision": "no",
    "prediction_type": "epsilon",
    "loss": "mse",
    "ddpm_num_steps": 4000,
    "ddpm_num_inference_steps": 4000,
    "ddpm_beta_schedule": "linear",
    "checkpointing_steps": 10000,
    "checkpoints_total_limit": null,
    "resume_from_checkpoint": null,
    "enable_xformers_memory_efficient_attention": false
}