adammoss's picture
Epoch 0
17cacd3
raw
history blame
1.58 kB
{
"dataset_name": "Mtot_Nbody_SIMBA",
"dataset_config_name": null,
"dataset_conditional_name": null,
"model_config_name_or_path": null,
"vae_model": "stabilityai/sd-vae-ft-ema",
"train_data_dir": null,
"output_dir": "output/ddpm-ema-256-Mtot-Nbody-SIMBA-1683055770",
"overwrite_output_dir": false,
"cache_dir": "data",
"resolution": 256,
"data_size": 13500,
"super_resolution": null,
"conditional": false,
"center_crop": false,
"random_flip": false,
"train_batch_size": 32,
"eval_batch_size": 16,
"dataloader_num_workers": 0,
"num_epochs": 100,
"save_images_epochs": 10,
"save_model_epochs": 10,
"gradient_accumulation_steps": 1,
"learning_rate": 0.0001,
"lr_scheduler": "cosine",
"lr_warmup_steps": 500,
"adam_beta1": 0.95,
"adam_beta2": 0.999,
"adam_weight_decay": 1e-06,
"adam_epsilon": 1e-08,
"use_ema": true,
"ema_inv_gamma": 1.0,
"ema_power": 0.75,
"ema_max_decay": 0.9999,
"push_to_hub": true,
"hub_token": "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",
"hub_model_id": null,
"hub_private_repo": false,
"logger": "tensorboard",
"logging_dir": "logs",
"local_rank": -1,
"mixed_precision": "no",
"prediction_type": "epsilon",
"loss": "mse",
"ddpm_num_steps": 4000,
"ddpm_num_inference_steps": 4000,
"ddpm_beta_schedule": "linear",
"checkpointing_steps": 10000,
"checkpoints_total_limit": null,
"resume_from_checkpoint": null,
"enable_xformers_memory_efficient_attention": false
}