{ "dataset_name": [ "Mtot_Nbody_SIMBA", "Mgas_SIMBA" ], "dataset_config_name": null, "dataset_conditional_name": null, "model_config_name_or_path": null, "base_channels": 64, "cross_attention_dim": null, "vae": null, "vae_from_pretrained": null, "vae_scaling_factor": null, "train_data_dir": null, "output_dir": "output/ddpm-ema-256-Mtot-Nbody-SIMBA-Mgas-SIMBA-cond-1686233229", "overwrite_output_dir": false, "cache_dir": "/content/drive/MyDrive/CAMELS", "resolution": 256, "data_size": 13500, "super_resolution": null, "local_resize": false, "conditional": true, "center_crop": false, "random_flip": false, "train_batch_size": 16, "eval_batch_size": 16, "dataloader_num_workers": 0, "num_epochs": 200, "save_images_epochs": 10, "save_model_epochs": 10, "gradient_accumulation_steps": 4, "learning_rate": 0.0001, "lr_scheduler": "cosine", "lr_warmup_steps": 500, "adam_beta1": 0.95, "adam_beta2": 0.999, "adam_weight_decay": 1e-06, "adam_epsilon": 1e-08, "use_ema": true, "ema_inv_gamma": 1.0, "ema_power": 0.75, "ema_max_decay": 0.9999, "push_to_hub": true, "hub_token": "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", "hub_model_id": null, "hub_private_repo": false, "logger": "wandb", "logging_dir": "logs", "local_rank": -1, "mixed_precision": "no", "prediction_type": "v_prediction", "loss": "mse", "ddpm_num_steps": 1000, "ddpm_num_inference_steps": 1000, "ddpm_beta_schedule": "squaredcos_cap_v2", "checkpointing_steps": 10000, "checkpoints_total_limit": null, "resume_from_checkpoint": "latest", "enable_xformers_memory_efficient_attention": false }