Veldrovive commited on
Commit
bab7ed6
1 Parent(s): 87de3b1

Upload decoder/3B_fp16_highlr/decoder_config.json with huggingface_hub

Browse files
decoder/3B_fp16_highlr/decoder_config.json ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "seed": 1,
3
+
4
+ "decoder": {
5
+ "unets": [
6
+ {
7
+ "dim": 576,
8
+ "image_embed_dim": 768,
9
+ "text_embed_dim": 768,
10
+ "cond_on_text_encodings": false,
11
+ "channels": 3,
12
+ "dim_mults": [1, 2, 3, 4],
13
+ "self_attn": [false, true, true, true],
14
+ "num_resnet_blocks": 4,
15
+ "attn_heads": 16,
16
+ "attn_dim_head": 64,
17
+ "sparse_attn": false,
18
+ "memory_efficient": true
19
+ }
20
+ ],
21
+ "clip": {
22
+ "make": "openai",
23
+ "model": "ViT-L/14"
24
+ },
25
+ "image_sizes": [64],
26
+ "channels": 3,
27
+ "timesteps": 1000,
28
+ "loss_type": "l2",
29
+ "beta_schedule": ["cosine"],
30
+ "learned_variance": false
31
+ },
32
+ "data": {
33
+ "webdataset_base_url": "pipe:aws s3 cp --quiet s3://s-datasets/laion5b/laion2B-data/{}.tar -",
34
+ "num_workers": 6,
35
+ "batch_size": 40,
36
+ "start_shard": 0,
37
+ "end_shard": 231349,
38
+ "shard_width": 6,
39
+ "index_width": 4,
40
+ "splits": {
41
+ "train": 0.75,
42
+ "val": 0.15,
43
+ "test": 0.1
44
+ },
45
+ "shuffle_train": false,
46
+ "resample_train": true,
47
+ "preprocessing": {
48
+ "RandomResizedCrop": {
49
+ "size": [224, 224],
50
+ "scale": [0.75, 1.0],
51
+ "ratio": [1.0, 1.0]
52
+ },
53
+ "ToTensor": true
54
+ }
55
+ },
56
+ "train": {
57
+ "epochs": 1000,
58
+ "lr":9.6e-4,
59
+ "wd": 0.0,
60
+ "warmup_steps": 1000,
61
+ "max_grad_norm": 0.5,
62
+ "save_every_n_samples": 1500000,
63
+ "n_sample_images": 10,
64
+ "device": "cuda:0",
65
+ "epoch_samples": 6000000,
66
+ "validation_samples": 100000,
67
+ "use_ema": true,
68
+ "ema_beta": 0.9999,
69
+ "unet_training_mask": [true],
70
+ "find_unused_parameters": false
71
+ },
72
+ "evaluate": {
73
+ "n_evaluation_samples": 10,
74
+ "FID": {
75
+ "feature": 64
76
+ },
77
+ "LPIPS": {
78
+ "net_type": "vgg",
79
+ "reduction": "mean"
80
+ }
81
+ },
82
+ "tracker": {
83
+ "data_path": "/fsx/aidan/new/multinode/experiments/deepspeed_fp16_2b/.tracker-data-highlr",
84
+ "overwrite_data_path": true,
85
+
86
+ "log": {
87
+ "log_type": "wandb",
88
+
89
+ "wandb_entity": "Veldrovive",
90
+ "wandb_project": "dalle2_train_decoder",
91
+ "wandb_run_name": "High LR - 3B deepspeed fp16 2B",
92
+
93
+ "auto_resume": true,
94
+ "verbose": true
95
+ },
96
+
97
+ "load": {
98
+ "load_from": "local",
99
+ "only_auto_resume": true,
100
+ "file_path": "/fsx/aidan/new/multinode/experiments/deepspeed_fp16_2b/models_highlr/checkpoints/latest.pth"
101
+ },
102
+
103
+ "save": [{
104
+ "save_to": "huggingface",
105
+ "huggingface_repo": "laion/DALLE2-PyTorch",
106
+
107
+ "save_meta_to": "decoder/3B_fp16_highlr/",
108
+ "save_latest_to": "decoder/3B_fp16_highlr/latest.pth",
109
+
110
+ "save_type": "model"
111
+ },{
112
+ "save_to": "huggingface",
113
+ "huggingface_repo": "laion/DALLE2-PyTorch",
114
+
115
+ "save_latest_to": "decoder/3B_fp16_highlr/checkpoints/latest.pth",
116
+
117
+ "save_type": "checkpoint"
118
+ },{
119
+ "save_to": "local",
120
+ "save_latest_to": "/fsx/aidan/new/multinode/experiments/deepspeed_fp16_2b/models_highlr/checkpoints/latest.pth",
121
+
122
+ "save_type": "checkpoint"
123
+ }]
124
+ }
125
+ }