jacktheporsche commited on
Commit
a8e358d
1 Parent(s): 61544a1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +108 -62
app.py CHANGED
@@ -1,4 +1,12 @@
 
 
 
 
1
  import torch
 
 
 
 
2
  import logging
3
  from PIL import Image, PngImagePlugin
4
  from datetime import datetime
@@ -12,6 +20,11 @@ DESCRIPTION = "RealVis XL"
12
  if not torch.cuda.is_available():
13
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU. </p>"
14
  IS_COLAB = utils.is_google_colab() or os.getenv("IS_COLAB") == "1"
 
 
 
 
 
15
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
16
  OUTPUT_DIR = os.getenv("OUTPUT_DIR", "./outputs")
17
 
@@ -52,6 +65,10 @@ def load_pipeline(model_name):
52
 
53
  @spaces.GPU
54
  def generate(
 
 
 
 
55
  custom_height: int = 1024,
56
  guidance_scale: float = 7.0,
57
  num_inference_steps: int = 30,
@@ -131,77 +148,105 @@ with gr.Blocks(css="style.css") as demo:
131
  elem_id="subtitle",
132
  )
133
  gr.DuplicateButton(
134
- placeholder="Enter a negative prompt",
135
- value=""
 
136
  )
137
- aspect_ratio_selector = gr.Radio(
138
- label="Aspect Ratio",
139
- choices=config.aspect_ratios,
140
- value="1024 x 1024",
141
- container=True,
142
- )
143
- with gr.Group(visible=False) as custom_resolution:
144
  with gr.Row():
145
- custom_width = gr.Slider(
146
- label="Width",
147
- minimum=MIN_IMAGE_SIZE,
148
- maximum=MAX_IMAGE_SIZE,
149
- step=8,
150
- value=1024,
151
  )
152
- custom_height = gr.Slider(
153
- label="Height",
154
- minimum=MIN_IMAGE_SIZE,
155
- maximum=MAX_IMAGE_SIZE,
156
- step=8,
157
- value=1024,
158
  )
159
- use_upscaler = gr.Checkbox(label="Use Upscaler", value=False)
160
- with gr.Row() as upscaler_row:
161
- upscaler_strength = gr.Slider(
162
- label="Strength",
163
- minimum=0,
164
- maximum=1,
165
- step=0.05,
166
- value=0.55,
167
- visible=False,
168
  )
169
- upscale_by = gr.Slider(
170
- label="Upscale by",
171
- minimum=1,
172
- maximum=1.5,
173
- step=0.1,
174
- value=1.5,
175
- visible=False,
176
  )
177
-
178
- sampler = gr.Dropdown(
179
- label="Sampler",
180
- choices=config.sampler_list,
181
- interactive=True,
182
- value="DPM++ 2M SDE Karras",
183
- )
184
- with gr.Row():
185
- seed = gr.Slider(
186
- label="Seed", minimum=0, maximum=utils.MAX_SEED, step=1, value=0
187
  )
188
- randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
189
- with gr.Group():
190
- with gr.Row():
191
- guidance_scale = gr.Slider(
192
- label="Guidance scale",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
193
  minimum=1,
194
- maximum=12,
195
  step=0.1,
196
- value=7.0,
 
197
  )
198
- num_inference_steps = gr.Slider(
199
- label="Number of inference steps",
200
- minimum=1,
201
- maximum=100,
202
- step=1,
203
- value=28,
 
 
 
 
204
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
205
  with gr.Accordion(label="Generation Parameters", open=False):
206
  gr_metadata = gr.JSON(label="Metadata", show_label=False)
207
  gr.Examples(
@@ -229,6 +274,8 @@ with gr.Blocks(css="style.css") as demo:
229
  inputs = [
230
  prompt,
231
  negative_prompt,
 
 
232
  custom_height,
233
  guidance_scale,
234
  num_inference_steps,
@@ -275,5 +322,4 @@ with gr.Blocks(css="style.css") as demo:
275
  outputs=[result, gr_metadata],
276
  api_name=False,
277
  )
278
-
279
- demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB, show_error=True)
 
1
+ import os
2
+ import gc
3
+ import gradio as gr
4
+ import numpy as np
5
  import torch
6
+ import json
7
+ import spaces
8
+ import config
9
+ import utils
10
  import logging
11
  from PIL import Image, PngImagePlugin
12
  from datetime import datetime
 
20
  if not torch.cuda.is_available():
21
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU. </p>"
22
  IS_COLAB = utils.is_google_colab() or os.getenv("IS_COLAB") == "1"
23
+ HF_TOKEN = os.getenv("HF_TOKEN")
24
+ CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
25
+ MIN_IMAGE_SIZE = int(os.getenv("MIN_IMAGE_SIZE", "512"))
26
+ MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "2048"))
27
+ USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
28
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
29
  OUTPUT_DIR = os.getenv("OUTPUT_DIR", "./outputs")
30
 
 
65
 
66
  @spaces.GPU
67
  def generate(
68
+ prompt: str,
69
+ negative_prompt: str = "",
70
+ seed: int = 0,
71
+ custom_width: int = 1024,
72
  custom_height: int = 1024,
73
  guidance_scale: float = 7.0,
74
  num_inference_steps: int = 30,
 
148
  elem_id="subtitle",
149
  )
150
  gr.DuplicateButton(
151
+ value="Duplicate Space for private use",
152
+ elem_id="duplicate-button",
153
+ visible=os.getenv("SHOW_DUPLICATE_BUTTON") == "1",
154
  )
155
+ with gr.Group():
 
 
 
 
 
 
156
  with gr.Row():
157
+ prompt = gr.Text(
158
+ label="Prompt",
159
+ show_label=False,
160
+ max_lines=5,
161
+ placeholder="Enter your prompt",
162
+ container=False,
163
  )
164
+ run_button = gr.Button(
165
+ "Generate",
166
+ variant="primary",
167
+ scale=0
 
 
168
  )
169
+ result = gr.Gallery(
170
+ label="Result",
171
+ columns=1,
172
+ preview=True,
173
+ show_label=False
 
 
 
 
174
  )
175
+ with gr.Accordion(label="Advanced Settings", open=False):
176
+ negative_prompt = gr.Text(
177
+ label="Negative Prompt",
178
+ max_lines=5,
179
+ placeholder="Enter a negative prompt",
180
+ value=""
 
181
  )
182
+ aspect_ratio_selector = gr.Radio(
183
+ label="Aspect Ratio",
184
+ choices=config.aspect_ratios,
185
+ value="1024 x 1024",
186
+ container=True,
 
 
 
 
 
187
  )
188
+ with gr.Group(visible=False) as custom_resolution:
189
+ with gr.Row():
190
+ custom_width = gr.Slider(
191
+ label="Width",
192
+ minimum=MIN_IMAGE_SIZE,
193
+ maximum=MAX_IMAGE_SIZE,
194
+ step=8,
195
+ value=1024,
196
+ )
197
+ custom_height = gr.Slider(
198
+ label="Height",
199
+ minimum=MIN_IMAGE_SIZE,
200
+ maximum=MAX_IMAGE_SIZE,
201
+ step=8,
202
+ value=1024,
203
+ )
204
+ use_upscaler = gr.Checkbox(label="Use Upscaler", value=False)
205
+ with gr.Row() as upscaler_row:
206
+ upscaler_strength = gr.Slider(
207
+ label="Strength",
208
+ minimum=0,
209
+ maximum=1,
210
+ step=0.05,
211
+ value=0.55,
212
+ visible=False,
213
+ )
214
+ upscale_by = gr.Slider(
215
+ label="Upscale by",
216
  minimum=1,
217
+ maximum=1.5,
218
  step=0.1,
219
+ value=1.5,
220
+ visible=False,
221
  )
222
+
223
+ sampler = gr.Dropdown(
224
+ label="Sampler",
225
+ choices=config.sampler_list,
226
+ interactive=True,
227
+ value="DPM++ 2M SDE Karras",
228
+ )
229
+ with gr.Row():
230
+ seed = gr.Slider(
231
+ label="Seed", minimum=0, maximum=utils.MAX_SEED, step=1, value=0
232
  )
233
+ randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
234
+ with gr.Group():
235
+ with gr.Row():
236
+ guidance_scale = gr.Slider(
237
+ label="Guidance scale",
238
+ minimum=1,
239
+ maximum=12,
240
+ step=0.1,
241
+ value=7.0,
242
+ )
243
+ num_inference_steps = gr.Slider(
244
+ label="Number of inference steps",
245
+ minimum=1,
246
+ maximum=100,
247
+ step=1,
248
+ value=28,
249
+ )
250
  with gr.Accordion(label="Generation Parameters", open=False):
251
  gr_metadata = gr.JSON(label="Metadata", show_label=False)
252
  gr.Examples(
 
274
  inputs = [
275
  prompt,
276
  negative_prompt,
277
+ seed,
278
+ custom_width,
279
  custom_height,
280
  guidance_scale,
281
  num_inference_steps,
 
322
  outputs=[result, gr_metadata],
323
  api_name=False,
324
  )
325
+ demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB, show_error=True)