soiz commited on
Commit
7ab2c9b
1 Parent(s): 967362a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -13
app.py CHANGED
@@ -40,21 +40,30 @@ def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None,
40
  # ネガティブプロンプトとその他のパラメータをログに出力
41
  print(f"Prompt: {full_prompt}, Negative Prompt: {negative_prompt}, CFG Scale: {cfg_scale}, Steps: {num_inference_steps}, Sampler: {sampler}")
42
 
43
- # Construct the function call parameters dynamically
44
- inputs = [full_prompt]
45
- if negative_prompt:
46
- inputs.append(negative_prompt)
47
- if cfg_scale is not None:
48
- inputs.append(cfg_scale)
49
- if num_inference_steps is not None:
50
- inputs.append(num_inference_steps)
51
- if sampler:
52
- inputs.append(sampler)
53
-
54
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  # モデル呼び出し
56
- result = models_load[model_str](*inputs)
57
-
58
  # Debugging result type
59
  print(f"Result type: {type(result)}, Result: {result}")
60
 
 
40
  # ネガティブプロンプトとその他のパラメータをログに出力
41
  print(f"Prompt: {full_prompt}, Negative Prompt: {negative_prompt}, CFG Scale: {cfg_scale}, Steps: {num_inference_steps}, Sampler: {sampler}")
42
 
 
 
 
 
 
 
 
 
 
 
 
43
  try:
44
+ # Adjust the inputs depending on model capabilities
45
+ if negative_prompt:
46
+ inputs = {
47
+ 'prompt': full_prompt,
48
+ 'negative_prompt': negative_prompt,
49
+ 'cfg_scale': cfg_scale,
50
+ 'num_inference_steps': num_inference_steps,
51
+ 'sampler': sampler
52
+ }
53
+ else:
54
+ inputs = {
55
+ 'prompt': full_prompt,
56
+ 'cfg_scale': cfg_scale,
57
+ 'num_inference_steps': num_inference_steps,
58
+ 'sampler': sampler
59
+ }
60
+
61
+ # Filter out None values
62
+ inputs = {k: v for k, v in inputs.items() if v is not None}
63
+
64
  # モデル呼び出し
65
+ result = models_load[model_str](**inputs)
66
+
67
  # Debugging result type
68
  print(f"Result type: {type(result)}, Result: {result}")
69