import os # Install Flask if not already installed return_code = os.system('pip install flask') if return_code != 0: raise RuntimeError("Failed to install Flask") import gradio as gr from random import randint from all_models import models from flask import Flask, request, send_file from io import BytesIO from PIL import Image, ImageChops app = Flask(__name__) # グローバルなモデル辞書 models_load = {} def load_model(model_name): global models_load if model_name not in models_load: try: m = gr.load(f'models/{model_name}') print(f"Model {model_name} loaded successfully.") models_load[model_name] = m except Exception as error: print(f"Error loading model {model_name}: {error}") models_load[model_name] = gr.Interface(lambda txt: None, ['text'], ['image']) def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None, num_inference_steps=None, sampler=None): if model_str not in models_load: load_model(model_str) if model_str in models_load: if noise == "random": noise = str(randint(0, 99999999999)) full_prompt = f'{prompt} {noise}' if noise else prompt # ネガティブプロンプトとその他のパラメータをログに出力 print(f"Prompt: {full_prompt}, Negative Prompt: {negative_prompt}, CFG Scale: {cfg_scale}, Steps: {num_inference_steps}, Sampler: {sampler}") try: # Adjust the inputs depending on model capabilities if negative_prompt: inputs = { 'prompt': full_prompt, 'negative_prompt': negative_prompt, 'cfg_scale': cfg_scale, 'num_inference_steps': num_inference_steps, 'sampler': sampler } else: inputs = { 'prompt': full_prompt, 'cfg_scale': cfg_scale, 'num_inference_steps': num_inference_steps, 'sampler': sampler } # Filter out None values inputs = {k: v for k, v in inputs.items() if v is not None} # モデル呼び出し result = models_load[model_str](**inputs) # Debugging result type print(f"Result type: {type(result)}, Result: {result}") # Check if result is an image or a file path if isinstance(result, str): # Assuming result might be a file path if os.path.exists(result): image = Image.open(result) else: print(f"File path not found: {result}") return None, 'File path not found' elif isinstance(result, Image.Image): image = result else: print("Result is not an image:", type(result)) return None, f"Unexpected result type: {type(result)}" # Check if the image is completely black black = Image.new('RGB', image.size, (0, 0, 0)) if ImageChops.difference(image, black).getbbox() is None: return None, 'The image is completely black. There may be a parameter that cannot be specified, or an error may have occurred internally.' return image, None except Exception as e: print("Error generating image:", e) return None, f"Error generating image: {e}" else: print(f"Model {model_str} not found") return None, f"Model {model_str} not found" @app.route('/', methods=['GET']) def home(): prompt = request.args.get('prompt', '') model = request.args.get('model', '') negative_prompt = request.args.get('Nprompt', None) noise = request.args.get('noise', None) cfg_scale = request.args.get('cfg_scale', None) num_inference_steps = request.args.get('steps', None) sampler = request.args.get('sampler', None) try: if cfg_scale is not None: cfg_scale = float(cfg_scale) except ValueError: return 'Invalid "cfg_scale" parameter. It should be a number.', 400 try: if num_inference_steps is not None: num_inference_steps = int(num_inference_steps) except ValueError: return 'Invalid "steps" parameter. It should be an integer.', 400 if not model: return 'Please provide a "model" query parameter in the URL.', 400 if not prompt: return 'Please provide a "prompt" query parameter in the URL.', 400 # Generate the image image, error_message = gen_fn(model, prompt, negative_prompt, noise, cfg_scale, num_inference_steps, sampler) if error_message: return error_message, 400 if isinstance(image, Image.Image): # Ensure the result is a PIL image # Save image to BytesIO object img_io = BytesIO() image.save(img_io, format='PNG') img_io.seek(0) return send_file(img_io, mimetype='image/png', as_attachment=False) return 'Failed to generate image.', 500 if __name__ == '__main__': # Launch Flask app app.run(host='0.0.0.0', port=7860) # Run Flask app