Spaces:
Sleeping
Sleeping
Load model with pipeline rather than use inference API
Browse files
app.py
CHANGED
@@ -1,4 +1,14 @@
|
|
1 |
import gradio as gr
|
2 |
-
|
3 |
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import pipeline
|
3 |
|
4 |
+
pipe = pipeline("image-classification", "umm-maybe/AI-image-detector")
|
5 |
+
|
6 |
+
def image_classifier(image):
|
7 |
+
outputs = pipe(image)
|
8 |
+
results = {}
|
9 |
+
for result in outputs:
|
10 |
+
results[result['label']] = result['score']
|
11 |
+
return results
|
12 |
+
|
13 |
+
demo = gr.Interface(fn=image_classifier, inputs=gr.Image(type="pil"), outputs="label")
|
14 |
+
demo.launch()
|