Spaces:
Runtime error
Runtime error
Adonai Vera
commited on
Commit
•
6c880c2
1
Parent(s):
1671151
Improvements user feedback and labeling
Browse files
app.py
CHANGED
@@ -1,10 +1,20 @@
|
|
1 |
-
from turtle import title
|
2 |
import gradio as gr
|
3 |
from transformers import pipeline
|
4 |
from PIL import Image
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
# Initialize the pipeline with your model
|
7 |
pipe = pipeline("image-classification", model="SubterraAI/ofwat_material_classification")
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
material_codes = {
|
10 |
"AC": "Asphalt Concrete",
|
@@ -61,6 +71,33 @@ material_full_names_list = [
|
|
61 |
'Vitrified Clay Lined'
|
62 |
]
|
63 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
def replace_label_with_full_name(res, defect_dict_key_code):
|
65 |
new_res = {}
|
66 |
for dic in res:
|
@@ -88,23 +125,23 @@ def classify_image(image):
|
|
88 |
# Extract labels and scores
|
89 |
return replace_label_with_full_name(res, material_codes)
|
90 |
|
91 |
-
|
92 |
# Create the Gradio interface
|
93 |
-
|
94 |
-
|
95 |
-
"image
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import pipeline
|
3 |
from PIL import Image
|
4 |
+
import os
|
5 |
+
from huggingface_hub import HfApi, upload_file
|
6 |
+
import io
|
7 |
+
import numpy as np
|
8 |
+
import uuid
|
9 |
|
10 |
# Initialize the pipeline with your model
|
11 |
pipe = pipeline("image-classification", model="SubterraAI/ofwat_material_classification")
|
12 |
+
HF_TOKEN = os.getenv('HF_TOKEN')
|
13 |
+
DATASET_NAME = "SubterraAI/ofwat_material_loop"
|
14 |
+
hf_api = HfApi()
|
15 |
+
|
16 |
+
# Directory where the flagged images will be saved
|
17 |
+
flagged_data_dir = "./flagged_data"
|
18 |
|
19 |
material_codes = {
|
20 |
"AC": "Asphalt Concrete",
|
|
|
71 |
'Vitrified Clay Lined'
|
72 |
]
|
73 |
|
74 |
+
def simple_flag(image, label):
|
75 |
+
# Convert the input image to PIL format and save to a BytesIO object
|
76 |
+
pil_image = Image.fromarray(image.astype(np.uint8))
|
77 |
+
img_byte_arr = io.BytesIO()
|
78 |
+
pil_image.save(img_byte_arr, format='PNG')
|
79 |
+
|
80 |
+
# Generate a unique ID for the image
|
81 |
+
unique_id = str(uuid.uuid4())
|
82 |
+
img_filename = f"{unique_id}.png"
|
83 |
+
|
84 |
+
# Save the image to a BytesIO object
|
85 |
+
image_bytes = img_byte_arr.getvalue()
|
86 |
+
|
87 |
+
# Upload the image to the correct label directory in the Hugging Face dataset
|
88 |
+
label_dir = f"{label}/{img_filename}"
|
89 |
+
upload_file(
|
90 |
+
path_or_fileobj=io.BytesIO(image_bytes),
|
91 |
+
path_in_repo=label_dir,
|
92 |
+
repo_id=DATASET_NAME,
|
93 |
+
repo_type="dataset",
|
94 |
+
token=HF_TOKEN,
|
95 |
+
commit_message=f"Add image with label {label}"
|
96 |
+
)
|
97 |
+
|
98 |
+
return "Thank you for your contribution to the open-source world! Your feedback helps us all move towards a clearer future"
|
99 |
+
|
100 |
+
|
101 |
def replace_label_with_full_name(res, defect_dict_key_code):
|
102 |
new_res = {}
|
103 |
for dic in res:
|
|
|
125 |
# Extract labels and scores
|
126 |
return replace_label_with_full_name(res, material_codes)
|
127 |
|
|
|
128 |
# Create the Gradio interface
|
129 |
+
with gr.Blocks() as demo:
|
130 |
+
gr.Markdown("# Material Classification with AI by Subterra")
|
131 |
+
gr.Markdown("Upload an image to view a classification demonstration leveraging the dataset/library of images collected by WRc & United Utilities during The Water Services Regulation Authority (OFWAT) Innovation Challenge – Artificial Intelligence and Sewers. Not only can you see the initial classification, but you as the user can also inform us if the classification is correct. Your response will be used to retrain this model. The team at Subterra would like to thank all of those involved in collecting this dataset as we hope that other groups will use it to further advance technology solutions for the water industry.")
|
132 |
+
|
133 |
+
with gr.Row():
|
134 |
+
with gr.Column():
|
135 |
+
img_input = gr.Image()
|
136 |
+
submit_button = gr.Button("Classify")
|
137 |
+
examples = gr.Examples(["examples/CS.jpg", "examples/GI.jpg", "examples/PP.jpg", "examples/RC.jpg"], label = "Explore Examples", inputs=img_input)
|
138 |
+
with gr.Column():
|
139 |
+
output_label = gr.Label()
|
140 |
+
flagging_options = gr.Radio(material_full_names_list, label="Does this classification look off to you? Your sharp eyes can help correct it. Flag any inaccuracies and suggest the right label!")
|
141 |
+
flag_button = gr.Button("Flag")
|
142 |
+
flag_status = gr.Textbox(label = "Every flag you submit polishes our dataset. Thanks for being an active participant in our open-source journey.",visible=True)
|
143 |
+
|
144 |
+
submit_button.click(classify_image, inputs=img_input, outputs=output_label)
|
145 |
+
flag_button.click(simple_flag, inputs=[img_input, flagging_options], outputs=flag_status)
|
146 |
+
|
147 |
+
demo.launch()
|