import gradio as gr from transformers import pipeline from PIL import Image import os from huggingface_hub import HfApi, upload_file import io import numpy as np import uuid # Initialize the pipeline with your model pipe = pipeline("image-classification", model="SubterraAI/ofwat_material_classification") HF_TOKEN = os.getenv('HF_TOKEN') DATASET_NAME = "SubterraAI/ofwat_material_loop" hf_api = HfApi() # Directory where the flagged images will be saved flagged_data_dir = "./flagged_data" material_codes = { "AC": "Asphalt Concrete", "BL": "Block", "BR": "Brick", "CI": "Cast Iron", "CO": "Concrete", "CS": "Corrugated Steel", "DI": "Ductile Iron", "EP": "Epoxy", "GI": "Galvanized Iron", "MAR": "Masonry", "N": "Not specified, possibly a custom abbreviation", "OTH": "Other", "PE": "Polyethylene", "PF": "Plywood-Faced", "PP": "Polypropylene", "PVC": "Polyvinyl Chloride", "RC": "Reinforced Concrete", "ST": "Steel", "U": "Unspecified, possibly a custom abbreviation", "UPVC": "Unplasticized Polyvinyl Chloride", "VC": "Vinyl Coated", "XI": "Extra Impact", "XP": "Extruded Polystyrene", "Z": "Not specified, possibly a custom abbreviation" } material_full_names_list = [ 'Asphalt Concrete', 'Block', 'Brick', 'Cast Iron', 'Concrete', 'Corrugated Steel', 'Ductile Iron', 'Epoxy', 'Galvanized Iron', 'Masonry', 'Not specified, possibly a custom abbreviation', 'Other', 'Polyethylene', 'Plywood-Faced', 'Polypropylene', 'Polyvinyl Chloride', 'Reinforced Concrete', 'Steel', 'Unspecified, possibly a custom abbreviation', 'Unplasticized Polyvinyl Chloride', 'Vinyl Coated', 'Extra Impact', 'Extruded Polystyrene', 'Not specified, possibly a custom abbreviation', 'Vitrified Clay Lined' ] def simple_flag(image, label): # Convert the input image to PIL format and save to a BytesIO object pil_image = Image.fromarray(image.astype(np.uint8)) img_byte_arr = io.BytesIO() pil_image.save(img_byte_arr, format='PNG') # Generate a unique ID for the image unique_id = str(uuid.uuid4()) img_filename = f"{unique_id}.png" # Save the image to a BytesIO object image_bytes = img_byte_arr.getvalue() # Upload the image to the correct label directory in the Hugging Face dataset label_dir = f"{label}/{img_filename}" upload_file( path_or_fileobj=io.BytesIO(image_bytes), path_in_repo=label_dir, repo_id=DATASET_NAME, repo_type="dataset", token=HF_TOKEN, commit_message=f"Add image with label {label}" ) return "Thank you for your contribution to the open-source world! Your feedback helps us all move towards a clearer future" def replace_label_with_full_name(res, defect_dict_key_code): new_res = {} for dic in res: # Splitting the label to handle possible suffix parts = dic["label"].split('_', 1) code = parts[0] suffix = '_' + parts[1] if len(parts) > 1 else '' # Replacing the code with its full name, if it exists in the dictionary full_name = defect_dict_key_code.get(code, code) # Constructing the new label with the suffix if it exists new_label = full_name + suffix new_res[new_label] = dic["score"] return new_res def classify_image(image): # Convert the input image to PIL format PIL_image = Image.fromarray(image).convert('RGB') # Classify the image using the pipeline res = pipe(PIL_image) # Extract labels and scores return replace_label_with_full_name(res, material_codes) # Create the Gradio interface with gr.Blocks() as demo: gr.Markdown("# Material Classification with AI by Subterra") gr.Markdown("Upload an image to view a classification demonstration leveraging the dataset/library of images collected by WRc & United Utilities during The Water Services Regulation Authority (OFWAT) Innovation Challenge – Artificial Intelligence and Sewers. Not only can you see the initial classification, but you as the user can also inform us if the classification is correct. Your response will be used to retrain this model. The team at Subterra would like to thank all of those involved in collecting this dataset as we hope that other groups will use it to further advance technology solutions for the water industry.") with gr.Row(): with gr.Column(): img_input = gr.Image() submit_button = gr.Button("Classify") examples = gr.Examples(["examples/CS.jpg", "examples/GI.jpg", "examples/PP.jpg", "examples/RC.jpg"], label = "Explore Examples", inputs=img_input) with gr.Column(): output_label = gr.Label() flagging_options = gr.Radio(material_full_names_list, label="Does this classification look off to you? Your sharp eyes can help correct it. Flag any inaccuracies and suggest the right label!") flag_button = gr.Button("Flag") flag_status = gr.Textbox(label = "Every flag you submit polishes our dataset. Thanks for being an active participant in our open-source journey.",visible=True) submit_button.click(classify_image, inputs=img_input, outputs=output_label) flag_button.click(simple_flag, inputs=[img_input, flagging_options], outputs=flag_status) demo.launch()