chips-detection / app.py
Khalida1w's picture
Update app.py
cde5d08 verified
raw
history blame
No virus
4.07 kB
import os
import glob
import gradio as gr
import shutil
from ultralytics import YOLO
# Initialize YOLO model
model = YOLO('./best.pt')
# Class to color mapping
ClassToColorMapping = {
0: "black",
1: "gray",
2: "green",
3: "purple",
4: "red"
}
def check_ffmpeg():
"""Check if ffmpeg is installed."""
if shutil.which("ffmpeg") is None:
raise EnvironmentError("ffmpeg is not installed or not found in the system PATH. Please install ffmpeg to proceed.")
def process_video(video_path, output_option):
# Check if ffmpeg is installed
check_ffmpeg()
# Run the YOLO model on the video, specifying the tracker configuration
results = model.track(video_path, save=True, tracker="bytetrack.yaml", persist=True)
# Get the video file name without extension and directory
video_name = os.path.splitext(os.path.basename(video_path))[0]
# Find the latest directory created in 'runs/detect/'
output_dir = max(glob.glob('./runs/detect/*'), key=os.path.getmtime)
# Find the saved video file in the latest directory with the specific video name
video_files = glob.glob(os.path.join(output_dir, f'{video_name}*.avi'))
if not video_files:
raise Exception(f"No .avi video files found in directory {output_dir} for {video_name}")
# Convert the video to mp4
mp4_files = []
for file in video_files:
mp4_file = f"{file[:-4]}.mp4"
# Quote the file paths to handle spaces and overwrite without prompt
os.system(f"ffmpeg -y -i \"{file}\" -vcodec libx264 \"{mp4_file}\"")
os.remove(file) # Remove the original .avi file after conversion
mp4_files.append(mp4_file)
# Check again for the converted .mp4 video files specifically for the processed video
matched_mp4_files = [file for file in mp4_files if video_name in file]
if not matched_mp4_files:
raise Exception(f"No .mp4 video files found in directory {output_dir} after conversion for {video_name}.")
# Initialize object count dictionary based on colors
color_counts = {color: 0 for color in ClassToColorMapping.values()}
# Calculate object count based on results and map to colors
for result in results:
if hasattr(result, 'boxes'):
for box in result.boxes:
class_id = int(box.cls) # Assuming `cls` is the class index
color = ClassToColorMapping.get(class_id, "unknown")
if color in color_counts:
color_counts[color] += 1
# Set default outputs
video_output = matched_mp4_files[0] if matched_mp4_files else None
# Determine the output based on user's choice
if output_option == "Count":
return color_counts, None
elif output_option == "Video":
return None, video_output
elif output_option == "Both":
return color_counts, video_output
# Define Gradio inputs
video_input = gr.Video()
output_option_input = gr.Radio(choices=["Count", "Video", "Both"], label="Select output type")
# Define a single example for the interface
examples = [
[os.path.abspath("example_video1.mp4"), "Both"]
]
video_interface = gr.Interface(
fn=process_video,
inputs=[video_input, output_option_input],
outputs=[gr.JSON(label="Color Counts"), "video"], # Ensure two outputs are defined, using JSON for color counts
title="YOLO Video Tracking Application with Color Counting",
description="A simple application to track objects in a video using YOLO model and count objects by color. Upload your own video, or click one of the examples to load them.",
article="""<div>
<p style="text-align: center">Upload a video file and select the type of output you want: object color counts, processed video, or both. Then, hit submit to process the video.</p>
</div>""",
examples=examples,
cache_examples=False # Disable caching to speed up launch
)
# Deploy the interface with share enabled
gr.TabbedInterface([video_interface], ["Track Video"]).launch(share=True)