File size: 4,069 Bytes
1d9c414
 
 
 
 
 
 
 
 
cde5d08
 
 
 
 
 
 
 
 
1d9c414
 
 
 
 
5e76035
1d9c414
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cde5d08
 
cd3b30d
cde5d08
cd3b30d
 
cde5d08
 
 
 
 
5e76035
297c2d0
 
 
5e76035
 
cde5d08
5e76035
297c2d0
5e76035
cde5d08
1d9c414
 
 
5e76035
1d9c414
28476c6
2253ff6
28476c6
2253ff6
 
1d9c414
 
5e76035
cde5d08
 
 
2253ff6
cde5d08
2253ff6
28476c6
cde5d08
1d9c414
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import os
import glob
import gradio as gr
import shutil
from ultralytics import YOLO

# Initialize YOLO model
model = YOLO('./best.pt')

# Class to color mapping
ClassToColorMapping = {
    0: "black",
    1: "gray",
    2: "green",
    3: "purple",
    4: "red"
}

def check_ffmpeg():
    """Check if ffmpeg is installed."""
    if shutil.which("ffmpeg") is None:
        raise EnvironmentError("ffmpeg is not installed or not found in the system PATH. Please install ffmpeg to proceed.")

def process_video(video_path, output_option):
    # Check if ffmpeg is installed
    check_ffmpeg()

    # Run the YOLO model on the video, specifying the tracker configuration
    results = model.track(video_path, save=True, tracker="bytetrack.yaml", persist=True)

    # Get the video file name without extension and directory
    video_name = os.path.splitext(os.path.basename(video_path))[0]
    
    # Find the latest directory created in 'runs/detect/'
    output_dir = max(glob.glob('./runs/detect/*'), key=os.path.getmtime)
    
    # Find the saved video file in the latest directory with the specific video name
    video_files = glob.glob(os.path.join(output_dir, f'{video_name}*.avi'))
    
    if not video_files:
        raise Exception(f"No .avi video files found in directory {output_dir} for {video_name}")

    # Convert the video to mp4
    mp4_files = []
    for file in video_files:
        mp4_file = f"{file[:-4]}.mp4"
        # Quote the file paths to handle spaces and overwrite without prompt
        os.system(f"ffmpeg -y -i \"{file}\" -vcodec libx264 \"{mp4_file}\"")
        os.remove(file)  # Remove the original .avi file after conversion
        mp4_files.append(mp4_file)

    # Check again for the converted .mp4 video files specifically for the processed video
    matched_mp4_files = [file for file in mp4_files if video_name in file]
    
    if not matched_mp4_files:
        raise Exception(f"No .mp4 video files found in directory {output_dir} after conversion for {video_name}.")

    # Initialize object count dictionary based on colors
    color_counts = {color: 0 for color in ClassToColorMapping.values()}

    # Calculate object count based on results and map to colors
    for result in results:
        if hasattr(result, 'boxes'):
            for box in result.boxes:
                class_id = int(box.cls)  # Assuming `cls` is the class index
                color = ClassToColorMapping.get(class_id, "unknown")
                if color in color_counts:
                    color_counts[color] += 1

    # Set default outputs
    video_output = matched_mp4_files[0] if matched_mp4_files else None

    # Determine the output based on user's choice
    if output_option == "Count":
        return color_counts, None
    elif output_option == "Video":
        return None, video_output
    elif output_option == "Both":
        return color_counts, video_output

# Define Gradio inputs
video_input = gr.Video()
output_option_input = gr.Radio(choices=["Count", "Video", "Both"], label="Select output type")

# Define a single example for the interface
examples = [
    [os.path.abspath("example_video1.mp4"), "Both"]
]

video_interface = gr.Interface(
    fn=process_video,
    inputs=[video_input, output_option_input],
    outputs=[gr.JSON(label="Color Counts"), "video"],  # Ensure two outputs are defined, using JSON for color counts
    title="YOLO Video Tracking Application with Color Counting",
    description="A simple application to track objects in a video using YOLO model and count objects by color. Upload your own video, or click one of the examples to load them.",
    article="""<div>
                <p style="text-align: center">Upload a video file and select the type of output you want: object color counts, processed video, or both. Then, hit submit to process the video.</p>
               </div>""",
    examples=examples,
    cache_examples=False  # Disable caching to speed up launch
)

# Deploy the interface with share enabled
gr.TabbedInterface([video_interface], ["Track Video"]).launch(share=True)