PKaushik commited on
Commit
75c605c
β€’
1 Parent(s): 14f12be
Files changed (1) hide show
  1. app.py +128 -0
app.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import tempfile
3
+ import time
4
+ from pathlib import Path
5
+
6
+ import cv2
7
+ import gradio as gr
8
+
9
+ from inferer import Inferer
10
+
11
+ pipeline = Inferer("human", device='cuda')
12
+ print(f"GPU on? {'🟒' if pipeline.device.type != 'cpu' else 'πŸ”΄'}")
13
+
14
+ def fn_image(image, conf_thres, iou_thres):
15
+ return pipeline(image, conf_thres, iou_thres)
16
+
17
+
18
+ def fn_video(video_file, conf_thres, iou_thres, start_sec, duration):
19
+ start_timestamp = time.strftime("%H:%M:%S", time.gmtime(start_sec))
20
+ end_timestamp = time.strftime("%H:%M:%S", time.gmtime(start_sec + duration))
21
+
22
+ suffix = Path(video_file).suffix
23
+
24
+ clip_temp_file = tempfile.NamedTemporaryFile(suffix=suffix)
25
+ subprocess.call(
26
+ f"ffmpeg -y -ss {start_timestamp} -i {video_file} -to {end_timestamp} -c copy {clip_temp_file.name}".split()
27
+ )
28
+
29
+ # Reader of clip file
30
+ cap = cv2.VideoCapture(clip_temp_file.name)
31
+
32
+ # This is an intermediary temp file where we'll write the video to
33
+ # Unfortunately, gradio doesn't play too nice with videos rn so we have to do some hackiness
34
+ # with ffmpeg at the end of the function here.
35
+ with tempfile.NamedTemporaryFile(suffix=".mp4") as temp_file:
36
+ out = cv2.VideoWriter(temp_file.name, cv2.VideoWriter_fourcc(*"MP4V"), 100, (1280, 720))
37
+
38
+ num_frames = 0
39
+ max_frames = duration * 100
40
+ while cap.isOpened():
41
+ try:
42
+ ret, frame = cap.read()
43
+ if not ret:
44
+ break
45
+ except Exception as e:
46
+ print(e)
47
+ continue
48
+
49
+ out.write(pipeline(frame, conf_thres, iou_thres))
50
+ num_frames += 1
51
+ print("Processed {} frames".format(num_frames))
52
+ if num_frames == max_frames:
53
+ break
54
+
55
+ out.release()
56
+
57
+ # Aforementioned hackiness
58
+ out_file = tempfile.NamedTemporaryFile(suffix="out.mp4", delete=False)
59
+ subprocess.run(f"ffmpeg -y -loglevel quiet -stats -i {temp_file.name} -c:v libx264 {out_file.name}".split())
60
+
61
+ return out_file.name
62
+
63
+
64
+ image_interface = gr.Interface(
65
+ fn=fn_image,
66
+ inputs=[
67
+ "image",
68
+ gr.Slider(0, 1, value=0.5, label="Confidence Threshold"),
69
+ gr.Slider(0, 1, value=0.5, label="IOU Threshold"),
70
+ ],
71
+ outputs=gr.Image(type="file"),
72
+ examples=[["example_1.jpg", 0.5, 0.5], ["example_2.jpg", 0.25, 0.45], ["example_3.jpg", 0.25, 0.45]],
73
+ title="Human Detection",
74
+ description=(
75
+ "Gradio demo for Human detection on images. To use it, simply upload your image or click one of the"
76
+ " examples to load them. Read more at the links below."
77
+ ),
78
+ allow_flagging=False,
79
+ allow_screenshot=False,
80
+ )
81
+
82
+ video_interface = gr.Interface(
83
+ fn=fn_video,
84
+ inputs=[
85
+ gr.Video(type="file"),
86
+ gr.Slider(0, 1, value=0.25, label="Confidence Threshold"),
87
+ gr.Slider(0, 1, value=0.45, label="IOU Threshold"),
88
+ gr.Slider(0, 100, value=0, label="Start Second", step=1),
89
+ gr.Slider(0, 100 if pipeline.device.type != 'cpu' else 3, value=4, label="Duration", step=1),
90
+ ],
91
+ outputs=gr.Video(type="file", format="mp4"),
92
+ examples=[
93
+ ["example_1.mp4", 0.25, 0.45, 0, 2],
94
+ ["example_2.mp4", 0.25, 0.45, 5, 3],
95
+ ["example_3.mp4", 0.25, 0.45, 6, 3],
96
+ ],
97
+ title="Human Detection",
98
+ description=(
99
+ "Gradio demo for Human detection on videos. To use it, simply upload your video or click one of the"
100
+ " examples to load them. Read more at the links below."
101
+ ),
102
+ allow_flagging=False,
103
+ allow_screenshot=False,
104
+ )
105
+
106
+ webcam_interface = gr.Interface(
107
+ fn_image,
108
+ inputs=[
109
+ gr.Image(source='webcam', streaming=True),
110
+ gr.Slider(0, 1, value=0.5, label="Confidence Threshold"),
111
+ gr.Slider(0, 1, value=0.5, label="IOU Threshold"),
112
+ ],
113
+ outputs=gr.Image(type="file"),
114
+ live=True,
115
+ title="Human Detection",
116
+ description=(
117
+ "Gradio demo for Human detection on real time webcam. To use it, simply allow the browser to access"
118
+ " your webcam. Read more at the links below."
119
+ ),
120
+ allow_flagging=False,
121
+ allow_screenshot=False,
122
+ )
123
+
124
+ if __name__ == "__main__":
125
+ gr.TabbedInterface(
126
+ [video_interface, image_interface, webcam_interface],
127
+ ["Run on Videos!", "Run on Images!", "Run on Webcam!"],
128
+ ).launch()