davron04 commited on
Commit
c9f445b
·
verified ·
1 Parent(s): 6180dbd

Update camera_view.py

Browse files
Files changed (1) hide show
  1. camera_view.py +58 -50
camera_view.py CHANGED
@@ -1,73 +1,81 @@
1
  import streamlit as st
2
  import cv2
 
3
  import time
 
4
 
5
- # Initialize session state
6
- if "show_button" not in st.session_state:
7
- st.session_state.show_button = True
8
 
9
- if "camera_active" not in st.session_state:
10
- st.session_state.camera_active = False
11
-
12
- class Camera_View:
13
  def __init__(self, app, model):
14
  self.app = app
15
  self.model = model
16
-
17
- def toggle_camera(self):
18
- st.session_state.camera_active = True
19
- st.session_state.show_button = False # Hide st.camera_input when camera is started
20
-
21
- def stop_camera(self):
22
- st.session_state.camera_active = False
23
-
24
  def show(self):
25
  # Top navigation
26
  col1_back, col2_back = st.columns([0.2, 0.8])
27
  with col1_back:
28
- if st.button("Back", key='upload_back', icon=':material/arrow_back:', type='primary'):
29
  self.app.change_page("Main")
30
 
31
- st.markdown("<h1 style='text-align: center;'>🧠 Real-time Detection</h1>", unsafe_allow_html=True)
32
  st.divider()
33
 
34
- # Buttons to control camera
35
- col1_button, col2_button = st.columns(2)
36
- with col1_button:
37
- st.button("Stop Camera",
38
- icon=':material/videocam_off:',
39
- type='secondary',
40
- on_click=self.stop_camera) # Button to stop video
41
- with col2_button:
42
- st.button("Start Camera",
43
- icon=':material/videocam:',
44
- type='primary',
45
- on_click=self.toggle_camera)
 
 
 
 
 
 
 
 
46
 
47
- # Display st.camera_input if camera is not started yet
48
- if st.session_state.show_button:
49
- st.camera_input("")
 
50
 
51
- frame_placeholder = st.empty()
 
52
 
53
- # Run real-time detection if camera is active
54
- if st.session_state.camera_active:
55
- cap = cv2.VideoCapture(0)
56
- while cap.isOpened():
57
- ret, frame = cap.read()
58
- if not ret or not st.session_state.camera_active:
59
- break
60
 
61
- frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
62
- results = self.model(frame)[0]
63
 
64
- for result in results.boxes.data.tolist():
65
- x1, y1, x2, y2, score, class_id = result
66
- color = (0, 0, 255) if score > 0.5 else (0, 255, 0)
67
- cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), color, 2)
68
 
69
- frame_placeholder.image(frame, channels="RGB", use_container_width=True)
70
- time.sleep(0.1)
 
 
 
71
 
72
- cap.release()
73
- st.warning("Camera stopped.")
 
 
 
 
 
1
  import streamlit as st
2
  import cv2
3
+ import tempfile
4
  import time
5
+ import os
6
 
7
+ if 'video_processed' not in st.session_state:
8
+ st.session_state.video_processed = False
 
9
 
10
+ class Video_View:
 
 
 
11
  def __init__(self, app, model):
12
  self.app = app
13
  self.model = model
14
+ def toggle_video_processing(self):
15
+ """Toggle video processing state."""
16
+ st.session_state.video_processed = False
 
 
 
 
 
17
  def show(self):
18
  # Top navigation
19
  col1_back, col2_back = st.columns([0.2, 0.8])
20
  with col1_back:
21
+ if st.button("Back", key='video_back', icon=':material/arrow_back:', type='primary'):
22
  self.app.change_page("Main")
23
 
24
+ st.markdown("<h1 style='text-align: center;'>🧠 Video Detection</h1>", unsafe_allow_html=True)
25
  st.divider()
26
 
27
+ uploaded_file = st.file_uploader("Upload a video", type=["mp4", "avi", "mov"],on_change=self.toggle_video_processing)
28
+ if not st.session_state.video_processed:
29
+ if uploaded_file is not None:
30
+ # Save to temp file
31
+ tfile = tempfile.NamedTemporaryFile(delete=False, suffix=".mp4")
32
+ tfile.write(uploaded_file.read())
33
+
34
+ cap = cv2.VideoCapture(tfile.name)
35
+ total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) # Total number of frames
36
+ width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
37
+ height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
38
+ fps = cap.get(cv2.CAP_PROP_FPS)
39
+
40
+ out_path = os.path.join(tempfile.gettempdir(), "predicted_video.mp4")
41
+ out = cv2.VideoWriter(out_path, cv2.VideoWriter_fourcc(*'XVID'), fps, (width, height))
42
+
43
+ frame_count = 0 # To track the number of processed frames
44
+
45
+ with st.spinner("Processing video... ⏳"):
46
+ progress_bar = st.progress(0) # Create a progress bar
47
 
48
+ while cap.isOpened():
49
+ ret, frame = cap.read()
50
+ if not ret:
51
+ break
52
 
53
+ # Run YOLO model on the frame
54
+ results = self.model(frame)[0]
55
 
56
+ for result in results.boxes.data.tolist():
57
+ x1, y1, x2, y2, score, _ = result
58
+ color = (0, 0, 255) if score > 0.5 else (0, 255, 0)
59
+ label = f"{score:.2f}"
60
+ cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), color, 1)
61
+ cv2.putText(frame, label, (int(x1), int(y1)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.6, color, 1)
 
62
 
63
+ out.write(frame)
 
64
 
65
+ # Update the progress bar
66
+ frame_count += 1
67
+ progress_percentage = frame_count / total_frames # Calculate the percentage
68
+ progress_bar.progress(progress_percentage) # Update the progress bar
69
 
70
+ cap.release()
71
+ out.release()
72
+ cv2.destroyAllWindows()
73
+ st.session_state.video_processed = True
74
+ st.success("✅ Detection complete!")
75
 
76
+ # Read and display the video
77
+ with open(out_path, 'rb') as video_file:
78
+ video_bytes = video_file.read()
79
+ st.video(uploaded_file, loop=True, autoplay=True, muted=False)
80
+ st.download_button("📥 Download Predicted Video", video_bytes, file_name="predicted_video.mp4",
81
+ mime="video/mp4")