Browse Source

added video streaming through rtsp

add-capture-card
mht 1 month ago
parent
commit
759e29d564
  1. 9
      core.py
  2. 91
      server.py

9
core.py

@ -1,4 +1,5 @@
import datetime
import threading
import pygame
import cv2
@ -17,6 +18,7 @@ import time
from PyQt5.QtCore import QObject, pyqtSignal
import ctypes
from ctypes import c_int64
from server import rtsp_server , run_server
showTrack = True
@ -31,6 +33,9 @@ class Core(QThread):
self.__detector = detector
self.__tracker = tracker
self.__rtspserver = rtsp_server
threading.Thread(target=run_server, daemon=True).start()
self.__video_sources = video_sources
self.__processing_source = video_sources[0]
@ -120,11 +125,13 @@ class Core(QThread):
self.coordsUpdated.emit(self.__processing_id, center, success)
self.newFrame.emit([bbox], self.__processing_id, False, ctime)
if showTrack:
x, y, w, h = map(int, bbox)
box_color = (0, 255, 0) if success else (255, 0, 0)
cv2.rectangle(frame, (x, y), (x + w, y + h), box_color, 2)
self.__rtspserver.update_frame(frame)
if showTrack:
# Convert OpenCV frame (BGR) to RGB
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)

91
server.py

@ -0,0 +1,91 @@
import cv2
import gi
import threading
import numpy as np
gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GLib
class VideoStream(GstRtspServer.RTSPMediaFactory):
def __init__(self, fps=30, width=640, height=480):
super(VideoStream, self).__init__()
self.fps = fps
self.width = width
self.height = height
self.frame = np.zeros((height, width, 3), dtype=np.uint8) # Black frame initially
self.lock = threading.Lock()
def update_frame(self, frame):
"""Externally updates the current frame."""
with self.lock:
self.frame = cv2.resize(frame, (self.width, self.height)) # Resize if necessary
def on_need_data(self, src, length):
"""Provides frames when the pipeline requests data."""
with self.lock:
frame_rgb = cv2.cvtColor(self.frame, cv2.COLOR_BGR2RGB) # Convert BGR to RGB
data = frame_rgb.tobytes() # Convert to bytes
buf = Gst.Buffer.new_allocate(None, len(data), None)
buf.fill(0, data)
buf.duration = Gst.SECOND // self.fps
buf.pts = buf.dts = Gst.CLOCK_TIME_NONE
src.emit("push-buffer", buf)
def do_create_element(self, url):
"""Creates the GStreamer pipeline for RTSP streaming."""
pipeline_str = (
"appsrc name=source is-live=true format=GST_FORMAT_TIME "
"caps=video/x-raw,format=RGB,width={},height={},framerate={}/1 "
"! videoconvert ! video/x-raw,format=I420 "
"! x264enc tune=zerolatency bitrate=500 speed-preset=ultrafast "
"! rtph264pay config-interval=1 name=pay0 pt=96"
).format(self.width, self.height, self.fps)
pipeline = Gst.parse_launch(pipeline_str)
src = pipeline.get_by_name("source")
src.connect("need-data", self.on_need_data)
return pipeline
class RTSPServer:
def __init__(self, ip="0.0.0.0", port=8554, mount_point="/stream"):
Gst.init(None)
self.server = GstRtspServer.RTSPServer()
self.server.set_address(ip)
self.server.set_service(str(port))
self.video_stream = VideoStream()
self.server.get_mount_points().add_factory(mount_point, self.video_stream)
self.server.attach(None)
def start(self):
print(f"RTSP Server running at rtsp://{self.server.get_address()}:{self.server.get_service()}/stream")
loop = GLib.MainLoop()
loop.run()
def update_frame(self, frame):
"""Externally updates the current frame for streaming."""
self.video_stream.update_frame(frame)
# Global server instance
rtsp_server = RTSPServer("192.168.0.196",41231)
def run_server():
rtsp_server.start()
def stream_webcam():
cap = cv2.VideoCapture("/home/mht/Downloads/bcd2890d71caaf0e095b95c9b525973f61186656-360p.mp4") # Open webcam
while cap.isOpened():
ret, frame = cap.read()
if ret:
rtsp_server.update_frame(frame) # Send frame to RTSP server
if __name__ == "__main__":
# Start RTSP server in a separate thread
threading.Thread(target=run_server, daemon=True).start()
# Stream webcam frames
stream_webcam()
Loading…
Cancel
Save