Compare commits
merge into: hs_tohidi:develop
hs_tohidi:add-capture-card
hs_tohidi:client-connection
hs_tohidi:develop
hs_tohidi:develop-Arm
hs_tohidi:video-streaming
pull from: hs_tohidi:add-capture-card
hs_tohidi:add-capture-card
hs_tohidi:client-connection
hs_tohidi:develop
hs_tohidi:develop-Arm
hs_tohidi:video-streaming
19 Commits
develop
...
add-captur
21 changed files with 557 additions and 124 deletions
-
BINOxygen-Sys-Warning.wav
-
205app.py
-
2config.yaml
-
142core.py
-
52cvStreamer.py
-
2detector/demo.py
-
47gpuMonitor.py
-
1message_queue/proto/SetCameraMessage.proto
-
11message_queue/proto/SetCameraMessage_pb2.py
-
112server.py
-
1tracker/ltr/external/PreciseRoIPooling/pytorch/prroi_pool/functional.py
-
2tracker/ltr/models/backbone/resnet.py
-
1tracker/ltr/models/bbreg/atom_iou_net.py
-
2tracker/ltr/models/layers/distance.py
-
2tracker/ltr/models/target_classifier/features.py
-
6tracker/pytracking/features/augmentation.py
-
7tracker/pytracking/features/preprocessing.py
-
1tracker/pytracking/libs/dcf.py
-
3tracker/pytracking/tracker/dimp/dimp.py
-
2tracker/pytracking/utils/params.py
-
BINvideo_streamer/vision_service.cpython-37m-x86_64-linux-gnu.so
@ -0,0 +1,52 @@ |
|||||
|
import cv2 |
||||
|
from pywt.version import release |
||||
|
|
||||
|
|
||||
|
class cvStreamer(): |
||||
|
def __init__(self, idx): |
||||
|
self.cap = cv2.VideoCapture(idx) |
||||
|
self.idx = idx |
||||
|
|
||||
|
def isOpened(self): |
||||
|
isOpen = self.cap.isOpened() |
||||
|
|
||||
|
if not isOpen: |
||||
|
self.release() |
||||
|
else: |
||||
|
print(f"usb cam open at {self.idx}") |
||||
|
|
||||
|
|
||||
|
return isOpen |
||||
|
|
||||
|
def release(self): |
||||
|
self.cap.release() |
||||
|
|
||||
|
def get_frame(self): |
||||
|
_, frame = self.cap.read() |
||||
|
|
||||
|
# Get the original dimensions of the frame |
||||
|
height, width = frame.shape[:2] |
||||
|
|
||||
|
# Define the maximum dimensions |
||||
|
max_width = 1920 |
||||
|
max_height = 1080 |
||||
|
|
||||
|
# Calculate the aspect ratio |
||||
|
aspect_ratio = width / height |
||||
|
|
||||
|
# Resize the frame if it exceeds the maximum dimensions |
||||
|
if width > max_width or height > max_height: |
||||
|
if aspect_ratio > 1: # Landscape orientation |
||||
|
new_width = max_width |
||||
|
new_height = int(new_width / aspect_ratio) |
||||
|
else: # Portrait orientation |
||||
|
new_height = max_height |
||||
|
new_width = int(new_height * aspect_ratio) |
||||
|
|
||||
|
# Resize the frame |
||||
|
frame = cv2.resize(frame, (new_width, new_height)) |
||||
|
|
||||
|
return frame |
||||
|
|
||||
|
def __del__(self): |
||||
|
self.release() |
@ -0,0 +1,47 @@ |
|||||
|
import pynvml |
||||
|
import time |
||||
|
from colorama import Fore, Style, init |
||||
|
import os |
||||
|
|
||||
|
# Initialize colorama |
||||
|
init(autoreset=True) |
||||
|
|
||||
|
|
||||
|
|
||||
|
def monitor_gpu_ram_usage(interval=2, threshold_gb=2): |
||||
|
pynvml.nvmlInit() |
||||
|
# Initialize NVML |
||||
|
try: |
||||
|
device_count = pynvml.nvmlDeviceGetCount() |
||||
|
print(f"Found {device_count} GPU(s).") |
||||
|
|
||||
|
while True: |
||||
|
for i in range(device_count): |
||||
|
handle = pynvml.nvmlDeviceGetHandleByIndex(i) |
||||
|
info = pynvml.nvmlDeviceGetMemoryInfo(handle) |
||||
|
|
||||
|
|
||||
|
|
||||
|
print(f"GPU {i}:") |
||||
|
print(f" Total RAM: {info.total / 1024 ** 2:.2f} MB") |
||||
|
if(info.used / 1024 ** 2 >= 2.5 * 1024 ): |
||||
|
print(Fore.RED + f" Used RAM: {info.used / 1024 ** 2:.2f} MB") |
||||
|
os.system("aplay /home/rog/repos/Tracker/NE-Smart-Tracker/Oxygen-Sys-Warning.wav") |
||||
|
else: |
||||
|
print(f" Used RAM: {info.used / 1024 ** 2:.2f} MB") |
||||
|
print(f" Free RAM: {info.free / 1024 ** 2:.2f} MB") |
||||
|
print(Fore.GREEN + "-" * 30) |
||||
|
print(Fore.GREEN) |
||||
|
|
||||
|
time.sleep(interval) # Wait for the specified interval before checking again |
||||
|
|
||||
|
except KeyboardInterrupt: |
||||
|
print("Monitoring stopped by user.") |
||||
|
|
||||
|
finally: |
||||
|
# Shutdown NVML |
||||
|
pynvml.nvmlShutdown() |
||||
|
|
||||
|
if __name__ == "__main__": |
||||
|
monitor_gpu_ram_usage(interval=2, threshold_gb=2) # Check every 2 seconds, threshold is 2 GB |
||||
|
|
@ -0,0 +1,112 @@ |
|||||
|
import cv2 |
||||
|
import gi |
||||
|
import threading |
||||
|
import numpy as np |
||||
|
|
||||
|
gi.require_version('Gst', '1.0') |
||||
|
gi.require_version('GstRtspServer', '1.0') |
||||
|
from gi.repository import Gst, GstRtspServer, GLib |
||||
|
import socket |
||||
|
|
||||
|
def get_local_ip(): |
||||
|
try: |
||||
|
# Create a socket object |
||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) |
||||
|
|
||||
|
# Connect to an external server (doesn't actually send any data) |
||||
|
s.connect(("8.8.8.8", 80)) |
||||
|
|
||||
|
# Get the local IP address |
||||
|
local_ip = s.getsockname()[0] |
||||
|
|
||||
|
# Close the socket |
||||
|
s.close() |
||||
|
|
||||
|
return local_ip |
||||
|
except Exception as e: |
||||
|
return str(e) |
||||
|
|
||||
|
|
||||
|
|
||||
|
class VideoStream(GstRtspServer.RTSPMediaFactory): |
||||
|
def __init__(self, fps=25, width=1920, height=1080): |
||||
|
super(VideoStream, self).__init__() |
||||
|
self.fps = fps |
||||
|
self.width = width |
||||
|
self.height = height |
||||
|
self.frame = np.zeros((height, width, 3), dtype=np.uint8) # Black frame initially |
||||
|
self.lock = threading.Lock() |
||||
|
|
||||
|
def update_frame(self, frame): |
||||
|
"""Externally updates the current frame.""" |
||||
|
with self.lock: |
||||
|
self.frame = cv2.resize(frame, (self.width, self.height)) # Resize if necessary |
||||
|
|
||||
|
def on_need_data(self, src, length): |
||||
|
"""Provides frames when the pipeline requests data.""" |
||||
|
with self.lock: |
||||
|
frame_rgb = cv2.cvtColor(self.frame, cv2.COLOR_BGR2RGB) # Convert BGR to RGB |
||||
|
data = frame_rgb.tobytes() # Convert to bytes |
||||
|
|
||||
|
buf = Gst.Buffer.new_allocate(None, len(data), None) |
||||
|
buf.fill(0, data) |
||||
|
buf.duration = Gst.SECOND // self.fps |
||||
|
buf.pts = buf.dts = Gst.CLOCK_TIME_NONE |
||||
|
src.emit("push-buffer", buf) |
||||
|
|
||||
|
def do_create_element(self, url): |
||||
|
"""Creates the GStreamer pipeline for RTSP streaming.""" |
||||
|
pipeline_str = ( |
||||
|
"appsrc name=source is-live=true format=GST_FORMAT_TIME " |
||||
|
"caps=video/x-raw,format=RGB,width={},height={},framerate={}/1 " |
||||
|
"! videoconvert ! video/x-raw,format=I420 " |
||||
|
"! nvh264enc preset=low-latency rc-mode=cbr bitrate=2500 gop-size=7 zerolatency=true " |
||||
|
"! h264parse ! rtph264pay config-interval=1 name=pay0 pt=96" |
||||
|
).format(self.width, self.height, self.fps) |
||||
|
|
||||
|
|
||||
|
pipeline = Gst.parse_launch(pipeline_str) |
||||
|
src = pipeline.get_by_name("source") |
||||
|
src.connect("need-data", self.on_need_data) |
||||
|
return pipeline |
||||
|
|
||||
|
|
||||
|
class RTSPServer: |
||||
|
def __init__(self, ip="0.0.0.0", port=8554, mount_point="/stream"): |
||||
|
Gst.init(None) |
||||
|
self.server = GstRtspServer.RTSPServer() |
||||
|
self.server.set_address(ip) |
||||
|
self.server.set_service(str(port)) |
||||
|
|
||||
|
self.video_stream = VideoStream() |
||||
|
self.server.get_mount_points().add_factory(mount_point, self.video_stream) |
||||
|
self.server.attach(None) |
||||
|
|
||||
|
def start(self): |
||||
|
print(f"RTSP Server running at rtsp://{self.server.get_address()}:{self.server.get_service()}/stream") |
||||
|
loop = GLib.MainLoop() |
||||
|
loop.run() |
||||
|
|
||||
|
def update_frame(self, frame): |
||||
|
"""Externally updates the current frame for streaming.""" |
||||
|
self.video_stream.update_frame(frame) |
||||
|
|
||||
|
# Global server instance |
||||
|
rtsp_server = RTSPServer(get_local_ip(),41231) |
||||
|
|
||||
|
def run_server(): |
||||
|
rtsp_server.start() |
||||
|
|
||||
|
# def stream_webcam(): |
||||
|
# cap = cv2.VideoCapture("/home/mht/Downloads/bcd2890d71caaf0e095b95c9b525973f61186656-360p.mp4") # Open webcam |
||||
|
# while cap.isOpened(): |
||||
|
# ret, frame = cap.read() |
||||
|
# if ret: |
||||
|
# rtsp_server.update_frame(frame) # Send frame to RTSP server |
||||
|
|
||||
|
# if __name__ == "__main__": |
||||
|
# # Start RTSP server in a separate thread |
||||
|
# threading.Thread(target=run_server, daemon=True).start() |
||||
|
# |
||||
|
# # Stream webcam frames |
||||
|
# stream_webcam() |
Write
Preview
Loading…
Cancel
Save
Reference in new issue