Required Info
Camera Model Intel RealSense SR305
Firmware Version v2.50.0
Operating System & Version Windows 11
Platform PC
SDK Version v2.50.0
Language opencv/python
Segment YOLOv10
I will detect objects with YOLOv10, but Realsense does not connect to the depth sensor.
Error starting RealSense pipeline: No device connected
import cv2
import torch
from ultralytics import YOLO
# Configure depth and color streams
pipeline = rs.pipeline()
config = rs.config()
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
# Load YOLO model
model = YOLO(yolov10x.pt)
try:
# Start streaming and catch potential errors
try:
print("Starting RealSense pipeline...")
pipeline.start(config)
print("RealSense pipeline started successfully.")
except RuntimeError as e:
print(f"Error starting RealSense pipeline: {e}")
pipeline = None
if pipeline:
while True:
# Wait for a coherent pair of frames: depth and color
frames = pipeline.wait_for_frames()
color_frame = frames.get_color_frame()
if not color_frame:
continue
# Convert images to numpy arrays
color_image = np.asanyarray(color_frame.get_data())
# Perform object detection
results = model(color_image)
# Display results
for result in results:
boxes = result.boxes
for box in boxes:
x1, y1, x2, y2 = map(int, box.xyxy[0])
conf = box.conf[0]
cls = int(box.cls[0])
label = f'{model.names[cls]} {conf:.2f}'
cv2.rectangle(color_image, (x1, y1), (x2, y2), (255, 0, 0), 2)
cv2.putText(color_image, label, (x1, y1 - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (255, 0, 0), 2)
# Show the image
cv2.imshow('RealSense', color_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
finally:
# Stop streaming if the pipeline was started
if pipeline:
print("Stopping RealSense pipeline...")
pipeline.stop()
print("RealSense pipeline stopped.")
# Check if destroyAllWindows exists before calling it
if hasattr(cv2, 'destroyAllWindows'):
cv2.destroyAllWindows()
New contributor
Recep Ortac is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.