I stream a video webcamera from another computer with GStreamer with that commande line :
gst-launch-1.0 -v v4l2src device=/dev/<video4> ! image/jpeg,width=640,height=480,framerate=30/1 ! jpegdec ! videoconvert ! x264enc bitrate=2000 speed-preset=ultrafast tune=zerolatency ref=4 qp-min=5 qp-max=10 ! rtph264pay config-interval=1 ! udpsink host=<ip_destination> port=<port_dest>
Then in my code I get the video stream with Gst :
"self.pipeline = Gst.parse_launch(udpsrc address={ip_address} port={port} ! application/x-rtp, encoding-name=H264 ! rtph264depay ! decodebin ! videoconvert ! jpegenc ! appsink name={name}")
Then I make some treatment on the stream to display the image on pyside6 and i got this
My entire code :
import cv2
import gi
from PySide6.QtWidgets import QVBoxLayout, QWidget
from PySide6.QtCore import QTimer
from PySide6.QtGui import QImage, QPixmap
from PySide6.QtWidgets import QLabel
gi.require_version("Gst", "1.0")
from gi.repository import Gst
class VideoStreamWidget(QWidget):
def __init__(self, port, node, parent=None):
super(VideoStreamWidget, self).__init__(parent)
self.node = node
Gst.init(None)
# Set up the label to display the video frames
self.video_label = QLabel(self)
layout = QVBoxLayout()
layout.addWidget(self.video_label)
self.setLayout(layout)
self.create_pipeline_gs(port)
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect("message::eos", self.on_eos)
self.bus.connect("message::error", self.on_error)
self.pipeline.set_state(Gst.State.PLAYING)
def create_pipeline_gs(self, port):
name = f"appsink{port}"
self.pipeline = Gst.parse_launch(
f"udpsrc address=127.0.0.1 port={port} ! application/x-rtp, encoding-name=H264 ! rtph264depay ! decodebin ! videoconvert ! jpegenc ! appsink name={name}"
)
self.appsink = self.pipeline.get_by_name(name)
self.appsink.set_property('emit-signals', True)
self.appsink.set_property('sync', False)
self.appsink.connect("new-sample", self.on_new_sample)
def on_new_sample(self, sink):
sample = sink.emit("pull-sample")
if sample:
caps = sample.get_caps()
buffer = sample.get_buffer()
structure = caps.get_structure(0)
width = structure.get_value("width")
height = structure.get_value("height")
success, map_info = buffer.map(Gst.MapFlags.READ)
if success:
image = QImage(
map_info.data, width, height, width * 3, QImage.Format_RGB888
)
pixmap = QPixmap.fromImage(image)
self.video_label.setPixmap(pixmap)
buffer.unmap(map_info)
return 0
def on_eos(self, bus, msg):
print("End of stream")
self.pipeline.set_state(Gst.State.NULL)
def on_error(self, bus, msg):
err, debug = msg.parse_error()
print(f"Error: {err}")
self.pipeline.set_state(Gst.State.NULL)
def closeEvent(self, event):
self.pipeline.set_state(Gst.State.NULL)
event.accept()
super(VideoStreamWidget, self).closeEvent(event)
I try my code without pyside6 just with flask and it’s working well.enter image description here
So i think it’s an issue with how I transform my image into QImage and QPixmap, but can not find solution to solve this issue.
user26714515 is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.