OpenCV进行RTSP推流

OpenCV进行RTSP推流

1 概述

将 OpenCV 处理后的图像直接进行 RTSP 推流,支持多种实现方案。


2 测试命令

2.1 推流本地文件

1
gst-launch-1.0 filesrc location=D:\5.mp4 ! decodebin ! videoconvert ! x264enc ! rtspclientsink location=rtsp://127.0.0.1:8554/live

2.2 播放推流内容

1
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/live ! rtph264depay ! h264parse ! avdec_h264 ! autovideosink

3 方案一:GStreamer推流

3.1 核心代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <opencv2/opencv.hpp>

#define RTSP_SERVER_URL "rtsp://127.0.0.1:8554/live"
#define CAPTURE_WIDTH 640
#define CAPTURE_HEIGHT 480
#define CAPTURE_FPS 30
#define FRAME_INTERVAL_MS (1000 / CAPTURE_FPS)

int main(int argc, char* argv[])
{
// 初始化GStreamer
gst_init(&argc, &argv);
std::cout << "3e" << std::endl;

// 创建管道
GstElement* pipeline = gst_pipeline_new("rtsp-pipeline");
GstElement* appsrc = gst_element_factory_make("appsrc", "app-source");
GstElement* videoconvert = gst_element_factory_make("videoconvert", "convert");
GstElement* x264enc = gst_element_factory_make("x264enc", "encoder");
GstElement* rtph264pay = gst_element_factory_make("rtph264pay", "payloader");
GstElement* rtspclientsink = gst_element_factory_make("rtspclientsink", "sink");

// 配置appsrc
g_object_set(G_OBJECT(appsrc), "is-live", TRUE, NULL);
g_object_set(G_OBJECT(appsrc), "format", GST_FORMAT_TIME, NULL);
g_object_set(G_OBJECT(appsrc), "caps", gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "BGR",
"width", G_TYPE_INT, CAPTURE_WIDTH,
"height", G_TYPE_INT, CAPTURE_HEIGHT,
"framerate", GST_TYPE_FRACTION, CAPTURE_FPS, 1,
NULL), NULL);

// 添加元素到管道
gst_bin_add_many(GST_BIN(pipeline), appsrc, videoconvert, x264enc, rtph264pay, rtspclientsink, NULL);

// 连接元素
gst_element_link_many(appsrc, videoconvert, x264enc, rtph264pay, rtspclientsink, NULL);

// 配置RTSP目标地址
g_object_set(G_OBJECT(rtspclientsink), "location", RTSP_SERVER_URL, NULL);

// 启动管道
gst_element_set_state(pipeline, GST_STATE_PLAYING);

// 打开摄像头
cv::VideoCapture cap(0);
if (!cap.isOpened()) {
std::cerr << "Error: Unable to open camera." << std::endl;
return -1;
}

// 推流循环
while (true)
{
cv::Mat frame;
cap >> frame;
if (frame.empty()) {
std::cerr << "Warning: Empty frame." << std::endl;
continue;
}

// 打印时间戳(精确到毫秒)
std::chrono::system_clock::time_point now = std::chrono::system_clock::now();
auto t = std::chrono::system_clock::to_time_t(now);
auto tp = std::chrono::system_clock::from_time_t(t);
auto ms = std::chrono::duration_cast<std::chrono::milliseconds>(now - tp).count();
auto in_time_t = std::chrono::system_clock::to_time_t(now);
std::cout << std::put_time(std::localtime(&in_time_t), "%Y-%m-%d %X") << "." << ms << std::endl;

// 创建GStreamer缓冲区
GstBuffer* buffer = gst_buffer_new_allocate(NULL, frame.total() * frame.elemSize(), NULL);
GstMapInfo map;
gst_buffer_map(buffer, &map, GST_MAP_WRITE);
memcpy(map.data, frame.data, frame.total() * frame.elemSize());
gst_buffer_unmap(buffer, &map);

// 推送缓冲区
GstFlowReturn ret;
g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
gst_buffer_unref(buffer);

// 控制帧率
g_usleep(FRAME_INTERVAL_MS * 1000);
}

// 清理资源
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);

return 0;
}

3.2 流程说明

步骤 操作
1 初始化 GStreamer
2 创建管道和元素(appsrc → videoconvert → x264enc → rtph264pay → rtspclientsink)
3 配置 appsrc 参数(分辨率、帧率、格式)
4 启动管道
5 循环读取摄像头帧并推送

4 方案二:FFmpeg推流

4.1 头文件

1
2
3
#include "rtspencoder.h"
#include <QCoreApplication>
#include <QDebug>

4.2 类定义

1
2
3
4
5
6
7
8
9
10
RTSPEncoder::RTSPEncoder(const char* rtspUrl, int outputWidth, int outputHeight, QObject* parent)
: QThread(parent), m_rtspUrl(rtspUrl), m_outputWidth(outputWidth), m_outputHeight(outputHeight)
{

}

RTSPEncoder::~RTSPEncoder()
{
cleanup();
}

4.3 初始化

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
int RTSPEncoder::initialize()
{
avformat_network_init();

// 初始化输出上下文
m_outFormatCtx = nullptr;
avformat_alloc_output_context2(&m_outFormatCtx, nullptr, "rtsp", m_rtspUrl.c_str());
if (!m_outFormatCtx)
{
std::cerr << "Error: Failed to allocate output context\n";
return -1;
}

// 查找H264编码器
m_codec = (AVCodec*)avcodec_find_encoder(AV_CODEC_ID_H264);
if (!m_codec) {
std::cerr << "Error: Failed to find H264 encoder\n";
return -1;
}

// 添加视频流
m_outStream = avformat_new_stream(m_outFormatCtx, m_codec);
if (!m_outStream) {
std::cerr << "Error: Failed to create new stream\n";
return -1;
}

// 初始化编码器上下文
m_codecCtx = avcodec_alloc_context3(m_codec);
if (!m_codecCtx) {
std::cerr << "Error: Failed to allocate codec context\n";
return -1;
}
m_codecCtx->codec_id = AV_CODEC_ID_H264;
m_codecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
m_codecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
m_codecCtx->width = m_outputWidth;
m_codecCtx->height = m_outputHeight;
m_codecCtx->time_base = { 1, 25 }; // 25fps
m_codecCtx->bit_rate = 700000;
m_codecCtx->gop_size = 10;
m_codecCtx->max_b_frames = 0;
m_codecCtx->flags = AV_CODEC_FLAG_GLOBAL_HEADER | AV_CODEC_FLAG_LOW_DELAY;

// 配置编码器选项
AVDictionary* codecOptions = nullptr;
av_dict_set(&codecOptions, "preset", "ultrafast", 0);
av_dict_set(&codecOptions, "tune", "zerolatency", 0);

// 打开编码器
if (avcodec_open2(m_codecCtx, m_codec, &codecOptions) < 0) {
std::cerr << "Error: Failed to open encoder\n";
return -1;
}

// 复制编码器参数到输出流
avcodec_parameters_from_context(m_outStream->codecpar, m_codecCtx);

// 写入流头部
avformat_write_header(m_outFormatCtx, nullptr);

m_frame.create(m_outputHeight, m_outputWidth, CV_8UC3);
m_frame.setTo(0);
m_frameCount = 0;

m_avFrame = av_frame_alloc();

return 0;
}

4.4 编码与发送

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
void RTSPEncoder::encodeFrameAndSend()
{
while (1)
{
// 阻塞等待新帧
{
std::unique_lock<std::mutex> lock(mtx);

while (!m_isReady) {
m_cv.wait(lock);
}
m_isReady = false;

// 转换颜色空间 BGR → I420
cv::cvtColor(m_frame, m_im420, cv::COLOR_RGB2YUV_I420);

// 配置AVFrame
m_avFrame->data[0] = m_im420.data;
m_avFrame->data[1] = m_avFrame->data[0] + m_codecCtx->width * m_codecCtx->height;
m_avFrame->data[2] = m_avFrame->data[1] + m_codecCtx->width * m_codecCtx->height / 4;
m_avFrame->linesize[0] = m_codecCtx->width;
m_avFrame->linesize[1] = m_codecCtx->width / 2;
m_avFrame->linesize[2] = m_codecCtx->width / 2;
m_avFrame->width = m_codecCtx->width;
m_avFrame->height = m_codecCtx->height;
m_avFrame->format = AV_PIX_FMT_YUV420P;
m_avFrame->pts = m_frameCount++;

// 发送帧到编码器
auto ret = avcodec_send_frame(m_codecCtx, m_avFrame);
if (ret < 0)
{
std::cerr << "Error: Failed to send frame for encoding\n";
return;
}

// 接收编码后的数据包
AVPacket pkt;
av_init_packet(&pkt);
while (ret >= 0)
{
ret = avcodec_receive_packet(m_codecCtx, &pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
break;
}
else if (ret < 0)
{
std::cout << "Error: Failed to receive encoded packet\n";
continue;
}
pkt.stream_index = m_outStream->index;
av_packet_rescale_ts(&pkt, m_codecCtx->time_base, m_outStream->time_base);
ret = av_interleaved_write_frame(m_outFormatCtx, &pkt);
av_packet_unref(&pkt);

if (ret < 0)
{
std::cout << "Error: Failed to write packet to output stream\n";
continue;
}
}
}
}
}

4.5 添加帧

1
2
3
4
5
6
7
8
9
10
11
12
13
14
void RTSPEncoder::AddFrame(cv::Mat& imGBR, int nFrame)
{
if (imGBR.cols != m_outputWidth || imGBR.rows != m_outputHeight)
return;

// 阻塞等待编码
{
std::lock_guard<std::mutex> lock(mtx);
memcpy(m_frame.data, imGBR.data, imGBR.cols * imGBR.rows * 3);
m_frameCount = nFrame;
m_isReady = true;
}
m_cv.notify_one();
}

4.6 资源清理

1
2
3
4
5
6
7
8
9
10
11
12
13
void RTSPEncoder::cleanup()
{
// 写入尾部
av_write_trailer(m_outFormatCtx);

// 释放资源
if(m_codecCtx)
avcodec_free_context(&m_codecCtx);
if(m_avFrame)
av_frame_free(&m_avFrame);
if(m_outFormatCtx)
avformat_free_context(m_outFormatCtx);
}

5 方案三:OpenCV GStreamer后端

5.1 推流代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
void push()
{
int codec = cv::VideoWriter::fourcc('H', 'E', 'V', 'C');
m_writer.open(push_str, codec, fps, cv::Size(width, height));

cv::Mat tmp;
while (true)
{
{
std::unique_lock<std::mutex> lk(proc2push_mtx);
proc2push_cv.wait(lk);
tmp = img_push.clone();
}

if (!m_writer.isOpened())
{
int codec = cv::VideoWriter::fourcc('H', 'E', 'V', 'C');
m_writer.open(push_str, codec, fps, cv::Size(width, height));
}

m_writer.write(tmp);
}
}

5.2 优缺点

方案 优点 缺点
GStreamer 灵活性高,延迟低 代码复杂
FFmpeg 功能强大,兼容性好 学习成本高
OpenCV GStreamer 实现简单 延迟较高

6 测试代码

6.1 视频源测试

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
int main1()  
{
// USB摄像头(GStreamer格式)
std::string pipeline = "v4l2src device=/dev/video0 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! nvv4l2decoder mjpeg=1 ! nvvidconv ! appsink";
cv::VideoCapture capture(pipeline, cv::CAP_GSTREAMER);

if (!capture.isOpened())
{
std::cout << "Read video Failed !" << std::endl;
return -1;
}

cv::Mat frame;
cv::namedWindow("video test");

int frame_num = capture.get(cv::CAP_PROP_FRAME_COUNT);
std::cout << "total frame number is: " << frame_num << std::endl;

while(1)
{
capture >> frame;
imshow("video test", frame);
if (cv::waitKey(30) == 'q')
{
break;
}
}

cv::destroyWindow("video test");
capture.release();
return 0;
}

6.2 GStreamer命令

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# 文件推流
gst-launch-1.0 -v filesrc location=D:/1.mp4 ! decodebin ! x264enc ! rtph264pay ! udpsink host=127.0.0.1 port=8554

# RTSP播放
gst-launch-1.0 -v rtspsrc location=rtsp://127.0.0.1:10054/live/Fs4FsmKSR ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

# 测试源
gst-launch-1.0 -v videotestsrc ! video/x-raw, format=BGRx ! autovideosink

# AppSource推流
gst-launch-1.0 -v appsrc name=mysource is-live=true block=true format=GST_FORMAT_TIME \
! videoconvert \
! video/x-raw,format=NV12 \
! nvv4l2h264enc \
! video/x-h264,stream-format=byte-stream \
! h264parse \
! rtspclientsink location=rtsp://localhost:8554/mystream

7 总结

方案 适用场景
GStreamer 低延迟、高性能推流
FFmpeg 复杂编码需求、跨平台
OpenCV GStreamer 简单场景、快速实现