opencv获取视频图像在web端无插件播放解决方案
- 一、步骤说明
- 1、opencv获取图像Mat
- 2、Mat实时转H264
- 3、服务端使用Qt通过websocket发送
- 4、前端使用wfs.js进行接收,播放
- 二、具体分析
一、步骤说明
1、opencv获取图像Mat
2、Mat实时转H264
3、服务端使用Qt通过websocket发送
4、前端使用wfs.js进行接收,播放
综合分析以及心得:
由于接到的项目中需要对视频中某一区域进行截取并进行相关图像算法处理(此处不进行介绍),并且Web前端进行无插件显示播放,所以这里使用opencv进行视频图像的获取,之前的做法是通过ffmpeg将获取的视频帧进行推流到nginx-rtmp服务器中,前端通过http访问m3u8进行实时读取,最后发现该方法延迟性极高(大概10s左右),放弃了该方法,最后想到通过websocket进行前端直接通信,这样可以降低延迟,所以在github中发现了wfs.js该方法,但是这里面需要传入H264编码的裸流,所以经过周折终于把mat转为了H264,(并且图像大小可以任意,项目需要)。
二、具体分析
1、opencv获取图像Mat
opencv获取视频使用VideoCapture直接进行获取,具体代码如下
cv::VideoCapture cam;
cam.open(inUrl); //输入源
//cam.open(0); // 本地相机
if (!cam.isOpened())
{
throw CException("cam open failed!");
}
fps = cam.get(CAP_PROP_FPS);
fps = 25;
Mat frame;
for (;;)
{
if(isNeedStop)
{
break;
}
///读取rtsp视频帧,解码视频帧
if (!cam.grab())
{
continue;
}
///yuv转换为rgb
if (!cam.retrieve(frame))
{
continue;
}
QThread::msleep(1000/fps);
}
2、Mat实时转H264
Mat转H264裸流这里我们直接ffmpeg进行转码,废话不多说直接上代码
h264encoder.h
#ifndef H264ENCODER_H
#define H264ENCODER_H
#include <QObject>
#include "pch.h"
#include "opencv2/opencv.hpp"
typedef struct AvH264EncConfig_T {
int width = 320;
int height = 240;
int frame_rate = 25;
// int64_t bit_rate = 320000;
int64_t bit_rate = 50 * 1024 * 8;
int gop_size = 50;
int max_b_frames = 0;
}AvH264EncConfig;
class h264encoder : public QObject
{
Q_OBJECT
public:
explicit h264encoder(QObject *parent = nullptr);
~h264encoder();
int Init(AvH264EncConfig h264_config);
AVPacket *encode(const cv::Mat& mat);
void Destory();
private:
AVFrame* cvmatToAvframe(const cv::Mat* image, AVFrame* frame);
AVCodec *cdc_;
AVCodecContext *cdc_ctx_;
AVFrame *avf_;
AVPacket *avp_;
int frame_size_;
int pts_;
signals:
void sig_GetOneFrame(QByteArray);
public slots:
void slot_encode(const cv::Mat& mat);
};
#endif // H264ENCODER_H
h264encoder.cpp
#include "h264encoder.h"
#include <QDebug>
h264encoder::h264encoder(QObject *parent) : QObject(parent)
{
cdc_ = NULL;
cdc_ctx_ = NULL;
avf_ = NULL;
avp_ = NULL;
}
h264encoder::~h264encoder()
{
Destory();
}
int h264encoder::Init(AvH264EncConfig h264_config) {
qDebug()<<"h264 init";
pts_ = 0;
cdc_ = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!cdc_) {
return -1;
}
cdc_ctx_ = avcodec_alloc_context3(cdc_);
if (!cdc_ctx_) {
return -1;
}
cdc_ctx_->bit_rate = h264_config.bit_rate;//导致画面模糊
cdc_ctx_->width = h264_config.width;
cdc_ctx_->height = h264_config.height;
cdc_ctx_->time_base = { 1, h264_config.frame_rate };
cdc_ctx_->framerate = { h264_config.frame_rate, 1 };
cdc_ctx_->gop_size = h264_config.gop_size;
cdc_ctx_->max_b_frames = h264_config.max_b_frames;
cdc_ctx_->pix_fmt = AV_PIX_FMT_YUV420P;
cdc_ctx_->codec_id = AV_CODEC_ID_H264;
cdc_ctx_->codec_type = AVMEDIA_TYPE_VIDEO;
//cdc_ctx_->qmin = 10;
//cdc_ctx_->qmax = 51;
//cdc_ctx_->qcompress = 0.6;
AVDictionary *dict = nullptr;
//av_dict_set(&dict, "preset", "slow", 0);
av_dict_set(&dict, "preset", "veryfast", 0);
av_dict_set(&dict, "tune", "zerolatency", 0);
av_dict_set(&dict, "profile", "main", 0);
// av_dict_set(&dict, "bufsize", "131072", 0);
avf_ = av_frame_alloc();
avp_ = av_packet_alloc();
if (!avf_ || !avp_) {
return -1;
}
av_init_packet(avp_);
frame_size_ = cdc_ctx_->width * cdc_ctx_->height;
avf_->format = cdc_ctx_->pix_fmt;
avf_->width = cdc_ctx_->width;
avf_->height = cdc_ctx_->height;
// alloc memory
int r = av_frame_get_buffer(avf_, 0);
if (r < 0) {
return -1;
}
r = av_frame_make_writable(avf_);
if (r < 0) {
return -1;
}
return avcodec_open2(cdc_ctx_, cdc_, &dict);
}
void h264encoder::Destory() {
if(cdc_ctx_) avcodec_free_context(&cdc_ctx_);
if (avf_) av_frame_free(&avf_);
if (avp_) av_packet_free(&avp_);
}
AVPacket *h264encoder::encode(const cv::Mat& mat) {
if (mat.empty()) return NULL;
avf_ = cvmatToAvframe(&mat,avf_);
int r = avcodec_send_frame(cdc_ctx_, avf_);
if (r >= 0) {
r = avcodec_receive_packet(cdc_ctx_, avp_);
if (r == 0) {
avp_->stream_index = avf_->pts;
return avp_;
}
if (r == AVERROR(EAGAIN) || r == AVERROR_EOF) {
return NULL;
}
}
return NULL;
}
AVFrame* h264encoder::cvmatToAvframe(const cv::Mat* image, AVFrame* frame){
int width = image->cols;
int height = image->rows;
int cvLinesizes[1];
cvLinesizes[0] = image->step1();
if (frame == NULL){
frame = av_frame_alloc();
av_image_alloc(frame->data, frame->linesize, width, height, AVPixelFormat::AV_PIX_FMT_YUV420P, 1);
}
SwsContext* conversion = sws_getContext(width, height, AVPixelFormat::AV_PIX_FMT_BGR24, width, height, (AVPixelFormat) frame->format, SWS_FAST_BILINEAR, NULL, NULL, NULL);
sws_scale(conversion, &image->data, cvLinesizes , 0, height, frame->data, frame->linesize);
sws_freeContext(conversion);
return frame;
}
void h264encoder::slot_encode(const cv::Mat& mat)
{
AVPacket *packet = encode(mat);
QByteArray data;
data.append((char*)packet->data, packet->size);
emit sig_GetOneFrame(data);
}
使用解释:这里使用Qt的信号槽机制,通过将opencv产生的Mat传入到槽函数slot_encode,然后进行解码,首先将Mat转码为AvFrame帧,之后编码为H264格式的视频帧,之后通过信号将数据转为而二进制发送出去
3、服务端使用Qt通过websocket发送
Qt建立websocket进行发送webSocket->sendBinaryMessage(data);
4、前端使用wfs.js进行接收,播放
wfs具体内容见github 地址:https://github.com/ChihChengYang/wfs.js 此处给出测试使用方法:
index.html
<html>
<head>
<title>RTSP PLAY WITH MSE</title>
<style>
.video_bg {
background-image: url("video.png");
background-repeat: no-repeat;
background-size: 100% 100%;
}
.video_size {
height: 450px;
width: 800px;
}
#rtsp_url {
width: 400px;
}
/*video默认全屏按钮*/
video::-webkit-media-controls-fullscreen-button {
display: none !important;
}
/*video默认aduio音量按钮*/
video::-webkit-media-controls-mute-button {
display: none !important;
}
/*video默认setting按钮*/
video::-internal-media-controls-overflow-button {
display: none !important;
}
/*禁用video的controls(要慎重!不要轻易隐藏掉,会导致点击视频不能播放)*/
video::-webkit-media-controls {
display: none !important;
}
</style>
</head>
<body>
<script type="text/javascript" src="jquery-1.12.1.min.js"></script>
<script type="text/javascript" src="wfs.js"></script>
<div class="wfsjs">
<video id="video1" class="video_size" autoplay="autoplay" loop="false" poster="./video.png"></video>
<div class="inputDiv">
<input id="ws_url" type="text" placeholder="请输入WebSocket Url" value="ws://192.168.1.193:48780" />
<input id="rtsp_url" type="text" placeholder="请输入RTSP URL..." value="rtsp://admin:long7854016@192.168.1.20:554/h264/ch1/main/av_stream" />
<button id="play" class="plays" onclick="play()">播 放</button>
<button id="stop" onclick="stop()">停 止</button>
<button id="fullScreenBtn">全屏</button>
</div>
</div>
<script>
var G_isHiden = false;
var wfs = null;
window.onload = function() {
if (!Wfs.isSupported()) {
alert("您的浏览器不支持 MediaSource Extend,某些功能使用将受到影响!")
}
};
function play() {
if (wfs) {
stop();
}
wfs = new Wfs();
var ws_url = $("#ws_url").val();
var rtsp_url = $("#rtsp_url").val();
if (ws_url == "") {
alert("请填写WebSocket Url");
return false;
}
if (rtsp_url == "") {
alert("请填写RTSP Url");
return false;
}
var video1 = document.getElementsByClassName("video_size")[0];
console.log("rtsp:"+rtsp_url);
wfs.attachMedia(video1, rtsp_url, "H264Raw", ws_url);
}
function stop() {
wfs.closeWebSocket();
wfs.destroy();
}
document.getElementById('fullScreenBtn').addEventListener('click', function() {
FullScreen('video1');
})
//进入全屏
function FullScreen(id) {
var ele = document.getElementById(id);
if (ele.requestFullscreen) {
ele.requestFullscreen();
} else if (ele.mozRequestFullScreen) {
ele.mozRequestFullScreen();
} else if (ele.webkitRequestFullScreen) {
ele.webkitRequestFullScreen();
} else if (ele.msRequestFullscreen) {
ele.msRequestFullscreen();
} else if (ele.oRequestFullscreen) {
ele.oRequestFullscreen();
}
}
//退出全屏
function exitFullscreen() {
var de = document;
if (de.exitFullscreen) {
de.exitFullscreen();
} else if (de.mozCancelFullScreen) {
de.mozCancelFullScreen();
} else if (de.webkitExitFullscreen) {
de.webkitExitFullscreen();
} else if (de.msExitFullscreen) {
de.msExitFullscreen();
} else if (de.oRequestFullscreen) {
de.oCancelFullScreen();
}
}
document.addEventListener('visibilitychange', function() {
var isHidden = document.hidden;
if (isHidden) {
Wfs.DefaultConfig.isHidden = true;
} else {
Wfs.DefaultConfig.isHidden = false;
}
});
</script>
</body>
</html>