上次分享了视频录制代码的封装,这次分享一下同时将视频和音频推流到服务器上,注意(这次分享的内容不包括音视频同步方面的),所以会出现音视频不同步的情
音视频同时发送的策略主要是:利用一个链表存储音视频帧,然后开启两个线程,即音频录制线程,视频录制线程,然后在主线程中循环一直从链表中读取音视频帧,我这边主要是先读取音视频帧,然后在读取视频帧,如果两个同时为空的话就跳过本次循环,然后分别推流到nginx服务器上。
然后在这边要注意,由于AVPack 中默认的stream_index为0,所以如果没有对上次的SendFrame函数进行重构就会出现音频推流失败的情况,获取流索引的方法主要是在添加流那边可以进行获取
AVStream中有个index即是本帧的流类型,可以从这里面获取,下面分享一下主函数的编写:
#include <QtCore/QCoreApplication>
#include <QThread>
#include <iostream>
#include "XMediaEncode.h"
#include "XRtmp.h"
#include "XAudioRecord.h"
#include "XVideoCapture.h"
using namespace std;
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
int ret = 0;
char *outUrl = "rtmp://192.168.198.128/live";
int sampleRate = 44100;
int channels = 2;
int sampleByte = 2;
int nbSample = 1024;
///打开摄像机
XVideoCapture *xv = XVideoCapture::Get();
if (!xv->Init(0))
{
cout << "open camera failed!" << endl;
getchar();
return -1;
}
cout << "open camera success!" << endl;
xv->Start();
///1 qt音频开始录制
XAudioRecord *ar = XAudioRecord::Get();
ar->sampleRate = sampleRate;
ar->channels = channels;
ar->sampleByte = sampleByte;
ar->nbSample = nbSample;
if (!ar->Init())
{
cout << "XAudioRecord Init failed!" << endl;
getchar();
return -1;
}
ar->Start();
///初始化格式上下文
XMediaEncode *xe = XMediaEncode::Get();
xe->inWidth = xv->width;
xe->inHeight = xv->height;
xe->outHeight = xv->height;
xe->outWidth = xv->width;
if (!xe->InitScale())
{
cout << "InitScale Init failed!" << endl;
getchar();
return -1;
}
cout << "初始化视频像素转换上下文成功!" << endl;
///音频重采样
xe->channels = channels;
xe->nbSample = 1024;
xe->sampleRate = sampleRate;
xe->inSampleFmt = XSampleFMT::X_S16;
xe->outSampleFmt = XSampleFMT::X_FLTP;
if (!xe->InitResample())
{
getchar();
return -1;
}
///4 初始化音频编码器
if (!xe->InitAudioCodec())
{
getchar();
return -1;
}
///4 初始化视频编码器
if (!xe->InitVideoCodec())
{
getchar();
return -1;
}
///5 封装器和音频流配置
//a.创建输出封装器上下文
XRtmp *xr = XRtmp::Get(0);
if (!xr->Init(outUrl))
{
getchar();
return -1;
}
int vindex = 0;
vindex = xr->AddStream(xe->vc);
//b.添加视频流
if (vindex<0)
{
getchar();
return -1;
}
int aindex = 0;
aindex = xr->AddStream(xe->ac);
//b.添加视频流
if (aindex<0)
{
getchar();
return -1;
}
//从编码器复制参数
///6 打开rtmp的网络输出io
//写入封装头
if (!xr->SendHead())
{
getchar();
return -1;
}
//一次读取一帧音频的字节数
int readSize = xe->nbSample*channels*sampleByte;
char *buf = new char[readSize];
for (;;)
{
//一次读取一帧音频
XData ad = ar->Pop();
XData vd = xv->Pop();
if (ad.size <= 0&&vd.size<=0)
{
QThread::msleep(1);
continue;
}
if (ad.size > 0)
{
//已经读取一帧源数据
//重采样数据
AVFrame *pcm = xe->Resample(ad.data);
ad.Drop();
AVPacket *pkt = xe->EncodeAudio(pcm);
if (pkt)
{
//推流
if (xr->SendFrame(pkt,aindex))
{
cout << "#" << flush;
}
}
}
if (vd.size > 0)
{
AVFrame *yuv = xe->RGBToYUV(vd.data);
vd.Drop();
AVPacket *pkt = xe->EncodeVideo(yuv);
if (pkt)
{
//推流
if (xr->SendFrame(pkt,vindex))
{
cout << "@" << flush;
}
}
}
}
delete buf;
getchar();
return a.exec();
}
XAudioRecord.h
#pragma once
#include "XDataThread.h"
enum XAUDIOTYPE
{
X_AUDIO_QT
};
class XAudioRecord:public XDataThread
{
public:
int sampleRate = 44100;//样本率
int channels = 2;//声道数
int sampleByte = 2;//样本大小
int nbSample = 1024;//一帧音频每个通道的样本数量
static XAudioRecord *Get(XAUDIOTYPE type= X_AUDIO_QT,unsigned char index = 0);
//开始录制
virtual bool Init()=0;
//停止录制
virtual void Stop()=0;
virtual ~XAudioRecord();
protected:
XAudioRecord();
};
XAudioRecord.cpp
#include "XAudioRecord.h"
#include <QAudioInput>
#include <iostream>
#include <list>
using namespace std;
class CXAudioRecord :public XAudioRecord
{
public:
void run()
{
cout << "进入音频录制线程" << endl;
int readSize = nbSample*channels*sampleByte;
char *buf = new char[readSize];
while (!isExit)
{
//读取已录制音频
//一次读取一帧音频
if (input->bytesReady() < readSize)
{
QThread::msleep(1);
continue;
}
int size = 0;
while (size != readSize)
{
int len = io->read(buf + size, readSize - size);
if (len < 0)break;
size += len;
}
if (size != readSize)
{
continue;
}
//已读取一帧音频
Push(XData(buf,readSize));
}
delete buf;
cout << "退出音频录制线程" << endl;
}
//开始录制
bool Init()
{
Stop();
///1 qt音频开始录制
QAudioFormat fmt;
int ret;
//采样频率
fmt.setSampleRate(sampleRate);
//通道数量
fmt.setChannelCount(channels);
//样本大小
fmt.setSampleSize(sampleByte * 8);
//格式
fmt.setCodec("audio/pcm");
//字节序
fmt.setByteOrder(QAudioFormat::LittleEndian);
fmt.setSampleType(QAudioFormat::UnSignedInt);
QAudioDeviceInfo info = QAudioDeviceInfo::defaultInputDevice();
if (!info.isFormatSupported(fmt))
{
cout << "Audio format not support!" << endl;
fmt = info.nearestFormat(fmt);
}
cout << "Audio format success" << endl;
input = new QAudioInput(fmt);
//开始录制音频
io = input->start();
if (!io)return false;
return true;
}
//停止录制
virtual void Stop()
{
XDataThread::Stop();
if (input)
input->stop();
if (io)
io->close();
input = NULL;
io = NULL;
}
QAudioInput *input = NULL;
QIODevice *io = NULL;
};
XAudioRecord *XAudioRecord::Get(XAUDIOTYPE type, unsigned char index)
{
static CXAudioRecord record[255];
return &record[index];
}
XAudioRecord::XAudioRecord()
{
}
XAudioRecord::~XAudioRecord()
{
}
XData.h
#pragma once
class XData
{
public:
char *data = 0;
int size = 0;
void Drop();
XData();
//创建空间,并复制data内容
XData(char *data,int size);
~XData();
};
XData.cpp
#include "XData.h"
#include <stdlib.h>
#include <string.h>
XData::XData(char *data, int size)
{
this->data = new char[size];
memcpy(this->data, data, size);
this->size = size;
}
void XData::Drop()
{
if (data)
delete data;
data = 0;
size = 0;
}
XData::XData()
{
}
XData::~XData()
{
}
XDataThread.h
#pragma once
#include <QThread>
#include "XData.h"
#include <list>
class XDataThread : public QThread
{
public:
//(缓冲列表大小)列表最大值,超出删除最旧的数据
int maxList = 100;
//在列表结尾插入
virtual void Push(XData d);
//读取列表中最早的数据,返回数据需要调用Xdata.Drop清理
virtual XData Pop();
//启动线程
virtual bool Start();
//退出线程,并等待线程退出(阻塞)
virtual void Stop();
XDataThread();
virtual ~XDataThread();
protected:
//存放交互数据 插入策略 先进先出
std::list<XData> datas;
//互斥访问 datas
QMutex mutex;
//交互数据列表大小
int dataCount = 0;
//处理线程退出
bool isExit = false;
};
XDataThread.cpp
#include "XDataThread.h"
//在列表结尾插入
void XDataThread::Push(XData d)
{
mutex.lock();
//超出最大大小
if (datas.size()>maxList)
{
datas.front().Drop();
datas.pop_front();
}
datas.push_back(d);
mutex.unlock();
}
//读取列表中最早的数据
XData XDataThread::Pop()
{
mutex.lock();
if (datas.empty())
{
mutex.unlock();
return XData();
}
XData d = datas.front();
datas.pop_front();
mutex.unlock();
return d;
}
//启动线程
bool XDataThread::Start()
{
isExit = false;
QThread::start();
return true;
}
//退出线程,并等待线程退出(阻塞)
void XDataThread::Stop()
{
isExit = true;
wait();
}
XDataThread::XDataThread()
{
}
XDataThread::~XDataThread()
{
}
XMediaEncode.h
#pragma once
struct AVFrame;
struct AVPacket;
class AVCodecContext;
enum XSampleFMT
{
X_S16 = 1,
X_FLTP = 8
};
///音视频编码接口类
class XMediaEncode
{
public:
///输入参数
int inWidth = 1280;
int inHeight = 720;
int inPixSize = 3;
int channels = 2;
int sampleRate = 44100;
XSampleFMT inSampleFmt = X_S16;
///输出参数
int outWidth = 1280;
int outHeight = 720;
int bitrate = 4000000;
int fps = 25;
int nbSample = 1024;
XSampleFMT outSampleFmt = X_FLTP;
//工厂的生产方法
static XMediaEncode *Get(unsigned char index = 0);
//初始化像素格式转换的上下文
virtual bool InitScale() = 0;
//音频重采样上下文初始化
virtual bool InitResample() = 0;
virtual AVFrame* Resample(char *data) = 0;
virtual AVFrame* RGBToYUV(char *rgb) = 0;
//视频编码器初始化
virtual bool InitVideoCodec() = 0;
//音频编码器初始化
virtual bool InitAudioCodec() = 0;
//视频编码 返回值无需调用者清理
virtual AVPacket *EncodeVideo(AVFrame *frame) = 0;
//音频编码 返回值无需调用者清理
virtual AVPacket *EncodeAudio(AVFrame *frame) = 0;
virtual ~XMediaEncode();
AVCodecContext *vc = 0; //编码器上下文
AVCodecContext *ac = 0; //音频编码器上下文
protected:
XMediaEncode();
};
XMediaEncode.cpp
#include "XMediaEncode.h"
#include <iostream>
extern "C"
{
#include <libswscale/swscale.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include "libswresample/swresample.h"
}
#pragma comment(lib,"swscale.lib")
#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"swresample.lib")
using namespace std;
#if defined WIN32 || defined _WIN32
#include <windows.h>
#endif
//获取CPU数量
static int XGetCpuNum()
{
#if defined WIN32 || defined _WIN32
SYSTEM_INFO sysinfo;
GetSystemInfo(&sysinfo);
return (int)sysinfo.dwNumberOfProcessors;
#elif defined __linux__
return (int)sysconf(_SC_NPROCESSORS_ONLN);
#elif defined __APPLE__
int numCPU = 0;
int mib[4];
size_t len = sizeof(numCPU);
// set the mib for hw.ncpu
mib[0] = CTL_HW;
mib[1] = HW_AVAILCPU; // alternatively, try HW_NCPU;
// get the number of CPUs from the system
sysctl(mib, 2, &numCPU, &len, NULL, 0);
if (numCPU < 1)
{
mib[1] = HW_NCPU;
sysctl(mib, 2, &numCPU, &len, NULL, 0);
if (numCPU < 1)
numCPU = 1;
}
return (int)numCPU;
#else
return 1;
#endif
}
class CXMediaEncode :public XMediaEncode
{
public:
void Close()
{
if (vsc)
{
sws_freeContext(vsc);
vsc = NULL;
}
if (yuv)
{
av_frame_free(&yuv);
}
if (asc)
{
swr_free(&asc);
}
if (vc)
{
avcodec_free_context(&vc);
}
if (vc)
{
avcodec_free_context(&vc);
}
if (pcm)
{
av_frame_free(&pcm);
}
vpts = 0;
apts = 0;
av_packet_unref(&vpack);
av_packet_unref(&apack);
}
bool InitVideoCodec()
{
///4.初始化编码上下文
//a 找到编码器
if (!(vc = CreateCodec(AV_CODEC_ID_H264)))
{
return false;
}
//vc->codec_id = codec->id;
vc->bit_rate = 50 * 1024 * 8; //压缩后每秒视频的bit位大小 200kB
vc->width = outWidth;
vc->height = outHeight;
vc->time_base = { 1,fps };
vc->framerate = { fps,1 };
//画面组的大小,多少帧一个关键帧
vc->gop_size = 50;
//设置不需要b帧
vc->max_b_frames = 0;
//设置像素格式
vc->pix_fmt = AV_PIX_FMT_YUV420P;
//d 打开编码器上下文
int ret = avcodec_open2(vc, 0, 0);
if (ret != 0)
{
char buf[1024] = { 0 };
av_strerror(ret, buf, sizeof(buf) - 1);
cout << buf << endl;
return false;
}
cout << "avcodec_open2 success!" << endl;
return true;
}
bool InitAudioCodec()
{
///4 初始化音频编码器
if (!(ac=CreateCodec(AV_CODEC_ID_AAC)))
{
return false;
}
//音频的参数
ac->bit_rate = 40000;
ac->sample_rate = sampleRate;
ac->sample_fmt = AV_SAMPLE_FMT_FLTP;
ac->channels = channels;
ac->channel_layout = av_get_default_channel_layout(channels);
//打开编码器
return OpenCodec(&ac);
}
AVPacket *EncodeVideo(AVFrame *frame)
{
av_packet_unref(&vpack);
frame->pts = vpts;
vpts++;
int ret = avcodec_send_frame(vc, frame);
if (ret != 0)
{
return NULL;
}
ret = avcodec_receive_packet(vc, &vpack);
if (ret != 0 || vpack.size < 0)
{
return NULL;
}
return &vpack;
}
AVPacket *EncodeAudio(AVFrame *frame)
{
pcm->pts = apts;
apts += av_rescale_q(pcm->nb_samples, { 1,sampleRate }, ac->time_base);
int ret = avcodec_send_frame(ac, pcm);
if (ret != 0)return NULL;
av_packet_unref(&apack);
ret = avcodec_receive_packet(ac, &apack);
//cout << "avcodec_receive_packet " << ret << endl;
if (ret != 0)return NULL;
return &apack;
}
bool InitScale()
{
///2.初始化格式转换上下文
vsc = sws_getCachedContext(vsc,
//源宽、高、像素格式
inWidth, inHeight, AV_PIX_FMT_BGR24,
//目标宽、高、像素格式
outWidth, outHeight, AV_PIX_FMT_YUV420P,
SWS_BICUBIC, //尺寸变化使用的算法
0, 0, 0
);
if (!vsc)
{
cout << "sws_getCachedContext failed";
return false;
}
///3.输出的数据结构
yuv = av_frame_alloc();
yuv->format = AV_PIX_FMT_YUV420P;
yuv->width = inWidth;
yuv->height = inHeight;
yuv->pts = 0;
//分配yuv空间
int ret = av_frame_get_buffer(yuv, 32);
if (ret != 0)
{
char buf[1024] = { 0 };
av_strerror(ret, buf, sizeof(buf) - 1);
throw exception(buf);
}
return true;
}
bool InitResample()
{
asc = NULL;
asc = swr_alloc_set_opts(asc,
av_get_default_channel_layout(channels), (AVSampleFormat)outSampleFmt, sampleRate, //输出格式
av_get_default_channel_layout(channels), (AVSampleFormat)inSampleFmt, sampleRate,//输入格式
0, 0);
if (!asc)
{
cout << "swr_alloc_set_opts failed!" << endl;
return false;
}
int ret = swr_init(asc);
if (ret != 0)
{
char err[1024] = { 0 };
av_strerror(ret, err, sizeof(err) - 1);
cout << err << endl;
return false;
}
cout << "音频重采样 上下文初始化成功" << endl;
///3 音频重采样输出空间分配
pcm = av_frame_alloc();
pcm->format = outSampleFmt;
pcm->channels = channels;
pcm->channel_layout = av_get_default_channel_layout(channels);
pcm->nb_samples = nbSample;//一帧音频一通道的采样数量
ret = av_frame_get_buffer(pcm, 0); //给pcm分配存储空间
if (ret != 0)
{
char err[1024] = { 0 };
av_strerror(ret, err, sizeof(err) - 1);
cout << err << endl;
return false;
}
return true;
}
AVFrame *RGBToYUV(char *rgb)
{
///rgb to yuv
//3.初始化输入的数据结构
uint8_t *indata[AV_NUM_DATA_POINTERS] = { 0 };
//indata[0] bgrbgrbgr
//plane indata[0] bbbbb indata[1]ggggg indata[2] rrrrr
indata[0] = (uint8_t*)rgb;
int insize[AV_NUM_DATA_POINTERS] = { 0 };
//一行(宽)数据的字节数
insize[0] = inWidth*inPixSize;
int h = sws_scale(vsc, indata, insize, 0, inHeight,//源数据
yuv->data, yuv->linesize);
if (h <= 0)
{
return NULL;
}
return yuv;
}
AVFrame* Resample(char *data)
{
const uint8_t *indata[AV_NUM_DATA_POINTERS] = { 0 };
indata[0] = (uint8_t *)data;
//已经读取一帧源数据
//重采样数据
int len = swr_convert(asc, pcm->data, pcm->nb_samples,//输出参数,输出存储地址,样本数
indata, pcm->nb_samples
);
if (len <= 0)
{
return NULL;
}
return pcm;
}
private:
bool OpenCodec(AVCodecContext **c)
{
int ret = avcodec_open2(*c, 0, 0);
if (ret != 0)
{
char err[1024] = { 0 };
av_strerror(ret, err, sizeof(err) - 1);
cout << err << endl;
avcodec_free_context(c);
return false;
}
cout << "avcodec_open2 success!" << endl;
return true;
}
AVCodecContext * CreateCodec(AVCodecID cid)
{
///4 初始化编码器
AVCodec *codec = avcodec_find_encoder(cid);
if (!codec)
{
cout << "avcodec_find_encoder failed!" << endl;
return NULL;
}
//音频编码器上下文
AVCodecContext * c = avcodec_alloc_context3(codec);
if (!c)
{
cout << "avcodec_alloc_context3 failed!" << endl;
return NULL;
}
cout << "avcodec_alloc_context3 success!" << endl;
c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
c->thread_count = XGetCpuNum();
return c;
}
SwsContext *vsc = NULL; //像素格式转换上下文
AVFrame *yuv = NULL; //输出的yuv
SwrContext *asc = NULL; //音频重采样上下文
AVPacket vpack = {0}; //视频帧
AVPacket apack = { 0 }; //音频帧
AVFrame *pcm = NULL; //重采样输出的pcm
int vpts = 0;
int apts = 0;
};
XMediaEncode *XMediaEncode::Get(unsigned char index)
{
static bool isFirst = true;
if (isFirst)
{
//注册所有的编解码器
avcodec_register_all();
isFirst = false;
}
static CXMediaEncode cxm[255];
return &cxm[index];
}
XMediaEncode::XMediaEncode()
{
}
XMediaEncode::~XMediaEncode()
{
}
XRtmp.h
#pragma once
class AVCodecContext;
class AVPacket;
class XRtmp
{
public:
static XRtmp *Get(unsigned char index = 0);
//初始化封装器上下文
virtual bool Init(const char *url) = 0;
//添加视频或者音频流 失败返回-1 成功返回流索引
virtual int AddStream(const AVCodecContext *c) = 0;
//打开rtmp网络IO,发送封装头
virtual bool SendHead() = 0;
//rtmp 帧推流
virtual bool SendFrame(AVPacket *pack,int streamIndex=0) = 0;
~XRtmp();
protected:
XRtmp();
};
XRtmp.cpp
#include "XRtmp.h"
#include <iostream>
#include <string>
using namespace std;
extern "C"
{
#include <libavformat/avformat.h>
}
#pragma comment(lib,"avformat.lib")
class CXRtmp :public XRtmp
{
public:
void Close()
{
if (ic)
{
avformat_close_input(&ic);
vs = NULL;
}
vc = NULL;
url = "";
}
bool Init(const char *url)
{
//a.创建输出封装器上下文
int ret = avformat_alloc_output_context2(&ic, 0, "flv", url);
this->url = url;
if (ret != 0)
{
char buf[1024] = { 0 };
av_strerror(ret, buf, sizeof(buf) - 1);
cout << buf << endl;
return false;
}
return true;
}
int AddStream(const AVCodecContext *c)
{
if (!c)return -1;
AVStream *st = avformat_new_stream(ic, NULL);
if (!st)
{
cout << ("avformat_new_stream failed!") << endl;
return -1;
}
st->codecpar->codec_tag = 0;
//从编码器复制参数
avcodec_parameters_from_context(st->codecpar, c);
av_dump_format(ic, 0, url.c_str(), 1);
if (c->codec_type == AVMEDIA_TYPE_VIDEO)
{
vc = c;
vs = st;
}
else if (c->codec_type == AVMEDIA_TYPE_AUDIO)
{
ac = c;
as = st;
}
return st->index;
}
bool SendHead()
{
int ret = avio_open(&ic->pb, url.c_str(), AVIO_FLAG_WRITE);
if (ret != 0)
{
char buf[1024] = { 0 };
av_strerror(ret, buf, sizeof(buf) - 1);
cout << buf << endl;
return false;
}
//写入封装头
ret = avformat_write_header(ic, NULL);
if (ret != 0)
{
char buf[1024] = { 0 };
av_strerror(ret, buf, sizeof(buf) - 1);
cout << buf << endl;
return false;
}
return true;
}
bool SendFrame(AVPacket *pack, int streamIndex)
{
if (!pack || pack->size <= 0 || !pack->data)return false;
//判断是音频还是视频
pack->stream_index = streamIndex;
AVRational stime;
AVRational dtime;
if (as && ac &&pack->stream_index == as->index)
{
stime = ac->time_base;
dtime = as->time_base;
}
else if (vs && vc && pack->stream_index == vs->index)
{
stime = vc->time_base;
dtime = vs->time_base;
}
else
{
return false;
}
//推流
pack->pts = av_rescale_q(pack->pts, stime, dtime);
pack->dts = av_rescale_q(pack->dts, stime, dtime);
pack->duration = av_rescale_q(pack->duration, stime, dtime);
int ret = av_interleaved_write_frame(ic, pack);
if (ret == 0)
{
cout << "#" << flush;
}
return true;
}
private:
AVFormatContext *ic = NULL; //rtmp flv 封装器
string url = "";
//视频编码器流
const AVCodecContext *vc = NULL;
//视频流
AVStream *vs = NULL;
//音频编码器流
const AVCodecContext *ac = NULL;
//音频流
AVStream *as = NULL;
};
XRtmp * XRtmp::Get(unsigned char index)
{
static CXRtmp cxr[255];
static bool isFirst = true;
if (isFirst)
{
//注册所有的封装器
av_register_all();
//注册所有的网络协议
avformat_network_init();
isFirst = false;
}
return &cxr[index];
}
XRtmp::XRtmp()
{
}
XRtmp::~XRtmp()
{
}
XVideoCapture.h
#pragma once
#include "XDataThread.h"
class XVideoCapture:public XDataThread
{
public:
int width = 0;
int height = 0;
int fps = 0;
static XVideoCapture *Get(unsigned char index = 0);
virtual bool Init(int camIdex = 0) = 0;
virtual bool Init(const char *url) = 0;
virtual void Stop() = 0;
virtual ~XVideoCapture();
protected:
XVideoCapture();
};
XVideoCapture.cpp
#include "XVideoCapture.h"
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
using namespace std;
#pragma comment(lib,"opencv_world320.lib")
class CXVideoCapture:public XVideoCapture
{
public:
VideoCapture cam;
void run()
{
Mat frame;
cout << "进入视频线程" << endl;
while (!isExit)
{
if (!cam.read(frame))
{
msleep(1);
continue;
}
if (frame.empty())
{
msleep(1);
continue;
}
//确保数据是连续的
XData d((char *)frame.data, frame.cols*frame.rows*frame.elemSize());
Push(d);
}
cout << "退出视频线程" << endl;
}
bool Init(int camIdex = 0)
{
//使用opencv打开本地相机
cam.open(camIdex);
///1.打开摄像头
if (!cam.isOpened())
{
cout<<"cam open failed!";
return false;
}
cout << "cam open success" << endl;
width = cam.get(CAP_PROP_FRAME_WIDTH);
height = cam.get(CAP_PROP_FRAME_HEIGHT);
fps = cam.get(CAP_PROP_FPS);
if (fps == 0)fps = 25;
return true;
}
bool Init(const char *url)
{
//使用opencv打开本地相机
cam.open(url);
///1.打开摄像头
if (!cam.isOpened())
{
cout << "cam open failed!";
return false;
}
cout << "cam open success" << endl;
width = cam.get(CAP_PROP_FRAME_WIDTH);
height = cam.get(CAP_PROP_FRAME_HEIGHT);
fps = cam.get(CAP_PROP_FPS);
if (fps == 0)fps = 25;
return true;
}
void Stop()
{
XDataThread::Stop();
if (cam.isOpened())
{
cam.release();
}
}
};
XVideoCapture *XVideoCapture::Get(unsigned char index)
{
static CXVideoCapture xc[255];
return &xc[index];
}
XVideoCapture::XVideoCapture()
{
}
XVideoCapture::~XVideoCapture()
{
}