本次分享的内容主要是针对上次分享的内容做一次封装,上次是利用c语言的特性来实现的,这次要利用c++的特性--封装来实现,后期可以封装成动态库,下次就可以直接使用。

本次封装后的类主要分成两个类XMediaEncode:音视频编解码类,XRtmp:网络推流类。

由于后面还会封装音频类,所以这两个类会作为抽象类来使用(c++中没有抽象类这个概念,就是定义一个纯虚函数后类似其他语言中的抽象类),然后将这两个类改变成工厂模式进行使用,即将构造函数定义成protected权限,然后开放一个静态的函数提供给外部进行使用该构造函数。

XMediaEncode工厂类主要有rgb转yuv的方法,编码的方法,封装成了这几个函数

    //初始化像素格式转换的上下文
    virtual    bool InitScale() = 0;

virtual AVFrame* RGBToYUV(char *rgb) = 0;

    //视频编码器初始化
    virtual bool InitVideoCodec() = 0;

    //视频编码
    virtual AVPacket *EncodeVideo(AVFrame *frame) = 0;

将他们定义成纯虚函数让他们的子类都需要进行实现。

XRtmp工厂类主要负责将编码后的数据推流给服务器,主要函数如下:

    //初始化封装器上下文
    virtual bool Init(const char *url) = 0;

    //添加视频或者音频流
    virtual bool AddStream(const AVCodecContext *c) = 0;
    
    //打开rtmp网络IO,发送封装头
    virtual bool SendHead() = 0;

    //rtmp 帧推流
    virtual bool SendFrame(AVPacket *pack) = 0;

这边还要对上下的一个错误纠正,上次说的这个函数:int64_t av_rescale_q_rnd(int64_t a, int64_t b, int64_t c, enum AVRounding rnd); 

解释应该是这样的:可以假设原来的dts为x,利用c来作为度量的值为a,利用b来做度量值的值y是这样就可以列出两个方程式

a/c=x ,y/b=x ,所以a/c=y/b,及最后的y=a*b/c;上次说错了,后面在看解释的时候应该是

将以 "时钟基c" 表示的 数值a 转换成以 "时钟基b" 来表示。所以应该是这么解释的。

然后在分享一个获取电脑的cpu数量的代码,之前是写死的,即写成8个。

if defined WIN32 || defined _WIN32
#include <windows.h>
#endif
//获取CPU数量
static int XGetCpuNum()
{
#if defined WIN32 || defined _WIN32
	SYSTEM_INFO sysinfo;
	GetSystemInfo(&sysinfo);

	return (int)sysinfo.dwNumberOfProcessors;
#elif defined __linux__
	return (int)sysconf(_SC_NPROCESSORS_ONLN);
#elif defined __APPLE__
	int numCPU = 0;
	int mib[4];
	size_t len = sizeof(numCPU);

	// set the mib for hw.ncpu
	mib[0] = CTL_HW;
	mib[1] = HW_AVAILCPU;  // alternatively, try HW_NCPU;

						   // get the number of CPUs from the system
	sysctl(mib, 2, &numCPU, &len, NULL, 0);

	if (numCPU < 1)
	{
		mib[1] = HW_NCPU;
		sysctl(mib, 2, &numCPU, &len, NULL, 0);

		if (numCPU < 1)
			numCPU = 1;
	}
	return (int)numCPU;
#else
	return 1;
#endif

由于本次的封装代码是基于之前的,所以这边就不做过多的解释了,代码中有注释,大家可以看看。

XMediaEncode.h

#pragma once
struct  AVFrame;
struct  AVPacket;
class AVCodecContext;
///音视频编码接口类
class XMediaEncode
{
public:

	///输入参数
	int inWidth = 1280;
	int inHeight = 720;
	int inPixSize = 3;
	///输出参数
	int outWidth = 1280;
	int outHeight = 720;
	int bitrate = 4000000; 
	int fps = 25;
	//工厂的生产方法
	static XMediaEncode *Get(unsigned char index = 0);
	//初始化像素格式转换的上下文
	virtual	bool InitScale() = 0;

	virtual AVFrame* RGBToYUV(char *rgb) = 0;

	//视频编码器初始化
	virtual bool InitVideoCodec() = 0;

	//视频编码
	virtual AVPacket *EncodeVideo(AVFrame *frame) = 0;
	virtual ~XMediaEncode();
	AVCodecContext *vc = 0; 		//编码器上下文
protected:
	XMediaEncode();
};

XMediaEncode.cpp

#include "XMediaEncode.h"
#include <iostream>
extern "C"
{
#include <libswscale/swscale.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
#pragma comment(lib,"swscale.lib")
#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avutil.lib")
using namespace std;

#if defined WIN32 || defined _WIN32
#include <windows.h>
#endif
//获取CPU数量
static int XGetCpuNum()
{
#if defined WIN32 || defined _WIN32
	SYSTEM_INFO sysinfo;
	GetSystemInfo(&sysinfo);

	return (int)sysinfo.dwNumberOfProcessors;
#elif defined __linux__
	return (int)sysconf(_SC_NPROCESSORS_ONLN);
#elif defined __APPLE__
	int numCPU = 0;
	int mib[4];
	size_t len = sizeof(numCPU);

	// set the mib for hw.ncpu
	mib[0] = CTL_HW;
	mib[1] = HW_AVAILCPU;  // alternatively, try HW_NCPU;

						   // get the number of CPUs from the system
	sysctl(mib, 2, &numCPU, &len, NULL, 0);

	if (numCPU < 1)
	{
		mib[1] = HW_NCPU;
		sysctl(mib, 2, &numCPU, &len, NULL, 0);

		if (numCPU < 1)
			numCPU = 1;
	}
	return (int)numCPU;
#else
	return 1;
#endif
}

class CXMediaEncode :public XMediaEncode
{
public:
	void Close()
	{
		if (vsc)
		{
			sws_freeContext(vsc);
			vsc = NULL;
		}
		if (yuv)
		{
			av_frame_free(&yuv);
		}
		if (vc)
		{
			avcodec_free_context(&vc);
		}
		vpts = 0;
		av_packet_unref(&pack);
	}
	bool InitVideoCodec()
	{
				///4.初始化编码上下文
		//a 找到编码器
		AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
		if (!codec)
		{
			cout << "Can't find h264 encoder!";
			return false;
		}
		//b 创建编码器上下文
		vc = avcodec_alloc_context3(codec);
		if (!vc)
		{
			cout<<"avcodec_alloc_context3 failed!"<<endl;
		}
		//c 配置编码器参数
		vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; //全局参数
		vc->codec_id = codec->id;
		cout << "cpunum=" << XGetCpuNum() << endl;
		vc->thread_count = XGetCpuNum();

		vc->bit_rate = 50 * 1024 * 8; //压缩后每秒视频的bit位大小  200kB
		vc->width = outWidth;
		vc->height = outHeight;
		vc->time_base = { 1,fps };
		vc->framerate = { fps,1 };

		//画面组的大小,多少帧一个关键帧
		vc->gop_size = 50;
		//设置不需要b帧
		vc->max_b_frames = 0;
		//设置像素格式
		vc->pix_fmt = AV_PIX_FMT_YUV420P;
		//d 打开编码器上下文
		int ret = avcodec_open2(vc, 0, 0);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			cout << buf << endl;
			return false;
		}
		cout << "avcodec_open2 success!" << endl;
		return true;
	}
	AVPacket *EncodeVideo(AVFrame *frame)
	{
		av_packet_unref(&pack);
		frame->pts = vpts;
		vpts++;
		int ret = avcodec_send_frame(vc, frame);
		if (ret != 0)
		{
			return NULL;
		}

		ret = avcodec_receive_packet(vc, &pack);
		if (ret != 0 || pack.size < 0)
		{
			return NULL;
		}
		return &pack;
	}
	bool InitScale()
	{
		///2.初始化格式转换上下文
		vsc = sws_getCachedContext(vsc,
			//源宽、高、像素格式
			inWidth, inHeight, AV_PIX_FMT_BGR24,
			//目标宽、高、像素格式
			outWidth, outHeight, AV_PIX_FMT_YUV420P,
			SWS_BICUBIC,  //尺寸变化使用的算法
			0, 0, 0
			);

		if (!vsc)
		{
			cout << "sws_getCachedContext failed";
			return false;
		}

		///3.输出的数据结构
		yuv = av_frame_alloc();
		yuv->format = AV_PIX_FMT_YUV420P;
		yuv->width = inWidth;
		yuv->height = inHeight;
		yuv->pts = 0;
		//分配yuv空间
		int ret = av_frame_get_buffer(yuv, 32);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			throw exception(buf);
		}
		return true;
	}
	AVFrame *RGBToYUV(char *rgb)
	{
		///rgb to yuv
		//3.初始化输入的数据结构
		uint8_t *indata[AV_NUM_DATA_POINTERS] = { 0 };
		//indata[0] bgrbgrbgr
		//plane  indata[0] bbbbb indata[1]ggggg indata[2] rrrrr
		indata[0] = (uint8_t*)rgb;
		int insize[AV_NUM_DATA_POINTERS] = { 0 };
		//一行(宽)数据的字节数
		insize[0] = inWidth*inPixSize;
		int h = sws_scale(vsc, indata, insize, 0, inHeight,//源数据
			yuv->data, yuv->linesize);
		if (h <= 0)
		{
			return NULL;
		}

		return yuv;
	}
private:
	SwsContext *vsc = NULL;         //像素格式转换上下文
	AVFrame *yuv = NULL;            //输出的yuv

	AVPacket pack = {0};
	int vpts = 0;
}; 

XMediaEncode *XMediaEncode::Get(unsigned char index)
{
	static bool isFirst = true;
	if (isFirst)
	{
		//注册所有的编解码器
		avcodec_register_all();
		isFirst = false;
	}
	static CXMediaEncode cxm[255];
	return &cxm[index];
}
XMediaEncode::XMediaEncode()
{
}


XMediaEncode::~XMediaEncode()
{
}

XRtmp.h

#pragma once
class AVCodecContext;
class AVPacket;
class XRtmp
{
public:
	static XRtmp *Get(unsigned char index = 0);
	//初始化封装器上下文
	virtual bool Init(const char *url) = 0;

	//添加视频或者音频流
	virtual bool AddStream(const AVCodecContext *c) = 0;
	
	//打开rtmp网络IO,发送封装头
	virtual bool SendHead() = 0;

	//rtmp 帧推流
	virtual bool SendFrame(AVPacket *pack) = 0;
	~XRtmp();
protected:
	XRtmp();
};

XRtmp.cpp

#include "XRtmp.h"
#include <iostream>
#include <string>
using namespace std;
extern "C"
{
#include <libavformat/avformat.h>
}
#pragma comment(lib,"avformat.lib")
class CXRtmp :public XRtmp
{
public:

	void Close()
	{
		if (ic)
		{
			avformat_close_input(&ic);
			vs = NULL;
		}
		vc = NULL;
		url = "";
	}
	bool Init(const char *url)
	{
		//a.创建输出封装器上下文
		int ret = avformat_alloc_output_context2(&ic, 0, "flv", url);
		this->url = url;
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			cout << buf << endl;
			return false;
		}
		return true;
	}
	bool AddStream(const AVCodecContext *c)
	{
		if (!c)return false;
		if (c->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			vc = c;
		}
		AVStream *st = avformat_new_stream(ic, NULL);
		if (!st)
		{
			cout << ("avformat_new_stream failed!") << endl;
			return false;
		}
		st->codecpar->codec_tag = 0;
		//从编码器复制参数
		avcodec_parameters_from_context(st->codecpar, c);
		av_dump_format(ic, 0, url.c_str(), 1);
		if (c->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			vc = c;
			vs = st;
		}
		return true;
	}
	bool SendHead()
	{
		int ret = avio_open(&ic->pb, url.c_str(), AVIO_FLAG_WRITE);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			cout << buf << endl;
			return false;
		}
		//写入封装头
		ret = avformat_write_header(ic, NULL);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			cout << buf << endl;
			return false;
		}
		return true;
	}

	bool SendFrame(AVPacket *pack)
	{
		//推流
		pack->pts = av_rescale_q(pack->pts, vc->time_base, vs->time_base);
		pack->dts = av_rescale_q(pack->dts, vc->time_base, vs->time_base);
		int ret = av_interleaved_write_frame(ic, pack);
		if (ret == 0)
		{
			cout << "#" << flush;
		}
		return true;
	}
private:
	AVFormatContext *ic = NULL;  	//rtmp flv 封装器
	string url = "";
	AVStream *vs = NULL;
	//视频编码器流
	const AVCodecContext *vc = NULL;
};

XRtmp * XRtmp::Get(unsigned char index)
{
	static CXRtmp cxr[255];
	static bool isFirst = true;
	if (isFirst)
	{
		//注册所有的封装器
		av_register_all();
		//注册所有的网络协议
		avformat_network_init();
		isFirst = false;
	}
	return &cxr[index];
}
XRtmp::XRtmp()
{
}


XRtmp::~XRtmp()
{
}

main.cpp

#include <opencv2/highgui.hpp>
#include <iostream>
#include "XMediaEncode.h"
#include "XRtmp.h"
using namespace std;
using namespace cv;

#pragma comment(lib,"opencv_world320.lib")
int main(int argc,char *argv[])
{
	//nginx-rtmp 直播服务器rtmp推流URL
	char *outUrl = "rtmp://192.168.198.128/live";

	//编码器和像素格式的转换
	XMediaEncode *me = XMediaEncode::Get(0);

	//封装和推流对象
	XRtmp *xr = XRtmp::Get(0);


	VideoCapture cam;
	Mat frame;
	namedWindow("video");
	//编码器上下文
	AVCodecContext *vc = NULL;

	try 
	{
		//使用opencv打开本地相机
		cam.open(0);
		///1.打开摄像头
		if (!cam.isOpened())
		{
			throw exception("cam open failed!");
		}
		cout << "cam open success" << endl;
		int inWidth = cam.get(CAP_PROP_FRAME_WIDTH);
		int inHeight = cam.get(CAP_PROP_FRAME_HEIGHT);
		int fps = cam.get(CAP_PROP_FPS);
		cout << "fps=" << fps << endl;
		int ret = 0;
		///2.初始化输出转化上下文
		///3.初始化输出数据结构
		me->inWidth = inWidth;
		me->inHeight = inHeight;
		me->outHeight = inHeight;
		me->outWidth = inWidth;
		me->InitScale();

		///4.初始化编码上下文
		//a 找到编码器
		if (!me->InitVideoCodec())
		{
			throw exception("InitVideoCodec failed!");
		}

		
		///5 封装器和视频流配置
		xr->Init(outUrl);
		//b.添加视频流
		xr->AddStream(me->vc);
		///打开rtmp的网络输出io,写入封装头
		xr->SendHead();


		//读取帧
		for (;;)
		{
			///只做解码,读取视频帧,解码视频帧
			if (!cam.grab())
			{
				continue;
			}
			///yuv转化为rgb
			if (!cam.retrieve(frame))
			{
				continue;
			}
			imshow("video", frame);
			waitKey(1);

			///rgb to yuv
			me->inPixSize = frame.elemSize();
			AVFrame *yuv = me->RGBToYUV((char *)frame.data);
			//cout << h <<"" <<flush;
			if (!yuv)
			{
				continue;
			}
			///h264编码
			AVPacket *pack = me->EncodeVideo(yuv);
			if (!pack)continue;
			//推流
			xr->SendFrame(pack);
		}
	}
	catch (exception &ex)
	{
		if (cam.isOpened())
			cam.release();
		cerr << ex.what() << endl;
	}
	getchar();
	return 0;
}