ubuntu 安装FFmpeg
T113VideoDemo.pro
#-------------------------------------------------
#
# Project created by QtCreator 2023-07-28T11:45:22
#
#-------------------------------------------------
QT += core gui
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
TARGET = T113VideoDemo
TEMPLATE = app
# The following define makes your compiler emit warnings if you use
# any feature of Qt which has been marked as deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if you use deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
SOURCES += \
main.cpp \
mainwindow.cpp
HEADERS += \
mainwindow.h
FORMS += \
mainwindow.ui
INCLUDEPATH += /usr/local/ffmpeg/include \
LIBS += /usr/local/ffmpeg/lib/libavcodec.so \
/usr/local/ffmpeg/lib/libavdevice.so \
/usr/local/ffmpeg/lib/libavfilter.so \
/usr/local/ffmpeg/lib/libavformat.so \
/usr/local/ffmpeg/lib/libavutil.so \
/usr/local/ffmpeg/lib/libpostproc.so \
/usr/local/ffmpeg/lib/libswresample.so \
/usr/local/ffmpeg/lib/libswscale.so \
mainwindow.h
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
namespace Ui {
class MainWindow;
}
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = 0);
~MainWindow();
private slots:
void slotOpenFile();
void on_pushButton_clicked();
void on_close_clicked();
void on_screen_clicked();
private:
Ui::MainWindow *ui;
QString currentFileName;
};
#endif // MAINWINDOW_H
mainwindow.cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QTime>
#include <QFileDialog>
#include <QScreen>
// 调用FFmpeg的头文件
extern "C"{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
// 延时函数
void delay(int msec){
QTime dieTime = QTime::currentTime().addMSecs(msec);
while( QTime::currentTime() < dieTime )
QCoreApplication::processEvents(QEventLoop::AllEvents, 100);
}
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
this->setWindowTitle("hello");//设置窗口的标题
QObject::connect(ui->open,SIGNAL(clicked()),this,SLOT(slotOpenFile()));
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::slotOpenFile()//打开文件键
{
currentFileName = QFileDialog::getOpenFileName(this, tr("open-file"), tr(""),
tr("Video files(*.rmvb *.rm *.avi *.wmv *.mkv *.asf *.3gp *.mov *.mp4 *.ogv* )"));
if( !currentFileName.isEmpty() )
{
on_pushButton_clicked();
}
}
void MainWindow::on_pushButton_clicked(){
AVFormatContext *pFormatCtx; // 存储音视频封装格式中包含的信息
int videoIndex = -1; // 视频帧索引,初始化为-1
AVCodecContext *pCodecCtx; // 视频流编码结构
AVCodec *pCodec; // 视频解码器
AVFrame *pFrame, *pFrameRGB;
unsigned char *out_buffer;
AVPacket *packet;
int ret, got_picture;
struct SwsContext *img_convert_ctx; // 主要用于视频图像的转换
char file_path[1280] = {0};
strcpy(file_path,currentFileName.toUtf8().data());
// 注册FFMpeg的库
av_register_all();
/*** (一)打开音视频流并获取音视频流信息 ***/
// 初始化AVFormatContext
pFormatCtx = avformat_alloc_context();
// 打开音视频流
/*avformat_open_input函数*/
//打开一个文件并解析。可解析的内容包括:视频流、音频流、视频流参数、音频流参数、视频帧索引。
//参数一:AVFormatContext **ps, 格式化的上下文(由avformat_alloc_context分配)的指针。
//参数二:要打开的流的url,地址最终会存入到AVFormatContext结构体当中。
//参数三:指定输入的封装格式。一般传NULL,由FFmpeg自行探测。
//参数四:包含AVFormatContext和demuxer私有选项的字典。返回时,此参数将被销毁并替换为包含找不到的选项
if (avformat_open_input(&pFormatCtx, file_path, NULL, NULL) != 0)
{
printf("Couldn't open input stream.\n");
return;
}
// 获取音视频流数据信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
printf("Couldn't find stream information.\n");
return;
}
/*** (二)查找视频流位置以及查找并打开视频解码器 ***/
// 查找视频流的起始索引位置(nb_streams表示视音频流的个数)
for (int i = 0; i < (int)pFormatCtx->nb_streams; i++)
{
// 查找到视频流时退出循环
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) // 判断是否为视频流
{
videoIndex = i;
break;
}
}
if (videoIndex == -1)
{
printf("Didn't find a video stream.\n");
return ;
}
// 查找视频解码器
pCodecCtx = pFormatCtx->streams[videoIndex]->codec; // 获取视频流编码结构
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL)
{
printf("Codec not found.\n");
return ;
}
// 打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
printf("Could not open codec.\n");
return ;
}
// 打印视频信息
printf("--------------- File Information ----------------\n");
av_dump_format(pFormatCtx, 0, file_path, 0); // 此函数打印输入或输出的详细信息
printf("-------------------------------------------------\n");
/*** (三)视频解码的同时处理图片像素数据 ***/
// 创建帧结构,此函数仅分配基本结构空间,图像数据空 间需通过av_malloc分配
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
// 创建动态内存,创建存储图像数据的空间(av_image_get_buffer_size获取一帧图像需要的大小)
out_buffer = (unsigned char *)av_malloc((size_t)av_image_get_buffer_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1));
// 存储一帧像素数据缓冲区
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer,
AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1);
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
// 初始化img_convert_ctx结构
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);
// av_read_frame读取一帧未解码的数据
while (av_read_frame(pFormatCtx, packet) >= 0)
{
// 如果是视频数据
if (packet->stream_index == videoIndex)
{
// 解码一帧视频数据
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0)
{
printf("Decode Error.\n");
return ;
}
if (got_picture)
{
sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
QImage img((uchar*)pFrameRGB->data[0],pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB32);
ui->label->setPixmap(QPixmap::fromImage(img)); // 在label上播放视频图片
delay(40);
}
}
av_free_packet(packet);
}
/*** (四)最后要释放申请的内存空间 ***/
sws_freeContext(img_convert_ctx); // 释放一个SwsContext
av_frame_free(&pFrameRGB);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
void MainWindow::on_close_clicked(){
close();
}
void MainWindow::on_screen_clicked(){
// static bool max = false;
// static QRect location = this->geometry();
// if (max) {
// this->setGeometry(location);
// } else {
// location = this->geometry();
// this->setGeometry(qApp->desktop()->availableGeometry());
// }
// this->setProperty("canMove", max);
// max = !max;
}