QML结合FFmpge制作视频播放器
环境:QT5.13 ffmpeg-4.2.2-win64
main.qml
import QtQuick 2.12
import QtQuick.Window 2.12
import QtQuick.Controls 2.12
import QtQuick.Dialogs 1.2
import KDMQuick 1.0
/**
* @FileName main.qml
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
Window {
id: window
visible: true
width: 1280
height: 720
maximumHeight: 720
minimumHeight: 720
maximumWidth: 1280
minimumWidth: 1280
flags: Qt.Dialog
title: qsTr("QML+FFmpeg视频播放器")
property string filePath: ''
property string fileName: ''
Connections{
target: toolBtn
onValueChanged: {
if(toolBtn.sliderPressed){
videoItem.setSeek(toolBtn.value)
}
}
}
FileDialog {
id: fileDialog
visible: false
title: "Please choose a file"
modality: Qt.ApplicationModal
folder: shortcuts.movies
nameFilters: ["Video files(*.mp4 *.avi)", "All files (*)"]
selectedNameFilter: "All files (*)"
onAccepted: {
console.log("You chose: " + fileDialog.fileUrl)
filePath = fileDialog.fileUrl
window.fileName = fileDialog.fileUrl
videoItem.filePath = filePath
fileName.opacity = 1
toolBtn.totalTime = videoItem.totalTime()
toolBtn.opacity = 1
videoItem.play()
timer.start()
fileNameTimer.start()
}
onRejected: {
console.log("Canceled")
}
}
MouseArea {
anchors.fill: parent
hoverEnabled: true
onEntered: {
toolBtn.opacity = 0
}
}
VideoItem{
id: videoItem
x: 0
y: 0
width: 1280
height: 720
}
Timer {
id: timer
interval: 40; running: false; repeat: true
onTriggered: {
if(!toolBtn.sliderPressed){
videoItem.updatePaint()
toolBtn.currentTime = videoItem.currentTime()
toolBtn.seek = videoItem.seek()
}
}
}
Timer {
id: fileNameTimer
interval: 2000; running: false; repeat: false
onTriggered: {
fileName.opacity = 0
toolBtn.opacity = 0
}
}
Text {
id: fileName
x: 15
y: 550
font.family: "Helvetica"
font.pointSize: 20
text: window.fileName
}
ToolButton {
id: toolBtn
x: 0
y: window.height - toolBtn.height
width: parent.width
height: 100
opacity: 0
onPlayClicked: {
videoItem.play()
timer.start()
}
onPauseClicked: {
timer.stop()
videoItem.pause()
}
onStopClicked: {
timer.stop()
videoItem.pause()
videoItem.stop()
videoItem.updatePaint()
toolBtn.currentTime = '00:00'
toolBtn.totalTime = '00:00'
toolBtn.seek = 0
}
}
MenuButton {
id: menuBtn
x: 10; y: 10
opacity: 0.1
onClicked: {
drawer.open()
}
}
Drawer {
id: drawer
width: 150
height: parent.height
dragMargin: 10
Column{
spacing: 5
padding: 0
Image {
id: logo
width: parent.width
anchors.horizontalCenter: parent.horizontalCenter
fillMode: Image.PreserveAspectFit
source: "./qt-logo.png"
}
FlatButton {
id: open
text: "打开"
font.pixelSize: 23
font.family: "Helvetica"
width: drawer.width
height: 50
backgroundDefaultColor: '#5A6268'
onClicked: {
fileDialog.open()
drawer.close()
}
}
FlatButton {
id: close
text: "关闭"
font.pixelSize: 23
width: drawer.width
height: 50
backgroundDefaultColor: '#5A6268'
onClicked: {
timer.stop()
videoItem.pause()
videoItem.stop()
Qt.quit()
}
}
FlatButton {
id: about
text: "关于"
font.pixelSize: 23
width: drawer.width
height: 50
backgroundDefaultColor: '#5A6268'
onClicked: {
messageDialog.open()
drawer.close()
}
}
}
background: Rectangle {
Rectangle {
x: parent.width - 1
width: 1
height: parent.height
}
}
}
MessageDialog {
id: messageDialog
title: "About"
text: "QML+FFmpet视频播放器"
onAccepted: {
console.log("And of course you could only agree.")
}
Component.onCompleted: visible = false
}
onClosing: {
timer.stop()
videoItem.pause()
videoItem.stop()
}
Component.onCompleted: {}
}
ToolButton.qml
import QtQuick 2.0
import QtQuick.Controls 2.12
/**
* @FileName ToolButton.qml
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
Rectangle {
id: root
property bool containMouseing: false
property bool play: false
property string playImage: "/play.png"
property string pauseImage: "/pause.png"
property string currentTime: '00:00'
property string totalTime: '00:00'
property bool sliderPressed: false
property int seek: 0
property real value: 0
signal playClicked()
signal pauseClicked()
signal stopClicked()
width: 50
height: 50
color: 'gray'
MouseArea{
anchors.fill: parent
hoverEnabled: true
onContainsMouseChanged: {
root.containMouseing = containsMouse
if (root.containMouseing){
root.opacity = 1
}
else
root.opacity = 0
}
}
Slider {
id: slider
x: 0
y: 15
height: 15
width: root.width
from: 0
value: root.seek
to: 999
onHoveredChanged: {
root.opacity = 1
}
onValueChanged: {
root.value = value
}
onPressedChanged: {
root.sliderPressed = pressed
}
}
Button {
id: playBtn
x: 580
y: 40
height: 50
width: 50
flat: true
icon.source : playImage
icon.width: 20
icon.height: 26
background: Rectangle {
radius: 25
opacity: enabled ? 1 : 0.3
color: playBtn.down ? "#d0d0d0" : "#e0e0e0"
}
onHoveredChanged: {
root.opacity = 1
}
onClicked: {
root.play = !root.play
if(!root.play)
{
root.playClicked()
}
else{
root.pauseClicked()
}
}
}
Button {
id: stopBtn
x: 650
y: 40
height: 50
width: 50
flat: true
icon.source : "stop.png"
icon.width: 20
icon.height: 26
background: Rectangle {
radius: 25
opacity: enabled ? 1 : 0.3
color: playBtn.down ? "#d0d0d0" : "#e0e0e0"
}
onHoveredChanged: {
root.opacity = 1
}
onClicked: {
root.stopClicked()
}
}
Text {
id: currentTime
x: 20
y: 40
font.pointSize: 12
text: root.currentTime
}
Text {
id: totalTime
x: 1210
y: 40
font.pointSize: 12
text: root.totalTime
}
Behavior on opacity {
NumberAnimation {
duration: 800
}
}
}
MenuButton.qml
import QtQuick 2.0
/**
* @FileName MenuButton.qml
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
Rectangle {
id: root
property alias text: text.text
property bool containMouseing: false
signal clicked()
width: 50
height: 50
color: 'gray'
radius: 25
Text {
id: text
anchors.centerIn: parent
font.pixelSize: 30
text: qsTr("\u2630")
}
MouseArea{
anchors.fill: parent
hoverEnabled: true
onClicked: root.clicked()
onContainsMouseChanged: {
root.containMouseing = containsMouse
if (root.containMouseing)
root.opacity = 1
else
root.opacity = 0.2
}
}
Behavior on opacity {
NumberAnimation {
duration: 500
}
}
}
FlatButton.qml
import QtQuick 2.0
import QtQuick.Controls 2.2
import QtGraphicalEffects 1.0
/**
* @FileName FlatButton.qml
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
Button {
id: root
property color backgroundDefaultColor: '#4E5BF2'
property color backgroundPressedColor: Qt.darker(backgroundDefaultColor, 1.2)
property color contentItemTextColor: 'white'
text: 'Button'
contentItem: Text {
text: root.text
color: root.contentItemTextColor
font.pixelSize: 15
font.family: 'Arial'
font.weight: Font.Thin
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
elide: Text.ElideRight
}
background: Rectangle {
implicitWidth: 83
implicitHeight: 37
color: root.down ? root.backgroundPressedColor : root.backgroundDefaultColor
radius: 3
layer.enabled: true
layer.effect: DropShadow {
transparentBorder: true
color: root.down ? root.backgroundPressedColor : root.backgroundDefaultColor
samples: 20
}
}
}
VideoFFmpeg.h
#ifndef VIDEOFFMPEG_H
#define VIDEOFFMPEG_H
#include
#include
extern "C"{
#include
#include
}
/**
* @FileName VideoFFmpeg.h
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
class VideoFFmpeg : public QObject
{
Q_OBJECT
Q_PROPERTY(int totalTim READ totalTim WRITE setTotalTim) //总时间
Q_PROPERTY(double fps READ fps WRITE setFps) //视频fps
Q_PROPERTY(double pts READ pts WRITE setPts) //当前pts
Q_PROPERTY(bool playStatus READ playStatus WRITE setPlayStatus) //播放状态
public:
static VideoFFmpeg *GetInstance()
{
static VideoFFmpeg ffmpeg;
return &ffmpeg;
}
private:
explicit VideoFFmpeg(QObject *parent = nullptr);
virtual ~VideoFFmpeg();
public:
AVFrame *yuv; //解码后的视频帧数据
int videoStream; //视频流类型
private:
char errorBuf[1024]; //错误信息
QMutex mutex; //互斥锁
AVFormatContext *ic; //解封装上下文
SwsContext *cCtx; //转码器上下文
private:
int m_totalTim;
double m_fps;
double m_pts;
bool m_playStatus;
public:
bool _Open(const char * path); //打开
bool _Close(); //关闭
AVPacket _Read(); //读取一帧数据
AVFrame *_Decode(const AVPacket *pkt); //解码
bool _YUVToRGB(const AVFrame *yuv, char *out,int outwidth, int outheight); //YUV转RGB格式
bool _Seek(float pos);
std::string _GetErrorfo(); //错误信息
public:
int totalTim() const;
double fps() const;
double pts() const;
bool playStatus() const;
public slots:
void setTotalTim(int totalTim);
void setFps(double fps);
void setPts(double pts);
void setPlayStatus(bool playStatus);
};
static double r2d(AVRational r)
{
return r.den == 0 ? 0 : (double)r.num / (double)r.den;
}
#endif // VIDEOFFMPEG_H
VideoFFmpeg.cpp
#include "VideoFFmpeg.h"
#include
/**
* @FileName VideoFFmpeg.cpp
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
VideoFFmpeg::VideoFFmpeg(QObject *parent)
: QObject(parent),
yuv(nullptr),
videoStream(0),
ic(nullptr),
cCtx(nullptr),
m_totalTim(0),
m_fps(0),
m_pts(0),
m_playStatus(false)
{
errorBuf[0] = '\0';
av_register_all();
}
VideoFFmpeg::~VideoFFmpeg()
{
}
bool VideoFFmpeg::_Open(const char *path)
{
_Close();
mutex.lock();
int re = avformat_open_input(&ic, path, 0, 0);
if (re != 0)
{
mutex.unlock();
av_strerror(re, errorBuf, sizeof(errorBuf));
return false;
}
m_totalTim = ic->duration / (AV_TIME_BASE);
for (int i = 0; i < ic->nb_streams; i++)
{
AVCodecContext *enc = ic->streams[i]->codec;
if (enc->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoStream = i;
m_fps = r2d(ic->streams[i]->avg_frame_rate);
AVCodec *codec = avcodec_find_decoder(enc->codec_id);
if (!codec)
{
mutex.unlock();
return false;
}
int err = avcodec_open2(enc, codec, NULL);
if (err != 0)
{
mutex.unlock();
char buf[1024] = { 0 };
av_strerror(err, buf, sizeof(buf));
return false;
}
}
}
mutex.unlock();
return true;
}
bool VideoFFmpeg::_Close()
{
mutex.lock();
if (cCtx)
{
sws_freeContext(cCtx);
cCtx = nullptr;
}
if (yuv) av_frame_free(&yuv);
if (ic) avformat_close_input(&ic);
mutex.unlock();
return true;
}
AVPacket VideoFFmpeg::_Read()
{
AVPacket pkt;
memset(&pkt,0,sizeof(AVPacket));
mutex.lock();
if (!ic)
{
mutex.unlock();
return pkt;
}
int err = av_read_frame(ic, &pkt);
if (err != 0)
{
av_strerror(err,errorBuf,sizeof(errorBuf));
}
mutex.unlock();
return pkt;
}
AVFrame *VideoFFmpeg::_Decode(const AVPacket *pkt)
{
mutex.lock();
if (!ic)
{
mutex.unlock();
return nullptr;
}
if (yuv == nullptr)
{
yuv = av_frame_alloc();
}
int re = avcodec_send_packet(ic->streams[pkt->stream_index]->codec,pkt);
if (re != 0)
{
mutex.unlock();
return nullptr;
}
re = avcodec_receive_frame(ic->streams[pkt->stream_index]->codec,yuv);
if (re != 0)
{
mutex.unlock();
return nullptr;
}
int pts = yuv->pts*r2d(ic->streams[pkt->stream_index]->time_base);
setPts(pts);
mutex.unlock();
return yuv;
}
bool VideoFFmpeg::_YUVToRGB(const AVFrame *yuv, char *out, int outwidth, int outheight)
{
mutex.lock();
if (!ic)
{
mutex.unlock();
return false;
}
AVCodecContext *videoCtx = ic->streams[this->videoStream]->codec;
cCtx = sws_getCachedContext(cCtx, videoCtx->width,
videoCtx->height,
videoCtx->pix_fmt,
outwidth, outheight,
AV_PIX_FMT_BGRA,
SWS_BICUBIC,
nullptr, nullptr, nullptr);
if (!cCtx)
{
mutex.unlock();
return false;
}
uint8_t *data[AV_NUM_DATA_POINTERS] = { 0 };
data[0] = (uint8_t *)out;
int linesize[AV_NUM_DATA_POINTERS] = { 0 };
linesize[0] = outwidth * 4;
int h = sws_scale(cCtx, yuv->data,
yuv->linesize,
0, videoCtx->height,
data,
linesize
);
mutex.unlock();
return true;
}
bool VideoFFmpeg::_Seek(float pos)
{
mutex.lock();
if (!ic)
{
mutex.unlock();
return false;
}
int64_t stamp = 0;
stamp = pos * ic->streams[videoStream]->duration;
int pts = stamp * r2d(ic->streams[videoStream]->time_base);
setPts(pts);
int re = av_seek_frame(ic, videoStream, stamp,
AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
avcodec_flush_buffers(ic->streams[videoStream]->codec);
mutex.unlock();
if (re > 0)
return true;
return false;
}
std::string VideoFFmpeg::_GetErrorfo()
{
mutex.lock();
std::string re = this->errorBuf;
mutex.unlock();
return re;
}
int VideoFFmpeg::totalTim() const
{
return m_totalTim;
}
double VideoFFmpeg::fps() const
{
return m_fps;
}
double VideoFFmpeg::pts() const
{
return m_pts;
}
bool VideoFFmpeg::playStatus() const
{
return m_playStatus;
}
void VideoFFmpeg::setTotalTim(int totalTim)
{
m_totalTim = totalTim;
}
void VideoFFmpeg::setFps(double fps)
{
m_fps = fps;
}
void VideoFFmpeg::setPts(double pts)
{
m_pts = pts;
}
void VideoFFmpeg::setPlayStatus(bool playStatus)
{
m_playStatus = playStatus;
}
VideoThread.h
#ifndef VIDEOTHREAD_H
#define VIDEOTHREAD_H
#include
#include
/**
* @FileName VideoThread.h
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
class VideoThread : public QThread
{
Q_OBJECT
Q_PROPERTY(bool status READ status WRITE setStatus)//线程状态
public:
static VideoThread *GetInstance()
{
static VideoThread videoThread;
return &videoThread;
}
void run() override;
bool status() const
{
return m_status;
}
private:
explicit VideoThread(QObject *parent = nullptr);
~VideoThread();
bool m_status;
public slots:
void setStatus(bool status)
{
m_status = status;
}
};
#endif // VIDEOTHREAD_H
VideoThread.cpp
#include "VideoThread.h"
#include "VideoFFmpeg.h"
#include
/**
* @FileName VideoThread.cpp
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
VideoThread::VideoThread(QObject *parent)
: QThread(parent),
m_status(false)
{
}
VideoThread::~VideoThread()
{
}
void VideoThread::run()
{
while (!m_status)
{
if (!VideoFFmpeg::GetInstance()->playStatus())
{
msleep(10);
continue;
}
AVPacket pkt = VideoFFmpeg::GetInstance()->_Read();
if (pkt.size <= 0)
{
msleep(10);
continue;
}
if (pkt.stream_index != VideoFFmpeg::GetInstance()->videoStream)
{
av_packet_unref(&pkt);
continue;
}
VideoFFmpeg::GetInstance()->_Decode(&pkt);
av_packet_unref(&pkt);
if (VideoFFmpeg::GetInstance()->fps() > 0)
msleep(1000/VideoFFmpeg::GetInstance()->fps());
}
}
VideoItem.h
#ifndef VIDEOITEM_H
#define VIDEOITEM_H
#include
#include
/**
* @FileName VideoItem.h
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
class VideoItem : public QQuickPaintedItem
{
Q_OBJECT
Q_PROPERTY(QString filePath READ filePath WRITE setFilePath NOTIFY filePathChanged)
Q_PROPERTY(bool playStatus READ playStatus WRITE setPlayStatus)
public:
VideoItem(QQuickItem *parent = nullptr);
QString filePath() const;
bool playStatus() const;
signals:
void filePathChanged(QString filePath);
public slots:
void paint(QPainter *painter) override;
Q_INVOKABLE void updatePaint();
Q_INVOKABLE void play();
Q_INVOKABLE void pause();
Q_INVOKABLE void stop();
Q_INVOKABLE QString totalTime();
Q_INVOKABLE QString currentTime();
Q_INVOKABLE int seek();
Q_INVOKABLE void setSeek(int pos);
void setFilePath(QString filePath);
void setPlayStatus(bool playStatus);
private:
QString m_filePath;
bool m_playStatus;
};
#endif // VIDEOITEM_H
VideoItem.cpp
#include "VideoItem.h"
#include "VideoFFmpeg.h"
#include "VideoThread.h"
#include
#include
/**
* @FileName VideoItem.cpp
* @brief File Description
* @author Kongdemin
* @date 2020-05-20
*/
VideoItem::VideoItem(QQuickItem *parent)
: QQuickPaintedItem(parent),
m_filePath(QString()),
m_playStatus(false)
{
connect(this, &VideoItem::filePathChanged,[=](QString file){
if (file.isEmpty())
{
return;
}
bool open = VideoFFmpeg::GetInstance()->_Open(file.mid(8).toLocal8Bit().constData());
if (!open)
{
qDebug()<< "file open failed!";
return;
}
});
}
QString VideoItem::filePath() const
{
return m_filePath;
}
bool VideoItem::playStatus() const
{
return m_playStatus;
}
void VideoItem::paint(QPainter *painter)
{
static QImage *image = nullptr;
static int w = 0;
static int h = 0;
if (w != width() || h != height())
{
if (image)
{
delete image->bits();
delete image;
image = nullptr;
}
}
if (image == nullptr)
{
uchar *buf = new uchar[width()*height() * 4];
image = new QImage(buf, width(), height(), QImage::Format_ARGB32);
}
AVFrame *yuv = VideoFFmpeg::GetInstance()->yuv;
if (yuv == nullptr) {
painter->save();
QFont font;
font.setFamily("Helvetica");
font.setPixelSize(38);
painter->setFont(font);
painter->drawText(QRect(0,0,this->width(),this->height()), Qt::AlignCenter, QString("please open video file"));
painter->restore();
return;
}
VideoFFmpeg::GetInstance()->_YUVToRGB(yuv, (char *)image->bits(),width(),height());
painter->drawImage(QPoint(0, 0), *image);
}
void VideoItem::updatePaint()
{
update();
}
void VideoItem::play()
{
VideoFFmpeg::GetInstance()->setPlayStatus(true);
if (VideoThread::GetInstance()->isRunning())
return;
else {
VideoThread::GetInstance()->setStatus(false);
VideoThread::GetInstance()->start();
}
}
void VideoItem::pause()
{
VideoFFmpeg::GetInstance()->setPlayStatus(false);
}
void VideoItem::stop()
{
if (VideoThread::GetInstance()->isRunning())
{
VideoThread::GetInstance()->setStatus(true);
VideoThread::GetInstance()->quit();
VideoThread::GetInstance()->wait();
}
VideoFFmpeg::GetInstance()->_Close();
setFilePath(QString());
}
QString VideoItem::totalTime()
{
int min = (VideoFFmpeg::GetInstance()->totalTim())/60;
int sec = (VideoFFmpeg::GetInstance()->totalTim()) % 60;
QString alltime = QString("%1:%2").arg(QString::number(min),QString::number(sec));
return alltime;
}
QString VideoItem::currentTime()
{
int min = (VideoFFmpeg::GetInstance()->pts() ) / 60;
int sec = (int)(VideoFFmpeg::GetInstance()->pts() ) % 60;
QString currenttime = QString("%1:%2").arg(QString::number(min),QString::number(sec));
return currenttime;
}
int VideoItem::seek()
{
if (VideoFFmpeg::GetInstance()->totalTim() > 0)
{
return int(VideoFFmpeg::GetInstance()->pts()*1000/VideoFFmpeg::GetInstance()->totalTim());
}
else
return 0;
}
void VideoItem::setSeek(int pos)
{
VideoFFmpeg::GetInstance()->_Seek(float(pos)/1000);
}
void VideoItem::setFilePath(QString filePath)
{
if (m_filePath == filePath)
return;
m_filePath = filePath;
emit filePathChanged(m_filePath);
}
void VideoItem::setPlayStatus(bool playStatus)
{
m_playStatus = playStatus;
}