diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..930fd7b --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,23 @@ +set(SRCS interface/cxffmpeg/interface.h + interface/cxffmpeg/qmlplayer.h + impl/qmlplayer.cpp + impl/VideoDecoder.h + impl/VideoDecoder.cpp + ) + +set(DEFS PLAYER_FFMPEG_DLL) +set(INCS ${CMAKE_CURRENT_SOURCE_DIR}/impl/ ${CMAKE_CURRENT_SOURCE_DIR}/interface) +set(LIBS Qt5::Core Qt5::Gui Qt5::Widgets Qt5::Quick Qt5::Multimedia avutil avcodec avformat swscale) + +find_package(Qt5 COMPONENTS Multimedia REQUIRED) +__add_real_target(qcxffmpeg dll SOURCE ${SRCS} + DEF ${DEFS} + INC ${INCS} + LIB ${LIBS} + INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/interface + ) + +if(CC_INSTALL_QCXFFMPEG) + __install_directory_specif(${CMAKE_CURRENT_SOURCE_DIR}/interface/ qcxffmpeg) +endif() + diff --git a/impl/VideoDecoder.cpp b/impl/VideoDecoder.cpp new file mode 100644 index 0000000..66c45c4 --- /dev/null +++ b/impl/VideoDecoder.cpp @@ -0,0 +1,223 @@ +#include "VideoDecoder.h" +#include +/* Enable or disable frame reference counting. You are not supposed to support + * both paths in your application but pick the one most appropriate to your + * needs. Look for the use of refcount in this example to see what are the + * differences of API usage between them. */ +static int gRefCount = 0; +static void pgm_save(unsigned char* buf, int wrap, int xsize, int ysize, + char* filename) +{ + FILE* f; + int i; + + f = fopen(filename, "w"); + fprintf(f, "P5\n%d %d\n%d\n", xsize, ysize, 255); + for (i = 0; i < ysize; i++) + fwrite(buf + i * wrap, 1, xsize, f); + fclose(f); +} +static void outError(int num) +{ + char errorStr[1024]; + av_strerror(num, errorStr, sizeof errorStr); + qDebug() << "FFMPEG ERROR:" << QString(errorStr); +} +static int open_codec_context(int& streamIndex + , AVCodecContext** decCtx + , AVFormatContext* fmtCtx + , enum AVMediaType type) +{ + int ret; + int index; + AVStream* st; + AVCodec* codec = nullptr; + AVDictionary* opts = nullptr; + ret = av_find_best_stream(fmtCtx, type, -1, -1, nullptr, 0); + if (ret < 0) + { + qWarning() << "Could not find stream " << av_get_media_type_string(type); + return ret; + } + index = ret; + st = fmtCtx->streams[index]; + codec = avcodec_find_decoder(st->codecpar->codec_id); + if (!codec) + { + qWarning() << "Cound not find codec " << av_get_media_type_string(type); + return AVERROR(EINVAL); + } + *decCtx = avcodec_alloc_context3(codec); + if (!*decCtx) + { + qWarning() << "Failed to allocate codec context " << av_get_media_type_string(type); + return AVERROR(ENOMEM); + } + ret = avcodec_parameters_to_context(*decCtx, st->codecpar); + if (ret < 0) + { + qWarning() << "Failed to copy codec parameters to decoder context" << av_get_media_type_string(type); + return ret; + } + av_dict_set(&opts, "refcounted_frames", gRefCount ? "1" : "0", 0); + + ret = avcodec_open2(*decCtx, codec, &opts); + if (ret < 0) + { + qWarning() << "Failed to open codec " << av_get_media_type_string(type); + return ret; + } + streamIndex = index; + return 0; +} +void VideoDecoder::init() +{ + avformat_network_init(); +} + +void VideoDecoder::uninit() +{ + avformat_network_deinit(); +} + +void VideoDecoder::stopplay() +{ + isStop = true; +} + +void VideoDecoder::load(const QString& file) +{ + isStop = false; + int ret = 0; + AVDictionary* options = NULL; + av_dict_set(&options, "rtsp_transport", "tcp", 0); + ret = avformat_open_input(&m_fmtCtx, file.toStdString().data(), nullptr, &options); + if (0 > ret) + { + qWarning() << "open url error"; + outError(ret); + return; + } + ret = avformat_find_stream_info(m_fmtCtx, nullptr); + if (0 > ret) + { + qWarning() << "find stream failed"; + outError(ret); + return; + } + ret = open_codec_context(m_videoStreamIndex, &m_videoCodecCtx, m_fmtCtx, AVMEDIA_TYPE_VIDEO); + if (ret < 0) + { + qWarning() << "open_codec_context failed"; + return; + } + m_videoStream = m_fmtCtx->streams[m_videoStreamIndex]; + m_width = m_videoCodecCtx->width; + m_height = m_videoCodecCtx->height; + m_pixFmt = m_videoCodecCtx->pix_fmt; + + emit videoInfoReady(m_width, m_height, m_pixFmt); + + av_dump_format(m_fmtCtx, 0, file.toStdString().data(), 0); + do { + if (!m_videoStream) + { + qWarning() << "Could not find audio or video stream in the input, aborting"; + break; + } + m_frame = av_frame_alloc(); + if (!m_frame) + { + qWarning() << "Could not allocate frame"; + break; + } + readframe(); + } while (0); + avcodec_free_context(&m_videoCodecCtx); + avformat_close_input(&m_fmtCtx); + av_frame_free(&m_frame); + emit videoFrameDataFinish(); + +} + + + +void VideoDecoder::readframe() +{ + AVPacket* pPacket = av_packet_alloc(); + AVFrame* pFrame = av_frame_alloc(); + + int width = m_width; + int height = m_height; + + SwsContext* sws_conotext = sws_getContext(m_width, m_height, m_pixFmt, + width, height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); + AVFrame* yuvFrame = av_frame_alloc(); + + int size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, width, height, 1); + uint8_t* buff = (uint8_t*)av_malloc(size); + av_image_fill_arrays(yuvFrame->data, yuvFrame->linesize, buff, AV_PIX_FMT_RGB24, width, height, 1); + yuvFrame->width = width; + yuvFrame->height = height; + int16_t* outputBuffer = NULL; + while (av_read_frame(m_fmtCtx, pPacket) == 0 && !isStop) + { + if (pPacket->stream_index == m_videoStreamIndex) + { + if (avcodec_send_packet(m_videoCodecCtx, pPacket)) + { + printf("error send packet!"); + continue; + } + if (!avcodec_receive_frame(m_videoCodecCtx, pFrame)) + { + sws_scale(sws_conotext, pFrame->data, pFrame->linesize, 0, height, yuvFrame->data, yuvFrame->linesize); + emit videoFrameDataReady(yuvFrame->data[0], yuvFrame->width, yuvFrame->height); + } + } + av_packet_unref(pPacket); + } +} + +//--------------------------------------------------------------------------------// +VideoDecoderController::VideoDecoderController(QObject* parent) : QObject(parent) +{ + m_decoder = new VideoDecoder; + m_decoder->moveToThread(&m_thread); + connect(&m_thread, &QThread::finished, m_decoder, &VideoDecoder::deleteLater); + connect(this, &VideoDecoderController::init, m_decoder, &VideoDecoder::init); + connect(this, &VideoDecoderController::uninit, m_decoder, &VideoDecoder::uninit); + connect(this, &VideoDecoderController::load, m_decoder, &VideoDecoder::load); + + connect(m_decoder, &VideoDecoder::videoFrameDataReady, this, &VideoDecoderController::videoFrameDataReady); + connect(m_decoder, &VideoDecoder::videoFrameDataFinish, this, &VideoDecoderController::videoFrameDataFinish); + + m_thread.start(); + // emit init(); +} + +VideoDecoderController::~VideoDecoderController() +{ + if (m_thread.isRunning()) + { + emit uninit(); + m_thread.quit(); + m_thread.wait(); + } +} + +void VideoDecoderController::startThread() +{ + //m_thread.start(); + emit init(); +} + +void VideoDecoderController::stopThread() +{ + //emit uninit(); +} + +void VideoDecoderController::stopplay() +{ + m_decoder->stopplay(); +} \ No newline at end of file diff --git a/impl/VideoDecoder.h b/impl/VideoDecoder.h new file mode 100644 index 0000000..ab75b1f --- /dev/null +++ b/impl/VideoDecoder.h @@ -0,0 +1,78 @@ +#ifndef PLAYER_FFMPEG_VIDEO_DECODER_H_ +#define PLAYER_FFMPEG_VIDEO_DECODER_H_ + +extern "C" +{ +#define __STDC_CONSTANT_MACROS + +#include +#include +#include +#include +#include +} + +#include +#include +#include + + +class VideoDecoder : public QObject +{ + Q_OBJECT +public: + void stopplay(); + +public slots: + void init(); + void uninit(); + void load(const QString& file); + +signals: + void videoInfoReady(int width, int height, int format); + void videoFrameDataReady(unsigned char* data, int width, int height); + void videoFrameDataFinish(); +protected: + void demuxing(); + void decodeFrame(); + void readframe(); +private: + AVFormatContext* m_fmtCtx = nullptr; + AVCodecContext* m_videoCodecCtx = nullptr; + AVStream* m_videoStream = nullptr; + AVFrame* m_frame = nullptr; + AVPacket m_packet; + int m_videoStreamIndex = 0; + + AVPixelFormat m_pixFmt; + int m_width, m_height; + bool isStop; +}; + +class VideoDecoderController : public QObject +{ + Q_OBJECT +public: + VideoDecoderController(QObject* parent = nullptr); + ~VideoDecoderController(); + + void startThread(); + void stopThread(); + void stopplay(); + +signals: + void init(); + void uninit(); + void pause(bool); + void load(const QString& file); + + void videoInfoReady(int width, int height, int format); + void videoFrameDataReady(unsigned char* data, int width, int height); + void videoFrameDataFinish(); + +private: + VideoDecoder* m_decoder = nullptr; + QThread m_thread; +}; + +#endif // ! PLAYER_FFMPEG_VIDEO_DECODER_H_ diff --git a/impl/qmlplayer.cpp b/impl/qmlplayer.cpp new file mode 100644 index 0000000..ea98f9f --- /dev/null +++ b/impl/qmlplayer.cpp @@ -0,0 +1,156 @@ +#include "cxffmpeg/qmlplayer.h" +#include +#include +#include +#include "VideoDecoder.h" + +QMLPlayer::QMLPlayer(QQuickItem *parent) + :QQuickPaintedItem(parent) +{ + m_decoderController = new VideoDecoderController(this); + connect(m_decoderController, &VideoDecoderController::videoFrameDataReady, this, &QMLPlayer::onVideoFrameDataReady); + connect(m_decoderController, &VideoDecoderController::videoFrameDataFinish, this, &QMLPlayer::onVideoFrameDataFinish); + m_linkState = false; +} + +QMLPlayer::~QMLPlayer() +{ + if (m_decoderController) + { + delete m_decoderController; + } + +} + +void QMLPlayer::paint(QPainter *painter) +{ + if (!image.isNull()) + { + int imageH = image.height(); + int imageW = image.width(); + int screenH = this->height(); + int screenW = this->width(); + + int scaledH = this->height(); + int scaledW = this->width(); + + int offsetX = 0; + int offsetY = 0; + + if (imageW > imageH) + { + scaledH = imageH * screenW / imageW; + if (scaledH > screenH) + { + scaledH = screenH; + scaledW = imageW * scaledH / imageH; + + offsetX = (screenW - scaledW) / 2; + offsetY = 0; + } + else + { + offsetX = 0; + offsetY = (screenH - scaledH) / 2; + } + } + else + { + scaledW = imageW * scaledH / imageH; + if (scaledW > screenW) + { + scaledW = screenW; + scaledH = imageH * screenW / imageW; + offsetX = 0; + offsetY = (screenH - scaledH) / 2; + } + else + { + offsetX = (screenW - scaledW) / 2; + offsetY = 0; + } + } + + + QImage img = image.scaled(scaledW, scaledH); + painter->drawImage(QPoint(offsetX, offsetY), img); + } + else + { + image = QImage(this->width(), this->height(), QImage::Format_RGB888); + image.fill(QColor(0.0, 0.0, 0.0)); + QImage img = image.scaled(this->width(), this->height()); + painter->drawImage(QPoint(0, 0), img); + } +} + +void QMLPlayer::rowVideoData(unsigned char *data, int width, int height) +{ + image = QImage(data, width, height, QImage::Format_RGB888); +} + +void QMLPlayer::rowAudioData(unsigned char *data, unsigned int size) +{ + audioOutputIO->write((const char *)data,size); +} + +QString QMLPlayer::getUrl() const +{ + return url; +} + +void QMLPlayer::setUrl(const QString &value) +{ + url = value; +} + +void QMLPlayer::start(QString urlStr) +{ + QTimer* timer = new QTimer(this); + timer->setInterval(40); + connect(timer, SIGNAL(timeout()), this, SLOT(update())); + timer->start(); + + QAudioFormat format; + // Set up the format, eg. + format.setSampleRate(48000); + format.setChannelCount(2); + format.setSampleSize(16); + format.setCodec("audio/pcm"); + format.setByteOrder(QAudioFormat::LittleEndian); + format.setSampleType(QAudioFormat::UnSignedInt); + + QAudioOutput *audio_output = new QAudioOutput(format,this); + audioOutputIO = (audio_output)->start(); + int size = audio_output->periodSize(); + emit m_decoderController->load(urlStr); + +} + +void QMLPlayer::stop() +{ + qDebug() << "QMLPlayer::stop()"; + m_decoderController->stopplay(); +} + +void QMLPlayer::onVideoFrameDataReady(unsigned char* data, int width, int height) +{ + //qDebug() << "onVideoFrameDataReady data.width:"<< width << " data.height:"<< height; + rowVideoData(data, width, height); + m_linkState = true; + + emit sigVideoFrameDataReady(); +} + +void QMLPlayer::onVideoFrameDataFinish() +{ + for (int i = 0; i < 10; i++) + { + image.fill(QColor(0.0, 0.0, 0.0)); + } +} + +bool QMLPlayer::getLinkState() +{ + return m_linkState; +} diff --git a/interface/cxffmpeg/interface.h b/interface/cxffmpeg/interface.h new file mode 100644 index 0000000..978400e --- /dev/null +++ b/interface/cxffmpeg/interface.h @@ -0,0 +1,11 @@ +#ifndef PLAYER_FFMPEG_VIDEO_FFMPEG_ECPORT_H_ +#define PLAYER_FFMPEG_VIDEO_FFMPEG_ECPORT_H_ +#include + +#if defined(PLAYER_FFMPEG_DLL) +# define PLAYER_FFMPEG_API Q_DECL_EXPORT +#else +# define PLAYER_FFMPEG_API Q_DECL_IMPORT +#endif + +#endif // PLAYER_FFMPEG_VIDEO_FFMPEG_ECPORT_H_ diff --git a/interface/cxffmpeg/qmlplayer.h b/interface/cxffmpeg/qmlplayer.h new file mode 100644 index 0000000..a837631 --- /dev/null +++ b/interface/cxffmpeg/qmlplayer.h @@ -0,0 +1,43 @@ +#ifndef QMLPLAYER_H +#define QMLPLAYER_H + +#include +#include +#include "cxffmpeg/interface.h" + +class VideoDecoderController; +class PLAYER_FFMPEG_API QMLPlayer : public QQuickPaintedItem +{ + Q_OBJECT +public: + QMLPlayer(QQuickItem *parent = 0); + ~QMLPlayer(); + + QString getUrl() const; + void setUrl(const QString &value); + + Q_INVOKABLE void start(QString urlStr); + Q_INVOKABLE void stop(); + Q_INVOKABLE bool getLinkState(); + +private: + void paint(QPainter *painter) override; + void rowVideoData(unsigned char *data, int width, int height); + void rowAudioData(unsigned char *data, unsigned int size); + +protected slots: + void onVideoFrameDataReady(unsigned char* data, int width, int height); + void onVideoFrameDataFinish(); + +signals: + void sigVideoFrameDataReady(); + +private: + QImage image; + QString url; + QIODevice *audioOutputIO; + VideoDecoderController* m_decoderController; + bool m_linkState; +}; + +#endif // QMLPLAYER_H