ffmpeg4教程12:intel media sdk(qsv)硬解码的使用方法+qt5 openggl显示AV_PIX_FMT_NV12-程序员宅基地

技术标签: ffmpeg  

https://blog.csdn.net/java_lilin/article/details/86527478

讨论群261074724
1.安装intel media sdk 请对于处理器的版本代号
2.参考ffmpeg官方的examples下的 qsvdec.c改造
如果差#include <mfx/mfxvideo.h>头文件需要将 文件下的include 重命名为mfx 考到工程

 

主要加了一个转换城rgb24的窗口显示


#include "pch.h"
#include <iostream>
#include <opencv2/core/utility.hpp> 
#include <opencv2/opencv.hpp>
#include <windows.h>

extern "C" {
#include "libavcodec/avcodec.h" 
#include "libavformat/avformat.h"
#include "libavformat/avio.h"
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libavutil/audio_fifo.h"  
#include "libavutil/time.h"
#include "libavutil/mathematics.h"
#include "libavutil/channel_layout.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
#include "libavutil/opt.h"  

#include "libavutil/mem.h"
#include "libavutil/buffer.h"
#include "libavutil/error.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_qsv.h"
#define HAVE_STRUCT_TIMESPEC
#include "pthread.h" 
}
 
typedef struct DecodeContext {
    AVBufferRef *hw_device_ref;
} DecodeContext;
static AVPixelFormat get_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts)
{
    while (*pix_fmts != AV_PIX_FMT_NONE) {
        if (*pix_fmts == AV_PIX_FMT_QSV) {
            DecodeContext *decode =(DecodeContext *) avctx->opaque;
            AVHWFramesContext  *frames_ctx;
            AVQSVFramesContext *frames_hwctx;
            int ret;

            /* create a pool of surfaces to be used by the decoder */
            avctx->hw_frames_ctx = av_hwframe_ctx_alloc(decode->hw_device_ref);
            if (!avctx->hw_frames_ctx)
                return AV_PIX_FMT_NONE;
            frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
            frames_hwctx =(AVQSVFramesContext *) frames_ctx->hwctx;

            frames_ctx->format = AV_PIX_FMT_QSV;
            frames_ctx->sw_format = avctx->sw_pix_fmt;
            frames_ctx->width = FFALIGN(avctx->coded_width, 32);
            frames_ctx->height = FFALIGN(avctx->coded_height, 32);
            frames_ctx->initial_pool_size = 32;

            frames_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;

            ret = av_hwframe_ctx_init(avctx->hw_frames_ctx);
            if (ret < 0)
                return AV_PIX_FMT_NONE;

            return AV_PIX_FMT_QSV;
        }

        pix_fmts++;
    }

    fprintf(stderr, "The QSV pixel format not offered in get_format()\n");

    return AV_PIX_FMT_NONE;
}
static void Show(HWND hwnd, unsigned char* rgb, int w, int h, bool fill)
{
    HDC hdc = GetDC(hwnd);//获取当前的显示设备上下文

    RECT rect;
    GetClientRect(hwnd, &rect);
    int cxClient = rect.right;
    int cyClient = rect.bottom;

    if (cxClient <= 0 || cyClient <= 0) {
        return;
    }

    HDC  hdcsource = CreateCompatibleDC(NULL);//创建存放图象的显示缓冲
    HBITMAP bitmap = CreateCompatibleBitmap(hdc, cxClient, cyClient);

    SelectObject(hdcsource, bitmap);    //将位图资源装入显示缓冲


    SetStretchBltMode(hdcsource, COLORONCOLOR);

    BITMAPINFO  bmi;
    bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
    bmi.bmiHeader.biWidth = w;
    bmi.bmiHeader.biHeight = -h;
    bmi.bmiHeader.biCompression = BI_RGB;
    bmi.bmiHeader.biBitCount = 24;
    bmi.bmiHeader.biPlanes = 1;
    bmi.bmiHeader.biClrUsed = 0;
    bmi.bmiHeader.biClrImportant = 0;
    bmi.bmiHeader.biSizeImage = 0;


    if (!fill) {

        int des_x = 0;
        int des_y = 0;
        int des_w = 0;
        int des_h = 0;


        if (1.0*cxClient / cyClient > 1.0*w / h) {
            des_h = cyClient;
            des_w = des_h * w / h;
            des_x = (cxClient - des_w) / 2;
            des_y = 0;
        }
        else {
            des_w = cxClient;
            des_h = des_w * h / w;
            des_x = 0;
            des_y = (cyClient - des_h) / 2;
        }


        BitBlt(hdcsource, 0, 0, cxClient, cyClient, hdcsource, 0, 0, SRCCOPY);
        StretchDIBits(hdcsource, des_x, des_y, des_w, des_h, \
            0, 0, w, h, rgb, &bmi, DIB_RGB_COLORS, SRCCOPY);
        BitBlt(hdc, 0, 0, cxClient, cyClient, hdcsource, 0, 0, SRCCOPY);
    }
    else {
        StretchDIBits(hdcsource, 0, 0, rect.right - rect.left, rect.bottom - rect.top, \
            0, 0, w, h, rgb, &bmi, DIB_RGB_COLORS, SRCCOPY);

        BitBlt(hdc, 0, 0, cxClient, cyClient, hdcsource, 0, 0, SRCCOPY);//将图象显示缓冲的内容直接显示到屏幕
    }

    DeleteObject(bitmap);
    DeleteDC(hdcsource);
    ReleaseDC(hwnd, hdc);
}

LRESULT CALLBACK WinProc(HWND hwnd, UINT umsg, WPARAM wparam, LPARAM lparam)

    switch (umsg)
    {
     
        case WM_DESTROY:
            PostQuitMessage(0);
            return 0;
    }
    return DefWindowProc(hwnd, umsg, wparam, lparam);
}  


static int decode_packet(DecodeContext *decode, AVCodecContext *decoder_ctx,
    AVFrame *frame, AVFrame *sw_frame,
    AVPacket *pkt, AVIOContext *output_ctx,HWND hwnd , SwsContext *img_convert_ctx, AVFrame *bgrFrame)
{
    int ret = 0;
    ret = avcodec_send_packet(decoder_ctx, pkt);
    if (ret < 0) {
        fprintf(stderr, "Error during decoding\n");
        return ret;
    }
    while (ret >= 0) {
        int i, j;
        ret = avcodec_receive_frame(decoder_ctx, frame);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
            break;
        else if (ret < 0) {
            fprintf(stderr, "Error during decoding\n");
            return ret;
        } 
         
        /* AV_PIX_FMT_QSV到 AV_PIX_FMT_NV12*/ 
        ret = av_hwframe_transfer_data(sw_frame, frame, 0);
        if (ret < 0) {
            fprintf(stderr, "Error transferring the data to system memory\n");
            goto fail;
        }    
        sws_scale(img_convert_ctx, (const unsigned char* const*)sw_frame->data, sw_frame->linesize, 0, sw_frame->height, bgrFrame->data, bgrFrame->linesize);
         
        Show(hwnd, bgrFrame->data[0], bgrFrame->width, bgrFrame->height, true);
        
         
    fail:
        av_frame_unref(sw_frame);
        av_frame_unref(frame);
        if (ret < 0)
            return ret;
    }
    return 0;
}

static AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
{
    AVFrame *picture;
    int ret;

    picture = av_frame_alloc();
    if (!picture)
        return NULL;

    picture->format = pix_fmt;
    picture->width = width;
    picture->height = height;

    /* allocate the buffers for the frame data */
    ret = av_frame_get_buffer(picture, 4);
    if (ret < 0) {
        fprintf(stderr, "Could not allocate frame data.\n");
        return NULL;
    }

    return picture;
}
void *VideoReadThread1(void*p) { 
    HWND hwnd =(HWND) p;
    int ret = 0;
    const char*url = "F:\\source\\ffmpge\\mux\\1.mp4";
    AVFormatContext* pInputFormatCtx = avformat_alloc_context();
    AVStream *video_st = NULL;

    DecodeContext decode = { NULL };
    const AVCodec *decoder;
    AVCodecContext *decoder_ctx = NULL;
    AVPacket pkt = { 0 }; av_init_packet(&pkt);
    if ((ret = avformat_open_input(&pInputFormatCtx, url, NULL, NULL)) < 0) {
        printf("Could not open input file.");
        return 0;
    }
    if ((ret = avformat_find_stream_info(pInputFormatCtx, 0)) < 0) {
        printf("Failed to retrieve input stream information");
        return 0;
    }
     
    /* find the first H.264 video stream */
    for (int i = 0; i < pInputFormatCtx->nb_streams; i++) {
        AVStream *st = pInputFormatCtx->streams[i];
        if (st->codecpar->codec_id == AV_CODEC_ID_H264 && !video_st)
        { 
            video_st = st;
        }
        else
            st->discard = AVDISCARD_ALL;
    }
 
    AVCodecContext *envideocodecCtx = NULL;
    AVIOContext *output_ctx = NULL;

    AVFrame *frame = NULL, *sw_frame = NULL;

    SwsContext *img_convert_ctx = sws_getContext(pInputFormatCtx->streams[video_st->index]->codecpar->width, pInputFormatCtx->streams[video_st->index]->codecpar->height,
        AV_PIX_FMT_NV12, pInputFormatCtx->streams[video_st->index]->codecpar->width, pInputFormatCtx->streams[video_st->index]->codecpar->height, AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
    AVFrame *bgrFrame = alloc_picture(AV_PIX_FMT_BGR24, pInputFormatCtx->streams[video_st->index]->codecpar->width, pInputFormatCtx->streams[video_st->index]->codecpar->height);
    

    if (!video_st) {
        fprintf(stderr, "No H.264 video stream in the input file\n");
        goto finish;
    }
    /* open the hardware device */
    ret = av_hwdevice_ctx_create(&decode.hw_device_ref, AV_HWDEVICE_TYPE_QSV, "auto", NULL, 0);
    if (ret < 0) {
        fprintf(stderr, "Cannot open the hardware device\n");
        goto finish;
    }
    /* initialize the decoder */
    decoder = avcodec_find_decoder_by_name("h264_qsv");
    if (!decoder) {
        fprintf(stderr, "The QSV decoder is not present in libavcodec\n");
        goto finish;
    }
    decoder_ctx = avcodec_alloc_context3(decoder);
    if (!decoder_ctx) {
        ret = AVERROR(ENOMEM);
        goto finish;
    }
    decoder_ctx->codec_id = AV_CODEC_ID_H264;
    if (video_st->codecpar->extradata_size) {
        decoder_ctx->extradata = (uint8_t *)av_mallocz(video_st->codecpar->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
        if (!decoder_ctx->extradata) {
            ret = AVERROR(ENOMEM);
            goto finish;
        }
        memcpy(decoder_ctx->extradata, video_st->codecpar->extradata,
            video_st->codecpar->extradata_size);
        decoder_ctx->extradata_size = video_st->codecpar->extradata_size;
    }

    decoder_ctx->opaque = &decode;
    decoder_ctx->get_format = get_format;

    ret = avcodec_open2(decoder_ctx, NULL, NULL);
    if (ret < 0) {
        fprintf(stderr, "Error opening the decoder: ");
        goto finish;
    }
    av_dump_format(pInputFormatCtx, 0, url, 0);
     
    frame = av_frame_alloc();
    sw_frame = av_frame_alloc();
    if (!frame || !sw_frame) {
        ret = AVERROR(ENOMEM);
        goto finish;
    }


    time_t tt;//这句返回的只是一个时间cuo
    struct tm t;
    time(&tt);
    localtime_s(&t, &tt);
    printf("%d-%02d-%02d %02d:%02d:%02d\n",
        t.tm_year + 1900,
        t.tm_mon + 1,
        t.tm_mday,
        t.tm_hour,
        t.tm_min,
        t.tm_sec);

    
    /* actual decoding */ 
        while (ret >= 0) {
            ret = av_read_frame(pInputFormatCtx, &pkt);
            if (ret < 0)
                break;
            if (pkt.stream_index == video_st->index)
                ret = decode_packet(&decode, decoder_ctx, frame, sw_frame, &pkt, output_ctx, hwnd, img_convert_ctx, bgrFrame);
            av_packet_unref(&pkt);
        }
        /* flush the decoder */


        time_t tt1;//这句返回的只是一个时间cuo
        struct tm t1;
        time(&tt1);
        localtime_s(&t1, &tt1);
        printf("%d-%02d-%02d %02d:%02d:%02d\n",
            t1.tm_year + 1900,
            t1.tm_mon + 1,
            t1.tm_mday,
            t1.tm_hour,
            t1.tm_min,
            t1.tm_sec);
    finish:
        if (ret < 0) {
            char buf[1024];
            av_strerror(ret, buf, sizeof(buf));
            fprintf(stderr, "%s\n", buf);
        }

        avformat_close_input(&pInputFormatCtx);

        av_frame_free(&frame);
        av_frame_free(&sw_frame);

        avcodec_free_context(&decoder_ctx);

        av_buffer_unref(&decode.hw_device_ref);

        avio_close(output_ctx);
 
    return 0;
}

int main()
{
    HINSTANCE hInstance;
    hInstance = GetModuleHandle(NULL);
    WNDCLASSEX wce = { 0 };
    wce.cbSize = sizeof(wce);
    wce.cbClsExtra = 0;
    wce.cbWndExtra = 0;
    wce.hbrBackground = (HBRUSH)(COLOR_WINDOW + 1);
    wce.hCursor = NULL;
    wce.hIcon = NULL;
    wce.hIconSm = NULL;
    wce.hInstance = hInstance;
    wce.lpfnWndProc = WinProc;
    wce.lpszClassName = L"Main";
    wce.lpszMenuName = NULL;
    wce.style = CS_HREDRAW | CS_VREDRAW;
    ATOM nAtom = RegisterClassEx(&wce);
    if (!nAtom)
    {
        MessageBox(NULL, L"RegisterClassEx失败", L"错误", MB_OK);
        return 0;
    }
    const char*  szStr1 = "Main";
    WCHAR wszClassName[256];
    memset(wszClassName, 0, sizeof(wszClassName));
    MultiByteToWideChar(CP_ACP, 0, szStr1, strlen(szStr1) + 1, wszClassName,
        sizeof(wszClassName) / sizeof(wszClassName[0]));
    HWND hwnd = CreateWindow(wszClassName, L"视频播放", WS_OVERLAPPEDWINDOW, 38, 20, 640, 480, NULL, NULL, hInstance, NULL);
    // 显示窗口  
    ShowWindow(hwnd, SW_SHOW);
    // 更新窗口  
    UpdateWindow(hwnd);

    //init

    pthread_t t1;
    pthread_create(&t1, NULL, VideoReadThread1, hwnd);

    // 消息循环  
    MSG msg;
    while (GetMessage(&msg, NULL, 0, 0))
    {
        TranslateMessage(&msg);
        DispatchMessage(&msg);
    } 
    return 0;

测试结果

88m 1280*720的硬解码 1分41s 内存250m左右
88m 1280*720的软解 1分31s 内存90m左右

如果在qt内可以不用转换之间用openggl显示nv12的 

 

 

讨论群261074724
--------------------- 
作者:月下家宴 
来源:CSDN 
原文:https://blog.csdn.net/java_lilin/article/details/86527478 
版权声明:本文为博主原创文章,转载请附上博文链接!

版权声明:本文为博主原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接:https://blog.csdn.net/mengzhengjie/article/details/90906335

智能推荐

攻防世界_难度8_happy_puzzle_攻防世界困难模式攻略图文-程序员宅基地

文章浏览阅读645次。这个肯定是末尾的IDAT了,因为IDAT必须要满了才会开始一下个IDAT,这个明显就是末尾的IDAT了。,对应下面的create_head()代码。,对应下面的create_tail()代码。不要考虑爆破,我已经试了一下,太多情况了。题目来源:UNCTF。_攻防世界困难模式攻略图文

达梦数据库的导出(备份)、导入_达梦数据库导入导出-程序员宅基地

文章浏览阅读2.9k次,点赞3次,收藏10次。偶尔会用到,记录、分享。1. 数据库导出1.1 切换到dmdba用户su - dmdba1.2 进入达梦数据库安装路径的bin目录,执行导库操作  导出语句:./dexp cwy_init/[email protected]:5236 file=cwy_init.dmp log=cwy_init_exp.log 注释:   cwy_init/init_123..._达梦数据库导入导出

js引入kindeditor富文本编辑器的使用_kindeditor.js-程序员宅基地

文章浏览阅读1.9k次。1. 在官网上下载KindEditor文件,可以删掉不需要要到的jsp,asp,asp.net和php文件夹。接着把文件夹放到项目文件目录下。2. 修改html文件,在页面引入js文件:<script type="text/javascript" src="./kindeditor/kindeditor-all.js"></script><script type="text/javascript" src="./kindeditor/lang/zh-CN.js"_kindeditor.js

STM32学习过程记录11——基于STM32G431CBU6硬件SPI+DMA的高效WS2812B控制方法-程序员宅基地

文章浏览阅读2.3k次,点赞6次,收藏14次。SPI的详情简介不必赘述。假设我们通过SPI发送0xAA,我们的数据线就会变为10101010,通过修改不同的内容,即可修改SPI中0和1的持续时间。比如0xF0即为前半周期为高电平,后半周期为低电平的状态。在SPI的通信模式中,CPHA配置会影响该实验,下图展示了不同采样位置的SPI时序图[1]。CPOL = 0,CPHA = 1:CLK空闲状态 = 低电平,数据在下降沿采样,并在上升沿移出CPOL = 0,CPHA = 0:CLK空闲状态 = 低电平,数据在上升沿采样,并在下降沿移出。_stm32g431cbu6

计算机网络-数据链路层_接收方收到链路层数据后,使用crc检验后,余数为0,说明链路层的传输时可靠传输-程序员宅基地

文章浏览阅读1.2k次,点赞2次,收藏8次。数据链路层习题自测问题1.数据链路(即逻辑链路)与链路(即物理链路)有何区别?“电路接通了”与”数据链路接通了”的区别何在?2.数据链路层中的链路控制包括哪些功能?试讨论数据链路层做成可靠的链路层有哪些优点和缺点。3.网络适配器的作用是什么?网络适配器工作在哪一层?4.数据链路层的三个基本问题(帧定界、透明传输和差错检测)为什么都必须加以解决?5.如果在数据链路层不进行帧定界,会发生什么问题?6.PPP协议的主要特点是什么?为什么PPP不使用帧的编号?PPP适用于什么情况?为什么PPP协议不_接收方收到链路层数据后,使用crc检验后,余数为0,说明链路层的传输时可靠传输

软件测试工程师移民加拿大_无证移民,未受过软件工程师的教育(第1部分)-程序员宅基地

文章浏览阅读587次。软件测试工程师移民加拿大 无证移民,未受过软件工程师的教育(第1部分) (Undocumented Immigrant With No Education to Software Engineer(Part 1))Before I start, I want you to please bear with me on the way I write, I have very little gen...

随便推点

Thinkpad X250 secure boot failed 启动失败问题解决_安装完系统提示secureboot failure-程序员宅基地

文章浏览阅读304次。Thinkpad X250笔记本电脑,装的是FreeBSD,进入BIOS修改虚拟化配置(其后可能是误设置了安全开机),保存退出后系统无法启动,显示:secure boot failed ,把自己惊出一身冷汗,因为这台笔记本刚好还没开始做备份.....根据错误提示,到bios里面去找相关配置,在Security里面找到了Secure Boot选项,发现果然被设置为Enabled,将其修改为Disabled ,再开机,终于正常启动了。_安装完系统提示secureboot failure

C++如何做字符串分割(5种方法)_c++ 字符串分割-程序员宅基地

文章浏览阅读10w+次,点赞93次,收藏352次。1、用strtok函数进行字符串分割原型: char *strtok(char *str, const char *delim);功能:分解字符串为一组字符串。参数说明:str为要分解的字符串,delim为分隔符字符串。返回值:从str开头开始的一个个被分割的串。当没有被分割的串时则返回NULL。其它:strtok函数线程不安全,可以使用strtok_r替代。示例://借助strtok实现split#include <string.h>#include <stdio.h&_c++ 字符串分割

2013第四届蓝桥杯 C/C++本科A组 真题答案解析_2013年第四届c a组蓝桥杯省赛真题解答-程序员宅基地

文章浏览阅读2.3k次。1 .高斯日记 大数学家高斯有个好习惯:无论如何都要记日记。他的日记有个与众不同的地方,他从不注明年月日,而是用一个整数代替,比如:4210后来人们知道,那个整数就是日期,它表示那一天是高斯出生后的第几天。这或许也是个好习惯,它时时刻刻提醒着主人:日子又过去一天,还有多少时光可以用于浪费呢?高斯出生于:1777年4月30日。在高斯发现的一个重要定理的日记_2013年第四届c a组蓝桥杯省赛真题解答

基于供需算法优化的核极限学习机(KELM)分类算法-程序员宅基地

文章浏览阅读851次,点赞17次,收藏22次。摘要:本文利用供需算法对核极限学习机(KELM)进行优化,并用于分类。

metasploitable2渗透测试_metasploitable2怎么进入-程序员宅基地

文章浏览阅读1.1k次。一、系统弱密码登录1、在kali上执行命令行telnet 192.168.26.1292、Login和password都输入msfadmin3、登录成功,进入系统4、测试如下:二、MySQL弱密码登录:1、在kali上执行mysql –h 192.168.26.129 –u root2、登录成功,进入MySQL系统3、测试效果:三、PostgreSQL弱密码登录1、在Kali上执行psql -h 192.168.26.129 –U post..._metasploitable2怎么进入

Python学习之路:从入门到精通的指南_python人工智能开发从入门到精通pdf-程序员宅基地

文章浏览阅读257次。本文将为初学者提供Python学习的详细指南,从Python的历史、基础语法和数据类型到面向对象编程、模块和库的使用。通过本文,您将能够掌握Python编程的核心概念,为今后的编程学习和实践打下坚实基础。_python人工智能开发从入门到精通pdf