RK3568硬解码并与Qt界面融合显示深入探究

  1. 最近实在头疼,因为项目换了平台。折腾来折腾去,到今天算是把很多坑踩完了。

RK上实现硬解码方案一共有一下几种方式

1)opencv+gstreamer插件,采用硬解码,只能解码出图像,无法解出声音

2)ffmpeg+RK的mpp方案,转成cv::Mat或者QImage

3)ffmpeg6.0x自带的rockitmpp硬解码器+DRM渲染

4)Rockit渲染框架

以上四种,基本上都实现了。过程确实很艰辛,掉了一大坨头发,骂了无数次Rockit厂商。效果最好,最完美的应属于Rockit方案。这套方案也是最难的,最通用的。

话不多说上代码:

方案1:

#include "GstDecoder.h"
#include "LogUtils.h"
using namespace phm_decoder;
GstDecoder::GstDecoder(std::string root_ftp_url, std::string local_ftp_path)
    : root_ftp_url_(root_ftp_url), local_ftp_path_(local_ftp_path) {}
GstDecoder::~GstDecoder() {}

void GstDecoder::decode(const std::string &rtsp, const int &train_index, const int &car_index, const int &pos_index) {
    std::string pipeline = " rtspsrc  location=" + rtsp + " latency=1000 protocols=tcp  " +
                           "! rtph265depay "
                           "! h265parse  "
                           "! mppvideodec "
                           "! videoconvert "
                           "! video/x-raw,format=(string)BGR "
                           "! appsink ";

    Logger::info("pipeline : {}", pipeline);
    cv::VideoCapture video_capture;
    DecoderData data;
    data.online = false;
    data.decoder_state = false;
    data.train_index = train_index;
    data.car_index = car_index;
    data.pos_index = pos_index;
    Logger::info("GstDecoder decode train_index={}", train_index);

    data.device_type = 3;
    date_dirs_[rtsp] = get_cur_date() + "/";
    video_capture.open(pipeline, cv::CAP_GSTREAMER);
    int reop = 3;
    while (!video_capture.isOpened() && reop > 0) {
        Logger::error("video_capture open error reop times={}!", reop);
        std::unique_lock<std::mutex> lock(image_mutex_);
        images_.push(data);
        reop--;
        if (reop <= 0) {
            return;
        } else {

            std::this_thread::sleep_for(std::chrono::milliseconds(1000));
            video_capture.release();
            video_capture.open(pipeline, cv::CAP_GSTREAMER);
            Logger::error("video_capture open error return");
        }
    }

    cv::Mat src;
    int count = 0;
    int error_count = 0;
    // 50 zhen biyou i zhen
    while (count < 50) {
        // 读取摄像头内容,也就是从摄像头拉流
        video_capture >> src;
        if (check_mat(src)) {
            count++;
        } else {
            error_count++;
            if (error_count > 75) {
                Logger::info("75 frames is error ,but is ok!");
                break;
            }
            // Logger::info("mat is green read rtsp: {}, count:{}!", rtsp, count);
        }
    }
    std::string time_str = get_cur_time();
    char car_index_str[12] = {0};
    sprintf(car_index_str, "_%d_%d.jpg", car_index, pos_index);
    std::string filename = time_str + std::string(car_index_str);
    std::string dir_str = local_ftp_path_ + date_dirs_[rtsp];
    std::string mkdir_cmd = "mkdir " + dir_str + " -p";
    system(mkdir_cmd.c_str());
    std::string local_path = dir_str + filename;
    Logger::info("gst img local_path:{}!", local_path);
    cv::imwrite(local_path.c_str(), src);
    system("sync");
    data.mat = std::make_shared<cv::Mat>(std::move(src));

    data.ftp_path = root_ftp_url_ + date_dirs_[rtsp] + filename;
    data.online = true;
    data.decoder_state = true;
    Logger::info("data.ftp_path:{}!", data.ftp_path);
    // data.ftp_url = root_ftp_url + +date_dirs_[rtsp] + filename;
    std::unique_lock<std::mutex> lock(image_mutex_);
    images_.push(data);

    std::unique_lock<std::mutex> lock_c(decoder_mutex_);
    decoder_threads_.erase(rtsp);
}

bool GstDecoder::check_mat(const cv::Mat &mat) {
    // 遍历每个像素
    // sige jiao
    int gree_count = 0;
    int roi_col = 200;
    int roi_row = 200;

    int all_count = 10000;
    for (int y = 0; y < roi_row; ++y) {
        for (int x = 0; x < roi_col; ++x) {
            // 获取像素点的 BGR 值
            cv::Vec3b pixel = mat.at<cv::Vec3b>(y, x);
            int b = (int)pixel[0];
            int g = (int)pixel[1];
            int r = (int)pixel[2];
            if (g < 150 && b <= 10 && r <= 10) {
                gree_count++;
                if (gree_count > all_count) {
                    return false;
                }
            }
        }
    }
    return true;
}

这个在Rocki的文档里比较常见,我这里只是为了拿到图片去检测。不用显示。所以没有后续处理了,这个相对来说比较简单,至于h264还是h265根据摄像机配置修改gstreamer插件名称就行了

方案2:代码看个大概吧,有一部分打开rtsp流的处理,没有贴上来。这个方案的难点在与dump_mat这个函数,就是拿到MppFrame后如何转成cv::mat或者QImage,具体这个git上有其他兄弟写的源码,可以参考他的:https://github.com/MUZLATAN/ffmpeg_rtsp_mpp/blob/master/main.cpp

#pragma once
Rk3568mpp.h


#include "Demultiplex.h"
#include "rockchip/rk_mpi.h"
#include <iostream>
#include <memory>
#include <mutex>
#include <opencv2/opencv.hpp>
#include <queue>
#include <thread>

#pragma pack(1)
// 组播 234.1.1.2 端口 12306
typedef struct {
    unsigned char head;           // 包头 0xFB
    unsigned char func;           // 请求图片01,图片上传成功02;
    unsigned char ip[16];         // lcd ip地址
    unsigned char file_path[128]; // 图片上传至RK3568后,上报完整路径
    unsigned char tail;           //=0XFC;
} lcd_img_msg;

#pragma pack()

namespace phm_decoder {

class Rk3568mpp {
    // just for decode h264 or h265 video
  public:
    Rk3568mpp();
    ~Rk3568mpp();
    bool init(MppCodingType type = MPP_VIDEO_CodingHEVC);
    void quit();
    bool send_packet(AVPacket *pav_pack);
    bool read_frame(MppFrame &frame);

  private:
    MppCtx ctx_;
    MppApi *mpi_;

    MppDecCfg cfg_;
    MppBufferGroup frm_grp_;
};
} // namespace phm_decoder


Rk3568mpp.cpp

#include "Rk3568mpp.h"
#include "LogUtils.h"
extern "C" {
#include <string.h>
#include <unistd.h>
}
using namespace phm_decoder;
Rk3568mpp::Rk3568mpp() : ctx_(nullptr), cfg_(nullptr), frm_grp_(nullptr), mpi_(nullptr) {
    bool res = init();
    if (!res) {
        Logger::error("Decoder init error!");
        return;
    }

    Logger::info("Decoder init success!");
}

Rk3568mpp::~Rk3568mpp() {
    Logger::info(" ~Rk3568mpp!");
    quit();
}

bool Rk3568mpp::init(MppCodingType type) {

    MPP_RET ret = MPP_OK;
    RK_U32 need_split = 0;
    // split by h264 or h265 data type

    ret = mpp_create(&ctx_, &mpi_);
    if (ret) {
        Logger::error("mpp_create failed");
        quit();
        return false;
    }
    MppParam param = &need_split;
    ret = mpi_->control(ctx_, MPP_DEC_SET_PARSER_SPLIT_MODE, param);
    if (ret) {
        std::string info = fmt::format("Failed to set MPP_DEC_SET_PARSER_SPLIT_MODE :{} ,ret:{}", (int)ret);
        Logger::error(info);
        quit();
        return false;
    }

    int fast = 1;
    param = &fast;
    ret = mpi_->control(ctx_, MPP_DEC_SET_PARSER_FAST_MODE, param);
    if (ret) {
        std::string info = fmt::format("Failed to set MPP_DEC_SET_PARSER_SPLIT_MODE :{} ,ret:{}", (int)ret);
        Logger::error(info);
        quit();
        return false;
    }

    MppPollType timeout = MPP_POLL_NON_BLOCK;
    param = &timeout;

    ret = mpi_->control(ctx_, MPP_SET_OUTPUT_TIMEOUT, param);
    if (ret) {
        std::string info = fmt::format("Failed to set output timeout :{} ,ret:{}", (int)timeout, (int)ret);
        Logger::error(info);
        quit();
        return false;
    }

    // MppCodingType type = MPP_VIDEO_CodingAVC;
    ret = mpp_init(ctx_, MPP_CTX_DEC, type);
    if (ret) {
        std::string info = fmt::format("mpp_init failed type :{},ret:{}", (int)type, (int)ret);
        Logger::error(info);
        quit();
        return false;
    }

    // mpp_dec_cfg_init(&cfg_);

    /* get default config from decoder context */
    // ret = mpi_->control(ctx_, MPP_DEC_GET_CFG, cfg_);
    // if (ret) {
    //     std::string info = fmt::format("Failed mpi control ret:{}", (int)ret);
    //     Logger::error(info);
    //     quit();
    //     return false;
    // }

    // /*
    //  * split_parse is to enable mpp internal frame spliter when the input
    //  * packet is not aplited into frames.
    //  */
    // ret = mpp_dec_cfg_set_u32(cfg_, "base:split_parse", need_split);
    // if (ret) {

    //     std::string info = fmt::format("mpp_dec_cfg_set_u32ret ret:{}", (int)ret);
    //     Logger::error(info);
    //     quit();
    //     return false;
    // }

    // ret = mpi_->control(ctx_, MPP_DEC_SET_CFG, cfg_);
    // if (ret) {
    //     // mpp_err("%p failed to set cfg %p ret %d\n", ctx, cfg, ret);
    //     std::string info = fmt::format("mpp_dec_cfg_set_u32ret ret:{}", (int)ret);
    //     Logger::error(info);
    //     quit();
    //     return false;
    // }

    return true;
}

void Rk3568mpp::quit() {
    if (ctx_) {
        MPP_RET ret = mpi_->reset(ctx_);
        if (ret) {
            Logger::error("mpi->reset failed");
        }
    }

    if (ctx_) {
        mpp_destroy(ctx_);
        ctx_ = nullptr;
    }

    if (cfg_) {
        mpp_dec_cfg_deinit(cfg_);
        cfg_ = nullptr;
    }
    if (frm_grp_) {
        mpp_buffer_group_put(frm_grp_);
        frm_grp_ = nullptr;
    }
}

bool Rk3568mpp::send_packet(AVPacket *av_pack) {
    MPP_RET ret = MPP_OK;
    MppPacket mpp_packet = nullptr;
    std::string info_av = fmt::format("decode av packet size:{}", av_pack->size);
    Logger::info(info_av);
    ret = mpp_packet_init(&mpp_packet, av_pack->data, av_pack->size);
    if (ret < 0) {
        std::string init_inf = fmt::format("mpp_packet_init fail ret:{}", (int)ret);
        Logger::error(init_inf);
        return false;
    }

    Logger::info(fmt::format("mpp_packet_init success"));
    mpp_packet_set_pts(mpp_packet, av_pack->pts);

    int put_times = 2;
    bool put_res = false;
    do {
        MPP_RET ret = mpi_->decode_put_packet(ctx_, mpp_packet);
        if (MPP_OK == ret) {
            if (0 != mpp_packet_get_length(mpp_packet)) {
                Logger::error("put data error!");
            }
            Logger::info(fmt::format("decode_put_packet success"));
            put_res = true;
            break;
        }
        usleep(2 * 1000);
        put_times--;
        Logger::info(fmt::format("mpi_->decode_put_packet error{}", (int)ret));
    } while (put_times > 0);
    mpp_packet_deinit(&mpp_packet);
    return put_res;
}

bool Rk3568mpp::read_frame(MppFrame &frame) {
    int get_frame_error_time = 2;
    do {
        Logger::info("decode_get_frame start");
        RK_U32 frm_eos = 0;
        MPP_RET ret = mpi_->decode_get_frame(ctx_, &frame);
        if (ret) {
            std::string info = fmt::format("decodes_get_frame ret:{}", (int)ret);
            Logger::error(info);
            get_frame_error_time--;
            if (get_frame_error_time < 0) {
                break;
            }
            usleep(2 * 1000);
            continue;
        }

        if (!frame) {
            Logger::info("read frame error,need send new pack");
            return false;
        }
        Logger::info("read decode_get_frame  success!");

        if (mpp_frame_get_info_change(frame)) {
            Logger::info("mpp_frame_get_info_change");
            // found info change and create buffer group for decoding
            RK_U32 width = mpp_frame_get_width(frame);
            RK_U32 height = mpp_frame_get_height(frame);
            RK_U32 hor_stride = mpp_frame_get_hor_stride(frame);
            RK_U32 ver_stride = mpp_frame_get_ver_stride(frame);
            RK_U32 buf_size = mpp_frame_get_buf_size(frame);

            Logger::info("decode_get_frame get info changed found");

            if (!frm_grp_) {
                /* If buffer group is not set create one and limit it */
                ret = mpp_buffer_group_get_internal(&frm_grp_, MPP_BUFFER_TYPE_ION);
                if (ret) {
                    std::string error_info = fmt::format("get mpp buffer group failed:{}", (int)ret);
                    Logger::info(error_info);
                    break;
                }

                /* Set buffer to mpp decoder */
                ret = mpi_->control(ctx_, MPP_DEC_SET_EXT_BUF_GROUP, frm_grp_);
                if (ret) {
                    std::string error_info = fmt::format("set buffer group failed ret:{}", (int)ret);
                    Logger::info(error_info);
                    break;
                }
            } else {
                /* If old buffer group exist clear it */
                ret = mpp_buffer_group_clear(frm_grp_);
                if (ret) {
                    std::string error_info = fmt::format("clear buffer group failed ret:{}", (int)ret);
                    Logger::info(error_info);
                    break;
                }
            }

            /* Use limit config to limit buffer count to 24 */
            ret = mpp_buffer_group_limit_config(frm_grp_, buf_size, 24);
            if (ret) {
                std::string error_info = fmt::format("limit buffer group failed ret:{}", (int)ret);
                Logger::info(error_info);
                break;
            }

            ret = mpi_->control(ctx_, MPP_DEC_SET_INFO_CHANGE_READY, NULL);
            if (ret) {
                std::string error_info = fmt::format("info change ready failed ret:{}", (int)ret);
                Logger::info(error_info);
                return false;
            }
        } else {
            std::string error_info = fmt::format("read frame success");
            Logger::info(error_info);

            RK_U32 err_info = mpp_frame_get_errinfo(frame);
            RK_U32 discard = mpp_frame_get_discard(frame);

            if (!err_info && !discard) {
                return true;
            } else {
                std::string error_info =
                    fmt::format("mpp_frame_get_errinfo err_info:{},discard:{}", (int)err_info, (int)discard);
                Logger::info(error_info);
                return false;
            }
        }

    } while (true);
    return false;
}

// cv::Mat FfpDecoder::dump_mat(MppFrame frame, int ca_index) {

//     cv::Mat image_data;
//     if (NULL == frame)
//         return image_data;
//     RK_U32 width = 0;
//     RK_U32 height = 0;
//     MppFrameFormat fmt = MPP_FMT_YUV420SP;
//     MppBuffer buffer = NULL;
//     RK_U8 *base = NULL;
//     width = mpp_frame_get_width(frame);
//     height = mpp_frame_get_height(frame);
//     RK_U32 h_stride = mpp_frame_get_hor_stride(frame);
//     RK_U32 v_stride = mpp_frame_get_ver_stride(frame);
//     RK_U32 i = 0;

//     fmt = mpp_frame_get_fmt(frame);
//     buffer = mpp_frame_get_buffer(frame);
//     std::string fmt_info = fmt::format("video mpp_frame_get_fmt format :{}", (int)fmt);
//     Logger::info(fmt_info);
//     if (NULL == buffer)
//         return image_data;

//     base = (RK_U8 *)mpp_buffer_get_ptr(buffer);

//     if (MPP_FRAME_FMT_IS_RGB(fmt) && MPP_FRAME_FMT_IS_LE(fmt)) {
//         fmt = (MppFrameFormat)(fmt & MPP_FRAME_FMT_MASK);
//         std::string fmt_info = fmt::format("video  reset frame format :{}", (int)fmt);
//         Logger::info(fmt_info);
//     }

//     switch (fmt) {
//     case MPP_FMT_YUV422SP: {

//     } break;
//     case MPP_FMT_YUV420SP_VU: {
//         RK_U8 *base_y = base;
//         RK_U8 *base_c = base + h_stride * v_stride;
//         cv::Mat yuvMat;
//         yuvMat.create(height * 3 / 2, width, CV_8UC1);
//         //转为YUV420p格式
//         int idx = 0;
//         for (i = 0; i < height; i++, base_y += h_stride) {
//             memcpy(yuvMat.data + idx, base_y, width);
//             idx += width;
//         }
//         for (i = 0; i < height / 2; i++, base_c += h_stride) {
//             memcpy(yuvMat.data + idx, base_c, width);
//             idx += width;
//         }
//         //这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
//         cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV2BGR_NV12);
//     } break;
//     case MPP_FMT_YUV420SP: {
//         RK_U8 *base_y = base;
//         RK_U8 *base_c = base + h_stride * v_stride;
//         cv::Mat yuvMat;
//         yuvMat.create(height * 3 / 2, width, CV_8UC1);
//         //转为YUV420p格式
//         int idx = 0;
//         for (i = 0; i < height; i++, base_y += h_stride) {
//             memcpy(yuvMat.data + idx, base_y, width);
//             idx += width;
//         }
//         for (i = 0; i < height / 2; i++, base_c += h_stride) {
//             memcpy(yuvMat.data + idx, base_c, width);
//             idx += width;
//         }
//         //这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
//         cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV420sp2BGR);
//         static int index = 0;
//         if (index > 10) {
//             index = 0;
//         }
//         std::string filename = fmt::format("./index{}_ca{}.jpg", index++, ca_index);
//         cv::imwrite(filename, image_data);

//     } break;
//     case MPP_FMT_YUV420P: {
//         RK_U8 *base_y = base;
//         RK_U8 *base_c = base + h_stride * v_stride;
//         cv::Mat yuvMat;
//         yuvMat.create(height * 3 / 2, width, CV_8UC1);
//         //转为YUV420p格式
//         int idx = 0;
//         for (i = 0; i < height; i++, base_y += h_stride) {
//             memcpy(yuvMat.data + idx, base_y, width);
//             idx += width;
//         }
//         for (i = 0; i < height / 2; i++, base_c += h_stride) {
//             memcpy(yuvMat.data + idx, base_c, width);
//             idx += width;
//         }
//         //这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
//         cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV420p2BGR);

//     } break;
//     case MPP_FMT_YUV444SP: {

//     } break;
//     case MPP_FMT_YUV400: {
//     } break;
//     case MPP_FMT_ARGB8888:
//     case MPP_FMT_ABGR8888:
//     case MPP_FMT_BGRA8888:
//     case MPP_FMT_RGBA8888: {
//     } break;
//     case MPP_FMT_RGB565:
//     case MPP_FMT_BGR565:
//     case MPP_FMT_RGB555:
//     case MPP_FMT_BGR555:
//     case MPP_FMT_RGB444:
//     case MPP_FMT_BGR444: {
//     } break;
//     default: {
//         std::string info = fmt::format("not supported format :{}", (int)fmt);
//         Logger::error(info);
//     } break;
//     }
//     return image_data;
// }


FfpDecoder.h
#pragma once

#include "DataDecoder.h"
#include "Demultiplex.h"
#include "rockchip/rk_mpi.h"
#include <iostream>
#include <memory>
#include <mutex>
#include <opencv2/opencv.hpp>
#include <queue>
#include <thread>
namespace phm_decoder {

class FfpDecoder : public DataDecoder {
    // just for decode h264 or h265 video
  public:
    FfpDecoder(std::string root_ftp_url, std::string local_ftp_path);
    ~FfpDecoder();

    virtual void decode(const std::string &rtsp, const int &train_index, const int &car_index, const int &pos_index);

    cv::Mat dump_mat(MppFrame frame, int ca_index);

  private:
    std::string root_ftp_url_;
    std::string local_ftp_path_;
    std::map<std::string, std::string> date_dirs_;
};
} // namespace phm_decoder


FfpDecoder.cpp
#include "FfpDecoder.h"
#include "LogUtils.h"
#include "Rk3568mpp.h"
extern "C" {
#include <string.h>
#include <unistd.h>
}
using namespace phm_decoder;
FfpDecoder::FfpDecoder(std::string root_ftp_url, std::string local_ftp_path)
    : root_ftp_url_(root_ftp_url), local_ftp_path_(local_ftp_path) {}

FfpDecoder::~FfpDecoder() {}

void FfpDecoder::decode(const std::string &rtsp, const int &train_index, const int &car_index, const int &pos_index) {
    bool get_frame_ret = false;

    DecoderData data;
    data.train_index = train_index;
    data.car_index = car_index;
    data.pos_index = pos_index;
    data.device_type = 3;
    std::string d_info = fmt::format("decode rtsp url:{},camera_index:{}", rtsp, train_index, car_index, pos_index);
    Logger::info(d_info);

    bool decode_res = false;

    Logger::error("rtsp:{}", rtsp);
    std::string rtsp_url = rtsp;

    std::string info_url = fmt::format("ready open rtsp_url rtsp:{}", rtsp_url);
    Logger::error(info_url);
    int reconnect_times = 3;
    Demultiplex dp;
    dp.init();
    date_dirs_[rtsp] = get_cur_date() + "/";
    while (active_ && reconnect_times > 0) {
        decode_res = dp.open_rtsp(rtsp_url);
        if (!decode_res) {
            std::string info = fmt::format("open rtsp error! reconnect_times:{}, rtsp:{}", reconnect_times, rtsp_url);
            Logger::error(info);
            usleep(100 * 1000);
            reconnect_times--;
            continue;
        } else {
            break;
        }
    }

    if (!decode_res) {

        data.online = false;
        data.decoder_state = false;
        std::unique_lock<std::mutex> lock(image_mutex_);
        images_.push(data);

        std::unique_lock<std::mutex> lock_c(decoder_mutex_);
        decoder_threads_.erase(rtsp);
        return;
    }

    Logger::info("open rtsp success!");
    int get_frame_count = 2;
    dp.seed_i_frame();

    int read_times = 4;
    Rk3568mpp rkmpp;
    bool res = rkmpp.init();
    if (!res) {
        std::string info = fmt::format("rkmpp init error");
        Logger::info(info);
        data.online = false;
        data.decoder_state = false;
        std::unique_lock<std::mutex> lock(image_mutex_);
        images_.push(data);

        std::unique_lock<std::mutex> lock_c(decoder_mutex_);
        decoder_threads_.erase(rtsp);
        return;
    }
    // read_times = 0;
    while (active_ && read_times > 0) {
        read_times--;
        std::queue<AVPacket *> read_packet = dp.read_packet(); // fasong I zhen
        while (!read_packet.empty() && active_) {
            AVPacket *av_pack = read_packet.front();
            if (av_pack->size <= 0) {
                read_packet.pop();
                av_packet_unref(av_pack);
                av_packet_free(&av_pack);
                Logger::error("decode av packet size->size is error");
                continue;
            }
            res = rkmpp.send_packet(av_pack);
            if (res) {
                read_packet.pop();
                av_packet_unref(av_pack);
                av_packet_free(&av_pack);
            }
            int get_frame_error_time = 2;
            Logger::info(fmt::format("decode_get_frame start"));

            MppFrame frame = nullptr;

            res = rkmpp.read_frame(frame);
            if (!res) {
                Logger::info("read frame error,need send new pack");
                continue;
            }
            get_frame_count--;
            auto image = std::make_shared<cv::Mat>(std::move(dump_mat(frame, pos_index)));
            if (get_frame_count == 0) {
                data.mat = image;
                get_frame_ret = true;

                std::string time_str = get_cur_time();
                char car_index_str[12] = {0};
                sprintf(car_index_str, "_%d_%d.jpg", car_index, pos_index);
                std::string filename = time_str + std::string(car_index_str);
                std::string dir_str = local_ftp_path_ + date_dirs_[rtsp];
                std::string mkdir_cmd = "mkdir " + dir_str + " -p";
                system(mkdir_cmd.c_str());
                std::string local_path = dir_str + filename;
                Logger::info("gst img local_path:{}!", local_path);
                cv::imwrite(local_path.c_str(), *(data.mat.get()));
                system("sync");
                data.ftp_path = root_ftp_url_ + date_dirs_[rtsp] + filename;
                Logger::info("ffmppeg get data.ftp_path:{}!", data.ftp_path);
            } else {
                image.reset();
            }

            mpp_frame_deinit(&frame);
            if (get_frame_ret) {
                break;
            }
        }

        while (!read_packet.empty()) {
            AVPacket *pack = read_packet.front();
            read_packet.pop();
            Logger::info(fmt::format("release read_packet data"));
            av_packet_unref(pack);
            av_packet_free(&pack);
        }
        if (get_frame_ret) {
            break;
        } else {
            Logger::info(fmt::format("read pack times :{}", read_times));
            usleep(200 * 1000);
        }
    }
    dp.close();

    data.online = true;
    data.decoder_state = get_frame_ret;
    if (!get_frame_ret) {
        Logger::info("send error to server");
    } else {
        Logger::info("capture camera image success!");
    }

    std::unique_lock<std::mutex> lock(image_mutex_);
    images_.push(data);

    std::unique_lock<std::mutex> lock_c(decoder_mutex_);
    decoder_threads_.erase(rtsp);
}

cv::Mat FfpDecoder::dump_mat(MppFrame frame, int ca_index) {

    cv::Mat image_data;
    if (NULL == frame) {
        Logger::error("dump_mat error:frame is null");
        return image_data;
    }
    RK_U32 width = 0;
    RK_U32 height = 0;
    MppFrameFormat fmt = MPP_FMT_YUV420SP;
    MppBuffer buffer = NULL;
    RK_U8 *base = NULL;
    width = mpp_frame_get_width(frame);
    height = mpp_frame_get_height(frame);
    RK_U32 h_stride = mpp_frame_get_hor_stride(frame);
    RK_U32 v_stride = mpp_frame_get_ver_stride(frame);
    RK_U32 i = 0;

    fmt = mpp_frame_get_fmt(frame);
    buffer = mpp_frame_get_buffer(frame);
    std::string fmt_info = fmt::format("video mpp_frame_get_fmt format :{}", (int)fmt);
    Logger::info(fmt_info);
    if (NULL == buffer) {
        Logger::error("dump_mat error:frame is null");
        return image_data;
    }

    base = (RK_U8 *)mpp_buffer_get_ptr(buffer);

    if (MPP_FRAME_FMT_IS_RGB(fmt) && MPP_FRAME_FMT_IS_LE(fmt)) {
        fmt = (MppFrameFormat)(fmt & MPP_FRAME_FMT_MASK);
        std::string fmt_info = fmt::format("video  reset frame format :{}", (int)fmt);
        Logger::info(fmt_info);
    }

    switch (fmt) {
    case MPP_FMT_YUV422SP: {

    } break;
    case MPP_FMT_YUV420SP_VU: {
        RK_U8 *base_y = base;
        RK_U8 *base_c = base + h_stride * v_stride;
        cv::Mat yuvMat;
        yuvMat.create(height * 3 / 2, width, CV_8UC1);
        // 转为YUV420p格式
        int idx = 0;
        for (i = 0; i < height; i++, base_y += h_stride) {
            memcpy(yuvMat.data + idx, base_y, width);
            idx += width;
        }
        for (i = 0; i < height / 2; i++, base_c += h_stride) {
            memcpy(yuvMat.data + idx, base_c, width);
            idx += width;
        }
        // 这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
        cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV2BGR_NV12);
    } break;
    case MPP_FMT_YUV420SP: {
        RK_U8 *base_y = base;
        RK_U8 *base_c = base + h_stride * v_stride;
        cv::Mat yuvMat;
        yuvMat.create(height * 3 / 2, width, CV_8UC1);
        // 转为YUV420p格式
        int idx = 0;
        for (i = 0; i < height; i++, base_y += h_stride) {
            memcpy(yuvMat.data + idx, base_y, width);
            idx += width;
        }
        for (i = 0; i < height / 2; i++, base_c += h_stride) {
            memcpy(yuvMat.data + idx, base_c, width);
            idx += width;
        }
        // 这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
        cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV420sp2BGR);
        static int index = 0;
        if (index > 10) {
            index = 0;
        }
        // std::string filename = fmt::format("./index{}_ca{}.jpg", index++, ca_index);
        // cv::imwrite(filename, image_data);

    } break;
    case MPP_FMT_YUV420P: {
        RK_U8 *base_y = base;
        RK_U8 *base_c = base + h_stride * v_stride;
        cv::Mat yuvMat;
        yuvMat.create(height * 3 / 2, width, CV_8UC1);
        // 转为YUV420p格式
        int idx = 0;
        for (i = 0; i < height; i++, base_y += h_stride) {
            memcpy(yuvMat.data + idx, base_y, width);
            idx += width;
        }
        for (i = 0; i < height / 2; i++, base_c += h_stride) {
            memcpy(yuvMat.data + idx, base_c, width);
            idx += width;
        }
        // 这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
        cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV420p2BGR);

    } break;
    case MPP_FMT_YUV444SP: {

    } break;
    case MPP_FMT_YUV400: {
    } break;
    case MPP_FMT_ARGB8888:
    case MPP_FMT_ABGR8888:
    case MPP_FMT_BGRA8888:
    case MPP_FMT_RGBA8888: {
    } break;
    case MPP_FMT_RGB565:
    case MPP_FMT_BGR565:
    case MPP_FMT_RGB555:
    case MPP_FMT_BGR555:
    case MPP_FMT_RGB444:
    case MPP_FMT_BGR444: {
    } break;
    default: {
        std::string info = fmt::format("not supported format :{}", (int)fmt);
        Logger::error(info);
    } break;
    }
    Logger::error("dump_mat success!!");
    return image_data;
}


#include "FfpDecoder.h"
#include "LogUtils.h"
#include "Rk3568mpp.h"
extern "C" {
#include <string.h>
#include <unistd.h>
}
using namespace phm_decoder;
FfpDecoder::FfpDecoder(std::string root_ftp_url, std::string local_ftp_path)
    : root_ftp_url_(root_ftp_url), local_ftp_path_(local_ftp_path) {}

FfpDecoder::~FfpDecoder() {}

void FfpDecoder::decode(const std::string &rtsp, const int &train_index, const int &car_index, const int &pos_index) {
    bool get_frame_ret = false;

    DecoderData data;
    data.train_index = train_index;
    data.car_index = car_index;
    data.pos_index = pos_index;
    data.device_type = 3;
    std::string d_info = fmt::format("decode rtsp url:{},camera_index:{}", rtsp, train_index, car_index, pos_index);
    Logger::info(d_info);

    bool decode_res = false;

    Logger::error("rtsp:{}", rtsp);
    std::string rtsp_url = rtsp;

    std::string info_url = fmt::format("ready open rtsp_url rtsp:{}", rtsp_url);
    Logger::error(info_url);
    int reconnect_times = 3;
    Demultiplex dp;
    dp.init();
    date_dirs_[rtsp] = get_cur_date() + "/";
    while (active_ && reconnect_times > 0) {
        decode_res = dp.open_rtsp(rtsp_url);
        if (!decode_res) {
            std::string info = fmt::format("open rtsp error! reconnect_times:{}, rtsp:{}", reconnect_times, rtsp_url);
            Logger::error(info);
            usleep(100 * 1000);
            reconnect_times--;
            continue;
        } else {
            break;
        }
    }

    if (!decode_res) {

        data.online = false;
        data.decoder_state = false;
        std::unique_lock<std::mutex> lock(image_mutex_);
        images_.push(data);

        std::unique_lock<std::mutex> lock_c(decoder_mutex_);
        decoder_threads_.erase(rtsp);
        return;
    }

    Logger::info("open rtsp success!");
    int get_frame_count = 2;
    dp.seed_i_frame();

    int read_times = 4;
    Rk3568mpp rkmpp;
    bool res = rkmpp.init();
    if (!res) {
        std::string info = fmt::format("rkmpp init error");
        Logger::info(info);
        data.online = false;
        data.decoder_state = false;
        std::unique_lock<std::mutex> lock(image_mutex_);
        images_.push(data);

        std::unique_lock<std::mutex> lock_c(decoder_mutex_);
        decoder_threads_.erase(rtsp);
        return;
    }
    // read_times = 0;
    while (active_ && read_times > 0) {
        read_times--;
        std::queue<AVPacket *> read_packet = dp.read_packet(); // fasong I zhen
        while (!read_packet.empty() && active_) {
            AVPacket *av_pack = read_packet.front();
            if (av_pack->size <= 0) {
                read_packet.pop();
                av_packet_unref(av_pack);
                av_packet_free(&av_pack);
                Logger::error("decode av packet size->size is error");
                continue;
            }
            res = rkmpp.send_packet(av_pack);
            if (res) {
                read_packet.pop();
                av_packet_unref(av_pack);
                av_packet_free(&av_pack);
            }
            int get_frame_error_time = 2;
            Logger::info(fmt::format("decode_get_frame start"));

            MppFrame frame = nullptr;

            res = rkmpp.read_frame(frame);
            if (!res) {
                Logger::info("read frame error,need send new pack");
                continue;
            }
            get_frame_count--;
            auto image = std::make_shared<cv::Mat>(std::move(dump_mat(frame, pos_index)));
            if (get_frame_count == 0) {
                data.mat = image;
                get_frame_ret = true;

                std::string time_str = get_cur_time();
                char car_index_str[12] = {0};
                sprintf(car_index_str, "_%d_%d.jpg", car_index, pos_index);
                std::string filename = time_str + std::string(car_index_str);
                std::string dir_str = local_ftp_path_ + date_dirs_[rtsp];
                std::string mkdir_cmd = "mkdir " + dir_str + " -p";
                system(mkdir_cmd.c_str());
                std::string local_path = dir_str + filename;
                Logger::info("gst img local_path:{}!", local_path);
                cv::imwrite(local_path.c_str(), *(data.mat.get()));
                system("sync");
                data.ftp_path = root_ftp_url_ + date_dirs_[rtsp] + filename;
                Logger::info("ffmppeg get data.ftp_path:{}!", data.ftp_path);
            } else {
                image.reset();
            }

            mpp_frame_deinit(&frame);
            if (get_frame_ret) {
                break;
            }
        }

        while (!read_packet.empty()) {
            AVPacket *pack = read_packet.front();
            read_packet.pop();
            Logger::info(fmt::format("release read_packet data"));
            av_packet_unref(pack);
            av_packet_free(&pack);
        }
        if (get_frame_ret) {
            break;
        } else {
            Logger::info(fmt::format("read pack times :{}", read_times));
            usleep(200 * 1000);
        }
    }
    dp.close();

    data.online = true;
    data.decoder_state = get_frame_ret;
    if (!get_frame_ret) {
        Logger::info("send error to server");
    } else {
        Logger::info("capture camera image success!");
    }

    std::unique_lock<std::mutex> lock(image_mutex_);
    images_.push(data);

    std::unique_lock<std::mutex> lock_c(decoder_mutex_);
    decoder_threads_.erase(rtsp);
}

cv::Mat FfpDecoder::dump_mat(MppFrame frame, int ca_index) {

    cv::Mat image_data;
    if (NULL == frame) {
        Logger::error("dump_mat error:frame is null");
        return image_data;
    }
    RK_U32 width = 0;
    RK_U32 height = 0;
    MppFrameFormat fmt = MPP_FMT_YUV420SP;
    MppBuffer buffer = NULL;
    RK_U8 *base = NULL;
    width = mpp_frame_get_width(frame);
    height = mpp_frame_get_height(frame);
    RK_U32 h_stride = mpp_frame_get_hor_stride(frame);
    RK_U32 v_stride = mpp_frame_get_ver_stride(frame);
    RK_U32 i = 0;

    fmt = mpp_frame_get_fmt(frame);
    buffer = mpp_frame_get_buffer(frame);
    std::string fmt_info = fmt::format("video mpp_frame_get_fmt format :{}", (int)fmt);
    Logger::info(fmt_info);
    if (NULL == buffer) {
        Logger::error("dump_mat error:frame is null");
        return image_data;
    }

    base = (RK_U8 *)mpp_buffer_get_ptr(buffer);

    if (MPP_FRAME_FMT_IS_RGB(fmt) && MPP_FRAME_FMT_IS_LE(fmt)) {
        fmt = (MppFrameFormat)(fmt & MPP_FRAME_FMT_MASK);
        std::string fmt_info = fmt::format("video  reset frame format :{}", (int)fmt);
        Logger::info(fmt_info);
    }

    switch (fmt) {
    case MPP_FMT_YUV422SP: {

    } break;
    case MPP_FMT_YUV420SP_VU: {
        RK_U8 *base_y = base;
        RK_U8 *base_c = base + h_stride * v_stride;
        cv::Mat yuvMat;
        yuvMat.create(height * 3 / 2, width, CV_8UC1);
        // 转为YUV420p格式
        int idx = 0;
        for (i = 0; i < height; i++, base_y += h_stride) {
            memcpy(yuvMat.data + idx, base_y, width);
            idx += width;
        }
        for (i = 0; i < height / 2; i++, base_c += h_stride) {
            memcpy(yuvMat.data + idx, base_c, width);
            idx += width;
        }
        // 这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
        cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV2BGR_NV12);
    } break;
    case MPP_FMT_YUV420SP: {
        RK_U8 *base_y = base;
        RK_U8 *base_c = base + h_stride * v_stride;
        cv::Mat yuvMat;
        yuvMat.create(height * 3 / 2, width, CV_8UC1);
        // 转为YUV420p格式
        int idx = 0;
        for (i = 0; i < height; i++, base_y += h_stride) {
            memcpy(yuvMat.data + idx, base_y, width);
            idx += width;
        }
        for (i = 0; i < height / 2; i++, base_c += h_stride) {
            memcpy(yuvMat.data + idx, base_c, width);
            idx += width;
        }
        // 这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
        cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV420sp2BGR);
        static int index = 0;
        if (index > 10) {
            index = 0;
        }
        // std::string filename = fmt::format("./index{}_ca{}.jpg", index++, ca_index);
        // cv::imwrite(filename, image_data);

    } break;
    case MPP_FMT_YUV420P: {
        RK_U8 *base_y = base;
        RK_U8 *base_c = base + h_stride * v_stride;
        cv::Mat yuvMat;
        yuvMat.create(height * 3 / 2, width, CV_8UC1);
        // 转为YUV420p格式
        int idx = 0;
        for (i = 0; i < height; i++, base_y += h_stride) {
            memcpy(yuvMat.data + idx, base_y, width);
            idx += width;
        }
        for (i = 0; i < height / 2; i++, base_c += h_stride) {
            memcpy(yuvMat.data + idx, base_c, width);
            idx += width;
        }
        // 这里的转码需要转为RGB 3通道, RGBA四通道则不能检测成功
        cv::cvtColor(yuvMat, image_data, cv::COLOR_YUV420p2BGR);

    } break;
    case MPP_FMT_YUV444SP: {

    } break;
    case MPP_FMT_YUV400: {
    } break;
    case MPP_FMT_ARGB8888:
    case MPP_FMT_ABGR8888:
    case MPP_FMT_BGRA8888:
    case MPP_FMT_RGBA8888: {
    } break;
    case MPP_FMT_RGB565:
    case MPP_FMT_BGR565:
    case MPP_FMT_RGB555:
    case MPP_FMT_BGR555:
    case MPP_FMT_RGB444:
    case MPP_FMT_BGR444: {
    } break;
    default: {
        std::string info = fmt::format("not supported format :{}", (int)fmt);
        Logger::error(info);
    } break;
    }
    Logger::error("dump_mat success!!");
    return image_data;
}

方案3:

比较头疼的是如何编译带h264_rkmpp与hevc_rkmpp硬解码器的ffmpeg6.0版本。这个自行编译吧。我也是别人给的。有小伙伴私信找我要,我没给。所以别找我要了。应该不难。

这个难点除了ffmpeg6.0外,就是DRM渲染了,重点是plane的层级选择。一般设备是crtc和conn_id就一个,但是图层是有多个的,具体渲染到哪个层需要自己指定。

cpp 复制代码
#include "player_th.h"
#include <stdio.h>
#include <QDebug>
#include "form_video_player.h"
player_th::player_th(QObject *parent,int x,int y,int width,int height) :
    QThread(parent),x_(x),y_(y),width_(width),height_(height)
{


    video_stream_idx = -1;
    audio_stream_idx=-1;
    quit_=false;

    frame=NULL;
    scale=1;
    last_scale=0;
    seek_times=0;
    last_seek_times=0;


    need_switch_ = true;

    fmt_ctx=NULL;
    video_dec_ctx=NULL;


    pSwsContext=NULL;
    qDebug()<<"player_th";


    video_track_=new VideoTrack();
    video_track_->set_display_area(x_,y_,width_,height_);
    Init();
    parent_ = (form_video_player*)(parent);
}

player_th::~player_th()
{
    stop_ = true;
    quit_ = true;
    this->msleep(1000);
}
void player_th::set_scale(float s)
{
    cmd_id=CMD_SCALE;
    scale=s;
}

QString player_th::ffmpeg_error_string(int errnum)
{
    char error_buf[AV_ERROR_MAX_STRING_SIZE] = {0};
    return  QString(QLatin1String(av_make_error_string(error_buf, AV_ERROR_MAX_STRING_SIZE, errnum)));
}


void player_th::SetUrl(QString str){
    QMutexLocker locker(&url_lock_);
    cur_rtsp_url_=str;
    if(cur_rtsp_url_!=str){
        need_switch_ = true;
    }else{
        need_switch_ = false;
    }
}

bool player_th::Init()
{
    qDebug()<<"start Init";
    total_playtime_s=0;
    video_stream=NULL;
    cmd_id=CMD_PLAY;
    last_cmd_id=CMD_PLAY;

    if(fmt_ctx==NULL)
    {
        fmt_ctx = avformat_alloc_context();
        //申请一个AVFormatContext结构的内存,并进行简单初始化
    }

    return true;
}


bool player_th::open_rtsp(QString rtsp_url){
    AVDictionary*  dco = NULL;
    av_dict_set(&dco, "rtsp_transport", "tcp", 0);
    av_dict_set(&dco, "max_analyze_duration", "3", 0);
    av_dict_set(&dco, "stimeout", "3000000", 0);
    if (avformat_open_input(&fmt_ctx, rtsp_url.toStdString().c_str(), NULL, &dco) < 0)
    {
        fprintf(stderr, "Could not open source file %s\n",rtsp_url.toStdString().c_str());
        return false;
    }

    av_dict_free(&dco);

    /* retrieve stream information */
    if (avformat_find_stream_info(fmt_ctx, NULL) < 0)
    {
        fprintf(stderr, "Could not find stream information\n");
        return false;
    }



    if (open_codec_context(&video_stream_idx, &video_dec_ctx, fmt_ctx, AVMEDIA_TYPE_VIDEO) >= 0)
    {
        video_stream = fmt_ctx->streams[video_stream_idx];
        /* allocate image where the decoded image will be put */
        video_width = video_dec_ctx->width;
        video_height = video_dec_ctx->height;
        pix_fmt = video_dec_ctx->pix_fmt;
    }
    else{
        return false;
    }


    if (!video_stream)
    {
        fprintf(stderr, "Could not find audio or video stream in the input, aborting\n");
        return false;
    }



    if(fmt_ctx->duration!=AV_NOPTS_VALUE)
    {
        qDebug()<<"pAVFormatContext->duration"<<fmt_ctx->duration/AV_TIME_BASE;

        total_playtime_s=fmt_ctx->duration/AV_TIME_BASE;
        emit realplay_time(0,total_playtime_s);
    }

    if(frame==NULL)
    {
        frame=av_frame_alloc();
    }


    pkt = av_packet_alloc();
    if (!pkt)
    {
        fprintf(stderr, "Could not allocate packet\n");
        return false;
    }
    if(!video_track_)
    {
        video_track_=new VideoTrack();
        video_track_->set_display_area(x_,y_,width_,height_);
    }
    qDebug()<<"fmt_ctxduration:"<<fmt_ctx->duration;
    qDebug()<<"width:"<<video_width<<"height"<<video_height<<"total_playtime_s:"<<total_playtime_s<<"AV_NOPTS_VALUE"<<AV_NOPTS_VALUE;
    return true;
}

void player_th::run()
{
    quit_=false;
    while(!quit_)
    {
        if(stop_ || !parent_->isVisible()){
            msleep(1000);
            continue;
        }
        QMutexLocker locker(&url_lock_);

        if(cmd_id==CMD_PLAY && !open_rtsp(cur_rtsp_url_))
        {
            msleep(1000);

            continue;
        }

        need_switch_ = false;
        while (!quit_&&!stop_ && !need_switch_ &&parent_->isVisible())
        {
            if(cmd_id==CMD_PAUSE&&last_cmd_id!=CMD_PAUSE)
            {
                av_read_pause(fmt_ctx);
                last_cmd_id=CMD_PAUSE;
                qDebug()<<"pause!!!!!!!!!"<<endl;
            }
            else if(cmd_id==CMD_PLAY&&last_cmd_id!=CMD_PLAY)
            {
                av_read_play(fmt_ctx);
                last_cmd_id=CMD_PLAY;
            }
            else if(cmd_id==CMD_SEEK)//&&last_cmd_id!=CMD_PAUSE
            {
                if(seek_times!=last_seek_times)
                {
                    seek_replay_sec(seek_times);
                    last_seek_times=seek_times;
                    //              qDebug()<<"seek change:"<<seek_times;
                }
            }
            else if (cmd_id == CMD_SCALE&& last_cmd_id!=CMD_PAUSE)
            {
                if(last_scale!=scale)
                {
                    qDebug()<<"cmd_id == CMD_SCALE  :"<<scale;
                    av_read_pause(fmt_ctx);
                    fmt_ctx->scale=scale;
                    av_read_play(fmt_ctx);
                    last_scale=scale;
                }
            }
            else if(cmd_id==CMD_STOP)
            {
                stop_=true;
                break;
            }else if(cmd_id == CMD_RESUME){
                av_read_play(fmt_ctx);
                last_cmd_id=CMD_RESUME;
            }

            if(cmd_id==CMD_PAUSE)
            {
                msleep(25);
                continue;
            }
            if (av_read_frame(fmt_ctx, pkt) >= 0)
            {

                int ret=0;
                if (pkt->stream_index == video_stream_idx)
                {
                    ret = decode_packet(video_dec_ctx, pkt);
                    float play_position=pkt->pts*r2d(video_stream->time_base);

                    emit realplay_time(play_position,total_playtime_s);
                }

                av_packet_unref(pkt);
                if (ret < 0)
                {
                    qDebug()<<"break!!!!"<<ret;
                    break;
                }

                msleep(5);//play local video need control speed.
            }
            else
            {

                break;
            }

        }
        free_content();
    }
}

bool player_th::seek_replay_sec(int seek_time)
{//seek到指定的秒
    qDebug()<<"seek_replay_sec:"<<seek_time;

    cmd_id=CMD_SEEK;
    int64_t seek_target = seek_time * AV_TIME_BASE;

    int stream_index=video_stream_idx;
    AVStream * s = NULL;
    s=video_stream;

    //seek_target = av_rescale_q(seek_target, AV_TIME_BASE_Q,s->time_base);
    AVRational bq = {1, AV_TIME_BASE};
    seek_target = av_rescale_q(seek_target, bq,s->time_base);

    if (s->start_time != AV_NOPTS_VALUE)
        seek_target += s->start_time;

    //     start_io_timeout_check();
    int ret = av_seek_frame(fmt_ctx, stream_index, seek_target, AVSEEK_FLAG_BACKWARD);
    //   stop_io_timeout_check();

    if (ret < 0)
    {
        //  LOG_ERROR() << "av_seek_frame failed, " << ffmpeg_error_string(ret);
        qDebug()<<"av_seek_frame failed!!!!";
        return false;
    }
    else
    {
        return true;
    }

}

void player_th::stop_play(){
    stop_ = true;
}

void player_th::start_play()
{
    stop_ = false;

}
int player_th::open_codec_context(int *stream_idx,AVCodecContext **dec_ctx, AVFormatContext *fmt_ctx, enum AVMediaType type)
{
    int ret, stream_index;
    AVStream *st;
    const AVCodec *dec = NULL;

    ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0);
    if (ret < 0) {
        fprintf(stderr, "Could not find %s stream in input file\n",
                av_get_media_type_string(type));
        return ret;
    } else {
        stream_index = ret;
        st = fmt_ctx->streams[stream_index];

        /* find decoder for the stream */
        //     dec = avcodec_find_decoder(st->codecpar->codec_id);

        dec = avcodec_find_decoder_by_name("h264_rkmpp");//hevc_rkmpp
        //  dec = avcodec_find_decoder_by_name("hevc_rkmpp");
        if (!dec) {
            fprintf(stderr, "Failed to find %s codec\n",
                    av_get_media_type_string(type));
            return AVERROR(EINVAL);
        }

        /* Allocate a codec context for the decoder */
        *dec_ctx = avcodec_alloc_context3(dec);
        if (!*dec_ctx) {
            fprintf(stderr, "Failed to allocate the %s codec context\n",
                    av_get_media_type_string(type));
            return AVERROR(ENOMEM);
        }

        /* Copy codec parameters from input stream to output codec context */
        if ((ret = avcodec_parameters_to_context(*dec_ctx, st->codecpar)) < 0) {
            fprintf(stderr, "Failed to copy %s codec parameters to decoder context\n",
                    av_get_media_type_string(type));
            return ret;
        }

        /* Init the decoders */
        if ((ret = avcodec_open2(*dec_ctx, dec, NULL)) < 0) {
            fprintf(stderr, "Failed to open %s codec\n",
                    av_get_media_type_string(type));
            return ret;
        }
        *stream_idx = stream_index;
    }

    return 0;
}

int player_th::decode_packet(AVCodecContext *dec, const AVPacket *pkt)
{
    int ret = -1;

    AVFrame *pFrameOK = NULL;

    // submit the packet to the decoder
    ret = avcodec_send_packet(dec, pkt);
    if (ret < 0)
    {
        qDebug()<<"avcodec_send_packet error:"<<ffmpeg_error_string(ret);
    }


    // get all the available frames from the decoder
    while (!quit_&&!stop_&&ret >= 0)
    {
        if (!(pFrameOK = av_frame_alloc()))
        {
            fprintf(stderr, "Can not alloc frame\n");
            ret = AVERROR(ENOMEM);
            qDebug()<<"av_frame_alloc error";
            // goto fail;
        }
        ret = avcodec_receive_frame(dec, frame);
        if (ret < 0)
        {
            // those two return values are special and mean there is no output
            // frame available, but there were no errors during decoding

            if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN))
            {//无可用帧
                //qDebug()<<"avcodec_receive_frame error:"<<ffmpeg_error_string(ret);
                return 0;
            }
            else
                qDebug()<<"avcodec_receive_frame error:"<<ffmpeg_error_string(ret);
            return ret;
        }
        else
        {//开始解码
            // qDebug()<<"frame format:"<<frame->format<<"AV_PIX_FMT_NV12:"<<AV_PIX_FMT_NV12<<"AV_PIX_FMT_DRM_PRIME"<<AV_PIX_FMT_DRM_PRIME;;
            if(video_track_!=NULL)
                video_track_->DisplayImage(frame);
        }


        av_frame_unref(frame);
        av_frame_free(&pFrameOK);
        if (ret < 0)
            return ret;
    }

    return 0;
}

void player_th::free_content()
{
    qDebug()<<"free_content";

    if(video_dec_ctx!=NULL)
    {
        qDebug()<<"avcodec_free_context";
        avcodec_free_context(&video_dec_ctx);
        video_dec_ctx=NULL;

    }
    if(fmt_ctx!=NULL)
    {
        qDebug()<<"fmt_ctx free";
        avformat_close_input(&fmt_ctx);
    }
    if(pkt!=NULL)
    {
        qDebug()<<"pkt free";
        av_packet_free(&pkt);
        pkt=NULL;
    }
    if(frame!=NULL)
    {
        qDebug()<<"frame free";
        av_frame_free(&frame);
        frame=NULL;
    }
    if(video_track_!=NULL){
       delete video_track_;
        video_track_ = NULL;
    }
}

void player_th::stop_display()
{
    quit_=true;
}


DRM渲染
extern "C"{
#include <libavutil/hwcontext_drm.h>
}
#include <unistd.h>
#include <sys/types.h>
#include <fcntl.h>
#include "DirectKmsSink.h"
//#include "log.h"

bool DirectKmsSink::Init(int w, int h, AVPixelFormat format)
{
    if (inited_)
        return true;

	pix_fmt_ = format;

    int ret;
	fd_ = open("/dev/dri/card0", O_RDWR | O_CLOEXEC);
    if (fd_ < 0) 
    {
        printf("failed to open card0, %s", strerror(errno));
        goto err;
    }
    res_ = drmModeGetResources(fd_);
    if (!res_) 
    {
        printf("drmModeGetResources failed: %s (%d)\n", strerror(errno), errno);
        goto err;
    }


    printf("!!!!!res_->count_crtcs:%d,!!!!res_->count_connectors:%d\n",res_->count_crtcs,res_->count_connectors);
    if (!res_->count_crtcs || !res_->count_connectors) 
    {
        printf("count_crtcs(%d) or count_connectors(%d) failed\n", res_->count_crtcs, res_->count_connectors);
        goto err;
    }

    crtc_id_ = res_->crtcs[0];//3
	conn_id_ = res_->connectors[0];
    printf("kms crtc_id=%u  conn_id=%u\n", crtc_id_, conn_id_);

	crtc_ = drmModeGetCrtc (fd_, crtc_id_);
	if (!crtc_) 
    {
        printf("drmModeGetCrtc failed: %s (%d)", strerror(errno), errno);
	    goto err;
	}

    mode_display_w_ = crtc_->mode.hdisplay;
	mode_display_h_ = crtc_->mode.vdisplay;


      printf("kms mode_display_w_=%u  mode_display_h_=%u\n", mode_display_w_, mode_display_h_);

    plane_res_ = drmModeGetPlaneResources(fd_);
    if (!plane_res_) 
    {
        printf("drmModeGetPlaneResources failed: %s (%d)", strerror(errno), errno);
        goto err;
    }

    plane_id_ = FindPlaneForCrtc();
    if (plane_id_ == -1)
    {
        printf("FindPlaneForCrtc failed: %u", plane_id_);
        goto err;
    }
    printf("kms plane_id=%u\n", plane_id_);
    inited_ = true;
    return true;

err:
    if(plane_res_)
	{
        drmModeFreePlaneResources(plane_res_);
        plane_res_ = nullptr;
    }	
    if(crtc_)
	{
        drmModeFreeCrtc(crtc_);
        crtc_ = nullptr;
    }	
	if(res_)
	{
        drmModeFreeResources(res_);
        res_ = nullptr;
    }	
	if(fd_ >= 0)
	{
        close(fd_);
        fd_ = -1;
    }	
    return false;
}


DirectKmsSink::~DirectKmsSink()
{
    for (int i = 0; i < kBufferCount; ++i)
    {
        DrmBuffer* next_buf = &buffers_[i];
        if (next_buf->fb_id != -1)
        {
            if (drmModeRmFB(fd_, next_buf->fb_id) != 0)
                printf("cant remove fb %d\n", next_buf->fb_id);
        }
    }
	if(plane_res_)
		drmModeFreePlaneResources(plane_res_);
    if(crtc_)
        drmModeFreeCrtc(crtc_);
	if(res_)
		drmModeFreeResources(res_);
	if(fd_ >= 0)
		close(fd_);
}


uint32_t DirectKmsSink::FindPlaneForCrtc()
{
	drmModePlane *plane;
	int i, pipe;
	plane = NULL;
	pipe = -1;
	for (i = 0; i < res_->count_crtcs; i++) {
		if (crtc_id_ == res_->crtcs[i]) {
			pipe = i;
			break;
		}
	}

	if (pipe == -1)
		return -1;

    printf("-----plane_res_->count_planes:%d\n",plane_res_->count_planes);

    for (i = 0; i < plane_res_->count_planes; i++)
    {
        plane = drmModeGetPlane (fd_, plane_res_->planes[i]);
        if(plane)
        {
             printf("-----drmModeGetPlane success:plane crtc_id %d\n",plane->crtc_id);
        }

    }

	for (i = 0; i < plane_res_->count_planes; i++) 
    {
		plane = drmModeGetPlane (fd_, plane_res_->planes[i]);
        if (plane )
		{
            printf("-----drmModeGetPlane success:planes pipe %d\n",pipe);

            if (plane->possible_crtcs & (1 << pipe) && plane->plane_id==101)
			{
                uint32_t plane_id = plane->plane_id;
                drmModeFreePlane(plane);
                return plane_id;
            }  
		    drmModeFreePlane(plane);
        }
	}

	return -1;
}

void DirectKmsSink::SetDisplayArea(int x, int y, int w, int h)
{
	display_x_ = x;
	display_y_ = y;
	display_w_ = w;
	display_h_ = h;

//    display_x_ = 0;
//    display_y_ = 0;
//    display_w_ = 1024;
//    display_h_ = 768;

	//超出屏幕大小时错误处理
	if ((display_x_ + display_w_) > mode_display_w_)
		display_w_ = mode_display_w_ - display_x_;

	if ((display_y_ + display_h_) > mode_display_h_)
		display_h_ = mode_display_h_ - display_y_;
    printf("set display area x:%d y:%d w:%d h:%d\n", display_x_, display_y_, display_w_, display_h_);
}

bool DirectKmsSink::ShowFrame(AVFrame *frame)
{
    DrmBuffer* drm_buf = &buffers_[drm_buf_idx_];
    AVDRMFrameDescriptor* desc = (AVDRMFrameDescriptor *) frame->data[0];
	drm_buf->dbuf_fd = desc->objects[0].fd;
    for (int i = 0; i < desc->layers->nb_planes && i < 4; i++ ) 
    {
        drm_buf->pitches[i] = desc->layers->planes[i].pitch;
        drm_buf->offsets[i] = desc->layers->planes[i].offset;
    }
    drm_format_ = desc->layers[0].format;
    drm_buf->fourcc = drm_format_;
    if (!Display(drm_buf, frame->width, frame->height))
        return false;
    drm_buf_idx_ = next_buffer_index();
    return true;

}

bool DirectKmsSink::Display(DrmBuffer* drm_buf, int width, int height)
{
    int ret;
    uint32_t bo_handle;
	ret = drmPrimeFDToHandle(fd_, drm_buf->dbuf_fd, &bo_handle);
	if (ret < 0) 
    {
        printf("cannot import dmabuf %d, fd=%d\n", ret, drm_buf->dbuf_fd);
		return false;
	}

    uint32_t bo_handles[4];
	bo_handles[0] = bo_handle;
	bo_handles[1] = bo_handle;
	bo_handles[2] = 0;
	bo_handles[3] = 0;

    ret = drmModeAddFB2(fd_, width, height, drm_buf->fourcc, bo_handles,
			    drm_buf->pitches, drm_buf->offsets, &drm_buf->fb_id, 0);
	if (ret < 0) 
    {
        printf("cannot add framebuffer %d\n", ret);
	}

    struct drm_gem_close gem_close;
    memset(&gem_close, 0, sizeof gem_close);
    gem_close.handle = bo_handle;
    if (drmIoctl(fd_, DRM_IOCTL_GEM_CLOSE, &gem_close) < 0)
        printf("cant close gem: %s", strerror(errno));
    if (ret < 0)
        return false;

	
    ret = drmModeSetPlane(fd_, plane_id_, crtc_id_,
		      drm_buf->fb_id, 0,
		      display_x_, display_y_, display_w_, display_h_,
		      0, 0, width << 16, height << 16);
    if (ret != 0)
    {
        printf("drmModeSetPlane failed %d\n", ret);
    }
	

    DrmBuffer* next_buf = &buffers_[next_buffer_index()];
    if (next_buf->fb_id != -1)
    {
        if (drmModeRmFB(fd_, next_buf->fb_id) != 0)
            printf("cant remove fb %d\n", next_buf->fb_id);
        else
            next_buf->fb_id = -1;
    }

    return ret == 0;

}

int DirectKmsSink::next_buffer_index()
{
    int index = drm_buf_idx_ + 1;
    if (index >= kBufferCount)
        index = 0;
    return index;
}

方案4:这个才是解决问题的终极奥义。虽然RK官方文档很垃圾,当初拿到的demo还是旧版的。甚至调了很久发现不行,去问RK官方,才知道他们是有新版的,就是linux5.1还有rockit最新版。其实内核不用更新,只需要用最新的rockit库就行了。

这里就不放大量代码了,代码太多,具体可参考demo,新版demo已经很详细了。

重点说一下遇到的问题和调试方法

视频一开始看不到,发送码流失败,可能是初始化解码缓冲区的问题。按照官方的demo,这样干是可以的,主要是注意解码的格式和缓存区大小,还有h264,h265,以及送帧方式。

cpp 复制代码
RK_S32 rk_vdec::mpi_create_vdec(RK_S32 s32Ch, VIDEO_MODE_E enMode, int max_pic_width, int max_pic_height,
                                bool enable_h264)
{
    RK_S32 s32Ret = RK_SUCCESS;
    VDEC_CHN_ATTR_S stAttr;
    VDEC_CHN_PARAM_S stVdecParam;
    MB_POOL_CONFIG_S stMbPoolCfg;
    VDEC_PIC_BUF_ATTR_S stVdecPicBufAttr;
    MB_PIC_CAL_S stMbPicCalResult;
    VDEC_MOD_PARAM_S stModParam;
    std::cout << "RK_S32 rk_vdec::mpi_create_vdec s32Ch: " << s32Ch << std::endl;

    memset(&stAttr, 0, sizeof(VDEC_CHN_ATTR_S));
    memset(&stVdecParam, 0, sizeof(VDEC_CHN_PARAM_S));
    memset(&stModParam, 0, sizeof(VDEC_MOD_PARAM_S));
    RK_BOOL bEnableMbPool = RK_TRUE;
    if (bEnableMbPool)
    {
        stModParam.enVdecMBSource = MB_SOURCE_USER;
        s32Ret = RK_MPI_VDEC_SetModParam(&stModParam);
        if (s32Ret != RK_SUCCESS)
        {
            RK_LOGE("vdec %d SetModParam failed! errcode %x", s32Ch, s32Ret);
            std::cout << "RK_MPI_VDEC_SetModParam failed " << std::endl;

            return s32Ret;
        }
    }

    stAttr.enMode = VIDEO_MODE_FRAME;
    if (enable_h264)
    {
        stAttr.enType = RK_VIDEO_ID_AVC;
    }
    else
    {
        stAttr.enType = RK_VIDEO_ID_HEVC;
    }
    stAttr.u32PicWidth = max_pic_width;
    stAttr.u32PicHeight = max_pic_height;
    stAttr.u32PicVirWidth = max_pic_width;
    stAttr.u32PicVirHeight = max_pic_height;
    stAttr.stVdecVideoAttr.u32TmvBufSize = max_pic_width * max_pic_height * 4;
    //*3 / 2;
    stAttr.stVdecVideoAttr.bTemporalMvpEnable = RK_TRUE;
    stAttr.stVdecVideoAttr.u32RefFrameNum = 5;
    stAttr.u32FrameBufSize = max_pic_width * max_pic_height * 4;
    // / 2;
    stAttr.u32FrameBufCnt = 8;
    stAttr.u32StreamBufCnt = 8;
    stVdecParam.stVdecVideoParam.enOutputOrder = VIDEO_OUTPUT_ORDER_DISP;

    chn_ = s32Ch;
    s32Ret = RK_MPI_VDEC_CreateChn(chn_, &stAttr);
    if (s32Ret != RK_SUCCESS)
    {
        RK_LOGE("create chn_ %d vdec failed! ", chn_);
        std::cout << "RK_MPI_VDEC_CreateChn failed " << std::endl;

        return s32Ret;
    }

    if (enable_h264)
    {
        stVdecParam.enType = RK_VIDEO_ID_AVC;
    }
    else
    {
        stVdecParam.enType = RK_VIDEO_ID_HEVC;
    }
    stVdecParam.stVdecVideoParam.enDecMode = VIDEO_DEC_MODE_IPB;
    // VIDEO_OUTPUT_ORDER_DISP
    stVdecParam.stVdecVideoParam.enOutputOrder = VIDEO_OUTPUT_ORDER_DEC;
    stVdecParam.stVdecVideoParam.enCompressMode = COMPRESS_MODE_NONE; //(COMPRESS_MODE_E)ctx->u32CompressMode;
    s32Ret = RK_MPI_VDEC_SetChnParam(chn_, &stVdecParam);
    if (s32Ret != RK_SUCCESS)
    {
        RK_LOGE("set chn %d param failed %x! ", chn_, s32Ret);
        std::cout << "RK_MPI_VDEC_SetChnParam failed " << std::endl;

        return s32Ret;
    }

    int s32ChnFd = RK_MPI_VDEC_GetFd(chn_);
    if (s32ChnFd <= 0)
    {
        RK_LOGE("get fd chn %d failed %d", chn_, s32ChnFd);
        std::cout << "RK_MPI_VDEC_GetFd failed " << std::endl;

        return s32Ret;
    }

    if (bEnableMbPool)
    {
        memset(&stMbPoolCfg, 0, sizeof(MB_POOL_CONFIG_S));
        if (enable_h264)
        {
            stVdecPicBufAttr.enCodecType = RK_VIDEO_ID_AVC;
        }
        else
        {
            stVdecPicBufAttr.enCodecType = RK_VIDEO_ID_HEVC;
        }
        stVdecPicBufAttr.stPicBufAttr.u32Width = max_pic_width;
        stVdecPicBufAttr.stPicBufAttr.u32Height = max_pic_height;

        std::cout << "stVdecPicBufAttr :width=" << max_pic_width << " height=" << max_pic_height << std::endl;

        stVdecPicBufAttr.stPicBufAttr.enPixelFormat = RK_FMT_RGBA8888;
        // RK_FMT_YUV420SP;
        stVdecPicBufAttr.stPicBufAttr.enCompMode = COMPRESS_MODE_NONE;
        s32Ret = RK_MPI_CAL_VDEC_GetPicBufferSize(&stVdecPicBufAttr, &stMbPicCalResult);
        if (s32Ret != RK_SUCCESS)
        {
            RK_LOGE("get picture buffer size failed. err 0x%x", s32Ret);
            std::cout << "RK_MPI_CAL_VDEC_GetPicBufferSize failed " << std::endl;

            return s32Ret;
        }
        std::cout << " stMbPoolCfg.u32MBCnt= " << stMbPoolCfg.u32MBCnt << std::endl;

        stMbPoolCfg.u64MBSize = stMbPicCalResult.u32MBSize;
        stMbPoolCfg.u32MBCnt = 10;
        stMbPoolCfg.enAllocType = MB_ALLOC_TYPE_DMA;
        stMbPoolCfg.enDmaType = MB_DMA_TYPE_NONE;
        stMbPoolCfg.enRemapMode = MB_REMAP_MODE_CACHED;
        stMbPoolCfg.bPreAlloc = RK_TRUE;
        MB_POOL s32Pool = RK_MPI_MB_CreatePool(&stMbPoolCfg);
        if (s32Pool == MB_INVALID_POOLID)
        {
            RK_LOGE("create pool failed!");
            std::cout << "RK_MPI_MB_CreatePool failed " << std::endl;

            return s32Ret;
        }
        s32Ret = RK_MPI_VDEC_AttachMbPool(chn_, s32Pool);
        if (s32Ret != RK_SUCCESS)
        {
            std::cout << "RK_MPI_VDEC_AttachMbPool failed " << std::endl;

            RK_LOGE("attatc vdec mb pool %d failed! ", chn_);
            return s32Ret;
        }
    }

    s32Ret = RK_MPI_VDEC_StartRecvStream(chn_);
    if (s32Ret != RK_SUCCESS)
    {
        RK_LOGE("start recv chn %d failed %x! ", chn_, s32Ret);
        std::cout << "RK_MPI_VDEC_StartRecvStream failed " << std::endl;

        return s32Ret;
    }
    std::cout << "RK_MPI_VDEC_StartRecvStream::RK_MPI_VDEC_StartRecvStream(" << std::endl;

    s32Ret = RK_MPI_VDEC_SetDisplayMode(chn_, VIDEO_DISPLAY_MODE_PLAYBACK);
    if (s32Ret != RK_SUCCESS)
    {
        RK_LOGE("RK_MPI_VDEC_SetDisplayMode failed with %#x!", s32Ret);
        return s32Ret;
    }

    std::cout << "chn_ VDEC SUCESS ----------------- :" << chn_ << std::endl;

    return RK_SUCCESS;
}

问题2:视频卡顿,这个问题很他妈的奇怪,一开始是以为是RK的bug,还是怎么。调试了很久画面出来了,但是动不动就卡顿花屏。快要崩溃边缘。手上活太多,搁置了两个礼拜。最后去问RK官方,对方说你拉的是摄像机,可能网络丢帧,让我把拉流的画面存下来对比。

后来想想,那我都改成TCP方式了,还会丢帧?软解码没问题啊。日了狗。最后相信干脆播放本地文件吧。于是就播放本地文件,调试了两天,才能正常播放本地文件。因为一次性初始化通道数量太少,居然不显示。这个问题没有深究,反正每次初始化五个VO通道就行了。

本地视频没有花屏,但是卡顿还有。

后来鬼使神差,把双线程改成了单线程播放,结果正常了,我欣喜异常心喜,妈卖批的,终于好了。

以下是送帧示例:仔细的伙伴已经发现问题了。那里那个回调函数free_stream_Data干什么用的?

其实这个就是为什么卡顿的真实原因,因为之前卡顿是每次从av_read_frame读到AVPackt的时候,就立即调用这个函数发送数据解码了。发完以后,RK不是立即使用过这段数据的。但是发完后,跟着就把AVPacket的数据内存给释放了。所以就导致在实际解码使用的时候,数据已经被释放了,相当于是丢帧了。

而这个回调函数就是为了给你释放内存的。所以示例代码中,特地做了一次内存复制,等rk用完在释放。

cpp 复制代码
bool rk_vdec::send_stream(const char *buf, int size, int64_t pts /* = 0*/)
{

    VDEC_STREAM_S stStream;
    MB_EXT_CONFIG_S pstMbExtConfig;
    MB_BLK buffer = RK_NULL;

    memset(&stStream, 0, sizeof(VDEC_STREAM_S));
    memset(&pstMbExtConfig, 0, sizeof(MB_EXT_CONFIG_S));

    char *data_copy = new char[size];
    memcpy(data_copy,buf,size);

    pstMbExtConfig.pFreeCB = free_stream_Data;
    //printf("send frame 0x%x\n",data_copy);
    pstMbExtConfig.pOpaque = (RK_U8 *)data_copy;
    pstMbExtConfig.pu8VirAddr = (RK_U8 *)data_copy;
    pstMbExtConfig.u64Size = size;
    RK_MPI_SYS_CreateMB(&buffer, &pstMbExtConfig);
    stStream.u64PTS = pts;
    stStream.pMbBlk = buffer;
    stStream.u32Len = size;
    stStream.bEndOfStream = RK_FALSE;
    stStream.bBypassMbBlk = RK_TRUE;

    int ret = RK_MPI_VDEC_SendStream(chn_, &stStream, HI_IO_BLOCK);

    if (ret != RK_SUCCESS)
    {
        //std::cout << "RK_MPI_VDEC_SendStream failed : 0x" << std::hex << ret << std::endl;
        RK_MPI_MB_ReleaseMB(stStream.pMbBlk);
        return false;
    }
    else
    {
        //std::cout << "RK_MPI_VDEC_SendStream SUCCESS 1111111111111111111111111111111" << std::endl;
        RK_MPI_MB_ReleaseMB(stStream.pMbBlk);
    }
    return true;
}

最后说一说和Qt融合的方法,主要说Rockit的方案,其他方案可以交给QImage渲染,效率低一些,但足够用了。

由于RK平台的Qt一般基于weston显示的,也就是wayland平台。融合的方式在rockit方案里提到了很多,网上也有很多说用RGA,RGN什么鬼的,或者VO send_frame的方法,甚至还有用colorKey,视频层在上,Qt层在下,通过设置Qt背景颜色什么鬼的。

我只想说以上方法都是垃圾。

正经的方法思路如下,Qt一般是在主层显示,而rockit的方案会用到其他一个层。融合的思路就是让两个层叠加显示,Qt不需要显示的地方设置透明度,让rockit层透出来。

也就是说,Qt主层在上,zpos比rockit显示层大。

然后设置Qt主层的背景颜色支持透明度,默认的weston.ini是不支持的,需要额外设置。

[output]

name=LVDS-1

mode=1920x1080@60

gbm-format=argb8888

这个透明度必须设置,否则Qt层不透明,无法显示下层的视频信息。这样Qt中的透明区域可以任意放置其他控件,这样就保证Qt画面叠加在视频层上面了。

这个想法很好,但是实现起来废了一周时间,掉了不少头发,少撩骚了几次妹子。最终还是被我找到了方法。

因为视频层在Qt层上,一开始也是遮挡Qt的后来设置视频层的背景颜色格式是RGBA后就能看到Qt了,所以反过来一定也可以。最终跟着驱动兄弟,又是翻驱动代码又是怎么滴的。总之是改好了。

至此所以问题完美解决。

我以为完了,但是后来发现一个首次启动视频画面只有几帧,等Qt起来后,就卡死了一样。

又把RK骂了一顿。花了一天时间。证明是weston启动时一定会打断rockit的渲染通道。

问了RK最终得知加个这个环境变量就i行了:export rt_vo_disable_vop=0

由此可见rock的官方埋了多少地雷!!!!!!!!!!!!!!

最后,我把rockit layer和设备的初始化示例代码放出来啦!有问题可以评论回复!

cpp 复制代码
void rk_video_player::init_rk_system()
{
    std::cout << "start RK_MPI_SYS_Init" << std::endl;
    RK_U32 s32Ret = RK_MPI_SYS_Init();
    if (s32Ret != RK_SUCCESS)
    {
        std::cout << "RK_MPI_SYS_Init failed" << std::endl;
    }
    else
    {
        std::cout << "RK_MPI_SYS_Init SUCCESS" << std::endl;
    }
    init_device(0);
    init_device_layer_camera(layer_, 0, 32, 896, 552);
    set_qt_layer_priority();
    // init_device_layer_playback(layer2_, 200, 52, 700, 400);
}
bool rk_video_player::init_device(int dev)
{
    //    enable_device(false);
    std::cout << "rk_vo::init_device =" << dev << std::endl;
    RK_S32 s32Ret;
    device_ = dev;
    VO_LAYER VoLayer = layer_;

    VO_PUB_ATTR_S VoPubAttr;
    VO_DEV VoDev = dev;
    memset(&VoPubAttr, 0, sizeof(VO_PUB_ATTR_S));
    std::cout << "RK_MPI_VO_BindLayer :layer_=" << dev << " layer_ " << layer_ << std::endl;
    s32Ret = RK_MPI_VO_BindLayer(VoLayer, VoDev, RK356X_LAYER_MODE);
    if (s32Ret != RK_SUCCESS)
    {
        std::cout << "RK_MPI_VO_BindLayer layer_ failed,s32Ret:" << std::hex << s32Ret << std::endl;
        return RK_FAILURE;
    }

    std::cout << "RK_MPI_VO_SetPubAttr::open VoPubAttr.enIntfType =" << VoPubAttr.enIntfType
              << " VoPubAttr.enIntfSync =" << VoPubAttr.enIntfSync << std::endl;
    if (VoPubAttr.enIntfType != VO_INTF_LVDS && VoPubAttr.enIntfSync != VO_OUTPUT_1024x768_60)
    {
        std::cout << "RK_MPI_VO_SetPubAttr VO_INTF_LVDS dev =" << std::hex << s32Ret << std::dec << std::endl;
        VoPubAttr.enIntfType = VO_INTF_LVDS;          // VO_INTF_LVDS;
        VoPubAttr.enIntfSync = VO_OUTPUT_1024x768_60; // VO_OUTPUT_1024x768_60;
        VoPubAttr.u32BgColor = 0x00000000;

        s32Ret = RK_MPI_VO_SetPubAttr(device_, &VoPubAttr);
        if (s32Ret != RK_SUCCESS)
        {
            std::cout << "RK_MPI_VO_SetPubAttr false dev =" << std::hex << s32Ret << std::dec << std::endl;
            return false;
        }
        std::cout << "RK_MPI_VO_SetPubAttr success dev =" << std::hex << s32Ret << std::dec << std::endl;
    }

    std::cout << "RK_MPI_VO_enable_device success ,then RK_MPI_VO_BindLayer";

    std::cout << "enable_device true::open dev =" << dev << " layer_ = " << layer_ << std::endl;
    if (!enable_device(true))
    {
        std::cout << "enable_device  failed,s32Ret" << std::endl;
        return false;
    }
}

bool rk_video_player::init_device_layer_camera(int layer1, int top_x, int top_y, int width, int height)
{
    RK_S32 s32Ret;
    VO_LAYER VoLayer = layer1;
    s32Ret = RK_MPI_VO_SetLayerPriority(layer1, 0);

    s32Ret = RK_MPI_VO_SetLayerDispBufLen(layer1, 6);
    std::cout << "RK_MPI_VO_SetLayerDispBufLen success" << std::endl;

    std::cout << "RK_MPI_VO_GetLayerAttr :open dev =" << device_ << " layer1 " << layer1 << std::endl;

    VO_VIDEO_LAYER_ATTR_S stLayerAttr;
    s32Ret = RK_MPI_VO_GetLayerAttr(VoLayer, &stLayerAttr);

    stLayerAttr.enCompressMode = COMPRESS_MODE_NONE;
    stLayerAttr.enPixFormat = RK_FMT_RGBA8888;

    stLayerAttr.u32DispFrmRt = 25;

    stLayerAttr.stDispRect.s32X = top_x;
    stLayerAttr.stDispRect.s32Y = top_y;

    stLayerAttr.stDispRect.u32Width = width;
    stLayerAttr.stDispRect.u32Height = height;

    stLayerAttr.stImageSize.u32Width = width;
    stLayerAttr.stImageSize.u32Height = height;
    stLayerAttr.bBypassFrame = RK_TRUE;
    stLayerAttr.bLowDelay = RK_FALSE;

    std::cout << "RK_MPI_VO_SetLayerAttr :width=" << std::dec << width << " height=" << height << std::endl;

    std::cout << "RK_MPI_VO_SetLayerAttr :open dev =" << device_ << " layer1 " << layer1 << std::endl;
    std::cout << "RK_MPI_SYS_Init height:" << height << std::endl;
    s32Ret = RK_MPI_VO_SetLayerAttr(VoLayer, &stLayerAttr);
    if (s32Ret != RK_SUCCESS)
    {
        std::cout << "RK_MPI_VO_SetLayerAttr failed,s32Ret:" << std::hex << s32Ret;
        return false;
    }

    RK_MPI_VO_SetLayerSpliceMode(VoLayer, VO_SPLICE_MODE_GPU);
    // VO_SPLICE_MODE_RGA
    std::cout << "RK_MPI_VO_EnableLayer :open dev =" << device_ << " layer1 " << layer1 << std::endl;

    s32Ret = RK_MPI_VO_EnableLayer(VoLayer);
    if (s32Ret != RK_SUCCESS)
    {
        std::cout << "RK_MPI_VO_EnableLayer failed,s32Ret:0x" << std::hex << s32Ret << std::endl;

        return false;
    }
    VO_CSC_S VideoCSC;
    memset(&VideoCSC, 0, sizeof(VO_CSC_S));
    VideoCSC.enCscMatrix = VO_CSC_MATRIX_IDENTITY;
    VideoCSC.u32Contrast = 50;
    VideoCSC.u32Hue = 50;
    VideoCSC.u32Luma = 50;
    VideoCSC.u32Satuature = 50;

    s32Ret = RK_MPI_VO_SetLayerCSC(layer_, &VideoCSC);
    if (s32Ret != RK_SUCCESS)
    {
        std::cout << "RK_MPI_VO_SetLayerCSC failed " << std::hex << s32Ret << std::endl;

        return false;
    }
    std::cout << "RK_MPI_VO_EnableLayer SUCCESS " << std::endl;

    return true;
}

题外话:

实际上还有第五种方案,就是用Qt 的Mediaplayer,或者video playback吧,里边url直接设置gstream 方式和插件流的方式,使用qtvideosink直接渲染在Qt的QML里边。可惜rockit提供编译的Qt版本没有这个插件,想弄的话要自己搞。这个可以参考jeston英伟达板卡的那些示例。

相关推荐
杜杜的man18 分钟前
【go从零单排】Closing Channels通道关闭、Range over Channels
开发语言·后端·golang
java小吕布1 小时前
Java中Properties的使用详解
java·开发语言·后端
versatile_zpc1 小时前
C++初阶:类和对象(上)
开发语言·c++
尘浮生1 小时前
Java项目实战II基于微信小程序的移动学习平台的设计与实现(开发文档+数据库+源码)
java·开发语言·数据库·spring boot·学习·微信小程序·小程序
娅娅梨2 小时前
C++ 错题本--not found for architecture x86_64 问题
开发语言·c++
汤米粥2 小时前
小皮PHP连接数据库提示could not find driver
开发语言·php
冰淇淋烤布蕾2 小时前
EasyExcel使用
java·开发语言·excel
拾荒的小海螺2 小时前
JAVA:探索 EasyExcel 的技术指南
java·开发语言
马剑威(威哥爱编程)3 小时前
哇喔!20种单例模式的实现与变异总结
java·开发语言·单例模式
白-胖-子3 小时前
【蓝桥等考C++真题】蓝桥杯等级考试C++组第13级L13真题原题(含答案)-统计数字
开发语言·c++·算法·蓝桥杯·等考·13级