2

I want to convert output format of DecodeFrameNoDelay function is yuvData and bufferInfo to OpenCV matrix that I can use imshow function to display frame on window

Link git to DecodeFrameNoDelay detail: https://github.com/cisco/openh264/wiki/ISVCDecoder#decodeframenodelay

Below is my code use to decode frame with Openh264

ISVCDecoder *decoder;
SBufferInfo bufferInfo;
SDecodingParam decodingParam;
uint8_t** yuvData;

void init(int width, int height) {
    WelsCreateDecoder(&decoder);
    decodingParam = {0};
    decodingParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_AVC;
    decoder->Initialize(&decodingParam);
    bufferInfo = {0};
    bufferInfo.iBufferStatus = 1;

    yuvData = new uint8_t*[3];
    yuvData[0] = new uint8_t[width * height];
    yuvData[1] = new uint8_t[width * height / 4];
    yuvData[2] = new uint8_t[width * height / 4];

}

bool decode(const unsigned char* rawEncodedData, int rawEncodedDataLength, uint8_t** yuvData, char *name) {
    int err = decoder->DecodeFrameNoDelay(rawEncodedData, rawEncodedDataLength, yuvData, &bufferInfo);

    if (err != 0) {
        std::cout << "H264 decoding failed. Error code: " << err << "." << std::endl;
        parseErrorCode(err);
        return false;
    }

    printf("H264 decoding success, err: %d, name: %s, status: %d\n", err, name, bufferInfo.iBufferStatus);

    return true;
}

Any help is appreciated.

Thanks and best regards.

Max
  • 81
  • 3
  • You need to create `Mat` object either by constructor version which sets image sizes or with `create()` function, and then copy pixel information from `yuvData` buffers to Mat's `data` buffer plane by plane. Then may be convert it to some other encoding, e.g. RGB, since I'm not sure if OpenCV can do anything except conversion with YUV images... – sklott Jul 26 '19 at 06:51

1 Answers1

0

There is a stride parameter that you should consider when performing copy. Here is a code that decodes frame into opencv cv::Mat:

inline void copyWithStride(
        void* dst, const void* src,
        size_t width, size_t height, size_t stride
) {
    for (size_t row = 0; row < height; ++row) {
        uint8_t* posFrom = (uint8_t*)src + row * stride;
        uint8_t* posTo = (uint8_t*)dst + row * width;
        memcpy(posTo, posFrom, width);
    }
}

cv::Mat decode(const unsigned char* rawEncodedData, int rawEncodedDataLength) {
    SBufferInfo sDstBufInfo;
    memset(&sDstBufInfo, 0, sizeof(SBufferInfo));

    unsigned char *pData[3] = {nullptr, nullptr, nullptr};

    int rv = decoder->DecodeFrameNoDelay(
        rawEncodedData,
        rawEncodedDataLength,
        pData,
        &sDstBufInfo
    );

    int stride0 = sDstBufInfo.UsrData.sSystemBuffer.iStride[0];
    int stride1 = sDstBufInfo.UsrData.sSystemBuffer.iStride[1];
    
    cv::Mat imageYuvCh[3];
    cv::Mat imageYuvMiniCh[3];

    copyWithStride(imageYuvCh[0].data, pData[0], width, height, stride0);
    copyWithStride(imageYuvMiniCh[1].data, pData[1], width/2, height/2, stride1);
    copyWithStride(imageYuvMiniCh[2].data, pData[2], width/2, height/2, stride1);
    
    cv::resize(imageYuvMiniCh[1], imageYuvCh[1], cv::Size(width, height));
    cv::resize(imageYuvMiniCh[2], imageYuvCh[2], cv::Size(width, height));
    
    cv::Mat resultYuv;
    cv::merge(imageYuvCh, 3, resultYuv);
    
    cv::Mat result;
    cvtColor(imgYUV, result, cv::COLOR_YUV2BGR);
    return result;
}
Fippo
  • 51
  • 1
  • 5