Soft and hard decoding implementation of android ffmpeg (ffmpeg 3.3.4)

1. Soft decoding implementation:

JNIEXPORT int JNICALL Java_h264_Native_PlayLocalVideo(JNIEnv *env, jobject obj,jstring inputFilePath_,jobject surface) {
    const char *path = env->GetStringUTFChars(inputFilePath_, 0);

    int ret;
    AVFormatContext *fmt_ctx = avformat_alloc_context();
    if (avformat_open_input(&fmt_ctx, path, NULL, NULL) < 0) {
        LOGD("can not open file");
        return -1;

    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env,surface);
    if (nativeWindow == NULL) {
        LOGD("ANativeWindow_fromSurface error");
        return -3;
    //Buffer when drawing
    ANativeWindow_Buffer outBuffer;
    //Get video stream decoder

    AVCodecContext *codec_ctx = avcodec_alloc_context3(NULL);
    codec_ctx->width = 1280;
    codec_ctx->height = 720;
    codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;

    AVCodec *avCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
    LOGD("mcodec is %d \n ",avCodec);
    //Open decoder
    if ((ret = avcodec_open2(codec_ctx, avCodec, NULL)) < 0) {
        ret = -3;
        return -4;
    //Loop reads a frame of compressed data from a file
    //Start reading video
    int y_size = codec_ctx->width * codec_ctx->height;
    AVPacket *pkt = (AVPacket *) malloc(sizeof(AVPacket));//Assign a packet
    av_new_packet(pkt, y_size);//Allocate packet data
    AVFrame *yuvFrame = av_frame_alloc();
    AVFrame *rgbFrame = av_frame_alloc();
    // Color converter
    struct SwsContext *m_swsCtx = sws_getContext(codec_ctx->width, codec_ctx->height,
                                                 codec_ctx->pix_fmt, codec_ctx->width,
                                                 codec_ctx->height, AV_PIX_FMT_RGBA, SWS_BICUBIC,
                                                 NULL, NULL, NULL);
    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, codec_ctx->width, codec_ctx->height, 1);
    uint8_t *out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
    LOGD("Start decoding");
    int index = 0;
    while (1) {
        if (av_read_frame(fmt_ctx, pkt) < 0) {
            //This is the end of the video
        FILE * pFile;
        pFile = fopen("/sdcard/h264.txt", "wb");
        if(pFile != NULL){
            fwrite (pkt->data ,1,pkt->size, pFile);
        LOGD("avcodec_send_packet index is = %d size=%d",index,pkt->size);
        ret = avcodec_send_packet(codec_ctx, pkt);
        if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
            LOGD("avcodec_send_packet ret=%d", ret);
        //Return decoded output data from decoder
        ret = avcodec_receive_frame(codec_ctx, yuvFrame);
        if (ret < 0 && ret != AVERROR_EOF) {
            LOGD("avcodec_receive_frame ret=%d", ret);

        LOGD("frame pkt dts is %lld", yuvFrame->pkt_dts);
        LOGD("frame pkt pts is %lld", yuvFrame->pkt_pts);
        LOGD("frame pkt is %lld", yuvFrame->pts);

        sws_scale(m_swsCtx, (const uint8_t *const *) yuvFrame->data, yuvFrame->linesize, 0,codec_ctx->height, rgbFrame->data, rgbFrame->linesize);
        //Set properties of buffer
        ANativeWindow_setBuffersGeometry(nativeWindow, codec_ctx->width, codec_ctx->height, WINDOW_FORMAT_RGBA_8888);
        ret = ANativeWindow_lock(nativeWindow, &outBuffer, NULL);
        if (ret != 0) {
            LOGD("ANativeWindow_lock error");
            return -5;
        av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize,
                             (const uint8_t *) outBuffer.bits, AV_PIX_FMT_RGBA,
                             codec_ctx->width, codec_ctx->height, 1);
        // Show buffer data to surfaceView
        ret = ANativeWindow_unlockAndPost(nativeWindow);
        if (ret != 0) {
            LOGD("ANativeWindow_unlockAndPost error");
            return -6;
        LOGD("Successfully displayed to buffer%d second", ++index);
        usleep(150000);//stop 1/6 second
    env->ReleaseStringUTFChars(inputFilePath_, path);
    LOGD("Parsing complete");
    return 1;

2 implementation of hard decoding
1: Modify the ffmpeg configuration file, turn on the hard decoding option, and recompile

–enable-jni \
–enable-mediacodec \
–enable-decoder=h264_mediacodec \
–enable-hwaccel=h264_mediacodec \

2: Modify decoder acquisition mode
codec = avcodec_find_decoder_by_name("h264_mediacodec");

The simple use of ffmpeg is always like this, in which the compilation process can be described as difficult and difficult, with hundreds of pre and post compilation attempts. Turn on neno, turn on hard decoding. It's not easy. I also thought about three months' spare time before I finished the soft and hard decoding process. In order to complete the single frame hard decoding Hydrangea involves source code modification, but few. In the case of a call problem, the solution to the problem is to view the source code as soon as possible. My configuration should still be feasible, but the ubuntu version, ndk version and ffmpeg version are problems, so I realize that I need to go ahead at this time. The help I can give is android ffmpeg hard decoding is feasible. It can be multi-channel and multi-threaded at the same time. If it's just a single video playing, it's not necessary to worry about it. It's also feasible to directly use android hard decoding. In fact, ffmpeg implementation is also a reflection of java's mediacodec implementation. In practical commercial projects, the efficiency of decoding can be greatly improved by enabling neno acceleration and multithreading decoding.

Keywords: Android SurfaceView codec Ubuntu

Added by ericorx on Fri, 01 May 2020 21:12:02 +0300