This section is< < detailed explanation of player startup process of ijkplayer code reading > >Continuation of.
We are looking at how the ijkplayer workflow works. We only focus on reading part 2.5, 2.6 and 2.8 of the code
Clear ffplay workflow. Part 2.0 of the code is mainly ffplay control interface thread, and the implementation method is through SDL EVENT
Mechanism, we won't expand this part of the code.
///>2.6 create read_thread() input data reading thread, source path ijkmedia/ijkplayer/ff_ffplay.c /* this thread gets the stream from the disk or the network */ static int read_thread(void *arg) { FFPlayer *ffp = arg; ///>Arg = FFP player instance VideoState *is = ffp->is; AVFormatContext *ic = NULL; int err, i, ret __unused; int st_index[AVMEDIA_TYPE_NB]; AVPacket pkt1, *pkt = &pkt1; int64_t stream_start_time; int completed = 0; int pkt_in_play_range = 0; AVDictionaryEntry *t; SDL_mutex *wait_mutex = SDL_CreateMutex(); int scan_all_pmts_set = 0; int64_t pkt_ts; int last_error = 0; int64_t prev_io_tick_counter = 0; int64_t io_tick_counter = 0; int init_ijkmeta = 0; if (!wait_mutex) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateMutex(): %s\n", SDL_GetError()); ret = AVERROR(ENOMEM); goto fail; } memset(st_index, -1, sizeof(st_index)); is->last_video_stream = is->video_stream = -1; is->last_audio_stream = is->audio_stream = -1; is->last_subtitle_stream = is->subtitle_stream = -1; is->eof = 0; ic = avformat_alloc_context(); ///>Create avformat context if (!ic) { av_log(NULL, AV_LOG_FATAL, "Could not allocate context.\n"); ret = AVERROR(ENOMEM); goto fail; } ic->interrupt_callback.callback = decode_interrupt_cb; ///>Set decode interrupt callback function ic->interrupt_callback.opaque = is; if (!av_dict_get(ffp->format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) { av_dict_set(&ffp->format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE); ///>This part will be explained in detail in a blog post scan_all_pmts_set = 1; } if (av_stristart(is->filename, "rtmp", NULL) || av_stristart(is->filename, "rtsp", NULL)) { ///>Configure the data source as rtmp rtsp type parameter // There is total different meaning for 'timeout' option in rtmp av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n"); av_dict_set(&ffp->format_opts, "timeout", NULL, 0); } if (ffp->skip_calc_frame_rate) { ///>Configure player frame rate av_dict_set_int(&ic->metadata, "skip-calc-frame-rate", ffp->skip_calc_frame_rate, 0); av_dict_set_int(&ffp->format_opts, "skip-calc-frame-rate", ffp->skip_calc_frame_rate, 0); } if (ffp->iformat_name) is->iformat = av_find_input_format(ffp->iformat_name); err = avformat_open_input(&ic, is->filename, is->iformat, &ffp->format_opts); ///>Configure the video format of the data source if (err < 0) { print_error(is->filename, err); ret = -1; goto fail; } ffp_notify_msg1(ffp, FFP_MSG_OPEN_INPUT); ///>Turn on player control input if (scan_all_pmts_set) av_dict_set(&ffp->format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE); if ((t = av_dict_get(ffp->format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) { av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key); #ifdef FFP_MERGE ret = AVERROR_OPTION_NOT_FOUND; goto fail; #endif } is->ic = ic; if (ffp->genpts) ic->flags |= AVFMT_FLAG_GENPTS; ///> Generate missing pts even if it requires parsing future frames. av_format_inject_global_side_data(ic); // //AVDictionary **opts; //int orig_nb_streams; //opts = setup_find_stream_info_opts(ic, ffp->codec_opts); //orig_nb_streams = ic->nb_streams; if (ffp->find_stream_info) { ///>According to codec_opts content matching supports video stream formats, such as flv and MPEG AVDictionary **opts = setup_find_stream_info_opts(ic, ffp->codec_opts); int orig_nb_streams = ic->nb_streams; do { if (av_stristart(is->filename, "data:", NULL) && orig_nb_streams > 0) { for (i = 0; i < orig_nb_streams; i++) { if (!ic->streams[i] || !ic->streams[i]->codecpar || ic->streams[i]->codecpar->profile == FF_PROFILE_UNKNOWN) { break; } } if (i == orig_nb_streams) { break; } } err = avformat_find_stream_info(ic, opts); ///>Matching process, key function } while(0); ffp_notify_msg1(ffp, FFP_MSG_FIND_STREAM_INFO);///>Notification player video stream format for (i = 0; i < orig_nb_streams; i++) av_dict_free(&opts[i]); av_freep(&opts); if (err < 0) { av_log(NULL, AV_LOG_WARNING, "%s: could not find codec parameters\n", is->filename); ret = -1; goto fail; } } if (ic->pb) ic->pb->eof_reached = 0; // FIXME hack, ffplay maybe should not use avio_feof() to test for the end if (ffp->seek_by_bytes < 0) ffp->seek_by_bytes = !!(ic->iformat->flags & AVFMT_TS_DISCONT) && strcmp("ogg", ic->iformat->name); is->max_frame_duration = (ic->iformat->flags & AVFMT_TS_DISCONT) ? 10.0 : 3600.0; is->max_frame_duration = 10.0; av_log(ffp, AV_LOG_INFO, "max_frame_duration: %.3f\n", is->max_frame_duration); #ifdef FFP_MERGE if (!window_title && (t = av_dict_get(ic->metadata, "title", NULL, 0))) window_title = av_asprintf("%s - %s", t->value, input_filename); #endif /* if seeking requested, we execute it */ if (ffp->start_time != AV_NOPTS_VALUE) { int64_t timestamp; timestamp = ffp->start_time; /* add the stream start time */ if (ic->start_time != AV_NOPTS_VALUE) timestamp += ic->start_time; ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0); if (ret < 0) { av_log(NULL, AV_LOG_WARNING, "%s: could not seek to position %0.3f\n", is->filename, (double)timestamp / AV_TIME_BASE); } } is->realtime = is_realtime(ic); av_dump_format(ic, 0, is->filename, 0); ///> int video_stream_count = 0; int h264_stream_count = 0; int first_h264_stream = -1; for (i = 0; i < ic->nb_streams; i++) { AVStream *st = ic->streams[i]; enum AVMediaType type = st->codecpar->codec_type; st->discard = AVDISCARD_ALL; if (type >= 0 && ffp->wanted_stream_spec[type] && st_index[type] == -1) if (avformat_match_stream_specifier(ic, st, ffp->wanted_stream_spec[type]) > 0) st_index[type] = i; // choose first h264 if (type == AVMEDIA_TYPE_VIDEO) { enum AVCodecID codec_id = st->codecpar->codec_id; video_stream_count++; if (codec_id == AV_CODEC_ID_H264) { h264_stream_count++; if (first_h264_stream < 0) first_h264_stream = i; } } } if (video_stream_count > 1 && st_index[AVMEDIA_TYPE_VIDEO] < 0) { st_index[AVMEDIA_TYPE_VIDEO] = first_h264_stream; av_log(NULL, AV_LOG_WARNING, "multiple video stream found, prefer first h264 stream: %d\n", first_h264_stream); } if (!ffp->video_disable) ///>Configure the best video decoder for the current player st_index[AVMEDIA_TYPE_VIDEO] = av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, st_index[AVMEDIA_TYPE_VIDEO], -1, NULL, 0); if (!ffp->audio_disable) ///>Configure the best audio decoder for the current player st_index[AVMEDIA_TYPE_AUDIO] = av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO, st_index[AVMEDIA_TYPE_AUDIO], st_index[AVMEDIA_TYPE_VIDEO], NULL, 0); if (!ffp->video_disable && !ffp->subtitle_disable) ///>Configure current player control st_index[AVMEDIA_TYPE_SUBTITLE] = av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE, st_index[AVMEDIA_TYPE_SUBTITLE], (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ? st_index[AVMEDIA_TYPE_AUDIO] : st_index[AVMEDIA_TYPE_VIDEO]), NULL, 0); is->show_mode = ffp->show_mode; #ifdef FFP_MERGE // bbc: dunno if we need this if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) { AVStream *st = ic->streams[st_index[AVMEDIA_TYPE_VIDEO]]; AVCodecParameters *codecpar = st->codecpar; AVRational sar = av_guess_sample_aspect_ratio(ic, st, NULL); if (codecpar->width) set_default_window_size(codecpar->width, codecpar->height, sar); } #endif /* open the streams */ if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) { ///>Open this stream if audio is available stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]); } else { ffp->av_sync_type = AV_SYNC_VIDEO_MASTER; is->av_sync_type = ffp->av_sync_type; } ret = -1; if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) { ///>Open the stream if the video is available ret = stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]); } if (is->show_mode == SHOW_MODE_NONE) is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT; if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) { ///>If there is a subtitle, open the stream stream_component_open(ffp, st_index[AVMEDIA_TYPE_SUBTITLE]); } ffp_notify_msg1(ffp, FFP_MSG_COMPONENT_OPEN); if (!ffp->ijkmeta_delay_init) { ijkmeta_set_avformat_context_l(ffp->meta, ic); ///>Configure audio and video playback parameters and subtitle language type } ffp->stat.bit_rate = ic->bit_rate; ///>Set video playback rate if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_VIDEO_STREAM, st_index[AVMEDIA_TYPE_VIDEO]); if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_AUDIO_STREAM, st_index[AVMEDIA_TYPE_AUDIO]); if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_TIMEDTEXT_STREAM, st_index[AVMEDIA_TYPE_SUBTITLE]); if (is->video_stream < 0 && is->audio_stream < 0) { av_log(NULL, AV_LOG_FATAL, "Failed to open file '%s' or configure filtergraph\n", is->filename); ret = -1; goto fail; } if (is->audio_stream >= 0) { ///>Configure audio and video queues is->audioq.is_buffer_indicator = 1; is->buffer_indicator_queue = &is->audioq; } else if (is->video_stream >= 0) { is->videoq.is_buffer_indicator = 1; is->buffer_indicator_queue = &is->videoq; } else { assert("invalid streams"); } if (ffp->infinite_buffer < 0 && is->realtime) ffp->infinite_buffer = 1; if (!ffp->render_wait_start && !ffp->start_on_prepared) ///> toggle_pause(ffp, 1); if (is->video_st && is->video_st->codecpar) { AVCodecParameters *codecpar = is->video_st->codecpar; ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, codecpar->width, codecpar->height); ffp_notify_msg3(ffp, FFP_MSG_SAR_CHANGED, codecpar->sample_aspect_ratio.num, codecpar->sample_aspect_ratio.den); } ffp->prepared = true; ffp_notify_msg1(ffp, FFP_MSG_PREPARED); ///>Notify that the player is ready if (!ffp->render_wait_start && !ffp->start_on_prepared) { while (is->pause_req && !is->abort_request) { SDL_Delay(20); } } if (ffp->auto_resume) { ffp_notify_msg1(ffp, FFP_REQ_START); ffp->auto_resume = 0; } /* offset should be seeked*/ if (ffp->seek_at_start > 0) { ffp_seek_to_l(ffp, (long)(ffp->seek_at_start)); } ///> read_ LOOP body of thread thread for (;;) { if (is->abort_request) break; #ifdef FFP_MERGE if (is->paused != is->last_paused) { is->last_paused = is->paused; if (is->paused) is->read_pause_return = av_read_pause(ic); else av_read_play(ic); } #endif #if CONFIG_RTSP_DEMUXER || CONFIG_MMSH_PROTOCOL if (is->paused && (!strcmp(ic->iformat->name, "rtsp") || (ic->pb && !strncmp(ffp->input_filename, "mmsh:", 5)))) { /* wait 10 ms to avoid trying to get another packet */ /* XXX: horrible */ SDL_Delay(10); continue; } #endif ///> 1. request seek adjust if (is->seek_req) { int64_t seek_target = is->seek_pos; int64_t seek_min = is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN; int64_t seek_max = is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX; // FIXME the +-2 is due to rounding being not done in the correct direction in generation // of the seek_pos/seek_rel variables ffp_toggle_buffering(ffp, 1); ///>Notify buffer update ffp_notify_msg3(ffp, FFP_MSG_BUFFERING_UPDATE, 0, 0); ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags); if (ret < 0) { av_log(NULL, AV_LOG_ERROR, "%s: error while seeking\n", is->ic->filename); } else { if (is->audio_stream >= 0) { packet_queue_flush(&is->audioq); packet_queue_put(&is->audioq, &flush_pkt); // TODO: clear invaild audio data // SDL_AoutFlushAudio(ffp->aout); } if (is->subtitle_stream >= 0) { packet_queue_flush(&is->subtitleq); packet_queue_put(&is->subtitleq, &flush_pkt); } if (is->video_stream >= 0) { if (ffp->node_vdec) { ffpipenode_flush(ffp->node_vdec); } packet_queue_flush(&is->videoq); packet_queue_put(&is->videoq, &flush_pkt); } if (is->seek_flags & AVSEEK_FLAG_BYTE) { set_clock(&is->extclk, NAN, 0); } else { set_clock(&is->extclk, seek_target / (double)AV_TIME_BASE, 0); } is->latest_video_seek_load_serial = is->videoq.serial; is->latest_audio_seek_load_serial = is->audioq.serial; is->latest_seek_load_start_at = av_gettime(); } ffp->dcc.current_high_water_mark_in_ms = ffp->dcc.first_high_water_mark_in_ms; is->seek_req = 0; is->queue_attachments_req = 1; is->eof = 0; #ifdef FFP_MERGE if (is->paused) step_to_next_frame(is); #endif completed = 0; SDL_LockMutex(ffp->is->play_mutex); if (ffp->auto_resume) { is->pause_req = 0; if (ffp->packet_buffering) is->buffering_on = 1; ffp->auto_resume = 0; stream_update_pause_l(ffp); } if (is->pause_req) step_to_next_frame_l(ffp); SDL_UnlockMutex(ffp->is->play_mutex); if (ffp->enable_accurate_seek) { is->drop_aframe_count = 0; is->drop_vframe_count = 0; SDL_LockMutex(is->accurate_seek_mutex); if (is->video_stream >= 0) { is->video_accurate_seek_req = 1; } if (is->audio_stream >= 0) { is->audio_accurate_seek_req = 1; } SDL_CondSignal(is->audio_accurate_seek_cond); SDL_CondSignal(is->video_accurate_seek_cond); SDL_UnlockMutex(is->accurate_seek_mutex); } ffp_notify_msg3(ffp, FFP_MSG_SEEK_COMPLETE, (int)fftime_to_milliseconds(seek_target), ret); ffp_toggle_buffering(ffp, 1); } ///> 2. attach queue reuest if (is->queue_attachments_req) { if (is->video_st && (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC)) { AVPacket copy = { 0 }; if ((ret = av_packet_ref(©, &is->video_st->attached_pic)) < 0) goto fail; packet_queue_put(&is->videoq, ©); packet_queue_put_nullpacket(&is->videoq, is->video_stream); } is->queue_attachments_req = 0; } ///> 3. if the queue are full, no need to read more if (ffp->infinite_buffer<1 && !is->seek_req && #ifdef FFP_MERGE (is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE #else (is->audioq.size + is->videoq.size + is->subtitleq.size > ffp->dcc.max_buffer_size #endif || ( stream_has_enough_packets(is->audio_st, is->audio_stream, &is->audioq, MIN_FRAMES) && stream_has_enough_packets(is->video_st, is->video_stream, &is->videoq, MIN_FRAMES) && stream_has_enough_packets(is->subtitle_st, is->subtitle_stream, &is->subtitleq, MIN_FRAMES)))) { if (!is->eof) { ffp_toggle_buffering(ffp, 0); } /* wait 10 ms */ SDL_LockMutex(wait_mutex); SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10); SDL_UnlockMutex(wait_mutex); continue; } ///> 4. completed exit type judge if ((!is->paused || completed) && (!is->audio_st || (is->auddec.finished == is->audioq.serial && frame_queue_nb_remaining(&is->sampq) == 0)) && (!is->video_st || (is->viddec.finished == is->videoq.serial && frame_queue_nb_remaining(&is->pictq) == 0))) { if (ffp->loop != 1 && (!ffp->loop || --ffp->loop)) { stream_seek(is, ffp->start_time != AV_NOPTS_VALUE ? ffp->start_time : 0, 0, 0); } else if (ffp->autoexit) { ret = AVERROR_EOF; goto fail; } else { ffp_statistic_l(ffp); if (completed) { av_log(ffp, AV_LOG_INFO, "ffp_toggle_buffering: eof\n"); SDL_LockMutex(wait_mutex); // infinite wait may block shutdown while(!is->abort_request && !is->seek_req) SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 100); SDL_UnlockMutex(wait_mutex); if (!is->abort_request) continue; } else { completed = 1; ffp->auto_resume = 0; // TODO: 0 it's a bit early to notify complete here ffp_toggle_buffering(ffp, 0); toggle_pause(ffp, 1); if (ffp->error) { av_log(ffp, AV_LOG_INFO, "ffp_toggle_buffering: error: %d\n", ffp->error); ffp_notify_msg1(ffp, FFP_MSG_ERROR); } else { av_log(ffp, AV_LOG_INFO, "ffp_toggle_buffering: completed: OK\n"); ffp_notify_msg1(ffp, FFP_MSG_COMPLETED); } } } } ///>5. Data stream of audio and video pkt->flags = 0; ret = av_read_frame(ic, pkt); ///*Reading frame includes one video file mode reading and the other network video file reading if (ret < 0) { ///*The contents related to the communication protocol are displayed in read_frame_internal() is in this function!!!! Key entrance int pb_eof = 0; int pb_error = 0; if ((ret == AVERROR_EOF || avio_feof(ic->pb)) && !is->eof) { ffp_check_buffering_l(ffp); pb_eof = 1; // check error later } if (ic->pb && ic->pb->error) { pb_eof = 1; pb_error = ic->pb->error; } if (ret == AVERROR_EXIT) { pb_eof = 1; pb_error = AVERROR_EXIT; } if (pb_eof) { if (is->video_stream >= 0) packet_queue_put_nullpacket(&is->videoq, is->video_stream); if (is->audio_stream >= 0) packet_queue_put_nullpacket(&is->audioq, is->audio_stream); if (is->subtitle_stream >= 0) packet_queue_put_nullpacket(&is->subtitleq, is->subtitle_stream); is->eof = 1; } if (pb_error) { if (is->video_stream >= 0) packet_queue_put_nullpacket(&is->videoq, is->video_stream); if (is->audio_stream >= 0) packet_queue_put_nullpacket(&is->audioq, is->audio_stream); if (is->subtitle_stream >= 0) packet_queue_put_nullpacket(&is->subtitleq, is->subtitle_stream); is->eof = 1; ffp->error = pb_error; av_log(ffp, AV_LOG_ERROR, "av_read_frame error: %s\n", ffp_get_error_string(ffp->error)); // break; } else { ffp->error = 0; } if (is->eof) { ffp_toggle_buffering(ffp, 0); SDL_Delay(100); } SDL_LockMutex(wait_mutex); SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10); SDL_UnlockMutex(wait_mutex); ffp_statistic_l(ffp); continue; } else { is->eof = 0; } ///>6. If discontinuous frames appear, put flush to the data queue_ Pkt package. if (pkt->flags & AV_PKT_FLAG_DISCONTINUITY) { if (is->audio_stream >= 0) { packet_queue_put(&is->audioq, &flush_pkt); } if (is->subtitle_stream >= 0) { packet_queue_put(&is->subtitleq, &flush_pkt); } if (is->video_stream >= 0) { packet_queue_put(&is->videoq, &flush_pkt); } } ///> 7.check if packet is in play range specified by user, then queue, otherwise discard stream_start_time = ic->streams[pkt->stream_index]->start_time; pkt_ts = pkt->pts == AV_NOPTS_VALUE ? pkt->dts : pkt->pts; pkt_in_play_range = ffp->duration == AV_NOPTS_VALUE || (pkt_ts - (stream_start_time != AV_NOPTS_VALUE ? stream_start_time : 0)) * av_q2d(ic->streams[pkt->stream_index]->time_base) - (double)(ffp->start_time != AV_NOPTS_VALUE ? ffp->start_time : 0) / 1000000 <= ((double)ffp->duration / 1000000); if (pkt->stream_index == is->audio_stream && pkt_in_play_range) { ///>Legitimate packet data is sent to the queue packet_queue_put(&is->audioq, pkt); } else if (pkt->stream_index == is->video_stream && pkt_in_play_range && !(is->video_st && (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC))) { packet_queue_put(&is->videoq, pkt); } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) { packet_queue_put(&is->subtitleq, pkt); } else { av_packet_unref(pkt); } ///>8. Statistics queue cache ffp_statistic_l(ffp); ///>9. Set context related parameter configuration of audio and video, and settings related to decoder type and working parameters if (ffp->ijkmeta_delay_init && !init_ijkmeta && (ffp->first_video_frame_rendered || !is->video_st) && (ffp->first_audio_frame_rendered || !is->audio_st)) { ijkmeta_set_avformat_context_l(ffp->meta, ic); init_ijkmeta = 1; } ///>10. Calculate the time between packages if (ffp->packet_buffering) { io_tick_counter = SDL_GetTickHR(); if ((!ffp->first_video_frame_rendered && is->video_st) || (!ffp->first_audio_frame_rendered && is->audio_st)) { if (abs((int)(io_tick_counter - prev_io_tick_counter)) > FAST_BUFFERING_CHECK_PER_MILLISECONDS) { prev_io_tick_counter = io_tick_counter; ffp->dcc.current_high_water_mark_in_ms = ffp->dcc.first_high_water_mark_in_ms; ffp_check_buffering_l(ffp); } } else { if (abs((int)(io_tick_counter - prev_io_tick_counter)) > BUFFERING_CHECK_PER_MILLISECONDS) { prev_io_tick_counter = io_tick_counter; ffp_check_buffering_l(ffp); } } } } ret = 0; fail: if (ic && !is->ic) avformat_close_input(&ic); if (!ffp->prepared || !is->abort_request) { ffp->last_error = last_error; ffp_notify_msg2(ffp, FFP_MSG_ERROR, last_error); } SDL_DestroyMutex(wait_mutex); return 0; }
Video refresh code
///>2.5 creating video_refresh_thread() video refresh thread static int video_refresh_thread(void *arg) { FFPlayer *ffp = arg; VideoState *is = ffp->is; double remaining_time = 0.0; while (!is->abort_request) { if (remaining_time > 0.0) av_usleep((int)(int64_t)(remaining_time * 1000000.0)); remaining_time = REFRESH_RATE; if (is->show_mode != SHOW_MODE_NONE && (!is->paused || is->force_refresh)) video_refresh(ffp, &remaining_time); } return 0; } /* called to display each frame */ static void video_refresh(FFPlayer *opaque, double *remaining_time) { FFPlayer *ffp = opaque; VideoState *is = ffp->is; double time; Frame *sp, *sp2; if (!is->paused && get_master_sync_type(is) == AV_SYNC_EXTERNAL_CLOCK && is->realtime) check_external_clock_speed(is); if (!ffp->display_disable && is->show_mode != SHOW_MODE_VIDEO && is->audio_st) { time = av_gettime_relative() / 1000000.0; if (is->force_refresh || is->last_vis_time + ffp->rdftspeed < time) { video_display2(ffp); ///>2.5.1 refresh video is->last_vis_time = time; } *remaining_time = FFMIN(*remaining_time, is->last_vis_time + ffp->rdftspeed - time); } if (is->video_st) { retry: if (frame_queue_nb_remaining(&is->pictq) == 0) { // nothing to do, no picture to display in the queue } else { double last_duration, duration, delay; Frame *vp, *lastvp; /* dequeue the picture */ lastvp = frame_queue_peek_last(&is->pictq); vp = frame_queue_peek(&is->pictq); if (vp->serial != is->videoq.serial) { frame_queue_next(&is->pictq); goto retry; } if (lastvp->serial != vp->serial) is->frame_timer = av_gettime_relative() / 1000000.0; if (is->paused) goto display; /* compute nominal last_duration */ last_duration = vp_duration(is, lastvp, vp); delay = compute_target_delay(ffp, last_duration, is); time= av_gettime_relative()/1000000.0; if (isnan(is->frame_timer) || time < is->frame_timer) is->frame_timer = time; if (time < is->frame_timer + delay) { *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time); goto display; } is->frame_timer += delay; if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX) is->frame_timer = time; SDL_LockMutex(is->pictq.mutex); if (!isnan(vp->pts)) update_video_pts(is, vp->pts, vp->pos, vp->serial); ///>Refresh PTS SDL_UnlockMutex(is->pictq.mutex); if (frame_queue_nb_remaining(&is->pictq) > 1) { Frame *nextvp = frame_queue_peek_next(&is->pictq); duration = vp_duration(is, vp, nextvp); if(!is->step && (ffp->framedrop > 0 || (ffp->framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) && time > is->frame_timer + duration) { frame_queue_next(&is->pictq); goto retry; } } if (is->subtitle_st) { ///>Refresh subtitle while (frame_queue_nb_remaining(&is->subpq) > 0) { sp = frame_queue_peek(&is->subpq); if (frame_queue_nb_remaining(&is->subpq) > 1) sp2 = frame_queue_peek_next(&is->subpq); else sp2 = NULL; if (sp->serial != is->subtitleq.serial || (is->vidclk.pts > (sp->pts + ((float) sp->sub.end_display_time / 1000))) || (sp2 && is->vidclk.pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000)))) { if (sp->uploaded) { ffp_notify_msg4(ffp, FFP_MSG_TIMED_TEXT, 0, 0, "", 1); } frame_queue_next(&is->subpq); } else { break; } } } frame_queue_next(&is->pictq); is->force_refresh = 1; SDL_LockMutex(ffp->is->play_mutex); if (is->step) { is->step = 0; if (!is->paused) stream_update_pause_l(ffp); } SDL_UnlockMutex(ffp->is->play_mutex); } display: /* display picture */ if (!ffp->display_disable && is->force_refresh && is->show_mode == SHOW_MODE_VIDEO && is->pictq.rindex_shown) video_display2(ffp); } is->force_refresh = 0; if (ffp->show_status) { ///>Adjust parameters according to refresh status static int64_t last_time; int64_t cur_time; int aqsize, vqsize, sqsize __unused; double av_diff; cur_time = av_gettime_relative(); if (!last_time || (cur_time - last_time) >= 30000) { aqsize = 0; vqsize = 0; sqsize = 0; if (is->audio_st) aqsize = is->audioq.size; if (is->video_st) vqsize = is->videoq.size; #ifdef FFP_MERGE if (is->subtitle_st) sqsize = is->subtitleq.size; #else sqsize = 0; #endif av_diff = 0; if (is->audio_st && is->video_st) av_diff = get_clock(&is->audclk) - get_clock(&is->vidclk); else if (is->video_st) av_diff = get_master_clock(is) - get_clock(&is->vidclk); else if (is->audio_st) av_diff = get_master_clock(is) - get_clock(&is->audclk); av_log(NULL, AV_LOG_INFO, "%7.2f %s:%7.3f fd=%4d aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r", get_master_clock(is), (is->audio_st && is->video_st) ? "A-V" : (is->video_st ? "M-V" : (is->audio_st ? "M-A" : " ")), av_diff, is->frame_drops_early + is->frame_drops_late, aqsize / 1024, vqsize / 1024, sqsize, is->video_st ? is->viddec.avctx->pts_correction_num_faulty_dts : 0, is->video_st ? is->viddec.avctx->pts_correction_num_faulty_pts : 0); fflush(stdout); last_time = cur_time; } } } ///> display the current picture, if any static void video_display2(FFPlayer *ffp) { VideoState *is = ffp->is; if (is->video_st) video_image_display2(ffp); ///> 2.5.2 IMAGE DISPLAY2 } ///>Play picture process static void video_image_display2(FFPlayer *ffp) { VideoState *is = ffp->is; Frame *vp; Frame *sp = NULL; vp = frame_queue_peek_last(&is->pictq); if (vp->bmp) { if (is->subtitle_st) { ///>Subtitle if (frame_queue_nb_remaining(&is->subpq) > 0) { sp = frame_queue_peek(&is->subpq); if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) { if (!sp->uploaded) { if (sp->sub.num_rects > 0) { char buffered_text[4096]; if (sp->sub.rects[0]->text) { strncpy(buffered_text, sp->sub.rects[0]->text, 4096); } else if (sp->sub.rects[0]->ass) { parse_ass_subtitle(sp->sub.rects[0]->ass, buffered_text); } ffp_notify_msg4(ffp, FFP_MSG_TIMED_TEXT, 0, 0, buffered_text, sizeof(buffered_text)); } sp->uploaded = 1; } } } } if (ffp->render_wait_start && !ffp->start_on_prepared && is->pause_req) { ///>2.5.3 if the rendering is not finished, wait for the rendering to finish if (!ffp->first_video_frame_rendered) { ffp->first_video_frame_rendered = 1; ffp_notify_msg1(ffp, FFP_MSG_VIDEO_RENDERING_START); } while (is->pause_req && !is->abort_request) { SDL_Delay(20); } } SDL_VoutDisplayYUVOverlay(ffp->vout, vp->bmp); ///>2.5.4 call YUV display output of SDL ffp->stat.vfps = SDL_SpeedSamplerAdd(&ffp->vfps_sampler, FFP_SHOW_VFPS_FFPLAY, "vfps[ffplay]"); if (!ffp->first_video_frame_rendered) { ffp->first_video_frame_rendered = 1; ffp_notify_msg1(ffp, FFP_MSG_VIDEO_RENDERING_START); } if (is->latest_video_seek_load_serial == vp->serial) { int latest_video_seek_load_serial = __atomic_exchange_n(&(is->latest_video_seek_load_serial), -1, memory_order_seq_cst); if (latest_video_seek_load_serial == vp->serial) { ffp->stat.latest_seek_load_duration = (av_gettime() - is->latest_seek_load_start_at) / 1000; if (ffp->av_sync_type == AV_SYNC_VIDEO_MASTER) { ffp_notify_msg2(ffp, FFP_MSG_VIDEO_SEEK_RENDERING_START, 1); } else { ffp_notify_msg2(ffp, FFP_MSG_VIDEO_SEEK_RENDERING_START, 0); } } } } } ///>When does this function pointer assignment occur? int SDL_VoutDisplayYUVOverlay(SDL_Vout *vout, SDL_VoutOverlay *overlay) { if (vout && overlay && vout->display_overlay) return vout->display_overlay(vout, overlay); return -1; } ///>This function is a JNI interface function_ native_ When setup() creates a player, the entry function is marked at the creation window in 2.8.2-1 below ///> SDL_ VoutAndroid_ The createforandroidsurface() function is as follows: SDL_Vout *SDL_VoutAndroid_CreateForAndroidSurface() { return SDL_VoutAndroid_CreateForANativeWindow(); } SDL_Vout *SDL_VoutAndroid_CreateForANativeWindow() { SDL_Vout *vout = SDL_Vout_CreateInternal(sizeof(SDL_Vout_Opaque)); if (!vout) return NULL; SDL_Vout_Opaque *opaque = vout->opaque; opaque->native_window = NULL; if (ISDL_Array__init(&opaque->overlay_manager, 32)) goto fail; if (ISDL_Array__init(&opaque->overlay_pool, 32)) goto fail; opaque->egl = IJK_EGL_create(); if (!opaque->egl) goto fail; vout->opaque_class = &g_nativewindow_class; vout->create_overlay = func_create_overlay; ///>2.5.5 create display output instance vout->free_l = func_free_l; vout->display_overlay = func_display_overlay; ///>2.5.6 function pointer pointing display method return vout; fail: func_free_l(vout); return NULL; } ///>Display the content of the function, which is related to the advertiser type created by the Android program space, ///>Ijkplayer supports Exo, AndroidMediacodec and ijkplayer types. Video output is divided into two categories, ///>For example, AndroidMediacodec decoding (Exo, AndroidMediacodec) and FFMPEG decoding (ijkplayer), we take ijkplayer as an example. static SDL_VoutOverlay *func_create_overlay(int width, int height, int frame_format, SDL_Vout *vout) { SDL_LockMutex(vout->mutex); SDL_VoutOverlay *overlay = func_create_overlay_l(width, height, frame_format, vout); SDL_UnlockMutex(vout->mutex); return overlay; } static SDL_VoutOverlay *func_create_overlay_l(int width, int height, int frame_format, SDL_Vout *vout) { switch (frame_format) { case IJK_AV_PIX_FMT__ANDROID_MEDIACODEC: return SDL_VoutAMediaCodec_CreateOverlay(width, height, vout); //>Android mediacodec decoding default: return SDL_VoutFFmpeg_CreateOverlay(width, height, frame_format, vout); //>Ffmpeg decoding } } ///> #ifndef __clang_analyzer__ SDL_VoutOverlay *SDL_VoutFFmpeg_CreateOverlay(int width, int height, int frame_format, SDL_Vout *display) { Uint32 overlay_format = display->overlay_format; switch (overlay_format) { case SDL_FCC__GLES2: { switch (frame_format) { case AV_PIX_FMT_YUV444P10LE: overlay_format = SDL_FCC_I444P10LE; break; case AV_PIX_FMT_YUV420P: case AV_PIX_FMT_YUVJ420P: default: #if defined(__ANDROID__) overlay_format = SDL_FCC_YV12; #else overlay_format = SDL_FCC_I420; #endif break; } break; } } SDLTRACE("SDL_VoutFFmpeg_CreateOverlay(w=%d, h=%d, fmt=%.4s(0x%x, dp=%p)\n", width, height, (const char*) &overlay_format, overlay_format, display); SDL_VoutOverlay *overlay = SDL_VoutOverlay_CreateInternal(sizeof(SDL_VoutOverlay_Opaque)); if (!overlay) { ALOGE("overlay allocation failed"); return NULL; } SDL_VoutOverlay_Opaque *opaque = overlay->opaque; opaque->mutex = SDL_CreateMutex(); opaque->sws_flags = SWS_BILINEAR; overlay->opaque_class = &g_vout_overlay_ffmpeg_class; overlay->format = overlay_format; overlay->pitches = opaque->pitches; overlay->pixels = opaque->pixels; overlay->w = width; overlay->h = height; overlay->free_l = func_free_l; overlay->lock = func_lock; overlay->unlock = func_unlock; overlay->func_fill_frame = func_fill_frame; ///>2.5.7 fill frame callback function enum AVPixelFormat ff_format = AV_PIX_FMT_NONE; int buf_width = width; int buf_height = height; switch (overlay_format) { case SDL_FCC_I420: case SDL_FCC_YV12: { ff_format = AV_PIX_FMT_YUV420P; // FIXME: need runtime config #if defined(__ANDROID__) // 16 bytes align pitch for arm-neon image-convert buf_width = IJKALIGN(width, 16); // 1 bytes per pixel for Y-plane #elif defined(__APPLE__) // 2^n align for width buf_width = width; if (width > 0) buf_width = 1 << (sizeof(int) * 8 - __builtin_clz(width)); #else buf_width = IJKALIGN(width, 16); // unknown platform #endif opaque->planes = 3; break; } case SDL_FCC_I444P10LE: { ff_format = AV_PIX_FMT_YUV444P10LE; // FIXME: need runtime config #if defined(__ANDROID__) // 16 bytes align pitch for arm-neon image-convert buf_width = IJKALIGN(width, 16); // 1 bytes per pixel for Y-plane #elif defined(__APPLE__) // 2^n align for width buf_width = width; if (width > 0) buf_width = 1 << (sizeof(int) * 8 - __builtin_clz(width)); #else buf_width = IJKALIGN(width, 16); // unknown platform #endif opaque->planes = 3; break; } case SDL_FCC_RV16: { ff_format = AV_PIX_FMT_RGB565; buf_width = IJKALIGN(width, 8); // 2 bytes per pixel opaque->planes = 1; break; } case SDL_FCC_RV24: { ff_format = AV_PIX_FMT_RGB24; #if defined(__ANDROID__) // 16 bytes align pitch for arm-neon image-convert buf_width = IJKALIGN(width, 16); // 1 bytes per pixel for Y-plane #elif defined(__APPLE__) buf_width = width; #else buf_width = IJKALIGN(width, 16); // unknown platform #endif opaque->planes = 1; break; } case SDL_FCC_RV32: { ff_format = AV_PIX_FMT_0BGR32; buf_width = IJKALIGN(width, 4); // 4 bytes per pixel opaque->planes = 1; break; } default: ALOGE("SDL_VoutFFmpeg_CreateOverlay(...): unknown format %.4s(0x%x)\n", (char*)&overlay_format, overlay_format); goto fail; } opaque->managed_frame = opaque_setup_frame(opaque, ff_format, buf_width, buf_height); ///>2.5.8 establish the relationship between frame format data structure and output queue if (!opaque->managed_frame) { ALOGE("overlay->opaque->frame allocation failed\n"); goto fail; } overlay_fill(overlay, opaque->managed_frame, opaque->planes); ///>2.5.9 fill the reference frame data into the overlay screen display memory return overlay; fail: func_free_l(overlay); return NULL; } #endif//__clang_analyzer__ ///> 2.5.8 static AVFrame *opaque_setup_frame(SDL_VoutOverlay_Opaque* opaque, enum AVPixelFormat format, int width, int height) { AVFrame *managed_frame = av_frame_alloc(); if (!managed_frame) { return NULL; } AVFrame *linked_frame = av_frame_alloc(); if (!linked_frame) { av_frame_free(&managed_frame); return NULL; } /*- * Lazily allocate frame buffer in opaque_obtain_managed_frame_buffer * * For refererenced frame management, we use buffer allocated by decoder * int frame_bytes = avpicture_get_size(format, width, height); AVBufferRef *frame_buffer_ref = av_buffer_alloc(frame_bytes); if (!frame_buffer_ref) return NULL; opaque->frame_buffer = frame_buffer_ref; */ managed_frame->format = format; managed_frame->width = width; managed_frame->height = height; av_image_fill_arrays(managed_frame->data, managed_frame->linesize ,NULL, format, width, height, 1); opaque->managed_frame = managed_frame; opaque->linked_frame = linked_frame; return managed_frame; } int av_image_fill_arrays(uint8_t *dst_data[4], int dst_linesize[4], const uint8_t *src, enum AVPixelFormat pix_fmt, int width, int height, int align) { int ret, i; ret = av_image_check_size(width, height, 0, NULL); if (ret < 0) return ret; ret = av_image_fill_linesizes(dst_linesize, pix_fmt, width); if (ret < 0) return ret; for (i = 0; i < 4; i++) dst_linesize[i] = FFALIGN(dst_linesize[i], align); return av_image_fill_pointers(dst_data, pix_fmt, height, (uint8_t *)src, dst_linesize); //>Image fill printing } ///> 2.5.9 static void overlay_fill(SDL_VoutOverlay *overlay, AVFrame *frame, int planes) { overlay->planes = planes; for (int i = 0; i < AV_NUM_DATA_POINTERS; ++i) { overlay->pixels[i] = frame->data[i]; overlay->pitches[i] = frame->linesize[i]; } } ///>2.5.6 the function pointer points to the display method, and the source file path of this function is ijksdl\dummy\ijksdl_vout_dummy.c static int func_display_overlay(SDL_Vout *vout, SDL_VoutOverlay *overlay) { SDL_LockMutex(vout->mutex); int retval = func_display_overlay_l(vout, overlay); SDL_UnlockMutex(vout->mutex); return retval; } static int func_display_overlay_l(SDL_Vout *vout, SDL_VoutOverlay *overlay) { return 0; }
Summary of this Section 2.5:
-
- When a user creates an ijkplay in the Android space, the JNI interface function is triggered_ native_ The setup() function executes this function
Call SDL_VoutAndroid_CreateForAndroidSurface() function.
- When a user creates an ijkplay in the Android space, the JNI interface function is triggered_ native_ The setup() function executes this function
-
- This program creates an SDL_Vout vout instance, and assign values to the function pointers of the instance respectively. The pointing contents are as follows:
vout->create_overlay = func_create_overlay; ///>2.5.5 create display output instance vout->display_overlay = func_display_overlay; ///>2.5.6 function pointer pointing display method
-
- In the video refresh thread, call SDL_. The voutdisplayyuvoverlay() function calls Vout - > display_ Overlay() function to refresh the display interface through this method.
At present, there are still questions. So far, when FFMPEG decoding mode is selected, this function has no relevant execution content; When Android mediacodec mode is adopted, this method is called to execute the refresh content code. Is it because FFMPEG adopts hard decoding without relevant operation? This doubt will be supplemented and improved after implementation.
///>2.8 initialize decoder video pipeline IJKFF_Pipenode* ffpipeline_init_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp) { return pipeline->func_init_video_decoder(pipeline, ffp); } ///> 2.8.1 pipeline->func_ init_ video_ The decoder function is initialized with this function IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp) { ALOGD("ffpipeline_create_from_android()\n"); IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque)); if (!pipeline) return pipeline; IJKFF_Pipeline_Opaque *opaque = pipeline->opaque; opaque->ffp = ffp; opaque->surface_mutex = SDL_CreateMutex(); opaque->left_volume = 1.0f; opaque->right_volume = 1.0f; if (!opaque->surface_mutex) { ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n"); goto fail; } pipeline->func_destroy = func_destroy; pipeline->func_open_video_decoder = func_open_video_decoder; pipeline->func_open_audio_output = func_open_audio_output; pipeline->func_init_video_decoder = func_init_video_decoder; pipeline->func_config_video_decoder = func_config_video_decoder; return pipeline; fail: ffpipeline_free_p(&pipeline); return NULL; } ///>2.8.2 calling ffpipeline_ create_ from_ The Android () function has the following function IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*)) { IjkMediaPlayer *mp = ijkmp_create(msg_loop); if (!mp) goto fail; mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface(); ///>2.8.2-1 create window if (!mp->ffplayer->vout) goto fail; mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer); ///>2.8.2-2 creating Android Mediacodec function if (!mp->ffplayer->pipeline) goto fail; ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout); return mp; fail: ijkmp_dec_ref_p(&mp); return NULL; } ///>2.8.3 calling function startup is a JNI interface function_ native_ Initiated by the setup() function, this function is called when the user JAVA program creates the player. static void IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this) { MPTRACE("%s\n", __func__); IjkMediaPlayer *mp = ijkmp_android_create(message_loop); JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN); jni_set_media_player(env, thiz, mp); ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this)); ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp)); ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp)); ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp)); LABEL_RETURN: ijkmp_dec_ref_p(&mp); }
Through part 2.8, our basic combing process is summarized as follows:
-
When the user Android program creates ijkplay, it calls JNI interface_ native_setup() function, which calls
ijkmp_android_create(message_loop) function, which calls the next 2 function to create a player -
Function ffpipeline_create_from_android() creates pipes and assigns the connection function pointer between pipes.
-
Call stream when the player is in 'ready state'_ Call ffpipeline_ in open () function init_ video_ Decoder (FFP - > pipeline, FFP) this function,
When calling pipeline - > func_ init_ video_ Decoder (pipeline, FFP) function. The function pointed to by this function pointer is:
static IJKFF_Pipenode *func_init_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp) { IJKFF_Pipeline_Opaque *opaque = pipeline->opaque; IJKFF_Pipenode *node = NULL; if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2) node = ffpipenode_init_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout); return node; } ///>From the function name, we can see that this function is android_mediacodec and FFPLAY establish a pipeline IJKFF_Pipenode *ffpipenode_init_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout) { if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN) return NULL; if (!ffp || !ffp->is) return NULL; IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque)); if (!node) return node; VideoState *is = ffp->is; IJKFF_Pipenode_Opaque *opaque = node->opaque; JNIEnv *env = NULL; node->func_destroy = func_destroy; if (ffp->mediacodec_sync) { node->func_run_sync = func_run_sync_loop; } else { node->func_run_sync = func_run_sync; } node->func_flush = func_flush; opaque->pipeline = pipeline; opaque->ffp = ffp; opaque->decoder = &is->viddec; opaque->weak_vout = vout; opaque->acodec_mutex = SDL_CreateMutex(); opaque->acodec_cond = SDL_CreateCond(); opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex(); opaque->acodec_first_dequeue_output_cond = SDL_CreateCond(); opaque->any_input_mutex = SDL_CreateMutex(); opaque->any_input_cond = SDL_CreateCond(); if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) { ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__); goto fail; } opaque->codecpar = avcodec_parameters_alloc(); if (!opaque->codecpar) goto fail; if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s:create: SetupThreadEnv failed\n", __func__); goto fail; } ALOGI("%s:use default mediacodec name: %s\n", __func__, ffp->mediacodec_default_name); strcpy(opaque->mcc.codec_name, ffp->mediacodec_default_name); opaque->acodec = SDL_AMediaCodecJava_createByCodecName(env, ffp->mediacodec_default_name); if (!opaque->acodec) { goto fail; } return node; fail: ALOGW("%s: init fail\n", __func__); ffpipenode_free_p(&node); return NULL; }
This process is to open up the relationship between mediacodec in Android space and ffplay instances in so library. Now let's review the operations in ijkplayer Demo
Some methods of android mediacodec are overloaded, which basically binds this method to the ffplay instance in the so library.
At this point, the 'ready state' of the player is ready.
The summary is as follows:
- Establish three queues, namely video, audio and subtitle;
- Create read data thread read_tread();
- Create audio and video refresh thread video_refresh();
- Get through the channels of Android mediacodec and ffplay players;
- Open the pipeline between the video display output surface and the output queue.
Thus, the player has playback conditions.