Cross compiling FFMPEG and X264 libraries under Linux: object board friendly arm Tiny4412 development board_ EXYNOS4412 (ARMV7_32-bit)

1, Environment introduction

PC operating system: RedHat 6.3 (32bit)

Cross compiler: 4.5.1 (the compiler comes with the development board CD)

2, X264 Library Download, compilation and installation

2.1 download X264 Library

Download address: https://www.videolan.org/developers/x264.html

2.2 decompress and configure X264 Library

[wbyq@wbyq tiny4412]$ tar xvf /mnt/hgfs/linux-share-dir/x264-master.tar.bz2
[wbyq@wbyq tiny4412]$ cd x264-master/
[wbyq@wbyq x264-master]$ ./configure --prefix=$PWD/_install --disable-asm --enable-shared --enable-static --host=arm-none-linux-gnueabi

2.3 modifying configuration files

Open config. In the current directory Mak file, prefix the compiler related commands with arm linux, because gcc is used by default

[wbyq@wbyq x264-master]$ gedit config.mak 

The modification is shown in the following figure:

2.4 compiling and installing X264

[wbyq@wbyq x264-master]$ make && make install

To view the files compiled and installed:

[wbyq@wbyq x264-master]$ tree _install/
_install/
├── bin
│   └── x264
├── include
│   ├── x264_config.h
│   └── x264.h
└── lib
    ├── libx264.a
    ├── libx264.so -> libx264.so.160
    ├── libx264.so.160
    └── pkgconfig
        └── x264.pc

4 directories, 7 files
[wbyq@wbyq x264-master]$

3, Download, compile and install FFMPEG Library

3.1 download FFMPEG

Download address: http://www.ffmpeg.org/download.html

3.2 compiling FFMPEG

[wbyq@wbyq ffmpeg-4.2.2]$ ./configure --enable-shared --enable-static --prefix=$PWD/_install --cross-prefix=arm-linux- --arch=arm --target-os=linux --enable-gpl --extra-cflags=-I/home/wbyq/work/tiny4412/x264-master/_install/include --extra-ldflags=-L/home/wbyq/work/tiny4412/x264-master/_install/lib --enable-ffmpeg --enable-libx264

[wbyq@wbyq ffmpeg-4.2.2]$ make && make install

3.3 view files compiled successfully

[wbyq@wbyq ffmpeg-4.2.2]$ tree _install/
_install/
├── bin
│   ├── ffmpeg
│   └── ffprobe
├── include
│   ├── libavcodec
│   │   ├── ac3_parser.h
│   │   ├── adts_parser.h
│   │   ├── avcodec.h
│   │   ├── avdct.h
│   │   ├── avfft.h
│   │   ├── d3d11va.h
│   │   ├── dirac.h
│   │   ├── dv_profile.h
│   │   ├── dxva2.h
│   │   ├── jni.h
│   │   ├── mediacodec.h
│   │   ├── qsv.h
│   │   ├── vaapi.h
│   │   ├── vdpau.h
│   │   ├── version.h
│   │   ├── videotoolbox.h
│   │   ├── vorbis_parser.h
│   │   └── xvmc.h
│   ├── libavdevice
│   │   ├── avdevice.h
│   │   └── version.h
│   ├── libavfilter
│   │   ├── avfilter.h
│   │   ├── buffersink.h
│   │   ├── buffersrc.h
│   │   └── version.h
│   ├── libavformat
│   │   ├── avformat.h
│   │   ├── avio.h
│   │   └── version.h
│   ├── libavutil
│   │   ├── adler32.h
│   │   ├── aes_ctr.h
│   │   ├── aes.h
│   │   ├── attributes.h
│   │   ├── audio_fifo.h
│   │   ├── avassert.h
│   │   ├── avconfig.h
│   │   ├── avstring.h
│   │   ├── avutil.h
│   │   ├── base64.h
│   │   ├── blowfish.h
│   │   ├── bprint.h
│   │   ├── bswap.h
│   │   ├── buffer.h
│   │   ├── camellia.h
│   │   ├── cast5.h
│   │   ├── channel_layout.h
│   │   ├── common.h
│   │   ├── cpu.h
│   │   ├── crc.h
│   │   ├── des.h
│   │   ├── dict.h
│   │   ├── display.h
│   │   ├── downmix_info.h
│   │   ├── encryption_info.h
│   │   ├── error.h
│   │   ├── eval.h
│   │   ├── ffversion.h
│   │   ├── fifo.h
│   │   ├── file.h
│   │   ├── frame.h
│   │   ├── hash.h
│   │   ├── hdr_dynamic_metadata.h
│   │   ├── hmac.h
│   │   ├── hwcontext_cuda.h
│   │   ├── hwcontext_d3d11va.h
│   │   ├── hwcontext_drm.h
│   │   ├── hwcontext_dxva2.h
│   │   ├── hwcontext.h
│   │   ├── hwcontext_mediacodec.h
│   │   ├── hwcontext_qsv.h
│   │   ├── hwcontext_vaapi.h
│   │   ├── hwcontext_vdpau.h
│   │   ├── hwcontext_videotoolbox.h
│   │   ├── imgutils.h
│   │   ├── intfloat.h
│   │   ├── intreadwrite.h
│   │   ├── lfg.h
│   │   ├── log.h
│   │   ├── lzo.h
│   │   ├── macros.h
│   │   ├── mastering_display_metadata.h
│   │   ├── mathematics.h
│   │   ├── md5.h
│   │   ├── mem.h
│   │   ├── motion_vector.h
│   │   ├── murmur3.h
│   │   ├── opt.h
│   │   ├── parseutils.h
│   │   ├── pixdesc.h
│   │   ├── pixelutils.h
│   │   ├── pixfmt.h
│   │   ├── random_seed.h
│   │   ├── rational.h
│   │   ├── rc4.h
│   │   ├── replaygain.h
│   │   ├── ripemd.h
│   │   ├── samplefmt.h
│   │   ├── sha512.h
│   │   ├── sha.h
│   │   ├── spherical.h
│   │   ├── stereo3d.h
│   │   ├── tea.h
│   │   ├── threadmessage.h
│   │   ├── timecode.h
│   │   ├── time.h
│   │   ├── timestamp.h
│   │   ├── tree.h
│   │   ├── twofish.h
│   │   ├── tx.h
│   │   ├── version.h
│   │   └── xtea.h
│   ├── libpostproc
│   │   ├── postprocess.h
│   │   └── version.h
│   ├── libswresample
│   │   ├── swresample.h
│   │   └── version.h
│   └── libswscale
│       ├── swscale.h
│       └── version.h
├── lib
│   ├── libavcodec.a
│   ├── libavcodec.so -> libavcodec.so.58.54.100
│   ├── libavcodec.so.58 -> libavcodec.so.58.54.100
│   ├── libavcodec.so.58.54.100
│   ├── libavdevice.a
│   ├── libavdevice.so -> libavdevice.so.58.8.100
│   ├── libavdevice.so.58 -> libavdevice.so.58.8.100
│   ├── libavdevice.so.58.8.100
│   ├── libavfilter.a
│   ├── libavfilter.so -> libavfilter.so.7.57.100
│   ├── libavfilter.so.7 -> libavfilter.so.7.57.100
│   ├── libavfilter.so.7.57.100
│   ├── libavformat.a
│   ├── libavformat.so -> libavformat.so.58.29.100
│   ├── libavformat.so.58 -> libavformat.so.58.29.100
│   ├── libavformat.so.58.29.100
│   ├── libavutil.a
│   ├── libavutil.so -> libavutil.so.56.31.100
│   ├── libavutil.so.56 -> libavutil.so.56.31.100
│   ├── libavutil.so.56.31.100
│   ├── libpostproc.a
│   ├── libpostproc.so -> libpostproc.so.55.5.100
│   ├── libpostproc.so.55 -> libpostproc.so.55.5.100
│   ├── libpostproc.so.55.5.100
│   ├── libswresample.a
│   ├── libswresample.so -> libswresample.so.3.5.100
│   ├── libswresample.so.3 -> libswresample.so.3.5.100
│   ├── libswresample.so.3.5.100
│   ├── libswscale.a
│   ├── libswscale.so -> libswscale.so.5.5.100
│   ├── libswscale.so.5 -> libswscale.so.5.5.100
│   ├── libswscale.so.5.5.100
│   └── pkgconfig
│       ├── libavcodec.pc
│       ├── libavdevice.pc
│       ├── libavfilter.pc
│       ├── libavformat.pc
│       ├── libavutil.pc
│       ├── libpostproc.pc
│       ├── libswresample.pc
│       └── libswscale.pc
└── share
    ├── ffmpeg
    │   ├── examples
    │   │   ├── avio_dir_cmd.c
    │   │   ├── avio_reading.c
    │   │   ├── decode_audio.c
    │   │   ├── decode_video.c
    │   │   ├── demuxing_decoding.c
    │   │   ├── encode_audio.c
    │   │   ├── encode_video.c
    │   │   ├── extract_mvs.c
    │   │   ├── filter_audio.c
    │   │   ├── filtering_audio.c
    │   │   ├── filtering_video.c
    │   │   ├── http_multiclient.c
    │   │   ├── hw_decode.c
    │   │   ├── Makefile
    │   │   ├── metadata.c
    │   │   ├── muxing.c
    │   │   ├── qsvdec.c
    │   │   ├── README
    │   │   ├── remuxing.c
    │   │   ├── resampling_audio.c
    │   │   ├── scaling_video.c
    │   │   ├── transcode_aac.c
    │   │   ├── transcoding.c
    │   │   ├── vaapi_encode.c
    │   │   └── vaapi_transcode.c
    │   ├── ffprobe.xsd
    │   ├── libvpx-1080p50_60.ffpreset
    │   ├── libvpx-1080p.ffpreset
    │   ├── libvpx-360p.ffpreset
    │   ├── libvpx-720p50_60.ffpreset
    │   └── libvpx-720p.ffpreset
    └── man
        ├── man1
        │   ├── ffmpeg.1
        │   ├── ffmpeg-all.1
        │   ├── ffmpeg-bitstream-filters.1
        │   ├── ffmpeg-codecs.1
        │   ├── ffmpeg-devices.1
        │   ├── ffmpeg-filters.1
        │   ├── ffmpeg-formats.1
        │   ├── ffmpeg-protocols.1
        │   ├── ffmpeg-resampler.1
        │   ├── ffmpeg-scaler.1
        │   ├── ffmpeg-utils.1
        │   ├── ffprobe.1
        │   └── ffprobe-all.1
        └── man3
            ├── libavcodec.3
            ├── libavdevice.3
            ├── libavfilter.3
            ├── libavformat.3
            ├── libavutil.3
            ├── libswresample.3
            └── libswscale.3

18 directories, 208 files
[wbyq@wbyq ffmpeg-4.2.2]$

4, Deploy the compiled library to the target development board

4.1 copy the library to the development board

To facilitate deployment to the development board, you can first create a "run_lib" directory, then copy the dynamic libraries compiled by X264 and FFMPEG to the "run_lib" directory, and then copy the "run_lib" directory to the target development board through NFS server. After copying the library to the development board, you can choose to directly copy the library in the "run_lib" directory to the / lib directory, or "run"_ Add the path of the "lib" directory to the environment variable of the system library. Anyway, the ultimate goal is to find the library to use when the program is running.

The copying process is as follows:

[wbyq@wbyq tiny4412]$ ls
ffmpeg-4.2.2  x264-master
[wbyq@wbyq tiny4412]$ mkdir run_lib
[wbyq@wbyq tiny4412]$ cd run_lib/
[wbyq@wbyq run_lib]$ cp ../ffmpeg-4.2.2/_install/lib/*.so* ./
[wbyq@wbyq run_lib]$ cp ../x264-master/_install/lib/*.so* ./
[wbyq@wbyq run_lib]$ ls
libavcodec.so            libavfilter.so            libavutil.so             libswresample.so          libx264.so
libavcodec.so.58         libavfilter.so.7          libavutil.so.56          libswresample.so.3        libx264.so.160
libavcodec.so.58.54.100  libavfilter.so.7.57.100   libavutil.so.56.31.100   libswresample.so.3.5.100
libavdevice.so           libavformat.so            libpostproc.so           libswscale.so
libavdevice.so.58        libavformat.so.58         libpostproc.so.55        libswscale.so.5
libavdevice.so.58.8.100  libavformat.so.58.29.100  libpostproc.so.55.5.100  libswscale.so.5.5.100

4.2 copy executable commands to the development board for testing

After copying the library to the development board, copy the "ffmpeg-4.2.2/_install/bin/ffmpeg" executable file to the bin directory of the development board.

The following is the effect of running ffmpeg command on the terminal of the development board:

[root@tiny4412 ]# ls
bin         etc         linuxrc     nfs.sh      root        tmp         work
code        home        lost+found  opt         sbin        usr
dev         lib         mnt         proc        sys         var
[root@tiny4412 ]# ffmpeg 
ffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers
  built with gcc 4.5.1 (ctng-1.8.1-FA)
  configuration: --enable-shared --enable-static --prefix=/home/wbyq/work/tiny4412/ffmpeg-4.2.2/_install --cross-prefix=arm-linux- --arch=arm --target-os=linux --enable-gpl --extra-cflags=-I/home/wbyq/work/tiny4412/x264-master/_install/include --extra-ldflags=-L/home/wbyq/work/tiny4412/x264-master/_install/lib --enable-ffmpeg --enable-libx264
  libavutil      56. 31.100 / 56. 31.100
  libavcodec     58. 54.100 / 58. 54.100
  libavformat    58. 29.100 / 58. 29.100
  libavdevice    58.  8.100 / 58.  8.100
  libavfilter     7. 57.100 /  7. 57.100
  libswscale      5.  5.100 /  5.  5.100
  libswresample   3.  5.100 /  3.  5.100
  libpostproc    55.  5.100 / 55.  5.100
Hyper fast Audio and Video encoder
usage: ffmpeg [options] [[infile options] -i infile]... {[outfile options] outfile}...

Use -h to get full help or, even better, run 'man ffmpeg'
[root@tiny4412 ]# 

Command can run, then you can carry out audio and video related program development.

5, Test ffmpeg Library: use ffmpeg to collect camera data, encode it into MP4 and save it locally

5.1 complete code

#include <stdlib.h>  
#include <stdio.h>  
#include <string.h>  
#include <math.h>  
#include <libavutil/avassert.h>  
#include <libavutil/channel_layout.h>  
#include <libavutil/opt.h>  
#include <libavutil/mathematics.h>  
#include <libavutil/timestamp.h>  
#include <libavformat/avformat.h>  
#include <libswscale/swscale.h>  
#include <libswresample/swresample.h>  
 
#define STREAM_ Duration 50.0 / * duration of recorded video seconds*/
#define STREAM_FRAME_RATE 5  	/*  images/s here you can set the frame rate according to the acquisition speed of the camera*/
#define STREAM_PIX_FMT    AV_PIX_FMT_YUV420P /* default pix_fmt */  
#define SCALE_FLAGS SWS_BICUBIC  
 
//Width and height of stored video
int video_width;
int video_height;
 
// Wrapper for a single output AVStream 
typedef struct OutputStream
{  
	AVStream *st;  
	AVCodecContext *enc;  
	/*Number of points for the next frame*/  
	int64_t next_pts;  
	int samples_count;  
	AVFrame *frame;  
	AVFrame *tmp_frame;  
	float t, tincr, tincr2;  
	struct SwsContext *sws_ctx;  
	struct SwrContext *swr_ctx;  
}OutputStream;  
 
 
typedef struct IntputDev
{  
	AVCodecContext  *pCodecCtx;  
	AVCodec         *pCodec;  
	AVFormatContext *v_ifmtCtx;  
	int  videoindex;  
	struct SwsContext *img_convert_ctx;  
	AVPacket *in_packet;  
	AVFrame *pFrame,*pFrameYUV;  
}IntputDev;  
 
static void log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt)  
{  
	AVRational *time_base = &fmt_ctx->streams[pkt->stream_index]->time_base;  
	printf("pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",  
		   av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, time_base),  
		   av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, time_base),  
		   av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, time_base),  
		   pkt->stream_index);  
}  
 
static int write_frame(AVFormatContext *fmt_ctx, const AVRational *time_base, AVStream *st, AVPacket *pkt)  
{  
	/* Readjust the output packet timestamp value from codec to stream time base */  
	av_packet_rescale_ts(pkt, *time_base, st->time_base);  
	pkt->stream_index = st->index;  
 
	/*Writes compressed frames to a media file.*/  
	log_packet(fmt_ctx, pkt);  
	return av_interleaved_write_frame(fmt_ctx, pkt);  
}  
 
/*Add an output stream. */  
static void add_stream(OutputStream *ost, AVFormatContext *oc,AVCodec **codec,enum AVCodecID codec_id)  
{  
	AVCodecContext *c;  
	int i;  
	/* find the encoder */ 
 
	*codec = avcodec_find_encoder(codec_id);  
	if (!(*codec))
	{  
		fprintf(stderr, "Could not find encoder for '%s'\n",  
				avcodec_get_name(codec_id));  
		exit(1);  
	}  
	ost->st = avformat_new_stream(oc, NULL);  
 
	if (!ost->st) {  
		fprintf(stderr, "Could not allocate stream\n");  
		exit(1);  
	}  
	ost->st->id = oc->nb_streams-1;  
	c = avcodec_alloc_context3(*codec);  
	if (!c) {  
		fprintf(stderr, "Could not alloc an encoding context\n");  
		exit(1);  
	}  
	ost->enc = c; 	
	switch((*codec)->type)
	{  
		case AVMEDIA_TYPE_AUDIO:  
			c->sample_fmt  = (*codec)->sample_fmts ?  
				(*codec)->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;  
			c->bit_rate    = 64000;  
			c->sample_rate = 44100;  
			if ((*codec)->supported_samplerates) {  
				c->sample_rate = (*codec)->supported_samplerates[0];  
				for (i = 0; (*codec)->supported_samplerates[i]; i++) {  
					if ((*codec)->supported_samplerates[i] == 44100)  
						c->sample_rate = 44100;  
				}  
			}  
			c->channels        = av_get_channel_layout_nb_channels(c->channel_layout);  
			c->channel_layout = AV_CH_LAYOUT_STEREO;  
			if ((*codec)->channel_layouts) {  
				c->channel_layout = (*codec)->channel_layouts[0];  
				for (i = 0; (*codec)->channel_layouts[i]; i++) {  
					if ((*codec)->channel_layouts[i] == AV_CH_LAYOUT_STEREO)  
						c->channel_layout = AV_CH_LAYOUT_STEREO;  
				}  
			}  
			c->channels        = av_get_channel_layout_nb_channels(c->channel_layout);  
			ost->st->time_base = (AVRational){ 1, c->sample_rate };  
			break;  
	  
		case AVMEDIA_TYPE_VIDEO:  
			c->codec_id = codec_id;  
			c->bit_rate = 2500000;  //Average bit rate. The default value of the example code is 400000
			/* The resolution must be a multiple of 2.*/  
			c->width=video_width;  
			c->height=video_height;  
			/*Time base: This is the basic time unit (in seconds)
			 *Represents the time stamp of the frame. For fixed fps content,
			 *The time base should be 1 /framerate and the timestamp increment should be
			 *Equal to 1.*/  
			ost->st->time_base = (AVRational){1,STREAM_FRAME_RATE};  //Frame rate setting
			c->time_base       = ost->st->time_base;  
			c->gop_size      = 12; /* Up to one intra frame is transmitted every twelve frames */  
			c->pix_fmt       = STREAM_PIX_FMT;  
			if(c->codec_id == AV_CODEC_ID_MPEG2VIDEO)
			{  
				/* Just for testing, we also added B frames */  
				c->max_b_frames = 2;  
			}  
			if(c->codec_id == AV_CODEC_ID_MPEG1VIDEO)
			{  
				/*You need to avoid using macroblocks that overflow some of these coefficients.
				 *This will not happen in ordinary video because
				 *The motion of the chroma plane does not match the brightness plane. */ 
				c->mb_decision = 2;  
			}	
		break;  
	  
		default:  
			break;  
	}  
  
	/* Some formats want stream headers to be separated. */  
	if (oc->oformat->flags & AVFMT_GLOBALHEADER)  
		c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;    
}  
 
static AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)  
{  
	AVFrame *picture;  
	int ret;  
	picture = av_frame_alloc();  
	if (!picture)  
		return NULL;  
	picture->format = pix_fmt;  
	picture->width  = width;  
	picture->height = height;  
  
	/* Allocate buffer for frame data */  
	ret = av_frame_get_buffer(picture, 32);  
	if(ret<0)
	{  
		fprintf(stderr, "Could not allocate frame data.\n");  
		exit(1);  
	}  
	return picture;  
}  
  
static void open_video(AVFormatContext *oc, AVCodec *codec, OutputStream *ost, AVDictionary *opt_arg)  
{  
	int ret;  
	AVCodecContext *c = ost->enc;  
	AVDictionary *opt = NULL;  
  
	av_dict_copy(&opt, opt_arg, 0);  
  
	/* open the codec */  
	ret = avcodec_open2(c, codec, &opt);  
	av_dict_free(&opt);  
	if (ret < 0)
	{  
		fprintf(stderr, "Could not open video codec: %s\n", av_err2str(ret));  
		exit(1);  
	}  
  
	/* Assign and initialize reusable framework */  
	ost->frame = alloc_picture(c->pix_fmt, c->width, c->height);  
	if (!ost->frame)
	{  
		fprintf(stderr, "Could not allocate video frame\n");  
		exit(1);  
	}  
	printf("ost->frame alloc success fmt=%d w=%d h=%d\n",c->pix_fmt,c->width, c->height);  
  
  
	/*If the output format is not YUV420P, it is temporary YUV420P
	*Pictures are also required. Then convert it to the desired
	*Output format. */
	ost->tmp_frame = NULL;  
	if(c->pix_fmt != AV_PIX_FMT_YUV420P)
	{  
		ost->tmp_frame = alloc_picture(AV_PIX_FMT_YUV420P, c->width, c->height);  
		if (!ost->tmp_frame)
		{  
			fprintf(stderr, "Could not allocate temporary picture\n");  
			exit(1);  
		}  
	}  
  
	/* Copy stream parameters to multiplexer*/  
	ret=avcodec_parameters_from_context(ost->st->codecpar, c);  
	if(ret<0)
	{  
		fprintf(stderr, "Could not copy the stream parameters\n");  
		exit(1);  
	}  
}
 
/*
  *Encode a video frame
  *1 is returned after encoding, otherwise 0 is returned
  */  
static int write_video_frame(AVFormatContext *oc, OutputStream *ost,AVFrame *frame)  
{  
	int ret;  
	AVCodecContext *c;  
	int got_packet=0;  
	AVPacket pkt={0};  
	if(frame==NULL)  
		return 1;  
	c = ost->enc;  
	av_init_packet(&pkt);  
	/* Coded image*/  
	ret = avcodec_encode_video2(c, &pkt, frame, &got_packet);  
	if(ret<0)
	{  
		fprintf(stderr, "Error encoding video frame: %s\n", av_err2str(ret));  
		exit(1);  
	}  
	printf("--------------video- pkt.pts=%s\n",av_ts2str(pkt.pts));  
	printf("----st.num=%d st.den=%d codec.num=%d codec.den=%d---------\n",ost->st->time_base.num,ost->st->time_base.den,  
			c->time_base.num,c->time_base.den);  		
	if(got_packet)
	{  
		ret = write_frame(oc, &c->time_base, ost->st, &pkt);  
	}else
	{  
		ret = 0;  
	}  
	if(ret<0)
	{  
		fprintf(stderr, "Error while writing video frame: %s\n", av_err2str(ret));  
		exit(1);  
	}  
	return (frame || got_packet) ? 0 : 1;  
}  
  
  
static AVFrame *get_video_frame(OutputStream *ost,IntputDev* input,int *got_pic)  
{  
	int ret, got_picture;  
	AVCodecContext *c = ost->enc;  
	AVFrame * ret_frame=NULL;  
	if(av_compare_ts(ost->next_pts, c->time_base,STREAM_DURATION, (AVRational){1,1})>=0)  
		return NULL;  
  
	/*When we pass a frame to the encoder, it may retain a reference to it
	*Inside, make sure we don't cover it here*/
	if (av_frame_make_writable(ost->frame)<0)  
		exit(1);  
	if(av_read_frame(input->v_ifmtCtx, input->in_packet)>=0)
	{  
		if(input->in_packet->stream_index==input->videoindex)
		{  
			ret = avcodec_decode_video2(input->pCodecCtx, input->pFrame, &got_picture, input->in_packet);  
			*got_pic=got_picture;  
			if(ret<0)
			{  
				printf("Decode Error.\n");  
				av_packet_unref(input->in_packet);  
				return NULL;  
			}  
			if(got_picture)
			{  
				sws_scale(input->img_convert_ctx, (const unsigned char* const*)input->pFrame->data, input->pFrame->linesize, 0, input->pCodecCtx->height, ost->frame->data,  ost->frame->linesize);  
				ost->frame->pts =ost->next_pts++;  
				ret_frame= ost->frame;    
			}  
		}  
		av_packet_unref(input->in_packet);  
	}  
	return ret_frame;  
}  
static void close_stream(AVFormatContext *oc, OutputStream *ost)  
{  
	avcodec_free_context(&ost->enc);  
	av_frame_free(&ost->frame);  
	av_frame_free(&ost->tmp_frame);  
	sws_freeContext(ost->sws_ctx);  
	swr_free(&ost->swr_ctx);  
}  
 
/*
Collect camera data and encode it into MP4 video
*/  
int main(int argc, char **argv)  
{  
	OutputStream video_st = { 0 }, audio_st = { 0 };  
	const char *filename;  
	AVOutputFormat *fmt;  
	AVFormatContext *oc;  
	AVCodec *audio_codec, *video_codec;  
	int ret;  
	int have_video = 0, have_audio = 0;  
	int encode_video = 0, encode_audio = 0;  
	AVDictionary *opt = NULL;  
	int i;  
  
	if(argc<3)
	{  
		//./app /dev/video0 123.mp4
		printf("usage: %s <Camera device node> <File name> \n", argv[0]);  
		return 1;  
	}  
  
	filename = argv[2];  
	printf("Name of the currently stored video file:%s\n",filename);
	/*Allocate output media environment*/
	avformat_alloc_output_context2(&oc, NULL, NULL, filename);  
	if(!oc)
	{  
		printf("Cannot infer output format from file extension: use MPEG. \n");  
		avformat_alloc_output_context2(&oc, NULL, "mpeg", filename);  
	}  
	if(!oc)return 1;  
	//Add camera----------------------------------
	IntputDev video_input={0};  
	AVCodecContext  *pCodecCtx;  
	AVCodec         *pCodec;  
	AVFormatContext *v_ifmtCtx;  
	avdevice_register_all();  
	v_ifmtCtx = avformat_alloc_context();  
	//Specified camera information under Linux  
	AVInputFormat *ifmt=av_find_input_format("video4linux2");  
	if(avformat_open_input(&v_ifmtCtx,argv[1],ifmt,NULL)!=0)
	{  
		printf("Unable to open input stream.%s\n",argv[1]);  
		return -1;  
	}
	if(avformat_find_stream_info(v_ifmtCtx,NULL)<0)  
	{  
		printf("Stream information not found.\n");  
		return -1;  
	}
	int videoindex=-1;  
	for(i=0; i<v_ifmtCtx->nb_streams; i++)   
	if(v_ifmtCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)  
	{  
		videoindex=i;  
		printf("videoindex=%d\n",videoindex);
		break;  
	}
	if(videoindex==-1)  
	{  
		printf("Video stream not found.\n");  
		return -1;  
	}  
	pCodecCtx=v_ifmtCtx->streams[videoindex]->codec; 
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);  
	if(pCodec==NULL)  
	{  
		printf("Codec not found.\n");  
		return -1;  
	}  
	if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)  
	{  
		printf("Unable to open codec.\n");  
		return -1;  
	}  
  
	AVFrame *pFrame,*pFrameYUV;  
	pFrame=av_frame_alloc();  
	pFrameYUV=av_frame_alloc();
	
	unsigned char *out_buffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,16));  
	av_image_fill_arrays((AVPicture *)pFrameYUV->data,(AVPicture *)pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,16);  
	printf("Camera size(WxH): %d x %d \n",pCodecCtx->width, pCodecCtx->height);  
	video_width=pCodecCtx->width;
	video_height=pCodecCtx->height;
	struct SwsContext *img_convert_ctx;  
	img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);   
	AVPacket *in_packet=(AVPacket *)av_malloc(sizeof(AVPacket));  
	video_input.img_convert_ctx=img_convert_ctx;  
	video_input.in_packet=in_packet;  
	video_input.pCodecCtx=pCodecCtx;  
	video_input.pCodec=pCodec;  
	video_input.v_ifmtCtx=v_ifmtCtx;  
	video_input.videoindex=videoindex;  
	video_input.pFrame=pFrame;  
	video_input.pFrameYUV=pFrameYUV;  
    //-----------------------------Add camera end
	fmt=oc->oformat;  
	/*Add audio and video streams and initialize the codec using the codec in the default format.*/ 
	printf("fmt->video_codec = %d\n", fmt->video_codec);  
	if(fmt->video_codec != AV_CODEC_ID_NONE)
	{  
		add_stream(&video_st,oc,&video_codec,fmt->video_codec);  
		have_video=1;  
		encode_video=1;  
	}  
	/*Now that all parameters have been set, you can turn on the audio and video codec and allocate the necessary encoding buffers.*/
	if(have_video)open_video(oc, video_codec, &video_st, opt);  
	av_dump_format(oc,0,filename,1);  
	/* Open the output file, if necessary */  
	if(!(fmt->flags & AVFMT_NOFILE))
	{  
		ret=avio_open(&oc->pb,filename,AVIO_FLAG_WRITE);  
		if(ret<0)
		{  
			fprintf(stderr, "Cannot be opened'%s': %s\n", filename,av_err2str(ret));  
			return 1;  
		}  
	}  
	/* Write stream header (if any)*/  
	ret=avformat_write_header(oc, &opt);  
	if(ret<0)
	{  
		fprintf(stderr, "An error occurred while opening the output file: %s\n",av_err2str(ret));  
		return 1;  
	}  
	int got_pic;  
	while(encode_video)
	{  
		/*Select the stream to encode*/    
		AVFrame *frame=get_video_frame(&video_st,&video_input,&got_pic);  
		if(!got_pic)  
		{  
			usleep(10000);  
			continue;  
		}
		encode_video=!write_video_frame(oc,&video_st,frame);  
	}  
	av_write_trailer(oc);  
	sws_freeContext(video_input.img_convert_ctx);  
	avcodec_close(video_input.pCodecCtx);  
	av_free(video_input.pFrameYUV);  
	av_free(video_input.pFrame);      
	avformat_close_input(&video_input.v_ifmtCtx); 
	/*Turn off each codec*/  
	if (have_video)close_stream(oc, &video_st);
	/*Close output file*/ 
	if (!(fmt->flags & AVFMT_NOFILE))avio_closep(&oc->pb);  
	/*Release flow*/  
	avformat_free_context(oc); 
	return 0;  
}

5.2 Makefile file

app1:
	arm-linux-gcc ffmpeg_video.c -I /home/wbyq/work/tiny4412/ffmpeg-4.2.2/_install/include -L /home/wbyq/work/tiny4412/ffmpeg-4.2.2/_install/lib -I/home/wbyq/work/tiny4412/x264-master/_install/include -L/home/wbyq/work/tiny4412/x264-master/_install/lib -lavcodec -lavfilter -lavutil -lswresample -lavdevice -lavformat -lpostproc -lswscale -lx264

5.3 operation test

[root@tiny4412 code]# ./a.out /dev/video15 123.mp4

Since there is no player installed on the development board, the following is 123 Mp4 files are copied to PC to play.

Added by djlfreak on Wed, 12 Jan 2022 05:45:54 +0200