Loading... ## 交叉编译多平台 FFmpeg 库并提取视频帧 本文档适用于 x86 平台编译 armeabi、armeabi-v7a、arm64-v8a、x86、x86_64 平台的 ffmpeg 运行库 编译新版 FFmpeg 请将 NDK 版本替换为 R17c 以上,将编译脚本中 `extra-cflags` 参数的值修改为 `"$CFALGS -Os -fPIC -DANDROID -Wfatal-errors -Wno-deprecated -isysroot $NDK_PATH/sysroot -I$NDK_PATH/sysroot/usr/include/$4"`,并在 `./configure` 命令结尾添加 `--enable-avresample --enable-nonfree --enable-postproc` ### 开发环境 编译环境: Ubuntu 1810 x64 开发环境: Windows 10 IDE: Android Studio 3.4.1 Android: 7.1 FFmpeg: 3.4.6 ### 编译流程 下载 FFmpeg 源码: [Download FFmpeg](http://ffmpeg.org/download.html#releases) 解压后进入源码包,创建 `build.sh` 文件,并赋予执行权限 ```bash tar zxvf ffmpeg-3.4.6.tar.gz cd ffmpeg-3.4.6 touch build.sh chmod +x build.sh ``` 将以下脚本写入 `build.sh` > NDK_PATH 建议下载 Revision 15C 版本 > 根据实际情况修改 NDK_PATH,TOOLCHAIN_VERSION 及 ANDROID_VERSION ```bash #!/bin/sh MY_LIBS_NAME=ffmpeg-3.4.6 # 编译产生的中间件目录 MY_BUILD_DIR=binary # NDK 目录 NDK_PATH=/usr/android-sdk-linux/android-ndk-r15c # 编译平台 BUILD_PLATFORM=linux-x86_64 # NDK 中交叉编译工具版本 TOOLCHAIN_VERSION=4.9 # Android API Level ANDROID_VERSION=26 ANDROID_ARMV5_CFLAGS="-march=armv5te" ANDROID_ARMV7_CFLAGS="-march=armv7-a -mfloat-abi=softfp -mfpu=neon" ANDROID_ARMV8_CFLAGS="-march=armv8-a" ANDROID_X86_CFLAGS="-march=i686 -mtune=intel -mssse3 -mfpmath=sse -m32" ANDROID_X86_64_CFLAGS="-march=x86-64 -msse4.2 -mpopcnt -m64 -mtune=intel" # params($1: arch, $2: arch_abi, $3: host, $4: cross_prefix, $5: cflags) build_bin() { echo "------------------- Start build $2 -------------------------" ARCH=$1 # arm arm64 x86 x86_64 ANDROID_ARCH_ABI=$2 # armeabi armeabi-v7a x86 mips PREFIX=$(pwd)/dist/${MY_LIBS_NAME}/${ANDROID_ARCH_ABI}/ HOST=$3 SYSROOT=${NDK_PATH}/platforms/android-${ANDROID_VERSION}/arch-${ARCH} CFALGS=$5 TOOLCHAIN=${NDK_PATH}/toolchains/${HOST}-${TOOLCHAIN_VERSION}/prebuilt/${BUILD_PLATFORM} CROSS_PREFIX=${TOOLCHAIN}/bin/$4- # build 中间件 mkdir -p ${MY_BUILD_DIR}/${ANDROID_ARCH_ABI} BUILD_DIR=./${MY_BUILD_DIR}/${ANDROID_ARCH_ABI} echo "pwd==$(pwd)" echo "ARCH==${ARCH}" echo "PREFIX==${PREFIX}" echo "HOST==${HOST}" echo "SYSROOT=${SYSROOT}" echo "CFALGS=$5" echo "CFALGS=${CFALGS}" echo "TOOLCHAIN==${TOOLCHAIN}" echo "CROSS_PREFIX=${CROSS_PREFIX}" mkdir -p ${BUILD_DIR} cd ${BUILD_DIR} sh ../../configure \ --prefix=${PREFIX} \ --target-os=linux \ --arch=${ARCH} \ --sysroot=$SYSROOT \ --enable-cross-compile \ --cross-prefix=${CROSS_PREFIX} \ --extra-cflags="$CFALGS -Os -fPIC -DANDROID -Wfatal-errors -Wno-deprecated" \ --extra-cxxflags="-D__thumb__ -fexceptions -frtti" \ --extra-ldflags="-L${SYSROOT}/usr/lib" \ --enable-shared \ --enable-asm \ --enable-neon \ --disable-encoders \ --enable-encoder=aac \ --enable-encoder=mjpeg \ --enable-encoder=png \ --disable-decoders \ --enable-decoder=aac \ --enable-decoder=aac_latm \ --enable-decoder=h264 \ --enable-decoder=mpeg4 \ --enable-decoder=mjpeg \ --enable-decoder=png \ --disable-demuxers \ --enable-demuxer=image2 \ --enable-demuxer=h264 \ --enable-demuxer=aac \ --disable-parsers \ --enable-parser=aac \ --enable-parser=ac3 \ --enable-parser=h264 \ --enable-gpl \ --disable-doc \ --disable-ffmpeg \ --disable-ffplay \ --disable-ffprobe \ --disable-symver \ --disable-debug \ --enable-small make clean make make install cd ../../ echo "------------------- $2 Build finish -------------------------" } # build for armeabi #build_bin arm armeabi arm-linux-androideabi arm-linux-androideabi "$ANDROID_ARMV5_CFLAGS" # build for armeabi-v7a #build_bin arm armeabi-v7a arm-linux-androideabi arm-linux-androideabi "$ANDROID_ARMV7_CFLAGS" # build for arm64-v8a build_bin arm64 arm64-v8a aarch64-linux-android aarch64-linux-android "$ANDROID_ARMV8_CFLAGS" # build for x86 #build_bin x86 x86 x86 i686-linux-android "$ANDROID_X86_CFLAGS" # build for x86_64 #build_bin x86_64 x86_64 x86_64 x86_64-linux-android "$ANDROID_X86_64_CFLAGS" ``` 根据需要选择脚本最后的编译命令,直接运行脚本即可自动编译 > **注意: 由于 JNI 只接受 .so 结尾的库文件,而 FFmpeg 的 configure 中指定了库名以版本号结尾,所以需要修改 configure 中的配置** 下面的配置在 FFmpeg-3.4.6 版本中位于第 3416 行 ```ini SLIBNAME_WITH_MAJOR='$(SLIBNAME).$(LIBMAJOR)' LIB_INSTALL_EXTRA_CMD='$$(RANLIB) "$(LIBDIR)/$(LIBNAME)"' SLIB_INSTALL_NAME='$(SLIBNAME_WITH_VERSION)' SLIB_INSTALL_LINKS='$(SLIBNAME_WITH_MAJOR) $(SLIBNAME)' ``` 修改为下面的格式 ```ini SLIBNAME_WITH_MAJOR='$(SLIBPREF)$(FULLNAME)-$(LIBMAJOR)$(SLIBSUF)' LIB_INSTALL_EXTRA_CMD='$$(RANLIB) "$(LIBDIR)/$(LIBNAME)"' SLIB_INSTALL_NAME='$(SLIBNAME_WITH_MAJOR)' SLIB_INSTALL_LINKS='$(SLIBNAME)' ``` > **注意: 有些版本的 FFmpeg 编译时会出现下列报错** ```bash libavcodec/aaccoder.c:函数’search_for_ms’: libavcodec/aaccoder.c:803:25:错误:预期的标识符或’(‘数字常量之前 libavcodec/hevc_mvs.c:函数’derive_spatial_merge_candidates’ libavcodec/hevc_mvs.c:368:23:错误:’y0000000’未声明(首次在此函数中使用 ``` 修改 `path_to_ffmpeg_src/libavcodec/` 目录下的 `aaccooder.c` `hevc_mvs.c` 两个文件 将文件中 `B0` 变量修改为其他字符,例如 `BB` 编译成功后目录结构 ```bash ffmpeg-3.4.6 ├─ binary # 编译产生的中间件 ├─ build.sh # 编译脚本 ├─ Changelog ├─ compat ├─ configure ├─ CONTRIBUTING.md ├─ COPYING.GPLv2 ├─ COPYING.GPLv3 ├─ COPYING.LGPLv2.1 ├─ COPYING.LGPLv3 ├─ CREDITS ├─ dist # 编译输出的库和头文件目录 │ └─ ffmpeg-3.4.6 # 该文件夹名由 MY_LIBS_NAME 指定 │ └─ arm64-v8a # 与编译的目标平台 ABI 名称相同 │ ├─ bin │ ├─ include # 头文件目录 │ │ ├─ libavcodec │ │ ├─ libavdevice │ │ ├─ libavfilter │ │ ├─ libavformat │ │ ├─ libavutil │ │ ├─ libpostproc │ │ ├─ libswresample │ │ └─ libswscale │ ├─ lib # 库目录,包含动态库和静态库 │ │ ├─ libavcodec-57.so │ │ ├─ libavcodec.a │ │ ├─ libavcodec.so -> libavcodec-57.so │ │ ├─ libavdevice-57.so │ │ ├─ libavdevice.a │ │ ├─ libavdevice.so -> libavdevice-57.so │ │ ├─ libavfilter-6.so │ │ ├─ libavfilter.a │ │ ├─ libavfilter.so -> libavfilter-6.so │ │ ├─ libavformat-57.so │ │ ├─ libavformat.a │ │ ├─ libavformat.so -> libavformat-57.so │ │ ├─ libavutil-55.so │ │ ├─ libavutil.a │ │ ├─ libavutil.so -> libavutil-55.so │ │ ├─ libpostproc-54.so │ │ ├─ libpostproc.a │ │ ├─ libpostproc.so -> libpostproc-54.so │ │ ├─ libswresample-2.so │ │ ├─ libswresample.a │ │ ├─ libswresample.so -> libswresample-2.so │ │ ├─ libswscale-4.so │ │ ├─ libswscale.a │ │ ├─ libswscale.so -> libswscale-4.so │ │ └─ pkgconfig │ └─ share ├─ doc ├─ ffbuild ├─ fftools ├─ INSTALL.md ├─ libavcodec ├─ libavdevice ├─ libavfilter ├─ libavformat ├─ libavresample ├─ libavutil ├─ libpostproc ├─ libswresample ├─ libswscale ├─ LICENSE.md ├─ MAINTAINERS ├─ Makefile ├─ presets ├─ README.md ├─ RELEASE ├─ RELEASE_NOTES ├─ tests ├─ tools └─ VERSION ``` ### 将运行库导入到项目中 目录结构 ```bash ffmpegtest ├─ app │ ├─ build │ ├─ libs │ └─ src │ ├─ androidTest │ ├─ main │ │ ├─ java │ │ │ └─ com │ │ │ └─ example │ │ │ └─ ffmpegtest │ │ │ └─ MainActivity.java │ │ ├─ jni # C/C++ 源码目录 │ │ │ └─ include # 需要导入的头文件 │ │ │ ├─ libavcodec │ │ │ ├─ libavdevice │ │ │ ├─ libavfilter │ │ │ ├─ libavformat │ │ │ ├─ libavutil │ │ │ ├─ libpostproc │ │ │ ├─ libswresample │ │ │ └─ libswscale │ │ ├─ jniLibs # JNI 需要调用的运行库 │ │ │ └─ arm64-v8a # 对应 ABI 版本建立文件夹 │ │ │ ├─ libavcodec-57.so │ │ │ ├─ libavdevice-57.so │ │ │ ├─ libavfilter-6.so │ │ │ ├─ libavformat-57.so │ │ │ ├─ libavutil-55.so │ │ │ ├─ libpostproc-54.so │ │ │ ├─ libswresample-2.so │ │ │ ├─ libswscale-4.so │ │ │ └─ libswscale-4.so │ │ └─ res │ └─ test └─ gradle ``` `CMakeLists.txt` 添加以下配置 ```ini include_directories(${PROJECT_SOURCE_DIR}/src/main/jni/include) add_library(ffmpegTest SHARED src/main/jni/ffmpegTest.cpp ) add_library(avcodec-57 SHARED IMPORTED) set_target_properties(avcodec-57 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavcodec-57.so ) add_library(avfilter-6 SHARED IMPORTED) set_target_properties(avfilter-6 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavfilter-6.so ) add_library(avformat-57 SHARED IMPORTED) set_target_properties(avformat-57 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavformat-57.so ) add_library(avutil-55 SHARED IMPORTED) set_target_properties(avutil-55 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavutil-55.so ) add_library(postproc-54 SHARED IMPORTED) set_target_properties(postproc-54 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libpostproc-54.so ) add_library(avdevice-57 SHARED IMPORTED) set_target_properties(avdevice-57 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavdevice-57.so ) add_library(swscale-4 SHARED IMPORTED) set_target_properties(swscale-4 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswscale-4.so ) add_library(swresample-2 SHARED IMPORTED) set_target_properties(swresample-2 PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswresample-2.so ) target_link_libraries(ffmpegTest ${log-lib} avcodec-57 avfilter-6 avformat-57 avutil-55 postproc-54 avdevice-57 swscale-4 swresample-2) ``` ### 提取视频帧并保存为图片 ```c #include <jni.h> #include <android/log.h> extern "C" { #include <libavformat/avformat.h> } #define DEBUG #ifdef DEBUG #define LOG "ffmpegLOG" #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG, __VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG, __VA_ARGS__) #define LOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG, __VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG, __VA_ARGS__) #define LOGF(...) __android_log_print(ANDROID_LOG_FATAL, LOG, __VA_ARGS__) #else #define LOG #define LOGD(...) #define LOGI(...) #define LOGW(...) #define LOGE(...) #define LOGF(...) #endif int writeJPEG(AVFrame* frame, int width, int height, char* output_ath, int image_index); extern "C" JNIEXPORT jint JNICALL Java_com_example_ffmpegtest_MainActivity_videoFrame(JNIEnv *env, jobject instance, jstring filePath_, jstring outputPath_) { const char *filePath = env->GetStringUTFChars(filePath_, 0); const char *outputPath = env->GetStringUTFChars(outputPath_, 0); LOGE("======================= ffmpeg start ======================="); clock_t time_start, time_finish; double total_time; time_start = clock(); // 注册所有模块 av_register_all(); AVFormatContext *formatContext = nullptr; int ret = 0; LOGD("Video path: [%s]", filePath); // 打开媒体 ret = avformat_open_input(&formatContext, filePath, nullptr, nullptr); if (ret < 0) { LOGE("Cannot open file, error code: [%d]", ret); return -1; } // 获取媒体信息 ret = avformat_find_stream_info(formatContext, nullptr); if (ret < 0) { LOGE("Cannot find stream, error code: [%d]", ret); return -1; } int video_index = -1; // 遍历媒体流 for (int i = 0; i < formatContext->nb_streams; i++) { if (formatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { video_index = i; break; } } if (video_index == -1) { LOGE("Cannot find video stream"); return -1; } // 找出一个有效码流的 AVCodecID,根据标准寻找对应的解码器 AVCodecContext *codecContext = formatContext->streams[video_index]->codec; enum AVCodecID codecId = codecContext->codec_id; AVCodec *codec = avcodec_find_decoder(codecId); if(!codec){ LOGE("Cannot find decoder"); return -1; } // 初始化解码器 ret = avcodec_open2(codecContext, codec, nullptr); if (ret < 0) { LOGE("Cannot open decoder, error code: [%d]", ret); return -1; } // 分配内存 AVPacket *packet = av_packet_alloc(); AVFrame *frame = av_frame_alloc(); int image_index = 0; // 当剩余帧数大于 0 时 while (av_read_frame(formatContext, packet) >= 0) { if (packet && packet->stream_index == video_index) { int gotFrame = 0; // 将 AVPacket 中的数据解码为原始数据(YUV、RGB 以及 PCM),存储在 AVFrame 上 avcodec_decode_video2(codecContext, frame, &gotFrame, packet); if (gotFrame) { image_index++; // 将视频帧保存在本地 ret = writeJPEG(frame, codecContext->width, codecContext->height, (char*)outputPath, image_index); if(ret == 0){ LOGI("Save frame in %s and rename to video_frame_%d.jpg", outputPath, image_index); } } } } time_finish = clock(); total_time = (double)(time_finish - time_start) / CLOCKS_PER_SEC; LOGE("Total time: [%f]s --- ffmpeg", total_time); av_packet_unref(packet); if (frame) { av_frame_free(&frame); } avcodec_close(codecContext); avformat_free_context(formatContext); LOGE("======================= ffmpeg finish ======================="); env->ReleaseStringUTFChars(filePath_, filePath); env->ReleaseStringUTFChars(outputPath_, outputPath); return 0; } int writeJPEG(AVFrame *frame, int width, int height, char *output_path, int image_index) { char out_file[1024]; sprintf(out_file, "%s/video_frame_%d.jpg", output_path, image_index); // 分配内存空间 AVFormatContext *formatContext = avformat_alloc_context(); // 初始化 AVFormatContext 结构体 avformat_alloc_output_context2(&formatContext, nullptr, "singlejpeg", out_file); // 打开(创建?)要写入的文件 if (avio_open2(&formatContext->pb, out_file, AVIO_FLAG_READ_WRITE, nullptr, nullptr) < 0) { LOGE("Open file failed---write JPEG"); return -1; } // 创建流通道,例如 Video - H.264, Audio - AAC AVStream *stream = avformat_new_stream(formatContext, nullptr); if (&stream == nullptr) { LOGE("Create stream failed---write JPEG"); return -1; } AVCodecContext *codecContext = stream->codec; // 保存文件头信息(帧信息) codecContext->codec_id = formatContext->oformat->video_codec; codecContext->codec_type = AVMEDIA_TYPE_VIDEO; codecContext->pix_fmt = AV_PIX_FMT_YUVJ420P; codecContext->height = height; codecContext->width = width; codecContext->time_base.num = 1; codecContext->time_base.den = 25; // 寻找解码器 AVCodec *codec = avcodec_find_encoder(codecContext->codec_id); if (!codec) { LOGE("Cannot find encoder---write JPEG"); return -1; } // 初始化解码器 if (avcodec_open2(codecContext, codec, nullptr) < 0) { LOGE("Cannot open encoder---write JPEG"); return -1; } // 将文件头保存到 codecpar 中 avcodec_parameters_from_context(stream->codecpar, codecContext); // 写入头数据 avformat_write_header(formatContext, nullptr); int size = codecContext->width * codecContext->height; // 创建并初始化 AVPacket 内存空间 AVPacket *packet = av_packet_alloc(); av_new_packet(packet, size * 3); int got_image = 0; // 调用编码器,编码为指定格式 int result = avcodec_encode_video2(codecContext, packet, frame, &got_image); if (result < 0) { LOGE("Encode failed---write JPEG"); return -1; } if (got_image == 1) { // 输出一帧数据 av_write_frame(formatContext, packet); } // 释放包内存 av_packet_unref(packet); // 写文件尾 av_write_trailer(formatContext); // 关闭文件 avio_close(formatContext->pb); // 释放解码器 avcodec_close(codecContext); avformat_free_context(formatContext); return 0; } ``` ### 在 Activity 中调用 ```java public class MainActivity extends AppCompatActivity { // 导入运行库 static { System.loadLibrary("ffmpegTest"); } protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // 获取存储设备路径 String storagePath = Environment.getExternalStorageDirectory().getPath(); File videoPath = new File(storagePath + "/Download/testVideo.mp4"); videoFrame(videoPath.toString(), storagePath + "/Download/video_frames/"); } } // 实例化运行库中的方法 public native int videoFrame(String filePath, String outputPath); ``` 最后修改:2022 年 01 月 03 日 © 允许规范转载 赞 如果觉得我的文章对你有用,请随意赞赏