Android FFmpeg 音视频系列:
前面 FFmpeg 系列的文章中,已经实现了 FFmpeg 的编译和集成,基于 FFmpeg 实现音视频的播放、录制,并结合 OpenGL 添加丰富的滤镜等功能,这些 demo 基本上将 FFmpeg 使用涉及到的知识点基本上覆盖了。
学完这些的你肯定有一些想法,比如使用 FFmpeg 打造一个自己的通用播放器、 做一个音视频剪辑软件等等,那么接下来推荐做的是学习一些优秀的开源项目,音视频的开源项目首推 ExoPlayer、 ijkplayer。
但是这些著名的开源项目代码量比较大且功能繁多,对一些刚入门的开发者来说学习起来比较吃力,也不容易坚持看下来。
Fanplayer
所以我们可以从一些中等代码量的优秀开源项目开始学习起来,基于此,在学完本文 FFmpeg 系列的基础上,接下来可以学习和研究开源跨平台播放器 Fanplayer 。
项目地址:https://github.com/rockcarry/fanplayer
fanplayer 是一个基于 FFmpeg 实现的支持 Android 和 Windows 平台的通用播放器,支持硬解码、倍速播放、流媒体播放等功能,播放器常用的功能基本上都支持,项目结构清晰,非常方便入手学习。
但是 fanplayer 需要你自己在 linux 环境下编译一遍 FFmpeg 源码生成依赖库,不过编译脚本作者都写好了,需要自己动手编译一遍 FFmpeg 并集成到项目中去。
实在嫌麻烦的同学,我这里已经把项目编译和集成完毕,直接拉下来看项目代码即可
项目代码:https://github.com/githubhaohao/Fanplayer-android
接下来简单讲解下 fanplayer 项目的源码供你参考,其中 Java 代码比较简单,就是 SurfaceView 的 surface 传下来构建 NativeWindow ,这里重点讲解 C 部分实现。
JNI 入口函数定义在文件 fanplayer_jni.cpp ,定义了几个播放器常用的 API:
static const JNINativeMethod g_methods[] = {
{ "nativeOpen" , "(Ljava/lang/String;Ljava/lang/Object;IILjava/lang/String;)J", (void*)nativeOpen },
{ "nativeClose" , "(J)V" , (void*)nativeClose },
{ "nativePlay" , "(J)V" , (void*)nativePlay },
{ "nativePause" , "(J)V" , (void*)nativePause },
{ "nativeSeek" , "(JJ)V" , (void*)nativeSeek },
{ "nativeSetParam" , "(JIJ)V", (void*)nativeSetParam },
{ "nativeGetParam" , "(JI)J" , (void*)nativeGetParam },
{ "nativeSetDisplaySurface" , "(JLjava/lang/Object;)V", (void*)nativeSetDisplaySurface },
};
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved)
{
DO_USE_VAR(reserved);
JNIEnv* env = NULL;
if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK || !env) {
__android_log_print(ANDROID_LOG_ERROR, "fanplayer_jni", "ERROR: GetEnv failed\n");
return -1;
}
jclass cls = env->FindClass("com/rockcarry/fanplayer/MediaPlayer");
int ret = env->RegisterNatives(cls, g_methods, sizeof(g_methods)/sizeof(g_methods[0]));
if (ret != JNI_OK) {
__android_log_print(ANDROID_LOG_ERROR, "fanplayer_jni", "ERROR: failed to register native methods !\n");
return -1;
}
// for g_jvm
g_jvm = vm;
av_jni_set_java_vm(vm, NULL);
return JNI_VERSION_1_4;
}
接下来就是 ffplayer.c 文件,封装了整个播放器,包含了三个子模块,分别是解复用、视频解码和音频解码模块,三个模块分别位于三个子线程中:
// 函数实现
void* player_open(char *file, void *win, PLAYER_INIT_PARAMS *params)
{
PLAYER *player = NULL;
//........代码省略
pthread_create(&player->avdemux_thread, NULL, av_demux_thread_proc, player);
pthread_create(&player->adecode_thread, NULL, audio_decode_thread_proc, player);
pthread_create(&player->vdecode_thread, NULL, video_decode_thread_proc, player);
return player; // return
error_handler:
player_close(player);
return NULL;
}
解复用、视频解码和音频解码模块三个子线程是通过 packet 队列进行通信,生产者和消费者模型。
文件 adev-android.cpp ,音频播放是通过 JNI 创建了 AudioTrack 对象,开启了一个子线程不断地从保存 PCM 数据的队列(链表)中读取数据:
// 接口函数实现
void* adev_create(int type, int bufnum, int buflen, CMNVARS *cmnvars)
{
//.......省略代码
jclass jcls = env->FindClass("android/media/AudioTrack");
ctxt->jmid_at_init = env->GetMethodID(jcls, "<init>" , "(IIIIII)V");
ctxt->jmid_at_close = env->GetMethodID(jcls, "release", "()V");
ctxt->jmid_at_play = env->GetMethodID(jcls, "play" , "()V");
ctxt->jmid_at_pause = env->GetMethodID(jcls, "pause" , "()V");
ctxt->jmid_at_write = env->GetMethodID(jcls, "write" , "([BII)I");
// new AudioRecord
#define STREAM_MUSIC 3
#define ENCODING_PCM_16BIT 2
#define CHANNEL_STEREO 3
#define MODE_STREAM 1
jobject at_obj = env->NewObject(jcls, ctxt->jmid_at_init, STREAM_MUSIC, ADEV_SAMPLE_RATE, CHANNEL_STEREO, ENCODING_PCM_16BIT, ctxt->buflen * 2, MODE_STREAM);
ctxt->jobj_at = env->NewGlobalRef(at_obj);
env->DeleteLocalRef(at_obj);
// start audiotrack
env->CallVoidMethod(ctxt->jobj_at, ctxt->jmid_at_play);
// create mutex & cond
pthread_mutex_init(&ctxt->lock, NULL);
pthread_cond_init (&ctxt->cond, NULL);
// create audio rendering thread
pthread_create(&ctxt->thread, NULL, audio_render_thread_proc, ctxt);
return ctxt;
}
解码后的视频图像直进行渲染,视频渲染走的是 ffrender.c 的 render_video ,然后调用 vdev-android.cpp 中的 vdev_android_lock:
static void vdev_android_lock(void *ctxt, uint8_t *buffer[8], int linesize[8], int64_t pts)
{
VDEVCTXT *c = (VDEVCTXT*)ctxt;
if (c->status & VDEV_ANDROID_UPDATE_WIN) {
if (c->win ) { ANativeWindow_release(c->win); c->win = NULL; }
if (c->surface) c->win = ANativeWindow_fromSurface(get_jni_env(), (jobject)c->surface);
if (c->win ) ANativeWindow_setBuffersGeometry(c->win, c->vw, c->vh, DEF_WIN_PIX_FMT);
c->status &= ~VDEV_ANDROID_UPDATE_WIN;
}
if (c->win) {
ANativeWindow_Buffer winbuf;
if (0 == ANativeWindow_lock(c->win, &winbuf, NULL)) {
buffer [0] = (uint8_t*)winbuf.bits;
linesize[0] = winbuf.stride * 4;
linesize[6] = c->vw;
linesize[7] = c->vh;
}
}
c->cmnvars->vpts = pts;
}
音视频同步用的是视频向音频同步的方式,并且参考 2 帧的理论渲染间隔进行微调,代码位于 vdev-cmn.c 中的 vdev_avsync_and_complete:
void vdev_avsync_and_complete(void *ctxt)
{
LOGCATE("vdev_avsync_and_complete");
VDEV_COMMON_CTXT *c = (VDEV_COMMON_CTXT*)ctxt;
int tickframe, tickdiff, scdiff, avdiff = -1;
int64_t tickcur, sysclock;
if (!(c->status & VDEV_PAUSE)) {
//++ frame rate & av sync control ++//
tickframe = 100 * c->tickframe / c->speed; //c->speed 默认 100
tickcur = av_gettime_relative() / 1000; //当前系统时间
tickdiff = (int)(tickcur - c->ticklast); //2帧渲染的(实际上的)时间间隔
c->ticklast = tickcur;
//(tickcur - c->cmnvars->start_tick) 播放了多久,系统时钟时间,单位都是 ms
sysclock= c->cmnvars->start_pts + (tickcur - c->cmnvars->start_tick) * c->speed / 100;
scdiff = (int)(sysclock - c->cmnvars->vpts - c->tickavdiff); // diff between system clock and video pts
avdiff = (int)(c->cmnvars->apts - c->cmnvars->vpts - c->tickavdiff); // diff between audio and video pts
avdiff = c->cmnvars->apts <= 0 ? scdiff : avdiff; // if apts is invalid, sync video to system clock
//tickdiff:两次渲染的实际间隔 ,tickframe 根据帧率计算的理论上的渲染间隔
if (tickdiff - tickframe > 5) c->ticksleep--;
if (tickdiff - tickframe < -5) c->ticksleep++;
if (c->cmnvars->vpts >= 0) {
if (avdiff > 500) c->ticksleep -= 3;
else if (avdiff > 50 ) c->ticksleep -= 2;
else if (avdiff > 30 ) c->ticksleep -= 1;
else if (avdiff < -500) c->ticksleep += 3;
else if (avdiff < -50 ) c->ticksleep += 2;
else if (avdiff < -30 ) c->ticksleep += 1;
}
if (c->ticksleep < 0) c->ticksleep = 0;
LOGCATE("vdev_avsync_and_complete tickdiff=%d, tickframe=%d, c->ticksleep=%d", tickdiff, tickframe, c->ticksleep);
//-- frame rate & av sync control --//
} else {
c->ticksleep = c->tickframe;
}
if (c->ticksleep > 0 && c->cmnvars->init_params->avts_syncmode != AVSYNC_MODE_LIVE_SYNC0) av_usleep(c->ticksleep * 1000);
av_log(NULL, AV_LOG_INFO, "d: %3d, s: %3d\n", avdiff, c->ticksleep);
}
以上即是 fanplayer 项目的核心代码片段。
-- END --
Copyright© 2013-2020
All Rights Reserved 京ICP备2023019179号-8