ffmpeg 解码库在android中的使用

<span style="font-family: Arial, Helvetica, sans-serif; background-color: rgb(255, 255, 255);">前面有博客讲到linux下编译ffmpeg</span>

那么编译完成之后应该怎么使用呢?

在参考了ffmpeg解码文件的demo这里给出一个解码的so库JNI实现方法

在编译完成ffmpeg的源码后,新建一个工程如下图目录结构



在ffmpeg编译后的源码中include文件夹中拷贝上述头文件,然后将编译出的so库拷贝至prebuilt文件夹

新建Android.mk

内容如下:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE :=  avutil-54-prebuilt
LOCAL_SRC_FILES := prebuilt/libavutil-54.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE :=  avswresample-1-prebuilt
LOCAL_SRC_FILES := prebuilt/libswresample-1.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE :=  swscale-3-prebuilt
LOCAL_SRC_FILES := prebuilt/libswscale-3.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avcodec-56-prebuilt
LOCAL_SRC_FILES := prebuilt/libavcodec-56.so
include $(PREBUILT_SHARED_LIBRARY)

#include $(CLEAR_VARS)
#LOCAL_MODULE := avdevice-56-prebuilt
#LOCAL_SRC_FILES := prebuilt/libavdevice-56.so
#include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat-56-prebuilt
LOCAL_SRC_FILES := prebuilt/libavformat-56.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avfilter-5-prebuilt
LOCAL_SRC_FILES := prebuilt/libavfilter-5.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := libffmpegutil
LOCAL_SRC_FILES := FFmpeg.c
LOCAL_LDLIBS := -llog -ljnigraphics -lz -landroid -lm -pthread
LOCAL_SHARED_LIBRARIES := avcodec-56-prebuilt avdevice-56-prebuilt avfilter-5-prebuilt avformat-56-prebuilt avutil-54-prebuilt avswresample-1-prebuilt swscale-3-prebuilt
 
include $(BUILD_SHARED_LIBRARY)

新建Application.mk 内容:

APP_ABI := armeabi-v7a
APP_PLATFORM := android-9


新建接口FFmpeg.java :

package com.android;

import android.util.Log;

public class FFmpeg {
	static {
		try {
			System.loadLibrary("avutil-54");
			System.loadLibrary("swresample-1");
			System.loadLibrary("swscale-3");
			System.loadLibrary("avcodec-56");
			System.loadLibrary("avformat-56");
			System.loadLibrary("avfilter-5");
			System.loadLibrary("ffmpegutils");
		} catch (UnsatisfiedLinkError ule) {
			Log.d("FFMPEG", ule.getMessage());
		}

	}

	public native int H264DecoderInit(int width, int height);

	public native int H264DecoderRelease();

	public native int H264Decode(byte[] in, int insize, byte[] out);

	public native int GetFFmpegVersion();

}
生成头文件:

 定位到FFmpeg.class目录, cmd中运行 javah -jni com.android.FFmpeg.class 生成 com_android_FFmpeg.h



新建接口实现类FFmpeg.c:

#include <math.h>

#include <libavutil/opt.h>
#include <libavcodec/avcodec.h>
#include <libavutil/channel_layout.h>
#include <libavutil/common.h>
#include <libavutil/imgutils.h>
#include <libavutil/mathematics.h>
#include <libavutil/samplefmt.h>
#include <android/log.h>
#include "com_android_FFmpeg.h"
#define LOG_TAG "H264Android.c"

#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)

#ifdef __cplusplus
extern "C" {
#endif
//Video
struct AVCodecContext *pAVCodecCtx = NULL;
struct AVCodec *pAVCodec;
struct AVPacket mAVPacket;
struct AVFrame *pAVFrame = NULL;

//Audio
struct AVCodecContext *pAUCodecCtx = NULL;
struct AVCodec *pAUCodec;
struct AVPacket mAUPacket;
struct AVFrame *pAUFrame = NULL;

int iWidth = 0;
int iHeight = 0;

int *colortab = NULL;
int *u_b_tab = NULL;
int *u_g_tab = NULL;
int *v_g_tab = NULL;
int *v_r_tab = NULL;

//short *tmp_pic=NULL;

unsigned int *rgb_2_pix = NULL;
unsigned int *r_2_pix = NULL;
unsigned int *g_2_pix = NULL;
unsigned int *b_2_pix = NULL;

void DeleteYUVTab() {
//	av_free(tmp_pic);

	av_free(colortab);
	av_free(rgb_2_pix);
}

void CreateYUVTab_16() {
	int i;
	int u, v;

//	tmp_pic = (short*)av_malloc(iWidth*iHeight*2); // 缂傛挸鐡? iWidth * iHeight * 16bits

	colortab = (int *) av_malloc(4 * 256 * sizeof(int));
	u_b_tab = &colortab[0 * 256];
	u_g_tab = &colortab[1 * 256];
	v_g_tab = &colortab[2 * 256];
	v_r_tab = &colortab[3 * 256];

	for (i = 0; i < 256; i++) {
		u = v = (i - 128);

		u_b_tab[i] = (int) (1.772 * u);
		u_g_tab[i] = (int) (0.34414 * u);
		v_g_tab[i] = (int) (0.71414 * v);
		v_r_tab[i] = (int) (1.402 * v);
	}

	rgb_2_pix = (unsigned int *) av_malloc(3 * 768 * sizeof(unsigned int));

	r_2_pix = &rgb_2_pix[0 * 768];
	g_2_pix = &rgb_2_pix[1 * 768];
	b_2_pix = &rgb_2_pix[2 * 768];

	for (i = 0; i < 256; i++) {
		r_2_pix[i] = 0;
		g_2_pix[i] = 0;
		b_2_pix[i] = 0;
	}

	for (i = 0; i < 256; i++) {
		r_2_pix[i + 256] = (i & 0xF8) << 8;
		g_2_pix[i + 256] = (i & 0xFC) << 3;
		b_2_pix[i + 256] = (i) >> 3;
	}

	for (i = 0; i < 256; i++) {
		r_2_pix[i + 512] = 0xF8 << 8;
		g_2_pix[i + 512] = 0xFC << 3;
		b_2_pix[i + 512] = 0x1F;
	}

	r_2_pix += 256;
	g_2_pix += 256;
	b_2_pix += 256;
}

void DisplayYUV_16(unsigned int *pdst1, unsigned char *y, unsigned char *u,
		unsigned char *v, int width, int height, int src_ystride,
		int src_uvstride, int dst_ystride) {
	int i, j;
	int r, g, b, rgb;

	int yy, ub, ug, vg, vr;

	unsigned char* yoff;
	unsigned char* uoff;
	unsigned char* voff;

	unsigned int* pdst = pdst1;

	int width2 = width / 2;
	int height2 = height / 2;

	if (width2 > iWidth / 2) {
		width2 = iWidth / 2;

		y += (width - iWidth) / 4 * 2;
		u += (width - iWidth) / 4;
		v += (width - iWidth) / 4;
	}

	if (height2 > iHeight)
		height2 = iHeight;

	for (j = 0; j < height2; j++) {
		yoff = y + j * 2 * src_ystride;
		uoff = u + j * src_uvstride;
		voff = v + j * src_uvstride;

		for (i = 0; i < width2; i++) {
			yy = *(yoff + (i << 1));
			ub = u_b_tab[*(uoff + i)];
			ug = u_g_tab[*(uoff + i)];
			vg = v_g_tab[*(voff + i)];
			vr = v_r_tab[*(voff + i)];

			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];

			yy = *(yoff + (i << 1) + 1);
			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			pdst[(j * dst_ystride + i)] = (rgb)
					+ ((r_2_pix[r] + g_2_pix[g] + b_2_pix[b]) << 16);

			yy = *(yoff + (i << 1) + src_ystride);
			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];

			yy = *(yoff + (i << 1) + src_ystride + 1);
			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			pdst[((2 * j + 1) * dst_ystride + i * 2) >> 1] = (rgb)
					+ ((r_2_pix[r] + g_2_pix[g] + b_2_pix[b]) << 16);
		}
	}
}
/*
 * Class:     FFmpeg
 * Method:    H264DecoderInit
 * Signature: (II)I
 */
JNIEXPORT jint JNICALL Java_com_android_FFmpeg_H264DecoderInit(
		JNIEnv * env, jobject jobj, jint width, jint height) {
	iWidth = width;
	iHeight = height;

	if (pAVCodecCtx != NULL) {
		avcodec_close(pAVCodecCtx);
		pAVCodecCtx = NULL;
	}
	if (pAVFrame != NULL) {
		av_free(pAVFrame);
		pAVFrame = NULL;
	}
	// Register all formats and codecs
	av_register_all();
	LOGD("avcodec register success");
	//CODEC_ID_PCM_ALAW
	pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
	if (pAVCodec == NULL)
		return -1;

	//init AVCodecContext
	pAVCodecCtx = avcodec_alloc_context3(pAVCodec);
	if (pAVCodecCtx == NULL)
		return -1;

	/* we do not send complete frames */
	if (pAVCodec->capabilities & CODEC_CAP_TRUNCATED)
		pAVCodecCtx->flags |= CODEC_FLAG_TRUNCATED; /* we do not send complete frames */

	/* open it */
	if (avcodec_open2(pAVCodecCtx, pAVCodec, NULL) < 0)
		return avcodec_open2(pAVCodecCtx, pAVCodec, NULL);

	av_init_packet(&mAVPacket);

	pAVFrame = av_frame_alloc();
	if (pAVFrame == NULL)
		return -1;

	//pImageConvertCtx = sws_getContext(pAVCodecCtx->width, pAVCodecCtx->height, PIX_FMT_YUV420P,  pAVCodecCtx->width, pAVCodecCtx->height,PIX_FMT_RGB565LE, SWS_BICUBIC,  NULL, NULL, NULL);
	//LOGD("sws_getContext  return =%d",pImageConvertCtx);

	LOGD("avcodec context  success");

	CreateYUVTab_16();
	LOGD("create yuv table success");

	return 1;
}

/*
 * Class:     com_android_concox_FFmpeg
 * Method:    H264DecoderRelease
 * Signature: ()I
 */
JNIEXPORT jint JNICALL Java_com_android_FFmpeg_H264DecoderRelease(
		JNIEnv * env, jobject jobj) {
	if (pAVCodecCtx != NULL) {
		avcodec_close(pAVCodecCtx);
		pAVCodecCtx = NULL;
	}

	if (pAVFrame != NULL) {
		av_free(pAVFrame);
		pAVFrame = NULL;
	}
	DeleteYUVTab();
	return 1;

}

/*
 * Class:     com_android_concox_FFmpeg
 * Method:    H264Decode
 * Signature: ([BI[B)I
 */
JNIEXPORT jint JNICALL Java_com_android_FFmpeg_H264Decode(JNIEnv* env,
		jobject thiz, jbyteArray in, jint inbuf_size, jbyteArray out) {
	int i;

	jbyte * inbuf = (jbyte*) (*env)->GetByteArrayElements(env, in, 0);
	jbyte * Picture = (jbyte*) (*env)->GetByteArrayElements(env, out, 0);

	av_frame_unref(pAVFrame);
	mAVPacket.data = inbuf;
	mAVPacket.size = inbuf_size;
	LOGD("mAVPacket.size:%d\n ", mAVPacket.size);
	int len = -1, got_picture = 0;
	len = avcodec_decode_video2(pAVCodecCtx, pAVFrame, &got_picture,
			&mAVPacket);
	LOGD("len:%d\n", len);
	if (len < 0) {
		LOGD("len=-1,decode error");
		return len;
	}

	if (got_picture > 0) {
		LOGD("GOT PICTURE");
		/*pImageConvertCtx = sws_getContext (pAVCodecCtx->width,
		 pAVCodecCtx->height, pAVCodecCtx->pix_fmt,
		 pAVCodecCtx->width, pAVCodecCtx->height,
		 PIX_FMT_RGB565LE, SWS_BICUBIC, NULL, NULL, NULL);
		 sws_scale (pImageConvertCtx, pAVFrame->data, pAVFrame->linesize,0, pAVCodecCtx->height, pAVFrame->data, pAVFrame->linesize);

		 */
		DisplayYUV_16((int*) Picture, pAVFrame->data[0], pAVFrame->data[1],
				pAVFrame->data[2], pAVCodecCtx->width, pAVCodecCtx->height,
				pAVFrame->linesize[0], pAVFrame->linesize[1], iWidth);
	} else
		LOGD("GOT PICTURE fail");

	(*env)->ReleaseByteArrayElements(env, in, inbuf, 0);
	(*env)->ReleaseByteArrayElements(env, out, Picture, 0);

	return len;
}

/*
 * Class:     com_android_concox_FFmpeg
 * Method:    GetFFmpegVersion
 * Signature: ()I
 */
JNIEXPORT jint JNICALL Java_com_android_concox_FFmpeg_GetFFmpegVersion(
		JNIEnv * env, jobject jobj) {
	return avcodec_version();
}


#ifdef __cplusplus
}
#endif


 配置好Eclipse中的NDK,build即可生成ffmpeguitl库文件。





郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。