Android使用C/C++代码或库需要通过JNI方式,详细的可见相关文章。
1.添加C/C++源文件
复制/opt/android/ndk/android-ndk-r10e/resources/x264下的文件:
x264.h
x264_config.h
到jni目录下
在jni目录下添加
x264encoder.h
x264encoder.cpp
main.cpp
三个源文件,详细源码如下:
/*
* \File
* x264encoder.h
* \Author
* Hank
*/
#ifndef x264encoder_h
#define x264encoder_h
extern "C"
{
#include "stdint.h"
#include "x264.h"
enum bitrate_level
{
HIGH_LEVEL = 0,
STANDARD_LEVEL = 1,
MEDIUM_LEVEL = 2,
LOW_LEVEL = 3,
};
class X264Encoder
{
public:
X264Encoder();
~X264Encoder();
bool openX264Encoder();
// long x264EncoderProcess(uint8_t *pSrcData, int srcDataSize, x264_nal_t **nals, int& nalsCount);
long x264EncoderProcess(x264_picture_t *pPicture, x264_nal_t **nals, int& nalsCount);
bool closeX264Encoder();
void setSourceFormat(unsigned int sourcformat);
void setResolution(unsigned int w, unsigned int h);
void setBitrate(unsigned int i_bitrate);
void setFps(unsigned int fps);
void setI_KeyInt_Max(unsigned int i_frame_max);
void setQp_Max(unsigned int qp_max);
void setQp_Min(unsigned int qp_min);
void forceIDRFrame();
void upgradeBitrateLevel();
void declineBitrateLevel();
void setLeastBitrateLevel();
private:
x264_param_t *pParameter;
x264_t *x264EncoderHandle;
// x264_picture_t *pPicture;
x264_picture_t *pOutput;
unsigned int sourceFormat;
// unsigned int i_bitrate;
unsigned int bitratelevel;
unsigned int i_fps;
unsigned int i_keyint_max;
unsigned int width;
unsigned int height;
unsigned int qp_max;
unsigned int qp_min;
unsigned int current_f_rf_constant;
unsigned int userSetting_f_rf_constant;
int64_t frameNo;
bool isForceIDRFrameEnabled;
};
}
#endif
/*
* \File
* x264encoder.cpp
* \Author
* Hank
*/
#include <stdlib.h>
#include <string.h>
#include "x264encoder.h"
// new version for x264 encoder
X264Encoder::X264Encoder()
{
this->bitratelevel = STANDARD_LEVEL;
qp_max = 30;
qp_min = 0;
i_fps = 20;
i_keyint_max = 300;
width = 352;
height = 288;
frameNo = 0;
isForceIDRFrameEnabled = false;
pParameter = NULL;
x264EncoderHandle = NULL;
// pPicture = NULL;
pOutput = NULL;
}
X264Encoder::~X264Encoder()
{
this->closeX264Encoder();
}
void X264Encoder::setSourceFormat(unsigned int sourcformat)
{
this->sourceFormat = sourcformat;
}
void X264Encoder::setResolution(unsigned int w, unsigned int h)
{
width = w;
height = h;
}
void X264Encoder::setBitrate(unsigned int i_bitrate)
{
if (i_bitrate > 0 && i_bitrate <= 64) {
this->bitratelevel = LOW_LEVEL;
}else if(i_bitrate > 64 && i_bitrate <= 128){
this->bitratelevel = MEDIUM_LEVEL;
}else if (i_bitrate > 128 && i_bitrate <= 256) {
this->bitratelevel = STANDARD_LEVEL;
}else if (i_bitrate > 256 && i_bitrate <= 384) {
this->bitratelevel = HIGH_LEVEL;
}else if (i_bitrate > 384 && i_bitrate <= 512) {
this->bitratelevel = HIGH_LEVEL;
}else {
this->bitratelevel = STANDARD_LEVEL;
}
}
void X264Encoder::setFps(unsigned int fps)
{
i_fps = fps;
}
void X264Encoder::setI_KeyInt_Max(unsigned int i_frame_max)
{
i_keyint_max = i_frame_max;
}
void X264Encoder::setQp_Max(unsigned int qp_max)
{
this->qp_max = qp_max;
}
void X264Encoder::setQp_Min(unsigned int qp_min)
{
this->qp_min = qp_min;
}
bool X264Encoder::openX264Encoder() {
this->closeX264Encoder();
if(!pParameter)
{
pParameter = (x264_param_t *)malloc(sizeof(x264_param_t));
if (!pParameter) {
this->closeX264Encoder();
return false;
}
memset(pParameter, 0, sizeof(x264_param_t));
}
int ret = x264_param_default_preset(pParameter, "ultrafast", "zerolatency");
if (ret != 0) {
this->closeX264Encoder();
return false;
}
pParameter->i_level_idc = 30;
pParameter->i_width = width;
pParameter->i_height = height;
pParameter->b_deterministic = 1;
// pParameter->b_sliced_threads = 1;
pParameter->i_threads = 1;
pParameter->i_csp = X264_CSP_I420;//X264_CSP_NV12;//X264_CSP_I420;
pParameter->i_fps_num = i_fps;
pParameter->i_fps_den = 1;
pParameter->i_bframe = 0;
pParameter->i_keyint_max = i_keyint_max;
// pParameter->b_open_gop = 1;
// pParameter->rc.i_bitrate = i_bitrate;
pParameter->rc.i_rc_method = X264_RC_CRF;//X264_RC_CQP;
if (this->bitratelevel == LOW_LEVEL) {
pParameter->rc.f_rf_constant = 32;
}else if(this->bitratelevel == MEDIUM_LEVEL){
pParameter->rc.f_rf_constant = 29;
}else if (this->bitratelevel == STANDARD_LEVEL) {
pParameter->rc.f_rf_constant = 26;
}else if (this->bitratelevel == HIGH_LEVEL) {
pParameter->rc.f_rf_constant = 24;
}else {
pParameter->rc.f_rf_constant = 24;
}
current_f_rf_constant = pParameter->rc.f_rf_constant;
userSetting_f_rf_constant = pParameter->rc.f_rf_constant;
// from huxiaopeng
pParameter->analyse.b_transform_8x8 = 1;
pParameter->rc.f_aq_strength = 1.5;
pParameter->rc.i_aq_mode = 0;
pParameter->rc.f_qcompress = 0.0;
pParameter->rc.f_ip_factor = 0.5;
pParameter->rc.f_rate_tolerance = 0.1;
pParameter->analyse.i_direct_mv_pred = X264_DIRECT_PRED_AUTO;
pParameter->analyse.i_me_method = X264_ME_DIA;
pParameter->analyse.i_me_range = 16;
pParameter->analyse.i_subpel_refine = 2;
// pParameter->analyse.i_noise_reduction = 1;
pParameter->i_slice_max_size = 1200;
// pParameter->i_nal_hrd = X264_NAL_HRD_NONE;
pParameter->b_deblocking_filter = 1;
pParameter->i_deblocking_filter_alphac0 = 4;
pParameter->i_deblocking_filter_beta = 4;
pParameter->rc.b_mb_tree = 0;
pParameter->i_log_level = X264_LOG_NONE;
if(x264_param_apply_profile(pParameter, "baseline"))
//if(x264_param_apply_profile(pParameter, "main"))
{
this->closeX264Encoder();
return false;
}
if (!x264EncoderHandle) {
x264EncoderHandle = x264_encoder_open(pParameter);
if (!x264EncoderHandle) {
this->closeX264Encoder();
return false;
}
}
if (!pOutput) {
pOutput = (x264_picture_t *)malloc(sizeof(x264_picture_t));
if (!pOutput) {
this->closeX264Encoder();
return false;
}
memset(pOutput, 0, sizeof(x264_picture_t));
}
return true;
}
void X264Encoder::forceIDRFrame()
{
isForceIDRFrameEnabled = true;
}
void X264Encoder::upgradeBitrateLevel()
{
/*
if (this->bitratelevel == HIGH_LEVEL) {
return;
}
this->bitratelevel++;
if (this->bitratelevel == LOW_LEVEL) {
pParameter->rc.f_rf_constant = 30;
}else if(this->bitratelevel == MEDIUM_LEVEL){
pParameter->rc.f_rf_constant = 27;
}else if (this->bitratelevel == STANDARD_LEVEL) {
pParameter->rc.f_rf_constant = 24;
}else if (this->bitratelevel == HIGH_LEVEL) {
pParameter->rc.f_rf_constant = 22;
}else {
pParameter->rc.f_rf_constant = 23;
}
*/
if (userSetting_f_rf_constant >= current_f_rf_constant) {
return;
}
pParameter->rc.f_rf_constant--;
current_f_rf_constant = pParameter->rc.f_rf_constant;
x264_encoder_reconfig(x264EncoderHandle, pParameter);
}
void X264Encoder::setLeastBitrateLevel()
{
pParameter->rc.f_rf_constant = 32;
current_f_rf_constant = pParameter->rc.f_rf_constant;
x264_encoder_reconfig(x264EncoderHandle, pParameter);
}
void X264Encoder::declineBitrateLevel()
{
if (32 <= current_f_rf_constant) {
return;
}
pParameter->rc.f_rf_constant++;
current_f_rf_constant = pParameter->rc.f_rf_constant;
x264_encoder_reconfig(x264EncoderHandle, pParameter);
}
long X264Encoder::x264EncoderProcess(x264_picture_t *pPicture, x264_nal_t **nals, int& nalsCount)
{
pPicture->i_pts = (int64_t)(frameNo * pParameter->i_fps_den);
pPicture->i_type = X264_TYPE_AUTO;
pPicture->i_qpplus1 = 0;//X264_QP_AUTO;
if (isForceIDRFrameEnabled) {
pPicture->i_type = X264_TYPE_IDR;
isForceIDRFrameEnabled = false;
}
int32_t framesize = -1;
framesize = x264_encoder_encode(x264EncoderHandle, nals, &nalsCount, pPicture, pOutput);
if (framesize>0) {
frameNo++;
}
return framesize;
}
bool X264Encoder::closeX264Encoder()
{
if (pOutput) {
free(pOutput);
pOutput = NULL;
}
/*
if (pPicture) {
free(pPicture);
pPicture = NULL;
}
*/
if (pParameter) {
free(pParameter);
pParameter = NULL;
}
if (x264EncoderHandle) {
x264_encoder_close(x264EncoderHandle);
x264EncoderHandle = NULL;
}
return true;
}
/*
* \File
* main.cpp
* \Author
* Hank
*/
#include <string.h>
#include <stdlib.h>
#include <arpa/inet.h>
#include <stdio.h>
#include <wchar.h>
#include <time.h>
#include <jni.h>
/*for android logs*/
#include <android/log.h>
#include "x264.h"
#include "x264_config.h"
#include "x264encoder.h"
#define LOG_TAG "android-ffmpeg-tutorial01"
#define LOGI(...) __android_log_print(4, LOG_TAG, __VA_ARGS__);
#define LOGE(...) __android_log_print(6, LOG_TAG, __VA_ARGS__);
jint naMain(JNIEnv *pEnv, jobject pObj, jobject pMainAct,
jstring pFileName, jint pNumOfFrames,
jint picWidth, jint picHeight) {
/* Preset */
int numOfFrames = pNumOfFrames;
int pictureWidth = picWidth;
int pictureHeight = picHeight;
int presetBitrate = 512;
int presetFps = 25;
int pictureSize = pictureWidth * pictureHeight;
int encodeTime = 0;
X264Encoder x264Encoder;
x264Encoder.setBitrate(presetBitrate);
x264Encoder.setResolution(pictureWidth,pictureHeight);
x264Encoder.setFps(presetFps);
char *yuvFilename = NULL;
char *avcFilename = NULL;
char str[80];
FILE *inputFile = NULL;
FILE *outputFile = NULL;
// Get C string from JNI jstring
yuvFilename = (char *)pEnv->GetStringUTFChars(pFileName, NULL);
if((inputFile = fopen(yuvFilename,"rb")) == NULL){
LOGI("Can not open inputfile %s", yuvFilename);
return -1;
}
strcpy(str, yuvFilename);
strcat(str, ".h264");
avcFilename = str;
LOGI("Output file: %s", avcFilename);
if ((outputFile = fopen(avcFilename,"wb")) == NULL) {
LOGI("Can not open outputfile %s", avcFilename);
return -1;
}
x264_picture_t inputPicture;
x264_picture_alloc(&inputPicture, X264_CSP_I420, pictureWidth, pictureHeight);
x264_nal_t *p_nals = NULL;
int nalsCount = 0;
int ret = 0;
int j = 0;
if(x264Encoder.openX264Encoder())
{
struct timeval start, end;
struct timeval sum;
sum.tv_sec = 0;
sum.tv_usec = 0;
/* Including the time of read & write file
gettimeofday(&start, NULL);
LOGI("Encode start time : %ld.%ld", start.tv_sec, start.tv_usec);
*/
for(j=0; j<numOfFrames; j++)
{
ret = fread(inputPicture.img.plane[0],1,pictureSize, inputFile);
if (ret < pictureSize)
{
break;
}
fread(inputPicture.img.plane[1],1,pictureSize/4, inputFile);
fread(inputPicture.img.plane[2],1,pictureSize/4, inputFile);
gettimeofday(&start, NULL);
x264Encoder.x264EncoderProcess(&inputPicture,&p_nals,nalsCount);
gettimeofday(&end, NULL);
if (end.tv_sec > start.tv_sec) {
sum.tv_sec += (end.tv_sec - (start.tv_sec + 1));
sum.tv_usec += ((1000000 - start.tv_usec) + end.tv_usec);
LOGI("A Spend time: %ld.%ld", sum.tv_sec, sum.tv_usec);
} else {
sum.tv_sec += (end.tv_sec - start.tv_sec);
sum.tv_usec += (end.tv_usec-start.tv_usec);
LOGI("B Spend time: %ld.%ld", sum.tv_sec, sum.tv_usec);
}
if(p_nals)
{
for(int i=0; i<nalsCount; i++)
{
ret = fwrite(p_nals[i].p_payload, p_nals[i].i_payload, 1, outputFile);
LOGI("Write NO %d frame", j);
LOGI("Should write %d bytes, in fact write %d bytes into %s", p_nals[i].i_payload, ret, avcFilename);
}
}
}
/* Including the time of read & write file
gettimeofday(&end, NULL);
LOGI("Encode end time : %ld.%ld", end.tv_sec, end.tv_usec);
if (end.tv_sec > start.tv_sec){
encodeTime = (end.tv_sec - (start.tv_sec+1))*1000000 + ((1000000-start.tv_usec) + end.tv_usec);
LOGI("Spend time: %ld.%ld", (end.tv_sec - (start.tv_sec+1)), ((1000000-start.tv_usec) + end.tv_usec));
} else {
encodeTime = (end.tv_sec - start.tv_sec)*1000000+(end.tv_usec-start.tv_usec);
LOGI("Spend time: %ld.%ld", (end.tv_sec - start.tv_sec), end.tv_usec-start.tv_usec);
}
*/
encodeTime = (sum.tv_sec + (sum.tv_usec/1000000))*1000000 + (sum.tv_usec%1000000);
LOGI("Spend time: %ld.%ld", sum.tv_sec, sum.tv_usec);
}
fclose(inputFile);
fclose(outputFile);
LOGI("Closed the files!");
x264_picture_clean(&inputPicture);
x264Encoder.closeX264Encoder();
LOGI("Closed the handler of encoder!");
return encodeTime;
//return 0;
}
static const JNINativeMethod gMethods[] = {
{"naMain",
"(Lroman10/tutorial/android_ffmpeg_tutorial01/MainActivity;Ljava/lang/String;III)I",
(void*)naMain
},
};
jint JNI_OnLoad(JavaVM* vm, void *reserved)
{
JNIEnv* env = NULL; jint result = -1;
if((vm->GetEnv((void**)&env, JNI_VERSION_1_6)) != JNI_OK) return -1;
jclass clazz; static const char* const
kClassName="roman10/tutorial/android_ffmpeg_tutorial01/MainActivity";
clazz = env->FindClass(kClassName); if(clazz == NULL) { printf("cannot
get class:%s\n", kClassName); return -1; }
if(env->RegisterNatives(clazz, gMethods,
sizeof(gMethods)/sizeof(gMethods[0]))!= JNI_OK) { printf("register
native method failed!\n"); return -1; }
return JNI_VERSION_1_6;
}
2. 修改相关配置并编译
1). 修改jni/Android.mk成如下:
#
# \File
# Android.mk
#
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := tutorial01
#LOCAL_SRC_FILES := tutorial01.c
LOCAL_SRC_FILES := x264encoder.cpp main.cpp
LOCAL_LDLIBS := -llog -ljnigraphics -lz
#LOCAL_SHARED_LIBRARIES := libavformat libavcodec libswscale libavutil
LOCAL_SHARED_LIBRARIES := libx264
include $(BUILD_SHARED_LIBRARY)
$(call import-module,x264-snapshot-20151015-2245/android/arm)
2). 修改 jni/Application.mk成如下:
#
# \File
# Android.mk
#
APP_ABI := armeabi
#APP_ABI := armeabi-v7a
APP_PLATFORM := android-10
3). 修改 src/build.gradle成如下
apply plugin: 'com.android.application'
android {
compileSdkVersion 23
buildToolsVersion "23.0.1"
defaultConfig {
applicationId "roman10.tutorial.android_ffmpeg_tutorial01"
minSdkVersion 19
targetSdkVersion 23
sourceSets.main{
jni.srcDirs=[]
jniLibs.srcDir "src/main/libs"
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
debug {
debuggable true
jniDebuggable true
renderscriptDebuggable false
}
}
}
dependencies {
compile 'com.android.support:support-v4:18.0.0'
}
4). 编译
在main目录下:
$ cd app/src/main
$ ndk-build
[armeabi] Compile++ thumb: tutorial01 <= x264encoder.cpp
[armeabi] Compile++ thumb: tutorial01 <= main.cpp
[armeabi] Prebuilt : libx264.so <= <NDK>/sources/x264-snapshot-20151015-2245/android/arm/lib/
[armeabi] SharedLibrary : libtutorial01.so
[armeabi] Install: libtutorial01.so => libs/armeabi/libtutorial01.so
[armeabi] Install: libx264.so => libs/armeabi/libx264.so
表示编码成功;
3.修改java源文件
修改MainActivity.java,添加调用的接口
public class MainActivity extends Activity {
// 调用参数的预定义
private static final String FRAME_DUMP_FOLDER_PATH = Environment.getExternalStorageDirectory()
+ File.separator + "android-264-encoder";
private static final String INPUT_YUV_FILENAME = "test_1920x1080.yuv";
private static int pWidth = 1920;
private static int pHeight = 1080;
@override
protected Void doInBackground(Void... params){
// 实际调用
encodeTime = naMain(mlOuterAct,
FRAME_DUMP_FOLDER_PATH + File.separator + INPUT_YUV_FILENAME,
mlNumOfFrames,
pWidth, pHeight );
}
/*
* 接口声明
* pVideoFileName : 表示输入文件名
* pNumOfFrame : 编码帧数
* Width : YUV源文件的图像宽度
* Height : YUV源文件的图像高度
*/
private static native int naMain(MainActivity pObject,
String pVideoFileName, int pNumOfFrame,
int Width, int Height);
// 库加载
static {
System.loadLibrary("x264");
System.loadLibrary("tutorial01");
}
}
再以android工程方式,编译运行;
fedora 23安装android studio:http://www.linuxdiyf.com/linux/16294.html
ubuntu15.04下Android studio开发配置完全教程:http://www.linuxdiyf.com/linux/14284.html
Ubuntu上安装Android Studio 1.3(谷歌Android IDE开发):http://www.linuxdiyf.com/linux/13264.html
在64位ubuntu15.04中安装android studio开发环境:http://www.linuxdiyf.com/linux/12996.html
在Ubuntu 15.04里安装Visual Studio Code:http://www.linuxdiyf.com/linux/12987.html