android 基於FFmpeg的視頻播放
爲了學習android jni方面的姿勢,利用之前編好的ffmpeg庫so文件,實現一下android上面調用ffmpeg來解碼播放視頻。相信大家都看過雷神的博客了,但是對於現在ffmpeg3.0以上,雷神之前提供代碼也過時了,運行也有問題,再者現在用jni也不通過mk的方式了,有as自帶的cmake方式,所以本篇介紹的的方法都是用最新的方式去實現的。
首先介紹一下整體的環境目錄
上圖爲整體項目的目錄
這裏簡單介紹一下cmake配置文件:
#CMake版本信息
cmake_minimum_required(VERSION 3.4.1)
#ffmpeg so文件路徑
set(ffmpeg_DIR ${CMAKE_SOURCE_DIR}/libs)
#配置加載native依賴
include_directories(${ffmpeg_DIR}/include)
#CPP文件夾下待編譯的c文件
add_library(native-lib SHARED src/main/cpp/native-lib.cpp)
#動態方式加載ffmepg的so文件 第三方庫加載方式
add_library(avcodec-57 SHARED IMPORTED)
add_library(avdevice-57 SHARED IMPORTED)
add_library(avfilter-6 SHARED IMPORTED)
add_library(avformat-57 SHARED IMPORTED)
add_library(avutil-55 SHARED IMPORTED)
add_library(postproc-54 SHARED IMPORTED)
add_library(swresample-2 SHARED IMPORTED)
add_library(swscale-4 SHARED IMPORTED)
#引入libavcodec-57.so文件
set_target_properties(avcodec-57 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libavcodec-57.so)
#引入libavdevice-57.so文件
set_target_properties(avdevice-57 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libavdevice-57.so)
#引入libavfilter-6.so文件
set_target_properties(avfilter-6 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libavfilter-6.so)
#引入libavformat-57.so文件
set_target_properties(avformat-57 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libavformat-57.so)
#引入libavutil-55.so文件
set_target_properties(avutil-55 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libavutil-55.so)
#引入libpostproc-54.so文件
set_target_properties(postproc-54 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libpostproc-54.so)
#引入libswresample-2.so文件
set_target_properties(swresample-2 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libswresample-2.so)
#引入libswscale-4.so文件
set_target_properties(swscale-4 PROPERTIES IMPORTED_LOCATION
${ffmpeg_DIR}/${ANDROID_ABI}/libswscale-4.so)
#C 日誌 ndk官方庫
find_library(log-lib log)
#靜態庫與動態庫進行鏈接 相當於gcc命令行參數 -l。
target_link_libraries(native-lib
avcodec-57
avfilter-6
avformat-57
avutil-55
-landroid
postproc-54
swresample-2
swscale-4
${log-lib})
native-lib.cpp代碼,這段代碼主要參考雷神的代碼,然後更換掉過時的方法,並加入了渲染視頻的邏輯。ffmpeg3.3版本,調用ffmpeg視頻解碼最簡單方法。
#include <jni.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>
}
#ifdef ANDROID
#include <android/log.h>
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, "(^_^)", format, ##__VA_ARGS__)
#else
#define LOGE(format, ...) printf("(>_<) " format "\n", ##__VA_ARGS__)
#define LOGI(format, ...) printf("(^_^) " format "\n", ##__VA_ARGS__)
#endif
extern "C"
JNIEXPORT jint JNICALL Java_com_lake_ndktest_FFmpeg_play
(JNIEnv *env, jobject obj, jstring input_jstr, jobject surface) {
LOGI("play");
// sd卡中的視頻文件地址,可自行修改或者通過jni傳入
const char *file_name = env->GetStringUTFChars(input_jstr, NULL);
LOGI("file_name:%s\n", file_name);
av_register_all();
AVFormatContext *pFormatCtx = avformat_alloc_context();
// Open video file
if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {
LOGE("Couldn't open file:%s\n", file_name);
return -1; // Couldn't open file
}
// Retrieve stream information
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("Couldn't find stream information.");
return -1;
}
// Find the first video stream
//找到第一個視頻流,因爲裏面的流還有可能是音頻流或者其他的,我們攝像頭只關心視頻流
int videoStream = -1, i;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO
&& videoStream < 0) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
LOGE("Didn't find a video stream or audio steam.");
return -1; // Didn't find a video stream
}
LOGI("找到視頻流");
AVCodecParameters *pCodecPar = pFormatCtx->streams[videoStream]->codecpar;
//查找解碼器
//獲取一個合適的編碼器pCodec find a decoder for the video stream
AVCodec *pCodec = avcodec_find_decoder(pCodecPar->codec_id);
if (pCodec == NULL) {
LOGE("Couldn't find Codec.\n");
return -1;
}
LOGI("獲取解碼器");
//打開這個編碼器,pCodecCtx表示編碼器上下文,裏面有流數據的信息
// Get a pointer to the codec context for the video stream
AVCodecContext *pCodecCtx = avcodec_alloc_context3(pCodec);
// Copy context
if (avcodec_parameters_to_context(pCodecCtx, pCodecPar) != 0) {
fprintf(stderr, "Couldn't copy codec context");
return -1; // Error copying codec context
}
LOGI("視頻流幀率:%d fps\n", pFormatCtx->streams[videoStream]->r_frame_rate.num /
pFormatCtx->streams[videoStream]->r_frame_rate.den);
int iTotalSeconds = (int) pFormatCtx->duration / 1000000;
int iHour = iTotalSeconds / 3600;//小時
int iMinute = iTotalSeconds % 3600 / 60;//分鐘
int iSecond = iTotalSeconds % 60;//秒
LOGI("持續時間:%02d:%02d:%02d\n", iHour, iMinute, iSecond);
LOGI("視頻時長:%d微秒\n", pFormatCtx->streams[videoStream]->duration);
LOGI("持續時間:%d微秒\n", (int) pFormatCtx->duration);
LOGI("獲取解碼器SUCESS");
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Could not open codec.");
return -1; // Could not open codec
}
LOGI("獲取native window");
// 獲取native window
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
LOGI("獲取視頻寬高");
// 獲取視頻寬高
int videoWidth = pCodecCtx->width;
int videoHeight = pCodecCtx->height;
LOGI("設置native window的buffer大小,可自動拉伸");
// 設置native window的buffer大小,可自動拉伸
ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight,
WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer windowBuffer;
LOGI("Allocate video frame");
// Allocate video frame
AVFrame *pFrame = av_frame_alloc();
LOGI("用於渲染");
// 用於渲染
AVFrame *pFrameRGBA = av_frame_alloc();
if (pFrameRGBA == NULL || pFrame == NULL) {
LOGE("Could not allocate video frame.");
return -1;
}
LOGI("Determine required buffer size and allocate buffer");
// Determine required buffer size and allocate buffer
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height,
1);
uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
pCodecCtx->width, pCodecCtx->height, 1);
LOGI("由於解碼出來的幀格式不是RGBA的,在渲染之前需要進行格式轉換");
// 由於解碼出來的幀格式不是RGBA的,在渲染之前需要進行格式轉換
struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width/*視頻寬度*/, pCodecCtx->height/*視頻高度*/,
pCodecCtx->pix_fmt/*像素格式*/,
pCodecCtx->width/*目標寬度*/,
pCodecCtx->height/*目標高度*/, AV_PIX_FMT_RGBA/*目標格式*/,
SWS_BICUBIC/*圖像轉換的一些算法*/, NULL, NULL, NULL);
if (sws_ctx == NULL) {
LOGE("Cannot initialize the conversion context!\n");
return -1;
}
LOGI("格式轉換成功");
LOGE("開始播放");
int ret;
AVPacket packet;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?
if (packet.stream_index == videoStream) {
//該楨位置
float timestamp = packet.pts * av_q2d(pFormatCtx->streams[videoStream]->time_base);
LOGI("timestamp=%f", timestamp);
// 解碼
ret = avcodec_send_packet(pCodecCtx, &packet);
if (ret < 0) {
break;
}
while (avcodec_receive_frame(pCodecCtx, pFrame) == 0) {//繪圖
// lock native window buffer
ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
// 格式轉換
sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize);
// 獲取stride
uint8_t *dst = (uint8_t *) windowBuffer.bits;
int dstStride = windowBuffer.stride * 4;
uint8_t *src = pFrameRGBA->data[0];
int srcStride = pFrameRGBA->linesize[0];
// 由於window的stride和幀的stride不同,因此需要逐行復制
int h;
for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
}
}
av_packet_unref(&packet);
}
LOGE("播放完成");
//c語言和java不一樣,java有自己的內存控制機制,而c語言申請的內存需要開發者自己銷燬掉,
//所以一旦申請了內存,不用了一定要釋放掉內存,否則就會出現內存溢出的問題。
av_free(buffer);
av_free(pFrameRGBA);
// Free the YUV frame
av_free(pFrame);
// Close the codecs
avcodec_close(pCodecCtx);
// Close the video file
avformat_close_input(&pFormatCtx);
return 0;
}
- 上述代碼遺留問題
當解碼1080p以上視頻時候,因爲解碼後就進行了渲染,所以視頻播放起來會很慢,1080p以下ffmpeg解碼效率還是可以的。
沒有進行音頻解碼和播放,後續博客我會補上。
通過NativeWindow實現視頻渲染確實很簡單,只需要把android層的surfaceview傳進來就可以了。
具體代碼請繼續往下看。
ackage com.lake.ndktest;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
public class MainActivity extends AppCompatActivity {
private SurfaceView surfaceView;
private EditText urlEdittext_input;
private SurfaceHolder surfaceHolder;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button startButton = (Button) this.findViewById(R.id.button_start);
urlEdittext_input= (EditText) this.findViewById(R.id.input_url);
surfaceView = (SurfaceView)findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
startButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View arg0){
String folderurl= Environment.getExternalStorageDirectory().getPath();
String urltext_input=urlEdittext_input.getText().toString();
final String inputurl=folderurl+"/MyLocalPlayer/"+urltext_input;
new Thread(new Runnable() {
@Override
public void run() {
FFmpeg.play(inputurl,surfaceHolder.getSurface());
}
}).start();
}
});
}
}
FFmpeg類:
/**
* Created by lake on 2017/8/23.
* [email protected]
*/
public class FFmpeg {
//JNI
public static native int play(String inputurl,Object surface);
static{
System.loadLibrary("avutil-55");
System.loadLibrary("swresample-2");
System.loadLibrary("avcodec-57");
System.loadLibrary("avdevice-57");
System.loadLibrary("avformat-57");
System.loadLibrary("postproc-54");
System.loadLibrary("swscale-4");
System.loadLibrary("avfilter-6");
System.loadLibrary("native-lib");
}
}
以上就是全部代碼,詳細的說明都在代碼註釋裏了,作爲菜鳥c基礎不太好,請諒解。cmake使用方式有這個例子相信大家也知道怎麼用了。as新建項目時候,勾選上support c/c++選項,項目會自動配置一個ndk環境出來,在這個基礎上進行開發會非常快。
demo 源碼 https://github.com/lakehubo/NDKtest
[1]: http://blog.csdn.net/leixiaohua1020/article/details/47010637 雷神例子
[2]: http://blog.csdn.net/glouds/article/details/50937266 nativewindow