歡迎加入技術談論羣: 714476794
1、播放視頻
1.1、新建Android項目
筆者用的AndroidStudio 3.6.1版本,新建項目選Native C++,其他版本可能不太一樣,語言這裏選擇的是Kotlin
1.2、配置項目
這裏我們只需要armeabi-v7架構的FFmpeg,如需要其他架構,自行編譯,將編譯好的armv7/lib下的so複製到app/libs/armeabi-v7a,筆者一般都習慣把第三方so庫放到module/libs下面,這裏需要注意在module的build.gradle加入配置
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
然後複製include文件夾到cpp文件夾下,複製到哪裏都無所謂,只要在CMakeLists.txt配置對就行。
1.3、編寫CMakeLists.txt
如果AndroidStudio創建項目沒有自動生成的需要手動新建
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
#設置生成的動態庫目錄 生成的so在當前項目不需要手動拷貝到libs或jniLibs,生成apk會自動引入,這裏生成的可以供其他項目使用
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/libs/${ANDROID_ABI})
#設置jni庫目錄
set(JNI_LIBS_DIR ${CMAKE_SOURCE_DIR}/../../../libs)
add_library( video
SHARED
video.cpp)
find_library( log-lib
log )
add_library( avcodec SHARED IMPORTED)
set_target_properties(
avcodec
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavcodec.so)
add_library(avdevice SHARED IMPORTED)
set_target_properties(
avdevice
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavdevice.so)
add_library(avfilter SHARED IMPORTED)
set_target_properties(
avfilter
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavfilter.so)
add_library(avformat SHARED IMPORTED)
set_target_properties(
avformat
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavformat.so)
add_library(avutil SHARED IMPORTED)
set_target_properties(
avutil
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavutil.so)
add_library(postproc SHARED IMPORTED)
set_target_properties(
postproc
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libpostproc.so)
add_library(swresample SHARED IMPORTED)
set_target_properties(
swresample
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libswresample.so)
add_library(swscale SHARED IMPORTED)
set_target_properties(
swscale
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libswscale.so)
#添加頭文件
include_directories(${CMAKE_SOURCE_DIR}/include)
target_link_libraries(
video
avcodec
avdevice
avfilter
avformat
avutil
postproc
swresample
swscale
android
${log-lib} )
1.4 配置build.gradle
android {
...
defaultConfig {
...
externalNativeBuild {
cmake {
cppFlags ""
}
ndk {
abiFilters "armeabi-v7a"
}
}
}
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"//CMakeLists.txt文件路徑
}
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
}
1.5、編寫C代碼
在src/main/cpp新建video.cpp
參考文章https://blog.csdn.net/johanman/article/details/83091706
//
// Created by k on 2020-05-13.
//
#include <jni.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <android/log.h>
extern "C" {
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}
// Android 打印 Log
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR, "FFmpeg_log", FORMAT, ##__VA_ARGS__);
extern "C"
JNIEXPORT void JNICALL
Java_com_mrkzs_android_ffmpegplayvideo_MainActivity_playVideo(JNIEnv *env, jobject instance, jstring source, jobject surface) {
// 記錄結果
int result;
// R1 Java String -> C String
const char *path = env->GetStringUTFChars(source, 0);
// 註冊 FFmpeg 組件
av_register_all();
// R2 初始化 AVFormatContext 上下文
AVFormatContext *format_context = avformat_alloc_context();
// 打開視頻文件
result = avformat_open_input(&format_context, path, NULL, NULL);
if (result < 0) {
LOGE("Player Error : Can not open video file");
return;
}
// 查找視頻文件的流信息
result = avformat_find_stream_info(format_context, NULL);
if (result < 0) {
LOGE("Player Error : Can not find video file stream info");
return;
}
// 查找視頻編碼器
int video_stream_index = -1;
for (int i = 0; i < format_context->nb_streams; i++) {
// 匹配視頻流
if (format_context->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_index = i;
}
}
// 沒找到視頻流
if (video_stream_index == -1) {
LOGE("Player Error : Can not find video stream");
return;
}
// 初始化視頻編碼器上下文
AVCodecContext *video_codec_context = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(video_codec_context, format_context->streams[video_stream_index]->codecpar);
// 初始化視頻編碼器
AVCodec *video_codec = avcodec_find_decoder(video_codec_context->codec_id);
if (video_codec == NULL) {
LOGE("Player Error : Can not find video codec");
return;
}
// R3 打開視頻解碼器
result = avcodec_open2(video_codec_context, video_codec, NULL);
if (result < 0) {
LOGE("Player Error : Can not find video stream");
return;
}
// 獲取視頻的寬高
int videoWidth = video_codec_context->width;
int videoHeight = video_codec_context->height;
// R4 初始化 Native Window 用於播放視頻
ANativeWindow *native_window = ANativeWindow_fromSurface(env, surface);
if (native_window == NULL) {
LOGE("Player Error : Can not create native window");
return;
}
// 通過設置寬高限制緩衝區中的像素數量,而非屏幕的物理顯示尺寸。
// 如果緩衝區與物理屏幕的顯示尺寸不相符,則實際顯示可能會是拉伸,或者被壓縮的圖像
result = ANativeWindow_setBuffersGeometry(native_window, videoWidth, videoHeight,WINDOW_FORMAT_RGBA_8888);
if (result < 0){
LOGE("Player Error : Can not set native window buffer");
ANativeWindow_release(native_window);
return;
}
// 定義繪圖緩衝區
ANativeWindow_Buffer window_buffer;
// 聲明數據容器 有3個
// R5 解碼前數據容器 Packet 編碼數據
AVPacket *packet = av_packet_alloc();
// R6 解碼後數據容器 Frame 像素數據 不能直接播放像素數據 還要轉換
AVFrame *frame = av_frame_alloc();
// R7 轉換後數據容器 這裏面的數據可以用於播放
AVFrame *rgba_frame = av_frame_alloc();
// 數據格式轉換準備
// 輸出 Buffer
int buffer_size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, videoWidth, videoHeight, 1);
// R8 申請 Buffer 內存
uint8_t *out_buffer = (uint8_t *) av_malloc(buffer_size * sizeof(uint8_t));
av_image_fill_arrays(rgba_frame->data, rgba_frame->linesize, out_buffer, AV_PIX_FMT_RGBA, videoWidth, videoHeight, 1);
// R9 數據格式轉換上下文
struct SwsContext *data_convert_context = sws_getContext(
videoWidth, videoHeight, video_codec_context->pix_fmt,
videoWidth, videoHeight, AV_PIX_FMT_RGBA,
SWS_BICUBIC, NULL, NULL, NULL);
// 開始讀取幀
while (av_read_frame(format_context, packet) >= 0) {
// 匹配視頻流
if (packet->stream_index == video_stream_index) {
// 解碼
result = avcodec_send_packet(video_codec_context, packet);
if (result < 0 && result != AVERROR(EAGAIN) && result != AVERROR_EOF) {
LOGE("Player Error : codec step 1 fail");
return;
}
result = avcodec_receive_frame(video_codec_context, frame);
if (result < 0 && result != AVERROR_EOF) {
LOGE("Player Error : codec step 2 fail");
return;
}
// 數據格式轉換
result = sws_scale(
data_convert_context,
(const uint8_t* const*) frame->data, frame->linesize,
0, videoHeight,
rgba_frame->data, rgba_frame->linesize);
if (result <= 0) {
LOGE("Player Error : data convert fail");
return;
}
// 播放
result = ANativeWindow_lock(native_window, &window_buffer, NULL);
if (result < 0) {
LOGE("Player Error : Can not lock native window");
} else {
// 將圖像繪製到界面上
// 注意 : 這裏 rgba_frame 一行的像素和 window_buffer 一行的像素長度可能不一致
// 需要轉換好 否則可能花屏
uint8_t *bits = (uint8_t *) window_buffer.bits;
for (int h = 0; h < videoHeight; h++) {
memcpy(bits + h * window_buffer.stride * 4,
out_buffer + h * rgba_frame->linesize[0],
rgba_frame->linesize[0]);
}
ANativeWindow_unlockAndPost(native_window);
}
}
// 釋放 packet 引用
av_packet_unref(packet);
}
// 釋放 R9
sws_freeContext(data_convert_context);
// 釋放 R8
av_free(out_buffer);
// 釋放 R7
av_frame_free(&rgba_frame);
// 釋放 R6
av_frame_free(&frame);
// 釋放 R5
av_packet_free(&packet);
// 釋放 R4
ANativeWindow_release(native_window);
// 關閉 R3
avcodec_close(video_codec_context);
// 釋放 R2
avformat_close_input(&format_context);
// 釋放 R1
env->ReleaseStringUTFChars(source, path);
}
1.6、佈局文件
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<EditText
android:id="@+id/url"
android:layout_width="match_parent"
android:layout_marginEnd="100dp"
android:layout_height="60dp" />
<Button
android:id="@+id/play"
android:layout_width="100dp"
android:layout_height="60dp"
app:layout_constraintLeft_toRightOf="@id/url"/>
<SurfaceView
android:id="@+id/surface"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginTop="60dp"
app:layout_constraintTop_toBottomOf="@+id/url" />
</androidx.constraintlayout.widget.ConstraintLayout>
1.7、MainActivity
kotlin代碼
package com.mrkzs.android.ffmpegplayvideo
import android.os.Bundle
import android.text.TextUtils
import android.view.Surface
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.widget.Button
import android.widget.EditText
import androidx.appcompat.app.AppCompatActivity
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val urlEt = findViewById<EditText>(R.id.url)
val playBtn = findViewById<Button>(R.id.play)
val surfaceView = findViewById<SurfaceView>(R.id.surface)
val holder : SurfaceHolder = surfaceView.holder
playBtn.setOnClickListener { view ->
if (view.id == R.id.play) {
if(!TextUtils.isEmpty(urlEt.text.trim())) {
Thread(Runnable {
playVideo(urlEt.text.trim().toString(), holder.surface)
}).start()
}
}
}
}
external fun playVideo(url : String, surface: Surface)
companion object {
init {
System.loadLibrary("video")
}
}
}
這只是一個小小demo,後續學習中會不斷深入。
2、總結
github demo:https://github.com/hiongyend/FFmpegPlayVideo
上一篇文章:Android使用FFmpeg播放視頻(一)編譯FFmpeg