爲了方便沒有準備好梯子的同學,我把項目在CSDN上打包下載,不過不會繼續更新(保留在初始版本)
讀取OpenGL渲染結果
讀取OpenGL渲染結果就是把渲染結果的像素矩陣導出(從本地內存複製到java數組,但都在內存中)
然而。。
就是這麼簡單的操作也並沒有什麼很好的解決方案
目前常見的主要有這幾種:
- glReadPixels :最常用,但是慢的要死,會阻塞整個GL線程,在三桑的Exynos 4412上尤其慢(mali 400mp4)
- Pixel Buffer Object :速度是glReadPixels 的8-10倍,但是要求至少OpenGL ES 3.0
- EGLImage :只要求OpenGL-ES 1.1/2.0,但是使用比較複雜
在之後使用實時濾鏡並錄製疊加了濾鏡的視頻時,我們會詳細討論導出OpenGL渲染結果的方法,這裏只使用了glReadPixels
PS:別忘了檢查權限哦~尤其是Marshmallow以上版本
代碼
需要注意的是,glReadPixels 的結果是上下顛倒的,所以我們要繞X軸旋轉180度
由於我們使用了GLSurfaceView,所以應該在onDrawFrame完成時調用
package com.martin.ads.vrlib.utils;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.opengl.GLES20;
import android.os.AsyncTask;
import android.os.Environment;
import android.util.Log;
import android.widget.Toast;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.IntBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by Ads on 2016/11/8.
*/
public class BitmapUtils {
public static void sendImage(int width, int height, Context context) {
final IntBuffer pixelBuffer = IntBuffer.allocate(width * height);
//about 20-50ms
long start = System.nanoTime();
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
pixelBuffer);
long end = System.nanoTime();
Log.d("TryOpenGL", "glReadPixels time: " + (end - start)/1000000+" ms");
//about 700-4000ms(png) 200-1000ms(jpeg)
//use jpeg instead of png to save time
//it will consume large memory and may take a long time, depends on the phone
new SaveBitmapTask(pixelBuffer,width,height,context).execute();
}
static class SaveBitmapTask extends AsyncTask<Void, Integer, Boolean>{
long start;
IntBuffer rgbaBuf;
int width, height;
Context context;
String filePath;
public SaveBitmapTask(IntBuffer rgbaBuf, int width, int height, Context context) {
this.rgbaBuf = rgbaBuf;
this.width = width;
this.height = height;
this.context = context;
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss");
File sdRoot = Environment.getExternalStorageDirectory();
String dir = "/Pano360Screenshots/";
File mkDir = new File(sdRoot, dir);
if (!mkDir.exists())
mkDir.mkdir();
String filename="/PanoScreenShot_" +width + "_" + height + "_" + simpleDateFormat.format(new Date())+".jpg";
filePath= mkDir.getAbsolutePath()+filename;
}
@Override
protected void onPreExecute() {
start = System.nanoTime();
super.onPreExecute();
}
@Override
protected Boolean doInBackground(Void... params) {
saveRgb2Bitmap(rgbaBuf, filePath , width, height);
return true;
}
@Override
protected void onPostExecute(Boolean aBoolean) {
Log.d("TryOpenGL", "saveBitmap time: " + (System.nanoTime() - start)/1000000+" ms");
Toast.makeText(context,"ScreenShot is saved to "+filePath,Toast.LENGTH_LONG).show();
super.onPostExecute(aBoolean);
}
}
public static void saveRgb2Bitmap(IntBuffer buf, String filePath, int width, int height) {
final int[] pixelMirroredArray = new int[width * height];
Log.d("TryOpenGL", "Creating " + filePath);
BufferedOutputStream bos = null;
try {
int[] pixelArray = buf.array();
// rotate 180 deg with x axis because y is reversed
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j];
}
}
bos = new BufferedOutputStream(new FileOutputStream(filePath));
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(IntBuffer.wrap(pixelMirroredArray));
bmp.compress(Bitmap.CompressFormat.JPEG, 90, bos);
bmp.recycle();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}