通過RS實現美顏功能
前言
最近在做一個相機APP,通過OpenGL實現了實時美顏功能,但是網上很難找到通過RS實現美顏的例子,因爲相機預覽通過OpenGL實現美顏後,用戶點擊拍照後的數據依然是原始數據,並沒有美顏,所以我通過同樣的算法用RS實現了一遍。有需要的同學可以參考一下
Java調用代碼
public static Bitmap beauty(Context context, Bitmap inBitmap) {
// Creates a RS context.
RenderScript mRS = RenderScript.create(context);
// Creates the input Allocation and copies all Bitmap contents into it.
Allocation inAllocation = Allocation.createFromBitmap(mRS, inBitmap);
// Defines the output Type, which will be a RGBA pixel.
// The Allocation will be composed by four unsigned chars (0-255) for each pixel,
// so that R-G-B-A values can be stored.
// It is necessary to use a Type-based approach whenever there is a multi-dimensional sizing (X,Y).
int bitmapWidth = inBitmap.getWidth();
int bitmapHeight = inBitmap.getHeight();
Type.Builder outType = new Type.Builder(mRS, Element.RGBA_8888(mRS)).setX(bitmapWidth).setY(bitmapHeight);
// Creates the output Allocation wherein to store the conversion result.
Allocation outAllocation = Allocation.createTyped(mRS, outType.create(), Allocation.USAGE_SCRIPT);
// Creates the conversion script wrapper.
ScriptC_beauty processScript = new ScriptC_beauty(mRS);
// Binds the inAllocation variable with the actual Allocation.
processScript.set_inAllocation(inAllocation);
// Performs the conversion. RS kernel will use outAllocation size for its iterations.
processScript.forEach_beauty(outAllocation);
// Creates output Bitmap, matching input one size.
Bitmap outBitmap = Bitmap.createBitmap(bitmapWidth, bitmapHeight, inBitmap.getConfig());
// Copy calculation result to the output Bitmap.
outAllocation.copyTo(outBitmap);
mRS.destroy();
return outBitmap;
}
RenderScript代碼
// Needed directive for RS to work
#pragma version(1)
// Change java_package_name directive to match your Activity's package path
#pragma rs java_package_name(com.dong.opencamera)
rs_allocation inAllocation;
static const float beautyParam = 0.5f;
static const float4 weight = {0.299f, 0.587f, 0.114f, 0.0f};
static float hardLight(float color) {
if (color <= 0.5) {
color = color * color * 2.0;
} else {
color = 1.0 - ((1.0 - color) * (1.0 - color) * 2.0);
}
return color;
}
uchar4 __attribute__((kernel)) beauty(uint32_t x, uint32_t y) {
const uint32_t imageWidth = rsAllocationGetDimX(inAllocation);
const uint32_t imageHeight = rsAllocationGetDimY(inAllocation);
float4 in = rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x, y));
//process border
if(x < 10 || y < 10 || x >= (imageWidth - 10) || y >= (imageHeight - 10)) {
return rsPackColorTo8888(in);
}
// 對綠色通道進行模糊操作
float4 sampleColor = in.g * 20.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x, y - 10));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x, y + 10));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 10, y));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 10, y));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 5, y - 8));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 5, y + 8));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 5, y + 8));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 5, y - 8));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 8, y - 5));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 8, y - 5));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 8, y + 5));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 8, y - 5));
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x, y - 6)) * 2.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x, y + 6)) * 2.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 6, y)) * 2.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 6, y)) * 2.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 4, y - 4)) * 2.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x - 4, y + 4))* 2.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 4, y - 4)) * 2.0;
sampleColor += rsUnpackColor8888(rsGetElementAt_uchar4(inAllocation, x + 4, y + 4))* 2.0;
sampleColor = sampleColor / 48.0;
float highPass = in.g - sampleColor.g + 0.5;
highPass = clamp(highPass, 0.0, 1.0);
for (int i = 0; i < 5; i++) {
highPass = hardLight(highPass);
}
highPass = clamp(highPass, 0.0, 1.0);
float luminance = dot(in, weight);
float alpha = pow(luminance, beautyParam);
alpha = clamp(alpha, 0.0, 1.0);
float3 offset = (in.rgb - (float3){highPass, highPass, highPass}) * alpha * 0.1;
float3 smoothColor = in.rgb + offset;
smoothColor.r = clamp(smoothColor.r, 0.0, 1.0);
smoothColor.g = clamp(smoothColor.g, 0.0, 1.0);
smoothColor.b = clamp(smoothColor.b, 0.0, 1.0);
// 線性混合
float4 out;
out.r = mix(smoothColor.r, max(in.r,smoothColor.r), alpha);
out.g = mix(smoothColor.g, max(in.g,smoothColor.g), alpha);
out.b = mix(smoothColor.b, max(in.b,smoothColor.b), alpha);
out.a = in.a;
return rsPackColorTo8888(out);
}
相機美顏的原理
1.取出綠色通道,對綠色通道進行模糊處理,例如高斯模糊,得到模糊後的值sampleColor
2.用原圖綠色通道值減去sampleColor,加上0.5(即128),1+2兩個步驟即PS中的高反差保留
3.對上述結果值進行3-5次強光處理,此步驟可以使得噪聲更加突出
4.計算原圖的灰度值,公式爲0.299R + 0.587G + 0.114*B
5.將灰度值作爲閾值,用來排除非皮膚部分,根據灰度值計算,將原圖與1-3後的結果圖合成
6.對混合後結果增加亮度
7.以灰度值作爲透明度將原圖與混合後結果進行濾色、柔光等混合,並調節飽和度