我嘗試使用OpenGL ES 2.0在Android NDK上使用非屏幕渲染來獲取圖像。爲什麼OpenGL ES覆蓋所有Android視圖?只是我想離線渲染〜
的Android版本:4.4.2
設備:三星Galaxy Tab SM-T705
- 我在做安卓
- 一個簡單的按鈕,例如如果你按下按鈕,該程序使CreatePbufferWindow在NDK
- 本程序繪製一個三角形。
glReadPixels()
獲取結果圖像。- 並保存位圖類
- 該程序在Android窗口中繪製結果圖像。
我不想覆蓋酒吧和按鈕 ,我不希望顯示OpenGL ES視圖。
NDK的C代碼
#include <jni.h>
#include <android/log.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <EGL/eglplatform.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <android/bitmap.h>
#include "tga.h"
#include "jpge.h"
#define LOG_TAG "libgl2jni"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
//static void printGLString(const char *name, GLenum s) {
// const char *v = (const char *) glGetString(s);
// LOGI("GL %s = %s\n", name, v);
//}
static void checkGlError(const char* op) {
for (GLint error = glGetError(); error; error
= glGetError()) {
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
static const char gVertexShader[] =
"attribute vec4 vPosition;\n"
"void main() {\n"
" gl_Position = vPosition;\n"
"}\n";
static const char gFragmentShader[] =
"precision mediump float;\n"
"void main() {\n"
" gl_FragColor = vec4(0.0, 1.0, 0.0, 1.0);\n"
"}\n";
/**
* Initialize an EGL context for the current display.
*/
static int engine_init_display(int width, int height) {
// initialize OpenGL ES and EGL
/*
* Here specify the attributes of the desired configuration.
* Below, we select an EGLConfig with at least 8 bits per color
* component compatible with on-screen windows
*/
const EGLint attribs[] = {
//EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
checkGlError("eglGetDisplay");
eglInitialize(display, 0, 0);
checkGlError("eglInitialize");
/* Here, the application chooses the configuration it desires. In this
* sample, we have a very simplified selection process, where we pick
* the first EGLConfig that matches our criteria */
eglChooseConfig(display, attribs, &config, 1, &numConfigs);
checkGlError("eglChooseConfig");
/* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
* As soon as we picked a EGLConfig, we can safely reconfigure the
* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
checkGlError("eglGetConfigAttrib");
// using PexelBuffer
EGLint attribList[] =
{
EGL_WIDTH, width,
EGL_HEIGHT, height,
EGL_LARGEST_PBUFFER, EGL_TRUE,
EGL_NONE
};
surface = eglCreatePbufferSurface(display, config, attribList);
checkGlError("eglCreatePbufferSurface");
// surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
const EGLint attrib_list[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
context = eglCreateContext(display, config, NULL, attrib_list);
checkGlError("eglCreateContext");
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
// LOGW("Unable to eglMakeCurrent");
return -1;
}
// eglQuerySurface(display, surface, EGL_WIDTH, &w);
// eglQuerySurface(display, surface, EGL_HEIGHT, &h);
// Initialize GL state.
glEnable(GL_CULL_FACE);
// glDisable(GL_DEPTH_TEST);
glEnable(GL_DEPTH_TEST);
return 0;
}
GLuint loadShader(GLenum shaderType, const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint createProgram(const char* pVertexSource, const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
GLuint gProgram;
GLuint gvPositionHandle;
bool setupGraphics(int w, int h) {
// printGLString("Version", GL_VERSION);
// printGLString("Vendor", GL_VENDOR);
// printGLString("Renderer", GL_RENDERER);
// printGLString("Extensions", GL_EXTENSIONS);
LOGI("setupGraphics(%d, %d)", w, h);
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram) {
LOGE("Could not create program.");
return false;
}
gvPositionHandle = glGetAttribLocation(gProgram, "vPosition");
checkGlError("glGetAttribLocation");
LOGI("glGetAttribLocation(\"vPosition\") = %d\n",
gvPositionHandle);
glViewport(0, 0, w, h);
checkGlError("glViewport");
return true;
}
GLfloat gTriangleVertices[] = { 1.0f, 1.0f, 1.0f, 0.5f, 0.5f, 0.5f,
-1.5f, 1.5f, 1.5f };
char* renderFrame(int width, int height) {
static float grey;
grey += 0.01f;
if (grey > 1.0f) {
grey = 0.0f;
}
// 크기 조정
for(int count = 0; count < 9; ++count)
{
gTriangleVertices[count] *= 0.8f;
}
char* pixelData = (char*)malloc(4 * width * height * sizeof(char));
for(int count = 0; count < width * height * 4; ++count) {
pixelData[count] = 0;
}
// 깊이 버퍼 활성화
glEnable(GL_DEPTH_TEST);
// 깊이 버퍼 초기화
glClearDepthf(1.F);
// glClearColor(grey, grey, grey, 1.0f);
glClearColor(1.0, 0.0, 0.0, 1.0f);
checkGlError("glClearColor");
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
checkGlError("glClear");
glUseProgram(gProgram);
checkGlError("glUseProgram");
glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, 0, gTriangleVertices);
checkGlError("glVertexAttribPointer");
glEnableVertexAttribArray(gvPositionHandle);
checkGlError("glEnableVertexAttribArray");
glDrawArrays(GL_TRIANGLES, 0, 3);
checkGlError("glDrawArrays");
glReadPixels(
0, 0,
width, height,
GL_RGBA,
GL_UNSIGNED_BYTE,
pixelData
);
// int jpgCount = 0;
// for(int count = 0; count < width * height * 4; ++count) {
// if (count % 4 != 3) {
// pixelJpgData[jpgCount] = pixelData[count];
// ++jpgCount;
// }
// }
// tgaGrabScreenSeries("/storage/emulated/0/Pictures/CTgaTest", 0, 0, width, height);
jpge::compress_image_to_jpeg_file(
"/storage/emulated/0/Pictures/CJpgTest.jpg",
width, height,
4,
(jpge::uint8*)pixelData
);
// LOGI("%s", (char const *)pixelData);
// for(int count = 0; count < width*height*4; ++count) {
// LOGI("%x", pixelData[count]);
// }
return pixelData;
// if(pixelData) {
// free(pixelData);
//// delete[] pixelData;
// }
}
int
decodeMemory(JNIEnv* env, const void* data, size_t len, jobject* bitmap)
{
jclass clazz = env->FindClass("android/graphics/BitmapFactory");
if (env->ExceptionCheck()) {
env->ExceptionClear();
return 2;
}
jmethodID mid = env->GetStaticMethodID(clazz, "decodeArray",
"([BII)Landroid/graphics/Bitmap;");
if (env->ExceptionCheck()) {
env->ExceptionClear();
return 2;
}
jbyteArray jarray = env->NewByteArray(len);
env->SetByteArrayRegion(jarray, 0, len, (jbyte*)data);
*bitmap = env->CallStaticObjectMethod(clazz, mid, jarray, 0, len);
return 1;
}
extern "C"
{
JNIEXPORT void JNICALL Java_com_javacodegeeks_android_buttonexample_GL2JNILib_init
(JNIEnv * env, jobject obj, jint width, jint height)
{
engine_init_display(width, height);
setupGraphics(width, height);
}
}
extern "C"
{
JNIEXPORT void JNICALL Java_com_javacodegeeks_android_buttonexample_GL2JNILib_step
(JNIEnv * env, jobject obj, jobject jBitmap, jint width, jint height)
//(JNIEnv * env, jobject obj, jint width, jint height)
{
char* pixelData = NULL;
pixelData = renderFrame(width, height);
// renderFrame(width, height);
decodeMemory(env, pixelData, width * height * 4, &jBitmap);
if (pixelData)
{
free(pixelData);
}
}
}
Java代碼
package com.javacodegeeks.android.buttonexample;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Environment;
import android.util.Log;
import android.view.View;
class GL2View extends View
{
public GL2View(Context context, int w, int h) {
super(context);
// TODO Auto-generated constructor stub
width = w;
height = h;
// //OpenGL 테스트 구현부 시작 /////////////////////////////////////////////////////
mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
GL2JNILib.init(width, height);
mCanvas = new Canvas();
mCanvas.setBitmap(mBitmap);
// //OpenGL 테스트 구현부 끝 /////////////////////////////////////////////////////
mPaint = new Paint();
}
/* Image SDCard Save (input Bitmap -> saved file JPEG)
* Writer intruder(Kwangseob Kim)
* @param bitmap : input bitmap file
* @param folder : input folder name
* @param name : output file name
*/
public static void saveBitmaptoJpeg(Bitmap bitmap,String folder, String name){
// String ex_storage =Environment.getExternalStorageDirectory().getAbsolutePath(); // 절대 경로
String ex_storage = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath(); // 이미지 경로
// Get Absolute Path in External Sdcard
String foler_name = "/"+folder+"/";
String file_name = name+".jpg";
String string_path = ex_storage+foler_name;
Log.d(VIEW_LOG_TAG, ex_storage);
File file_path;
try{
file_path = new File(string_path);
if(!file_path.isDirectory()){
file_path.mkdirs();
}
FileOutputStream out = new FileOutputStream(string_path+file_name);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.close();
}catch(FileNotFoundException exception){
Log.e("FileNotFoundException", exception.getMessage());
}catch(IOException exception){
Log.e("IOException", exception.getMessage());
}
}
public void SaveBitmapToSDcard()
{
// File path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
saveBitmaptoJpeg(mBitmap, "./", "JavaTest");
}
// @Override
// protected void onSizeChanged(int w, int h, int oldw, int oldh)
// {
// mBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
// mCanvas = new Canvas();
// mCanvas.setBitmap(mBitmap);
//
// testDrawing();
// }
//
// private void testDrawing()
// {
// mCanvas.drawColor(Color.WHITE);
// mPaint.setColor(Color.RED);
// mCanvas.drawRect(100, 100, 200, 200, mPaint);
// }
//
@Override
protected void onDraw(Canvas canvas)
{
GL2JNILib.step(mBitmap, width, height);
// GL2JNILib.step(width, height);
if(mBitmap != null)
{
canvas.drawBitmap(mBitmap, 0, 0, null);
}
SaveBitmapToSDcard();
}
private Bitmap mBitmap;
private Canvas mCanvas;
private Paint mPaint;
int width;
int height;
}
本地通話
//package com.android.gl2jni;
package com.javacodegeeks.android.buttonexample;
import android.graphics.Bitmap;
// Wrapper for native library
public class GL2JNILib {
static {
System.loadLibrary("gl2jni");
}
/**
* @param width the current view width
* @param height the current view height
*/
public static native void init(int width, int height);
public static native void step(Bitmap bitmap, int width, int height);
// public static native void step(int width, int height);
}
如果你不想顯示視圖,你爲什麼甚至創建一個看法?我沒有看到你的代碼調用本地代碼需要在視圖中的原因。如果需要,您可以完全使用本地代碼創建上下文和PBuffer表面。 – 2015-03-19 04:00:40
恩......我不想顯示OpenGL視圖。我只想獲取位圖數據。如果我不調用「protected void onDraw(Canvas canvas)」,OpenGL視圖會覆蓋我的渲染視圖(導航欄和按鈕)。我認爲eglCreatePBufferSurface()問題或深度緩衝區問題, – 2015-03-19 04:27:17
不知道我的觀點是否清楚。我甚至不會爲此創建一個視圖。 – 2015-03-19 04:53:02