I am facing problem when I am trying to display an image using OpenGL ES in android ndk. What I am doing is reading RGB data of an bmp file of size 720x480 and trying to display on emulator.
Application is running properly (as Logcat shows this), but I am not able to see the image on emulator display
I have pasted my JNI code here.
Additional Information: RGB[0] is having RGB data of a BMP file.
Texture size: 1024, 512
Frame size: 720 x 480
Grabframe(); // functionality to draw textures
Start-decoding(): // reading the RGB file (no headers)
Some part of the code has been taken from:
http://quirkygba.blogspot.com/2010/10/android-native-coding-in-c.html
Kindly refer this link.
I will really appreciate if you could give me some pointers, so that I can proceed further.
#include "com_sample_GlBufferView.h";
#include <GLES/gl.h>
#include <GLES/glext.h>
#include <string.h>
#include <pthread.h>
#include <android/log.h>
#include <stdio.h>
#include <time.h>
#define FRAME_X 1024
#define FRAME_Y 512
#define FPS_TRIGGER 5
static pthread_cond_t s_vsync_cond;
static pthread_mutex_t s_vsync_mutex;
GLuint textures[1];
GLuint textureId;
int frame_counter = 0;
time_t last_time;
int StartDecoding();
/*****************************/
FILE *fp;
unsigned char *RGB[No_Threads];
/******************************/
static const GLfloat vertices[] = {
-0.5f, 1.0f,
0.5f, 1.0f,
-0.5f, -0.5f,
0.5f, -0.5f
};
static const GLfloat texCoords[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f
};
static void wait_vsync()
{
pthread_mutex_lock(&s_vsync_mutex);
pthread_cond_wait(&s_vsync_cond, &s_vsync_mutex);
pthread_mutex_unlock(&s_vsync_mutex);
}
JNIEXPORT void JNICALL Java_com_sample_GlBufferView_native_1start(JNIEnv * env, jobject obj)
{
/* init conditions */
pthread_cond_init(&s_vsync_cond, NULL);
pthread_mutex_init(&s_vsync_mutex, NULL);
while (1) {
/* game code goes here */
wait_vsync();
}
}
JNIEXPORT void JNICALL Java_com_sample_GlBufferView_native_1gl_1resize(JNIEnv * env, jobject obj, jint w, jint h)
{
// THIS IS CALLED WHEN CHANGING FROM PORTRAIT TO LANDSCAPE...
// height = h;
// width = w;}
JNIEXPORT void JNICALL Java_com_sample_GlBufferView_native_1gl_1render(JNIEnv * env, jobject obj)
{
__android_log_print(ANDROID_LOG_INFO,"JNI", "native_gl_render function is called");
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
//glOrthof(0.0f, 1.0f, 0.0f, 1.0f, -1.0f, 1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisable(GL_DEPTH_TEST);
grabFrame();
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
printFramesPerSecond();
/* tell the other thread to carry on */
pthread_cond_signal(&s_vsync_cond);
}
void printFramesPerSecond() {
frame_counter++;
if(time(NULL)-last_time >= FPS_TRIGGER) {
__android_log_print(ANDROID_LOG_DEBUG,"FPS #: ", "%.2f", (float)(frame_counter/(time(NULL)-last_time)));
frame_counter = 0;
last_time = time(NULL);
}
}
void grabFrame() {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, FRAME_X, FRAME_Y, 0, GL_RGB, GL_UNSIGNED_BYTE, (void*)RGB[0]);
}
JNIEXPORT void JNICALL Java_com_sample_GlBufferView_native_1init(JNIEnv * env, jobject obj, jstring filename)
{
__android_log_print(ANDROID_LOG_INFO,"JNI", "native_init function is called");
// TURN ON 2D TEXTURE
glEnable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
glGenTextures(1, &textureId);
// BIND THE TEXTURE
glBindTexture(GL_TEXTURE_2D, textureId);
// SET TEXTURE PARAMS
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
RGB[0]=(unsigned char *)malloc(1036900);
//ref[thread_no]=obj1;
StartDecoding();
}
int StartDecoding()
{
__android_log_print(ANDROID_LOG_INFO,"JNI", "StartDecoding function is called");
fp=fopen("data/Image","rb");
if(fp!=NULL)
{
fread(RGB[0],1,1036800,fp);
fclose(fp);
}
}
And My Android code is as follows
public class OpenGL_SampleActivity extends Activity {
/** Called when the activity is first created. */
private GlBufferView _glBufferview;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
_glBufferview = new GlBufferView(this);
setContentView(_glBufferview);
}
}
class GlBufferView extends GLSurfaceView {
private String filename="/data/Tiger.mpg";
native void native_start();
native void native_gl_resize(int w, int h);
native void native_gl_render();
native void native_init(String filename);
public GlBufferView(Context context) {
super(context);
(new Thread() {
@Override
public void run() {
native_start();
}
}).start();
setRenderer(new MyRenderer());
requestFocus();
setFocusableInTouchMode(true);
}
class MyRenderer implements GLSurfaceView.Renderer {
@Override
public void onDrawFrame(GL10 gl) {
native_gl_render();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
native_gl_resize(width, height);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
native_init(filename);
}
}
static {
System.loadLibrary("itv");
}
}
Can anybody please tell me where is the problem in my code.