2

UPDATE: got rid of the line GLES20.glEnable(GLES20.GL_TEXTURE_2D); But the line GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 256, 256, 0, GLES20.GL_RGB, GLES20.GL_BYTE, ByteBuffer.wrap(pixels)); gives GL_INVALID_ENUM... pixel buffer length is 196608.

Project files: http://godofgod.co.uk/my_files/NightCamPrj.zip

I am trying to get camera data to a OpenGL ES2 shader and the camera stuff appears to work but I cannot get the texture to work even when I try my own values. I get a black screen. Here is the code:

package com.matthewmitchell.nightcam;

import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.Scanner;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.content.res.AssetManager;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;

public class MyRenderer implements GLSurfaceView.Renderer{
    private FloatBuffer vertices;
    private FloatBuffer texcoords;
    private int mProgram;
    private int maPositionHandle;
    private int gvTexCoordHandle;
    private int gvSamplerHandle;
    private static Context context;
    int[] camera_texture;
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        initShapes();
        GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
        Debug.out("Hello init.");
        //Shaders
        int vertexShader = 0;
        int fragmentShader = 0;
        try {
            vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile("vertex.vsh"));
            fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile("fragment.fsh"));
        } catch (IOException e) {
            Debug.out("The shaders could not be found.");
            e.printStackTrace();
        }
        mProgram = GLES20.glCreateProgram();             // create empty OpenGL Program
        GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
        Debug.out("VS LOG: " + GLES20.glGetShaderInfoLog(vertexShader)); 
        GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
        Debug.out("FS LOG: " + GLES20.glGetShaderInfoLog(fragmentShader)); 
        GLES20.glLinkProgram(mProgram);                  // creates OpenGL program executables
        Debug.out("PROG LOG: " + GLES20.glGetProgramInfoLog(mProgram)); 
        // get handles
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
        gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");
        gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
        camera_texture = null;
        GLES20.glEnable(GLES20.GL_TEXTURE_2D);
    }


    private void initShapes(){
        float triangleCoords[] = {
            // X, Y, Z
            -1.0f, -1.0f, 0.0f,
             1.0f, -1.0f, 0.0f,
             -1.0f, 1.0f, 0.0f,
             1.0f,  1.0f, 0.0f,
        }; 
        float texcoordf[] = {
            // X, Y, Z
            -1.0f,-1.0f,
            1.0f,-1.0f,
            -1.0f,1.0f,
            1.0f,1.0f,
        }; //Even if wrong way around it should produce a texture with these coordinates on the screen.

        // initialize vertex Buffer for vertices
        ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        vertices = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        vertices.put(triangleCoords);    // add the coordinates to the FloatBuffer
        vertices.position(0);            // set the buffer to read the first coordinate
        // initialize vertex Buffer for texcoords 
        vbb = ByteBuffer.allocateDirect(texcoordf.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        texcoords = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        texcoords.put(texcoordf);    // add the coordinates to the FloatBuffer
        texcoords.position(0);            // set the buffer to read the first coordinate
    }

    private static String readFile(String path) throws IOException {
        //Load file from assets folder using context given by the activity class
        AssetManager assetManager = context.getAssets();
        InputStream stream = assetManager.open(path);
        try {
            return new Scanner(stream).useDelimiter("\\A").next();
        }
        finally {
            stream.close();
        }
    }

    private int loadShader(int type, String shaderCode){
        int shader = GLES20.glCreateShader(type);
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        return shader;
    }

    public void onDrawFrame(GL10 unused) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if(camera_texture == null){
            return;
        }
        // Add program to OpenGL environment
        GLES20.glUseProgram(mProgram);
        // Prepare the triangle data
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
        GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
        //Bind texture
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
        GLES20.glUniform1i(gvSamplerHandle, 0);
        // Draw the triangle
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        //Disable arrays
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
    }

    public void onSurfaceChanged(GL10 unused, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    public void takeContext(Context mcontext) {
        context = mcontext;
    }

    void bindCameraTexture(byte[] data,int w,int h) {
        //Takes pixel data from camera and makes texture
        byte[] pixels = new byte[256*256*3]; //Testing simple 256x256 texture. Will update for camera resolution
        for(int x = 0;x < 256;x++){
            for(int y = 0;y < 256;y++){
                //Ignore camera data, use test values.
                pixels[(x*256+y)*3] = 0;
                pixels[(x*256+y)*3+1] = 100;
                pixels[(x*256+y)*3+2] = 120;
            }
        }
        //Debug.out("PX:" + pixels[0] + " " + pixels[1] + " " + pixels[2]);
        //Make new texture for new data
        if (camera_texture == null){
            camera_texture = new int[1];
        }else{
            GLES20.glDeleteTextures(1, camera_texture, 0);
        }
        GLES20.glGenTextures(1, camera_texture, 0);
        int tex = camera_texture[0];
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 256, 256, 0, GLES20.GL_RGB, GLES20.GL_BYTE, ByteBuffer.wrap(pixels));
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    }
}

Here is the vertex shader code:

attribute vec4 vPosition;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main(){ 
    gl_Position = vPosition;
    v_texCoord = a_texCoord;
}    

Here is the fragment shader code:

precision mediump float;
varying vec2 v_texCoord;
uniform sampler2D s_texture;
void main(){
    gl_FragColor = texture2D(s_texture, v_texCoord);
}

We can ignore the camera stuff here because I'm using test values. I'm using a test 256x256 texture. I've done everything I've seen in examples.

Why it is black and how can I make it show?

4
  • OK, but where is your shader code?
    – Max
    Commented Nov 7, 2011 at 15:02
  • Sorry. I forgot that didn't I. I'll be right onto getting it. Commented Nov 7, 2011 at 18:25
  • And what happens if you just set some constant color to fragment instead of texture sample?
    – Max
    Commented Nov 7, 2011 at 19:19
  • Yes that works providing I remove the calls that set-up the texture for drawing. Commented Nov 7, 2011 at 19:33

3 Answers 3

3
+100

I see that you're using glGetAttribLocation() to retrieve the location of s_texture. This is a uniform variable, not an attribute. Try using glGetUniformLocation() instead for this one.

I don't know if this will solve all of your problems, but it's something that needs to be done for sure.

4
  • OK, thanks I did this but it indeed did not solve it completely. Commented Nov 10, 2011 at 22:48
  • Too bad. I just saw your update about the invalid enum: this is probably caused by using GL_BYTE for the 'type' parameter in glTexImage2D, which is not a valid type. Use GL_UNSIGNED_BYTE here. See khronos.org/opengles/sdk/docs/man/xhtml/glTexImage2D.xml for accepted values.
    – svdree
    Commented Nov 11, 2011 at 7:53
  • Ah, it is working! Thank you. Though it flickers with black parts which is odd. Commented Nov 11, 2011 at 18:49
  • Well I sorted it all out except the rendering but it's a separate issue. I'll give the bounty to you since your help finally sorted it. Thanks to Max as well. Commented Nov 11, 2011 at 19:04
3

it is not seen from your code but it seems to me that you're not calling the bindCameraTexture from the place where there is rendering context (while you should do that in onSurfaceCreated or onSurfaceChanged).

4
  • Thanks for the answer. The method gets called. I'll post the entire project in a file... Commented Nov 7, 2011 at 21:37
  • Yep, the method is called in onPreviewFrame where there is no rendering context (that is why the texture is actually not created). you should save the byte array and then assign it to the texture in onDrawFrame.
    – Max
    Commented Nov 7, 2011 at 22:02
  • Oh, I see. I'll make sure I do this later and I'll report back. Thank you very much. Commented Nov 7, 2011 at 22:45
  • OK, I move the texture calls to onDrawFrame but it is still black. :( Commented Nov 8, 2011 at 1:24
0

I finished the sample with camera preview as the texture. the key difference with your code is :

  1. I use SurfaceTexture to connect the camera preview to the texture used in the openGL ES.

  2. I use the matrix generated by SurfaceTexture to adjust the output of camera preview, otherwise there is black flicker area.

  3. I do not call the glBindTexture() explicitly on the texture used for camera preview.

    Good Luck.

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Not the answer you're looking for? Browse other questions tagged or ask your own question.