vxh.viet vxh.viet - 6 months ago 159
Android Question

How to draw Buffer[] to a TextureView on Android?

I'm using JavaCV's

to retrieve frames from a video file. This
FFmpegFrameGrabber
return a
Frame
which basically contain a
Buffer[]
to hold image pixels for a video frame.

Because performance is my top priority, I would like to use OpenGL ES to display this
Buffer[]
directly without converting it into
Bitmap
.

The view to be displayed is only taking less than half of the screen and following the OpenGL ES document:


Developers who want to incorporate OpenGL ES graphics in a small portion of their layouts should take a look at TextureView.


So I guess
TextureView
is the right choice for this task. However I haven't found much resources about this (most of them is Camera Preview example).

I would like to ask how can I draw
Buffer[]
to a
TextureView
? And if this is not the most efficient way to do this, I'm willing to try your alternatives.




Update: So currently I have this set up like this:

In my
VideoActivity
where I repeatedly extract video's
Frame
which contain a
ByteBuffer
and then send this to my
MyGLRenderer2
to be converted to OpenGLES's texture:

...
mGLSurfaceView = (GLSurfaceView)findViewById(R.id.gl_surface_view);
mGLSurfaceView.setEGLContextClientVersion(2);
mRenderer = new MyGLRenderer2(this);
mGLSurfaceView.setRenderer(mRenderer);
mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
...

private void grabCurrentFrame(final long currentPosition){
if(mCanSeek){
new AsyncTask(){

@Override
protected void onPreExecute() {
super.onPreExecute();
mCanSeek = false;
}

@Override
protected Object doInBackground(Object[] params) {
try {
Frame frame = mGrabber.grabImage();
setCurrentFrame((ByteBuffer)frame.image[0]);
}
catch (Exception e) {
e.printStackTrace();
}
return null;
}

@Override
protected void onPostExecute(Object o) {
super.onPostExecute(o);
mCanSeek = true;
}
}
}.execute();
}
}


MyGLRenderer2
looks like this:

public class MyGLRenderer2 implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer2";
private Square square;

public MyGLRenderer2(Context context){
super();
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0,0,width, height);
GLES20.glClearColor(0,0,0,1);
square = new Square();
}

@Override
public void onDrawFrame(GL10 gl) {
createFrameTexture(mCurrentBuffer, 64, 64, GLES20.GL_RGB); //not sure about the size of this yet
square.draw(textureHandle);
if(mCurrentBuffer != null){
mCurrentBuffer.clear();
}
}

//test
private ByteBuffer mCurrentBuffer;
public void setTexture(ByteBuffer buffer){
mCurrentBuffer = buffer.duplicate();
mCurrentBuffer.position(0);

Log.d(TAG, mCurrentBuffer.toString());
}

private int[] textureHandles = new int[1];
private int textureHandle;

public void createFrameTexture(ByteBuffer data, int width, int height, int format) {
GLES20.glGenTextures(1, textureHandles, 0);
textureHandle = textureHandles[0];
GlUtil.checkGlError("glGenTextures");

// Bind the texture handle to the 2D texture target.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);

// Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
// is smaller or larger than the source image.
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GlUtil.checkGlError("loadImageTexture");

// Load the data from the buffer into the texture handle.
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
GlUtil.checkGlError("loadImageTexture");
}


}

And
Square
looks like this:

public class Square {
private float vertices[] = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f,
};

private float textureVertices[] = {
0f,1f,
1f,1f,
0f,0f,
1f,0f
};

private FloatBuffer verticesBuffer;
private FloatBuffer textureBuffer;

public Square(){
initializeBuffers();
initializeProgram();
}

private void initializeBuffers(){
ByteBuffer buff = ByteBuffer.allocateDirect(vertices.length * 4);
buff.order(ByteOrder.nativeOrder());
verticesBuffer = buff.asFloatBuffer();
verticesBuffer.put(vertices);
verticesBuffer.position(0);

buff = ByteBuffer.allocateDirect(textureVertices.length * 4);
buff.order(ByteOrder.nativeOrder());
textureBuffer = buff.asFloatBuffer();
textureBuffer.put(textureVertices);
textureBuffer.position(0);
}

private final String vertexShaderCode =
"attribute vec4 aPosition;" +
"attribute vec2 aTexPosition;" +
"varying vec2 vTexPosition;" +
"void main() {" +
" gl_Position = aPosition;" +
" vTexPosition = aTexPosition;" +
"}";

private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D uTexture;" +
"varying vec2 vTexPosition;" +
"void main() {" +
" gl_FragColor = texture2D(uTexture, vTexPosition);" +
"}";


private int vertexShader;
private int fragmentShader;
private int program;

private void initializeProgram(){
vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vertexShader, vertexShaderCode);
GLES20.glCompileShader(vertexShader);

fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragmentShader, fragmentShaderCode);
GLES20.glCompileShader(fragmentShader);

program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);

GLES20.glLinkProgram(program);
}

public void draw(int texture){
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glUseProgram(program);
GLES20.glDisable(GLES20.GL_BLEND);

int positionHandle = GLES20.glGetAttribLocation(program, "aPosition");
int textureHandle = GLES20.glGetUniformLocation(program, "uTexture");
int texturePositionHandle = GLES20.glGetAttribLocation(program, "aTexPosition");

GLES20.glVertexAttribPointer(texturePositionHandle, 2, GLES20.GL_FLOAT, false, 0, textureBuffer);
GLES20.glEnableVertexAttribArray(texturePositionHandle);

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
GLES20.glUniform1i(textureHandle, 0);

GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, verticesBuffer);
GLES20.glEnableVertexAttribArray(positionHandle);

GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}


}

I'm not sure where I did it wrong but what I'm receiving now is white noise.

Answer

The most efficient way to do what you ask will be to convert your pixels to an OpenGL ES texture, and render that on the TextureView. The function to use is glTexImage2D().

You can find some examples in Grafika, which uses the function to upload some generated textures. Take a look at createImageTexture(). Grafika's gles package may be of use if you don't already have GLES code in your app.

FWIW, it would be more efficient to decode video frames directly to a Surface created from the TextureView's SurfaceTexture, but I don't know if JavaCV supports that.

Edit: Another approach, if you don't mind working with the NDK, is to use ANativeWindow. Create a Surface for the TextureView's SurfaceTexture, pass it to native code, then call ANativeWindow_fromSurface() to get the ANativeWindow. Use ANativeWindow_setBuffersGeometry() to set the size and color format. Lock the buffer, copy the pixels in, unlock the buffer to post it. I don't think this requires an extra data copy internally, and potentially has some advantages over the glTexImage2D() approach.