/** * Copyright 2012 JogAmp Community. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and should not be interpreted as representing official policies, either expressed * or implied, of JogAmp Community. */ package jogamp.opengl.android.av; import java.io.IOException; import javax.media.opengl.GL; import javax.media.opengl.GLES2; import javax.media.opengl.GLException; import com.jogamp.common.os.AndroidVersion; import com.jogamp.common.os.Platform; import com.jogamp.opengl.util.TimeFrameI; import com.jogamp.opengl.util.av.GLMediaPlayer; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureSequence; import jogamp.common.os.android.StaticContext; import jogamp.opengl.util.av.GLMediaPlayerImpl; import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture.OnFrameAvailableListener; import android.media.MediaPlayer; import android.media.MediaPlayer.OnCompletionListener; import android.net.Uri; import android.view.Surface; /*** * Android implementation utilizes API level 14 (4.0.? ICS) features * as listed below. *
* We utilize the {@link MediaPlayer} with direct to texture streaming.
* The MediaPlayer uses libstagefright
to access the OpenMAX AL implementation
* for hardware decoding.
*
* Since the MediaPlayer API can only deal w/ one SurfaceTexture,
* we enforce textureCount
= 2 via {@link #validateTextureCount(int)}
* and duplicate the single texture via {@link #createTexFrames(GL, int)} .. etc.
* Two instanceds of TextureFrame are required due our framework implementation w/ Ringbuffer and 'lastFrame' access.
*
* Returns 2 - implementation duplicates single texture *
*/ @Override protected int validateTextureCount(int desiredTextureCount) { return 2; } @Override protected final int getNextTextureImpl(GL gl, TextureFrame nextFrame) { int pts = TimeFrameI.INVALID_PTS; if(null != mp) { final SurfaceTextureFrame sTexFrame = (SurfaceTextureFrame) nextFrame; final SurfaceTexture surfTex = sTexFrame.surfaceTex; if( sTexFrame != singleSTexFrame ) { throw new InternalError("XXX: sTexFrame: "+sTexFrame+", singleSTexFrame "+singleSTexFrame); } if( !sTexFrameAttached ) { sTexFrameAttached = true; final Surface surface = new Surface(sTexFrame.surfaceTex); mp.setSurface(surface); surface.release(); surfTex.setOnFrameAvailableListener(onFrameAvailableListener); } if( eos || !mp.isPlaying() ) { eos = true; pts = TimeFrameI.END_OF_STREAM_PTS; } else { // Only block once, no while-loop. // This relaxes locking code of non crucial resources/events. boolean update = updateSurface; if( !update ) { synchronized(updateSurfaceLock) { if(!updateSurface) { // volatile OK. try { updateSurfaceLock.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } update = updateSurface; updateSurface = false; } } if(update) { surfTex.updateTexImage(); // nextFrame.setPTS( (int) ( nextSTex.getTimestamp() / 1000000L ) ); // nano -9 -> milli -3 pts = mp.getCurrentPosition(); // stex.getTransformMatrix(atex.getSTMatrix()); } } nextFrame.setPTS( pts ); } return pts; } /** * {@inheritDoc} ** Creates only one single texture and duplicated content to 2 TextureFrames *
*/ @Override protected TextureFrame[] createTexFrames(GL gl, final int count) { final int[] texNames = new int[1]; gl.glGenTextures(1, texNames, 0); final int err = gl.glGetError(); if( GL.GL_NO_ERROR != err ) { throw new RuntimeException("TextureNames creation failed (num: 1/"+count+"): err "+toHexString(err)); } final TextureFrame[] texFrames = new TextureFrame[count]; for(int i=0; i* Destroys the single texture at last call. *
*/ @Override protected final void destroyTexFrame(GL gl, TextureSequence.TextureFrame frame) { sTexFrameCount--; if( 0 == sTexFrameCount ) { singleSTexFrame = null; sTexFrameAttached = false; final SurfaceTextureFrame sFrame = (SurfaceTextureFrame) frame; sFrame.surfaceTex.release(); super.destroyTexFrame(gl, frame); } } private OnFrameAvailableListener onFrameAvailableListener = new OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { wakeUp(true); } }; private OnCompletionListener onCompletionListener = new OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { eos = true; } }; }