/** * Copyright 2012 JogAmp Community. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and should not be interpreted as representing official policies, either expressed * or implied, of JogAmp Community. */ package jogamp.opengl.util.av; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import com.jogamp.nativewindow.AbstractGraphicsDevice; import com.jogamp.opengl.GL; import com.jogamp.opengl.GL2GL3; import com.jogamp.opengl.GLContext; import com.jogamp.opengl.GLDrawable; import com.jogamp.opengl.GLDrawableFactory; import com.jogamp.opengl.GLES2; import com.jogamp.opengl.GLException; import com.jogamp.opengl.GLProfile; import jogamp.opengl.Debug; import com.jogamp.common.net.UriQueryProps; import com.jogamp.common.ExceptionUtils; import com.jogamp.common.net.Uri; import com.jogamp.common.os.Platform; import com.jogamp.common.util.InterruptSource; import com.jogamp.common.util.InterruptedRuntimeException; import com.jogamp.common.util.LFRingbuffer; import com.jogamp.common.util.Ringbuffer; import com.jogamp.common.util.SourcedInterruptedException; import com.jogamp.opengl.GLExtensions; import com.jogamp.opengl.util.TimeFrameI; import com.jogamp.opengl.util.av.AudioSink; import com.jogamp.opengl.util.av.GLMediaPlayer; import com.jogamp.opengl.util.glsl.ShaderCode; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureSequence; import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame; /** * After object creation an implementation may customize the behavior: *
* See {@link GLMediaPlayer}. *
*/ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { private static final int STREAM_WORKER_DELAY = Debug.getIntProperty("jogl.debug.GLMediaPlayer.StreamWorker.delay", false, 0); private static final String unknown = "unknown"; private volatile State state; private final Object stateLock = new Object(); private int textureCount; private int textureTarget; private int textureFormat; private int textureInternalFormat; private int textureType; private int texUnit; private int textureFragmentShaderHashCode; private final int[] texMinMagFilter = { GL.GL_NEAREST, GL.GL_NEAREST }; private final int[] texWrapST = { GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE }; /** User requested URI stream location. */ private Uri streamLoc = null; /** * In case {@link #streamLoc} is a {@link GLMediaPlayer#CameraInputScheme}, * {@link #cameraPath} holds the URI's path portion * as parsed in {@link #initStream(Uri, int, int, int)}. * @see #cameraProps */ protected Uri.Encoded cameraPath = null; /** Optional camera properties, see {@link #cameraPath}. */ protected Maptexture2D
,
* if not overridden by specialization.
*/
@Override
public String getTextureLookupFunctionName(final String desiredFuncName) throws IllegalStateException {
checkGLInit();
return "texture2D";
}
/**
* {@inheritDoc}
*
* This implementation simply returns an empty string since it's using
* the build-in function texture2D
,
* if not overridden by specialization.
*/
@Override
public String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
checkGLInit();
return "";
}
@Override
public final int getTextureFragmentShaderHashCode() {
if( !isTextureAvailable() ) {
textureFragmentShaderHashCode = 0;
return 0;
} else if( 0 == textureFragmentShaderHashCode ) {
int hash = 31 + getTextureLookupFragmentShaderImpl().hashCode();
hash = ((hash << 5) - hash) + getTextureSampler2DType().hashCode();
textureFragmentShaderHashCode = hash;
}
return textureFragmentShaderHashCode;
}
@Override
public final int getDecodedFrameCount() { return decodedFrameCount; }
@Override
public final int getPresentedFrameCount() { return presentedFrameCount; }
@Override
public final int getVideoPTS() { return video_pts_last; }
@Override
public final int getAudioPTS() {
if( State.Uninitialized != state ) {
return getAudioPTSImpl();
}
return 0;
}
/** Override if not using audioSink! */
protected int getAudioPTSImpl() {
if( null != audioSink ) {
return audioSink.getPTS();
} else {
return 0;
}
}
@Override
public final State getState() { return state; }
protected final void setState(final State s) { state=s; }
@Override
public final State play() {
synchronized( stateLock ) {
final State preState = state;
switch( state ) {
case Paused:
if( playImpl() ) {
resetAVPTS();
if( null != audioSink ) {
audioSink.play(); // cont. w/ new data
}
if( null != streamWorker ) {
streamWorker.doResume();
}
changeState(0, State.Playing);
}
default:
}
if(DEBUG) { System.err.println("Play: "+preState+" -> "+state+", "+toString()); }
return state;
}
}
protected abstract boolean playImpl();
@Override
public final State pause(final boolean flush) {
return pauseImpl(flush, 0);
}
private final State pauseImpl(final boolean flush, int event_mask) {
synchronized( stateLock ) {
final State preState = state;
if( State.Playing == state ) {
event_mask = addStateEventMask(event_mask, GLMediaPlayer.State.Paused);
setState( State.Paused );
if( null != streamWorker ) {
streamWorker.doPause(true);
}
if( flush ) {
resetAVPTSAndFlush();
} else if( null != audioSink ) {
audioSink.pause();
}
attributesUpdated( event_mask );
if( !pauseImpl() ) {
play();
}
}
if(DEBUG) { System.err.println("Pause: "+preState+" -> "+state+", "+toString()); }
return state;
}
}
protected abstract boolean pauseImpl();
@Override
public final State destroy(final GL gl) {
return destroyImpl(gl, 0);
}
private final State destroyImpl(final GL gl, final int event_mask) {
synchronized( stateLock ) {
if( null != streamWorker ) {
streamWorker.doStop();
streamWorker = null;
}
destroyImpl(gl);
removeAllTextureFrames(gl);
textureCount=0;
changeState(event_mask, State.Uninitialized);
attachedObjects.clear();
return state;
}
}
protected abstract void destroyImpl(GL gl);
@Override
public final int seek(int msec) {
synchronized( stateLock ) {
final State preState = state;
final int pts1;
switch(state) {
case Playing:
case Paused:
final State _state = state;
setState( State.Paused );
if( null != streamWorker ) {
streamWorker.doPause(true);
}
// Adjust target ..
if( msec >= duration ) {
msec = duration - (int)Math.floor(frame_duration);
} else if( msec < 0 ) {
msec = 0;
}
pts1 = seekImpl(msec);
resetAVPTSAndFlush();
if( null != audioSink && State.Playing == _state ) {
audioSink.play(); // cont. w/ new data
}
if(DEBUG) {
System.err.println("Seek("+msec+"): "+getPerfString());
}
if( null != streamWorker ) {
streamWorker.doResume();
}
setState( _state );
break;
default:
pts1 = 0;
}
if(DEBUG) { System.err.println("Seek("+msec+"): "+preState+" -> "+state+", "+toString()); }
return pts1;
}
}
protected abstract int seekImpl(int msec);
@Override
public final float getPlaySpeed() { return playSpeed; }
@Override
public final boolean setPlaySpeed(float rate) {
synchronized( stateLock ) {
final float preSpeed = playSpeed;
boolean res = false;
if(State.Uninitialized != state ) {
if( rate > 0.01f ) {
if( Math.abs(1.0f - rate) < 0.01f ) {
rate = 1.0f;
}
if( setPlaySpeedImpl(rate) ) {
resetAVPTS();
playSpeed = rate;
res = true;
}
}
}
if(DEBUG) { System.err.println("setPlaySpeed("+rate+"): "+state+", "+preSpeed+" -> "+playSpeed+", "+toString()); }
return res;
}
}
/**
* Override if not using AudioSink, or AudioSink's {@link AudioSink#setPlaySpeed(float)} is not sufficient!
*
* AudioSink shall respect !audioSinkPlaySpeedSet
to determine data_size
* at {@link AudioSink#enqueueData(com.jogamp.opengl.util.av.AudioSink.AudioFrame)}.
*
* Shall also take care of {@link AudioSink} initialization if appropriate. *
* @param gl null for audio-only, otherwise a valid and current GL object. * @throws IOException * @throws GLException */ protected abstract void initGLImpl(GL gl) throws IOException, GLException; /** * Returns the validated number of textures to be handled. *
* Default is {@link #TEXTURE_COUNT_DEFAULT} minimum textures, if desiredTextureCount
* is < {@link #TEXTURE_COUNT_MIN}, {@link #TEXTURE_COUNT_MIN} is returned.
*
* Implementation must at least return a texture count of {@link #TEXTURE_COUNT_MIN}, two, the last texture and the decoding texture. *
*/ protected int validateTextureCount(final int desiredTextureCount) { return desiredTextureCount < TEXTURE_COUNT_MIN ? TEXTURE_COUNT_MIN : desiredTextureCount; } protected TextureFrame[] createTexFrames(final GL gl, final int count) { final int[] texNames = new int[count]; gl.glGenTextures(count, texNames, 0); final int err = gl.glGetError(); if( GL.GL_NO_ERROR != err ) { throw new RuntimeException("TextureNames creation failed (num: "+count+"): err "+toHexString(err)); } final TextureFrame[] texFrames = new TextureFrame[count]; for(int i=0; i* Video frames shall be ignored, if {@link #getVID()} is {@link #STREAM_ID_NONE}. *
** Audio frames shall be ignored, if {@link #getAID()} is {@link #STREAM_ID_NONE}. *
** Method may be invoked on the StreamWorker decoding thread. *
** Implementation shall care of OpenGL synchronization as required, e.g. glFinish()/glFlush()! *
* @param gl valid and current GL instance, shall benull
for audio only.
* @param nextFrame the {@link TextureFrame} to store the video PTS and texture data,
* shall be null
for audio only.
* @return the last processed video PTS value, maybe {@link TimeFrameI#INVALID_PTS} if video frame is invalid or n/a.
* Will be {@link TimeFrameI#END_OF_STREAM_PTS} if end of stream reached.
* @throws InterruptedException if waiting for next frame fails
*/
protected abstract int getNextTextureImpl(GL gl, TextureFrame nextFrame) throws InterruptedException;
protected final int getNextSingleThreaded(final GL gl, final TextureFrame nextFrame, final boolean[] gotVFrame) throws InterruptedException {
final int pts;
if( STREAM_ID_NONE != vid ) {
preNextTextureImpl(gl);
pts = getNextTextureImpl(gl, nextFrame);
postNextTextureImpl(gl);
if( TimeFrameI.INVALID_PTS != pts ) {
newFrameAvailable(nextFrame, Platform.currentTimeMillis());
gotVFrame[0] = true;
} else {
gotVFrame[0] = false;
}
} else {
// audio only
pts = getNextTextureImpl(null, null);
gotVFrame[0] = false;
}
return pts;
}
/**
* {@inheritDoc}
* * Note: All {@link AudioSink} operations are performed from {@link GLMediaPlayerImpl}, * i.e. {@link #play()}, {@link #pause(boolean)}, {@link #seek(int)}, {@link #setPlaySpeed(float)}, {@link #getAudioPTS()}. *
** Implementations using an {@link AudioSink} shall write it's instance to {@link #audioSink} * from within their {@link #initStreamImpl(int, int)} implementation. *
*/ @Override public final AudioSink getAudioSink() { return audioSink; } /** * To be called from implementation at 1st PTS after start * w/ current pts value in milliseconds. * @param audio_scr_t0 */ protected void setFirstAudioPTS2SCR(final int pts) { if( audioSCR_reset ) { audio_scr_t0 = Platform.currentTimeMillis() - pts; audioSCR_reset = false; } } private void flushAllVideoFrames() { if( null != videoFramesFree ) { videoFramesFree.resetFull(videoFramesOrig); lastFrame = videoFramesFree.get(); if( null == lastFrame ) { throw new InternalError("XXX"); } videoFramesDecoded.clear(); } cachedFrame = null; } private void resetAVPTSAndFlush() { video_dpts_cum = 0; video_dpts_count = 0; resetAVPTS(); flushAllVideoFrames(); if( null != audioSink ) { audioSink.flush(); } } private void resetAVPTS() { nullFrameCount = 0; presentedFrameCount = 0; displayedFrameCount = 0; decodedFrameCount = 0; audioSCR_reset = true; videoSCR_reset = true; } private final int getVideoDPTSAvg() { return (int) ( video_dpts_cum * (1.0f - VIDEO_DPTS_COEFF) + 0.5f ); } private final void newFrameAvailable(final TextureFrame frame, final long currentTimeMillis) { decodedFrameCount++; // safe: only written-to either from stream-worker or user thread if( 0 == frame.getDuration() ) { // patch frame duration if not set already frame.setDuration( (int) frame_duration ); } synchronized(eventListenersLock) { for(final Iterator* This thread pauses after it's started! *
**/ StreamWorker() { setDaemon(true); synchronized(this) { start(); try { this.notifyAll(); // wake-up startup-block while( !isRunning && !shallStop ) { this.wait(); // wait until started } } catch (final InterruptedException e) { throw new InterruptedRuntimeException(e); } } } private void makeCurrent(final GLContext ctx) { if( GLContext.CONTEXT_NOT_CURRENT >= ctx.makeCurrent() ) { throw new GLException("Couldn't make ctx current: "+ctx); } } private void destroySharedGL() { if( null != sharedGLCtx ) { if( sharedGLCtx.isCreated() ) { // Catch dispose GLExceptions by GLEventListener, just 'print' them // so we can continue with the destruction. try { sharedGLCtx.destroy(); } catch (final GLException gle) { gle.printStackTrace(); } } sharedGLCtx = null; } if( null != dummyDrawable ) { final AbstractGraphicsDevice device = dummyDrawable.getNativeSurface().getGraphicsConfiguration().getScreen().getDevice(); dummyDrawable.setRealized(false); dummyDrawable = null; device.close(); } } public final synchronized void initGL(final GL gl) { final GLContext glCtx = gl.getContext(); final boolean glCtxCurrent = glCtx.isCurrent(); final GLProfile glp = gl.getGLProfile(); final GLDrawableFactory factory = GLDrawableFactory.getFactory(glp); final AbstractGraphicsDevice device = glCtx.getGLDrawable().getNativeSurface().getGraphicsConfiguration().getScreen().getDevice(); dummyDrawable = factory.createDummyDrawable(device, true, glCtx.getGLDrawable().getChosenGLCapabilities(), null); // own device! dummyDrawable.setRealized(true); sharedGLCtx = dummyDrawable.createContext(glCtx); makeCurrent(sharedGLCtx); if( glCtxCurrent ) { makeCurrent(glCtx); } else { sharedGLCtx.release(); } } public final synchronized void doPause(final boolean waitUntilDone) { if( isActive ) { shallPause = true; if( java.lang.Thread.currentThread() != this ) { if( isBlocked && isActive ) { this.interrupt(); } if( waitUntilDone ) { try { while( isActive && isRunning ) { this.wait(); // wait until paused } } catch (final InterruptedException e) { throw new InterruptedRuntimeException(e); } } } } } public final synchronized void doResume() { if( isRunning && !isActive ) { shallPause = false; if( java.lang.Thread.currentThread() != this ) { try { this.notifyAll(); // wake-up pause-block while( !isActive && !shallPause && isRunning ) { this.wait(); // wait until resumed } } catch (final InterruptedException e) { final InterruptedException e2 = SourcedInterruptedException.wrap(e); doPause(false); throw new InterruptedRuntimeException(e2); } } } } public final synchronized void doStop() { if( isRunning ) { shallStop = true; if( java.lang.Thread.currentThread() != this ) { if( isBlocked && isRunning ) { this.interrupt(); } try { this.notifyAll(); // wake-up pause-block (opt) while( isRunning ) { this.wait(); // wait until stopped } } catch (final InterruptedException e) { throw new InterruptedRuntimeException(e); } } } } public final boolean isRunning() { return isRunning; } public final boolean isActive() { return isActive; } @Override public final void run() { setName(getName()+"-StreamWorker_"+StreamWorkerInstanceId); StreamWorkerInstanceId++; synchronized ( this ) { isRunning = true; this.notifyAll(); // wake-up ctor() } while( !shallStop ) { TextureFrame nextFrame = null; try { if( shallPause ) { synchronized ( this ) { if( sharedGLCtxCurrent ) { postNextTextureImpl(sharedGLCtx.getGL()); sharedGLCtx.release(); } while( shallPause && !shallStop ) { isActive = false; this.notifyAll(); // wake-up doPause() try { this.wait(); // wait until resumed } catch (final InterruptedException e) { if( !shallPause ) { throw SourcedInterruptedException.wrap(e); } } } if( sharedGLCtxCurrent ) { makeCurrent(sharedGLCtx); preNextTextureImpl(sharedGLCtx.getGL()); } isActive = true; this.notifyAll(); // wake-up doResume() } } if( !sharedGLCtxCurrent && null != sharedGLCtx ) { synchronized ( this ) { if( null != sharedGLCtx ) { makeCurrent( sharedGLCtx ); preNextTextureImpl(sharedGLCtx.getGL()); sharedGLCtxCurrent = true; } if( null == videoFramesFree ) { throw new InternalError("XXX videoFramesFree is null"); } } } if( !shallStop ) { isBlocked = true; final GL gl; if( STREAM_ID_NONE != vid ) { nextFrame = videoFramesFree.getBlocking(); nextFrame.setPTS( TimeFrameI.INVALID_PTS ); // mark invalid until processed! gl = sharedGLCtx.getGL(); } else { gl = null; } isBlocked = false; final int vPTS = getNextTextureImpl(gl, nextFrame); boolean audioEOS = false; if( TimeFrameI.INVALID_PTS != vPTS ) { if( null != nextFrame ) { if( STREAM_WORKER_DELAY > 0 ) { java.lang.Thread.sleep(STREAM_WORKER_DELAY); } if( !videoFramesDecoded.put(nextFrame) ) { throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this); } newFrameAvailable(nextFrame, Platform.currentTimeMillis()); nextFrame = null; } else { // audio only if( TimeFrameI.END_OF_STREAM_PTS == vPTS || ( duration > 0 && duration < vPTS ) ) { audioEOS = true; } else { nullFrameCount = 0; } } } else if( null == nextFrame ) { // audio only audioEOS = maxNullFrameCountUntilEOS <= nullFrameCount; if( null == audioSink || 0 == audioSink.getEnqueuedFrameCount() ) { nullFrameCount++; } } if( audioEOS ) { // state transition incl. notification synchronized ( this ) { shallPause = true; isActive = false; this.notifyAll(); // wake-up potential do*() } if( DEBUG ) { System.err.println( "AV-EOS (StreamWorker): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == vPTS)+", "+GLMediaPlayerImpl.this); } pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS); } } } catch (final InterruptedException e) { if( !isBlocked ) { // !shallStop && !shallPause streamErr = new StreamException("InterruptedException while decoding: "+GLMediaPlayerImpl.this.toString(), SourcedInterruptedException.wrap(e)); } isBlocked = false; } catch (final Throwable t) { streamErr = new StreamException(t.getClass().getSimpleName()+" while decoding: "+GLMediaPlayerImpl.this.toString(), t); } finally { if( null != nextFrame ) { // put back videoFramesFree.put(nextFrame); } if( null != streamErr ) { if( DEBUG ) { ExceptionUtils.dumpThrowable("handled", streamErr); } // state transition incl. notification synchronized ( this ) { shallPause = true; isActive = false; this.notifyAll(); // wake-up potential do*() } pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_ERR); } } } synchronized ( this ) { if( sharedGLCtxCurrent ) { postNextTextureImpl(sharedGLCtx.getGL()); } destroySharedGL(); isRunning = false; isActive = false; this.notifyAll(); // wake-up doStop() } } } static int StreamWorkerInstanceId = 0; private volatile StreamWorker streamWorker = null; private volatile StreamException streamErr = null; protected final int addStateEventMask(int event_mask, final State newState) { if( state != newState ) { switch( newState ) { case Uninitialized: event_mask |= GLMediaEventListener.EVENT_CHANGE_UNINIT; break; case Initialized: event_mask |= GLMediaEventListener.EVENT_CHANGE_INIT; break; case Playing: event_mask |= GLMediaEventListener.EVENT_CHANGE_PLAY; break; case Paused: event_mask |= GLMediaEventListener.EVENT_CHANGE_PAUSE; break; } } return event_mask; } protected final void attributesUpdated(final int event_mask) { if( 0 != event_mask ) { final long now = Platform.currentTimeMillis(); synchronized(eventListenersLock) { for(final Iterator* The latter catches an occurring exception and set the state delivers the error events. *
** Further calls are issues off-thread by the decoder implementation. *
*/ protected final void updateAttributes(int vid, final int aid, final int width, final int height, final int bps_stream, final int bps_video, final int bps_audio, final float fps, final int videoFrames, final int audioFrames, final int duration, final String vcodec, final String acodec) { int event_mask = 0; final boolean wasUninitialized = state == State.Uninitialized; if( wasUninitialized ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_INIT; setState( State.Initialized ); } if( STREAM_ID_AUTO == vid ) { vid = STREAM_ID_NONE; } if( this.vid != vid ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_VID; this.vid = vid; } if( STREAM_ID_AUTO == vid ) { vid = STREAM_ID_NONE; } if( this.aid != aid ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_AID; this.aid = aid; } if( this.width != width || this.height != height ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_SIZE; this.width = width; this.height = height; } if( this.fps != fps ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_FPS; this.fps = fps; if( 0 != fps ) { this.frame_duration = 1000f / fps; final int fdurI = (int)this.frame_duration; if( 0 < fdurI ) { this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_MS_UNTIL_EOS / fdurI; } else { this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT; } } else { this.frame_duration = 0; this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT; } } if( this.bps_stream != bps_stream || this.bps_video != bps_video || this.bps_audio != bps_audio ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_BPS; this.bps_stream = bps_stream; this.bps_video = bps_video; this.bps_audio = bps_audio; } if( this.videoFrames != videoFrames || this.audioFrames != audioFrames || this.duration != duration ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_LENGTH; this.videoFrames = videoFrames; this.audioFrames = audioFrames; this.duration = duration; } if( (null!=acodec && acodec.length()>0 && !this.acodec.equals(acodec)) ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_CODEC; this.acodec = acodec; } if( (null!=vcodec && vcodec.length()>0 && !this.vcodec.equals(vcodec)) ) { event_mask |= GLMediaEventListener.EVENT_CHANGE_CODEC; this.vcodec = vcodec; } if(0==event_mask) { return; } if( wasUninitialized ) { if( null != streamWorker ) { throw new InternalError("XXX: StreamWorker not null - "+this); } if( TEXTURE_COUNT_MIN < textureCount || STREAM_ID_NONE == vid ) { // Enable StreamWorker for 'audio only' as well (Bug 918). streamWorker = new StreamWorker(); } if( DEBUG ) { System.err.println("XXX Initialize @ updateAttributes: "+this); } } attributesUpdated(event_mask); } protected void setIsGLOriented(final boolean isGLOriented) { if( isInGLOrientation != isGLOriented ) { if( DEBUG ) { System.err.println("XXX gl-orient "+isInGLOrientation+" -> "+isGLOriented); } isInGLOrientation = isGLOriented; if( null != videoFramesOrig ) { for(int i=0; i