/** * Copyright 2012-2024 JogAmp Community. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and should not be interpreted as representing official policies, either expressed * or implied, of JogAmp Community. */ package jogamp.opengl.util.av; import java.io.IOException; import java.net.URISyntaxException; import java.net.URLConnection; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import com.jogamp.nativewindow.AbstractGraphicsDevice; import com.jogamp.nativewindow.DefaultGraphicsDevice; import com.jogamp.nativewindow.NativeWindowFactory; import com.jogamp.opengl.GL; import com.jogamp.opengl.GL2GL3; import com.jogamp.opengl.GLContext; import com.jogamp.opengl.GLDrawable; import com.jogamp.opengl.GLDrawableFactory; import com.jogamp.opengl.GLES2; import com.jogamp.opengl.GLException; import com.jogamp.opengl.GLProfile; import jogamp.opengl.Debug; import com.jogamp.common.net.UriQueryProps; import com.jogamp.common.nio.Buffers; import com.jogamp.common.av.AudioSink; import com.jogamp.common.av.PTS; import com.jogamp.common.av.TimeFrameI; import com.jogamp.common.net.Uri; import com.jogamp.common.os.Clock; import com.jogamp.common.util.IOUtil; import com.jogamp.common.util.InterruptSource; import com.jogamp.common.util.LFRingbuffer; import com.jogamp.common.util.Ringbuffer; import com.jogamp.common.util.TSPrinter; import com.jogamp.common.util.WorkerThread; import com.jogamp.math.FloatUtil; import com.jogamp.math.Vec2i; import com.jogamp.math.Vec4f; import com.jogamp.opengl.GLExtensions; import com.jogamp.opengl.util.av.SubtitleEventListener; import com.jogamp.opengl.util.av.CodecID; import com.jogamp.opengl.util.av.GLMediaPlayer; import com.jogamp.opengl.util.av.SubTextEvent; import com.jogamp.opengl.util.av.SubEmptyEvent; import com.jogamp.opengl.util.av.SubBitmapEvent; import com.jogamp.opengl.util.glsl.ShaderCode; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureData; import com.jogamp.opengl.util.texture.TextureIO; import com.jogamp.opengl.util.texture.TextureSequence; import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame; /** * After object creation an implementation may customize the behavior: *
* See {@link GLMediaPlayer}. *
*/ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { private static final int STREAM_WORKER_DELAY = Debug.getIntProperty("jogl.debug.GLMediaPlayer.StreamWorker.delay", false, 0); private static final TSPrinter logout; private static final String unknown = "unknown"; static { if( DEBUG || DEBUG_AVSYNC || DEBUG_NATIVE ) { logout = TSPrinter.stderr(); } else { logout = null; } } private volatile State state; private final Object stateLock = new Object(); private final AtomicBoolean oneVideoFrameOnce = new AtomicBoolean(false); private int textureCount; private int textureTarget; private int textureFormat; private int textureInternalFormat; private int textureType; private int texUnit; private int userMaxChannels = -1; // not set private int textureFragmentShaderHashCode; private final int[] texMinMagFilter = { GL.GL_NEAREST, GL.GL_NEAREST }; private final int[] texWrapST = { GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE }; private boolean aRatioLbox = false; private final Vec4f aRatioLboxBackColor = new Vec4f(); /** User requested URI stream location. */ private Uri streamLoc; /** * In case {@link #streamLoc} is a {@link GLMediaPlayer#CameraInputScheme}, * {@link #cameraPath} holds the URI's path portion * as parsed in {@link #playStream(Uri, int, int, int, int)}. * @see #cameraProps */ protected Uri.Encoded cameraPath = null; /** Optional camera properties, see {@link #cameraPath}. */ protected Maptexture2D
,
* if not overridden by specialization, e.g. using the ffmpeg implementation.
*/
@Override
public String setTextureLookupFunctionName(final String texLookupFuncName) throws IllegalStateException {
textureLookupFunctionName = "texture2D";
resetTextureFragmentShaderHashCode();
return textureLookupFunctionName;
}
/**
* {@inheritDoc}
*
* This implementation simply returns the build-in function name of texture2D
,
* if not overridden by specialization, e.g. using the ffmpeg implementation.
*/
@Override
public final String getTextureLookupFunctionName() {
return textureLookupFunctionName;
}
/**
* {@inheritDoc}
*
* This implementation simply returns an empty string since it's using
* the build-in function texture2D
,
* if not overridden by specialization.
*/
@Override
public String getTextureLookupFragmentShaderImpl() {
return "";
}
@Override
public String getTextureFragmentShaderHashID() {
// return getTextureSampler2DType()+";"+getTextureLookupFunctionName()+";"+getTextureLookupFragmentShaderImpl();
return getTextureSampler2DType()+";"+getTextureLookupFunctionName();
}
@Override
public final int getTextureFragmentShaderHashCode() {
if( State.Uninitialized == state ) {
resetTextureFragmentShaderHashCode();
return 0;
} else if( 0 == textureFragmentShaderHashCode ) {
final int hash = getTextureFragmentShaderHashID().hashCode();
textureFragmentShaderHashCode = hash;
}
return textureFragmentShaderHashCode;
}
protected final void resetTextureFragmentShaderHashCode() { textureFragmentShaderHashCode = 0; }
@Override
public final int getDecodedFrameCount() { return decodedFrameCount; }
@Override
public final int getPresentedFrameCount() { return presentedFrameCount; }
@Override
public final PTS getPTS() { return av_scr_cpy; }
@Override
public final int getVideoPTS() { return video_pts_last.getCurrent(); }
@Override
public final int getAudioPTS() {
if( State.Uninitialized != state && null != audioSink ) {
return audioSink.getPTS().getCurrent();
}
return 0;
}
/** Override if not using audioSink! */
protected PTS getAudioPTSImpl() {
if( null != audioSink ) {
return audioSink.getPTS();
} else {
return dummy_audio_pts;
}
}
/** Override if not using audioSink! */
protected PTS getUpdatedAudioPTS() {
if( null != audioSink ) {
return audioSink.updateQueue();
} else {
return dummy_audio_pts;
}
}
private final PTS dummy_audio_pts = new PTS( () -> { return State.Playing == state ? playSpeed : 0f; } );
/** Override if not using audioSink! */
protected int getAudioQueuedDuration() {
if( null != audioSink ) {
return (int)(audioSink.getQueuedDuration()*1000f);
} else {
return 0;
}
}
/** Override if not using audioSink! */
protected int getLastBufferedAudioPTS() {
if( null != audioSink ) {
return audioSink.getLastBufferedPTS();
} else {
return 0;
}
}
@Override
public final State getState() { return state; }
protected final void setState(final State s) { state=s; }
@Override
public final State resume() {
synchronized( stateLock ) {
final State preState = state;
if( State.Paused == state ) {
if( resumeImpl() ) {
if( null != audioSink ) {
audioSink.play(); // cont. w/ new data
}
if( null != streamWorker ) {
streamWorker.resume();
}
changeState(new GLMediaPlayer.EventMask(), State.Playing);
{
final int _pending_seek = pending_seek;
pending_seek = -1;
if( 0 <= _pending_seek ) {
this.seek(_pending_seek);
}
}
}
}
if(DEBUG) { logout.println("Play: "+preState+" -> "+state+", "+toString()); }
return state;
}
}
protected abstract boolean resumeImpl();
@Override
public final State pause(final boolean flush) {
return pauseImpl(flush, new GLMediaPlayer.EventMask());
}
private final State pauseImpl(final boolean flush, GLMediaPlayer.EventMask eventMask) {
synchronized( stateLock ) {
final State preState = state;
if( State.Playing == state ) {
eventMask = addStateEventMask(eventMask, State.Paused);
setState( State.Paused );
if( null != streamWorker ) {
streamWorker.pause(true);
}
if( flush ) {
resetAVPTSAndFlush(false);
} else if( null != audioSink ) {
audioSink.pause();
}
attributesUpdated( eventMask );
if( !pauseImpl() ) {
resume();
}
}
if(DEBUG) { logout.println("Pause: "+preState+" -> "+state+", "+toString()); }
return state;
}
}
protected abstract boolean pauseImpl();
@Override
public final State stop() {
synchronized( stateLock ) {
final State preState = state;
if( null != streamWorker ) {
streamWorker.stop(true);
streamWorker = null;
}
resetAVPTSAndFlush(true);
stopImpl();
changeState(new GLMediaPlayer.EventMask(), State.Uninitialized);
// attachedObjects.clear();
if(DEBUG) { logout.println("Stop: "+preState+" -> "+state+", "+toString()); }
return state;
}
}
protected abstract void stopImpl();
@Override
public final State destroy(final GL gl) {
return destroyImpl(gl, new GLMediaPlayer.EventMask(), true);
}
private final State destroyImpl(final GL gl, final GLMediaPlayer.EventMask eventMask, final boolean wait) {
synchronized( stateLock ) {
if( null != streamWorker ) {
streamWorker.stop(wait);
streamWorker = null;
}
resetAVPTSAndFlush(true);
destroyImpl();
removeAllTextureFrames(gl);
lastFrame = null;
textureCount=0;
changeState(eventMask, State.Uninitialized);
attachedObjects.clear();
return state;
}
}
protected abstract void destroyImpl();
@Override
public final int seek(int msec) {
final int pts1;
final State preState;
synchronized( stateLock ) {
preState = state;
switch(state) {
case Playing:
case Paused:
final State _state = state;
setState( State.Paused );
// Adjust target ..
if( msec >= duration ) {
msec = duration - (int)Math.floor(frame_duration);
} else if( msec < 0 ) {
msec = 0;
}
if( null != streamWorker ) {
streamWorker.pause(true);
}
pts1 = seekImpl(msec);
resetAVPTSAndFlush(false);
if( null != audioSink && State.Playing == _state ) {
audioSink.play(); // cont. w/ new data
}
if(DEBUG) {
logout.println("Seek("+msec+"): "+getPerfString());
}
if( null != streamWorker ) {
streamWorker.resume();
}
setState( _state );
attributesUpdated(new GLMediaPlayer.EventMask(GLMediaPlayer.EventMask.Bit.Seek));
break;
default:
pending_seek = msec;
pts1 = 0;
}
}
oneVideoFrameOnce.set(true);
if(DEBUG) { logout.println("Seek("+msec+"): "+preState+" -> "+state+", "+toString()); }
return pts1;
}
protected int pending_seek = -1;
protected abstract int seekImpl(int msec);
@Override
public final float getPlaySpeed() { return playSpeed; }
private static final float clipZeroOneAllowMax(final float v) {
if( v < 0.01f ) {
return 0.0f;
} else if( Math.abs(1.0f - v) < 0.01f ) {
return 1.0f;
}
return v;
}
@Override
public final boolean setPlaySpeed(float rate) {
synchronized( stateLock ) {
final float preSpeed = playSpeed;
boolean res = false;
rate = clipZeroOneAllowMax(rate);
if( rate > 0.1f ) {
if(State.Uninitialized != state ) {
if( setPlaySpeedImpl(rate) ) {
resetAVPTS();
playSpeed = rate;
res = true;
}
} else {
// earmark ..
playSpeed = rate;
res = true;
}
}
if(DEBUG) { logout.println("setPlaySpeed("+rate+"): "+state+", "+preSpeed+" -> "+playSpeed+", "+toString()); }
return res;
}
}
/**
* Override if not using AudioSink, or AudioSink's {@link AudioSink#setPlaySpeed(float)} is not sufficient!
*
* AudioSink shall respect !audioSinkPlaySpeedSet
to determine data_size
* at {@link AudioSink#enqueueData(int, ByteBuffer, int)}
*
* Shall also take care of {@link AudioSink} initialization if appropriate. *
* @param gl null for audio-only, otherwise a valid and current GL object. * @throws IOException * @throws GLException */ protected abstract void initGLImpl(GL gl) throws IOException, GLException; /** * Returns the validated number of textures to be handled. *
* Default is {@link #TEXTURE_COUNT_DEFAULT} minimum textures, if desiredTextureCount
* is < {@link #TEXTURE_COUNT_MIN}, {@link #TEXTURE_COUNT_MIN} is returned.
*
* Implementation must at least return a texture count of {@link #TEXTURE_COUNT_MIN}, two, the last texture and the decoding texture. *
*/ protected int validateTextureCount(final int desiredTextureCount) { return desiredTextureCount < TEXTURE_COUNT_MIN ? TEXTURE_COUNT_MIN : desiredTextureCount; } protected TextureFrame[] createTexFrames(final GL gl, final int count) { final int[] texNames = new int[count]; gl.glGenTextures(count, texNames, 0); final int err = gl.glGetError(); if( GL.GL_NO_ERROR != err ) { throw new RuntimeException("TextureNames creation failed (num: "+count+"): err "+toHexString(err)); } final TextureFrame[] texFrames = new TextureFrame[count]; for(int i=0; i* Video frames shall be ignored, if {@link #getVID()} is {@link #STREAM_ID_NONE}. *
** Audio frames shall be ignored, if {@link #getAID()} is {@link #STREAM_ID_NONE}. *
** Subtitle frames shall be ignored, if {@link #getSID()} is {@link #STREAM_ID_NONE}. *
** Method may be invoked on the StreamWorker decoding thread. *
** Implementation shall care of OpenGL synchronization as required, e.g. glFinish()/glFlush()! *
* @param gl valid and current GL instance, shall benull
for audio only.
* @param vFrame next video {@link TextureFrame} to store the video PTS and texture data,
* shall be null
for audio only.
* @param sTex {@link Texture} instance as bitmap subtitle target element.
* May be {@code null} for no desired bitmap subtitle.
* @param sTexUsed Result value. If the {@link Texture} {@code sTex} is used and {@link #pushSubtitleTex(Object, int, int, int, int, int, int, int, int, int, int)},
* {@code true} must be written into {@code sTexUsed}
* @return the last processed video PTS value, maybe {@link TimeFrameI#INVALID_PTS} if video frame is invalid or n/a.
* Will be {@link TimeFrameI#END_OF_STREAM_PTS} if end of stream reached.
* @throws InterruptedException if waiting for next frame fails
*/
protected abstract int getNextTextureImpl(GL gl, TextureFrame vFrame, Texture sTex, boolean[] sTexUsed) throws InterruptedException;
protected final int getNextSingleThreaded(final GL gl, final TextureFrame nextVFrame, final boolean[] gotVFrame) throws InterruptedException {
final int pts;
if( STREAM_ID_NONE != vid ) {
preNextTextureImpl(gl);
final boolean[] sTexUsed = { false };
final Texture subTex = ( null != subTexFree && STREAM_ID_NONE != sid ) ? subTexFree.get() : null;
pts = getNextTextureImpl(gl, nextVFrame, subTex, sTexUsed);
postNextTextureImpl(gl);
if( null != subTex && !sTexUsed[0] ) {
subTexFree.putBlocking(subTex); // return unused
}
if( TimeFrameI.INVALID_PTS != pts ) {
newVideoFrameAvailable(nextVFrame, Clock.currentMillis());
gotVFrame[0] = true;
} else {
gotVFrame[0] = false;
}
} else {
// audio only
pts = getNextTextureImpl(null, null, null, null);
gotVFrame[0] = false;
}
return pts;
}
/**
* {@inheritDoc}
* * Note: All {@link AudioSink} operations are performed from {@link GLMediaPlayerImpl}, * i.e. {@link #resume()}, {@link #pause(boolean)}, {@link #seek(int)}, {@link #setPlaySpeed(float)}, {@link #getAudioPTS()}. *
** Implementations using an {@link AudioSink} shall write it's instance to {@link #audioSink} * from within their {@link #initStreamImpl(int, String, int, String, int)} implementation. *
*/ @Override public final AudioSink getAudioSink() { return audioSink; } private void flushAllVideoFrames() { if( null != videoFramesFree ) { videoFramesFree.resetFull(videoFramesOrig); lastFrame = videoFramesFree.get(); if( null == lastFrame ) { throw new InternalError("XXX"); } videoFramesDecoded.clear(); } cachedFrame = null; } private void resetAVPTSAndFlush(final boolean set_scr_cpy) { resetSCR(av_scr); if( set_scr_cpy ) { av_scr_cpy.set(av_scr); } audio_queued_last_ms = 0; audio_dequeued_last = 0; resetAVPTS(); flushAllVideoFrames(); if( null != audioSink ) { audioSink.flush(); } } private void resetSCR(final PTS pts) { av_scr.set(pts); audio_dpts_cum = 0; audio_dpts_count = 0; video_dpts_cum = 0; video_dpts_count = 0; } private void resetAVPTS() { nullFrameCount = 0; presentedFrameCount = 0; displayedFrameCount = 0; decodedFrameCount = 0; video_scr_reset = true; audio_scr_reset = true; } private static final int getDPTSAvg(final float dpts_cum, final int dpts_count) { final int dpts_avg = (int) ( dpts_cum * (1.0f - AV_DPTS_COEFF) + 0.5f ); final int dpts_avg_diff = dpts_count >= AV_DPTS_NUM ? dpts_avg : 0; return dpts_avg_diff; } private final void newVideoFrameAvailable(final TextureFrame frame, final long currentMillis) { decodedFrameCount++; // safe: only written-to either from stream-worker or user thread if( 0 == frame.getDuration() ) { // patch frame duration if not set already frame.setDuration( (int) frame_duration ); } synchronized(frameListenerLock) { final int sz = frameListener.size(); for(int i=0; i* This thread pauses after it's started! *
**/ StreamWorker() { wt.start( true ); } } private volatile StreamWorker streamWorker = null; private StreamException streamErr = null; private static final boolean singleDEBUG = false; private static final Object singleLock = new Object(); private static DefaultGraphicsDevice singleDevice = null; private static DefaultGraphicsDevice singleOwner = null; private static int singleCount = 0; protected final void pushSound(final ByteBuffer sampleData, final int data_size, final int audio_pts) { if( audioStreamEnabled() ) { audioSink.enqueueData( audio_pts, sampleData, data_size); } } protected final void pushSubtitleEmpty(final int start_display_pts, final int end_display_pts) { if( null != subEventListener ) { subEventListener.run( new SubEmptyEvent(start_display_pts, end_display_pts) ); } } protected final void pushSubtitleText(final String text, final int start_display_pts, final int end_display_pts) { if( null != subEventListener ) { subEventListener.run( new SubTextEvent(this.scodecID, getLang(getSID()), SubTextEvent.TextFormat.TEXT, text, start_display_pts, end_display_pts) ); } } protected final void pushSubtitleASS(final String ass, final int start_display_pts, final int end_display_pts) { if( null != subEventListener ) { subEventListener.run( new SubTextEvent(this.scodecID, getLang(getSID()), SubTextEvent.TextFormat.ASS, ass, start_display_pts, end_display_pts) ); } } /** {@link GLMediaPlayerImpl#pushSubtitleTex(Object, int, int, int, int, int, int, int, int, int)} */ private final SubBitmapEvent.TextureOwner subTexRelease = new SubBitmapEvent.TextureOwner() { @Override public void release(final Texture subTex) { if( null != subTexFree && null != subTex ) { // return unused try { subTexFree.putBlocking(subTex); if( subDEBUG ) { System.err.println("GLMediaPlayer: Released SubTex: sid "+sid+", free "+subTexFree+", subTex "+subTex); } } catch (final InterruptedException e) { throw new InternalError("GLMediaPlayer.SubTexRelease: Release failed, all full: sid "+sid+", free "+subTexFree+", subTex "+subTex+", "+GLMediaPlayerImpl.this, e); } } } }; protected final void pushSubtitleTex(final Object texObj, final int texID, final int texWidth, final int texHeight, final int x, final int y, final int width, final int height, final int start_display_pts, final int end_display_pts) { final Texture subTex = (Texture)texObj; if( null != subTex ) { subTex.set(texWidth, texHeight, width, height); } if( null != subEventListener ) { subEventListener.run( new SubBitmapEvent(this.scodecID, getLang(getSID()), new Vec2i(x, y), new Vec2i(width, height), subTex, start_display_pts, end_display_pts, subTexRelease) ); } else { subTexRelease.release(subTex); // release right away } } protected final GLMediaPlayer.EventMask addStateEventMask(final GLMediaPlayer.EventMask eventMask, final State newState) { if( state != newState ) { switch( newState ) { case Uninitialized: eventMask.setBit(GLMediaPlayer.EventMask.Bit.Uninit); break; case Initialized: eventMask.setBit(GLMediaPlayer.EventMask.Bit.Init); break; case Playing: eventMask.setBit(GLMediaPlayer.EventMask.Bit.Play); break; case Paused: eventMask.setBit(GLMediaPlayer.EventMask.Bit.Pause); break; } } return eventMask; } protected final void attributesUpdated(final GLMediaPlayer.EventMask eventMask) { if( !eventMask.isZero() ) { final long now = Clock.currentMillis(); if( DEBUG ) { logout.println("GLMediaPlayer.AttributesChanged: "+eventMask+", state "+state+", when "+now); } synchronized(eventListenerLock) { final int sz = eventListener.size(); for(int i=0; i* Further calls are issues off-thread by the decoder implementation. *
*/ protected final void updateAttributes(final String title, final int[] v_streams, final String[] v_langs, int vid, final int[] a_streams, final String[] a_langs, int aid, final int[] s_streams, final String[] s_langs, int sid, final int width, final int height, final int bps_stream, final int bps_video, final int bps_audio, final float fps, final int videoFrames, final int audioFrames, final int duration, final String vcodec, final String acodec, final String scodec, final int ffmpegVCodecID, final int ffmpegACodecID, final int ffmpegSCodecID) { final GLMediaPlayer.EventMask eventMask = new GLMediaPlayer.EventMask(); final boolean wasUninitialized = state == State.Uninitialized; if( wasUninitialized ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.Init); setState( State.Initialized ); } if( null == title ) { final String basename; final String s = getUri().path.decode(); final int li = s.lastIndexOf('/'); if( 0 < li ) { basename = s.substring(li+1); } else { basename = s; } final int di = basename.lastIndexOf('.'); if( 0 < di ) { this.title = basename.substring(0, di); } else { this.title = basename; } } else { this.title = title; } this.v_streams = v_streams; this.v_langs = v_langs; this.a_streams = a_streams; this.a_langs = a_langs; this.s_streams = s_streams; this.s_langs = s_langs; if( STREAM_ID_AUTO == vid || 0 == v_streams.length ) { vid = STREAM_ID_NONE; } if( this.vid != vid ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.VID); this.vid = vid; } if( STREAM_ID_AUTO == aid || 0 == a_streams.length ) { aid = STREAM_ID_NONE; } if( this.aid != aid ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.AID); this.aid = aid; } if( STREAM_ID_AUTO == sid || 0 == s_streams.length ) { sid = STREAM_ID_NONE; } if( this.sid != sid ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.SID); this.sid = sid; } if( this.width != width || this.height != height ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.Size); this.width = width; this.height = height; } if( this.fps != fps ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.FPS); this.fps = fps; if( 0 != fps ) { this.frame_duration = 1000f / fps; final int fdurI = (int)this.frame_duration; if( 0 < fdurI ) { this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_MS_UNTIL_EOS / fdurI; } else { this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT; } } else { this.frame_duration = 0; this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT; } } if( this.bps_stream != bps_stream || this.bps_video != bps_video || this.bps_audio != bps_audio ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.BPS); this.bps_stream = bps_stream; this.bps_video = bps_video; this.bps_audio = bps_audio; } if( this.videoFrames != videoFrames || this.audioFrames != audioFrames || this.duration != duration ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.Length); this.videoFrames = videoFrames; this.audioFrames = audioFrames; this.duration = duration; } if( (null!=acodec && acodec.length()>0 && !this.acodec.equals(acodec)) ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec); eventMask.setBit(GLMediaPlayer.EventMask.Bit.ACodec); this.acodec = acodec; } if( (null!=vcodec && vcodec.length()>0 && !this.vcodec.equals(vcodec)) ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec); eventMask.setBit(GLMediaPlayer.EventMask.Bit.VCodec); this.vcodec = vcodec; } if( (null!=scodec && scodec.length()>0 && !this.scodec.equals(scodec)) ) { eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec); eventMask.setBit(GLMediaPlayer.EventMask.Bit.SCodec); this.scodec = scodec; } final CodecID acodecID = CodecID.fromFFmpeg(ffmpegACodecID); final CodecID vcodecID = CodecID.fromFFmpeg(ffmpegVCodecID); final CodecID scodecID = CodecID.fromFFmpeg(ffmpegSCodecID); if( (0