aboutsummaryrefslogtreecommitdiffstats
path: root/src/jogl/classes/jogamp/opengl/util/av
diff options
context:
space:
mode:
Diffstat (limited to 'src/jogl/classes/jogamp/opengl/util/av')
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java77
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java1598
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java229
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java129
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java137
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java439
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java1029
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java281
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java41
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java78
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java78
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv10Natives.java78
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java130
13 files changed, 3376 insertions, 948 deletions
diff --git a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
index 31f13297b..c39b78bb8 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
@@ -3,14 +3,14 @@
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
- *
+ *
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
- *
+ *
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
- *
+ *
* THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
@@ -20,7 +20,7 @@
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
+ *
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of JogAmp Community.
@@ -44,43 +44,40 @@ import jogamp.opengl.egl.EGLExt;
public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
final protected TextureType texType;
final protected boolean useKHRSync;
-
+
public enum TextureType {
- GL(0), KHRImage(1);
-
+ GL(0), KHRImage(1);
+
public final int id;
TextureType(int id){
this.id = id;
}
- }
-
+ }
+
public static class EGLTextureFrame extends TextureSequence.TextureFrame {
-
+
public EGLTextureFrame(Buffer clientBuffer, Texture t, long khrImage, long khrSync) {
super(t);
this.clientBuffer = clientBuffer;
this.image = khrImage;
this.sync = khrSync;
}
-
+
public final Buffer getClientBuffer() { return clientBuffer; }
- public final long getImage() { return image; }
+ public final long getImage() { return image; }
public final long getSync() { return sync; }
-
+
+ @Override
public String toString() {
- return "EGLTextureFrame[" + texture + ", img "+ image + ", sync "+ sync+", clientBuffer "+clientBuffer+"]";
+ return "EGLTextureFrame[pts " + pts + " ms, l " + duration + " ms, texID "+ texture.getTextureObject() + ", img "+ image + ", sync "+ sync+", clientBuffer "+clientBuffer+"]";
}
protected final Buffer clientBuffer;
protected final long image;
protected final long sync;
}
-
- protected EGLMediaPlayerImpl() {
- this(TextureType.GL, false);
- }
-
+
protected EGLMediaPlayerImpl(TextureType texType, boolean useKHRSync) {
super();
this.texType = texType;
@@ -88,36 +85,36 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
}
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- final Texture texture = super.createTexImageImpl(gl, idx, tex, width, height, false);
+ protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
+ final Texture texture = super.createTexImageImpl(gl, texName, width, height);
final Buffer clientBuffer;
final long image;
final long sync;
- final boolean eglUsage = TextureType.KHRImage == texType || useKHRSync ;
+ final boolean eglUsage = TextureType.KHRImage == texType || useKHRSync ;
final EGLContext eglCtx;
final EGLExt eglExt;
final EGLDrawable eglDrawable;
-
+
if(eglUsage) {
eglCtx = (EGLContext) gl.getContext();
eglExt = eglCtx.getEGLExt();
- eglDrawable = (EGLDrawable) eglCtx.getGLDrawable();
+ eglDrawable = (EGLDrawable) eglCtx.getGLDrawable();
} else {
eglCtx = null;
eglExt = null;
eglDrawable = null;
}
-
+
if(TextureType.KHRImage == texType) {
IntBuffer nioTmp = Buffers.newDirectIntBuffer(1);
// create EGLImage from texture
clientBuffer = null; // FIXME
nioTmp.put(0, EGL.EGL_NONE);
- image = eglExt.eglCreateImageKHR( eglDrawable.getDisplay(), eglCtx.getHandle(),
+ image = eglExt.eglCreateImageKHR( eglDrawable.getNativeSurface().getDisplayHandle(), eglCtx.getHandle(),
EGLExt.EGL_GL_TEXTURE_2D_KHR,
clientBuffer, nioTmp);
if (0==image) {
- throw new RuntimeException("EGLImage creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", tex "+tex[idx]+", err "+toHexString(EGL.eglGetError()));
+ throw new RuntimeException("EGLImage creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", tex "+texName+", err "+toHexString(EGL.eglGetError()));
}
} else {
clientBuffer = null;
@@ -125,12 +122,12 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
}
if(useKHRSync) {
- int[] tmp = new int[1];
+ IntBuffer tmp = Buffers.newDirectIntBuffer(1);
// Create sync object so that we can be sure that gl has finished
// rendering the EGLImage texture before we tell OpenMAX to fill
// it with a new frame.
- tmp[0] = EGL.EGL_NONE;
- sync = eglExt.eglCreateSyncKHR(eglDrawable.getDisplay(), EGLExt.EGL_SYNC_FENCE_KHR, tmp, 0);
+ tmp.put(0, EGL.EGL_NONE);
+ sync = eglExt.eglCreateSyncKHR(eglDrawable.getNativeSurface().getDisplayHandle(), EGLExt.EGL_SYNC_FENCE_KHR, tmp);
if (0==sync) {
throw new RuntimeException("EGLSync creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", err "+toHexString(EGL.eglGetError()));
}
@@ -139,31 +136,31 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
}
return new EGLTextureFrame(clientBuffer, texture, image, sync);
}
-
+
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- final boolean eglUsage = TextureType.KHRImage == texType || useKHRSync ;
+ protected void destroyTexFrame(GL gl, TextureSequence.TextureFrame frame) {
+ final boolean eglUsage = TextureType.KHRImage == texType || useKHRSync ;
final EGLContext eglCtx;
final EGLExt eglExt;
final EGLDrawable eglDrawable;
-
+
if(eglUsage) {
eglCtx = (EGLContext) gl.getContext();
eglExt = eglCtx.getEGLExt();
- eglDrawable = (EGLDrawable) eglCtx.getGLDrawable();
+ eglDrawable = (EGLDrawable) eglCtx.getGLDrawable();
} else {
eglCtx = null;
eglExt = null;
eglDrawable = null;
}
- final EGLTextureFrame eglTex = (EGLTextureFrame) imgTex;
-
+ final EGLTextureFrame eglTex = (EGLTextureFrame) frame;
+
if(0!=eglTex.getImage()) {
- eglExt.eglDestroyImageKHR(eglDrawable.getDisplay(), eglTex.getImage());
+ eglExt.eglDestroyImageKHR(eglDrawable.getNativeSurface().getDisplayHandle(), eglTex.getImage());
}
if(0!=eglTex.getSync()) {
- eglExt.eglDestroySyncKHR(eglDrawable.getDisplay(), eglTex.getSync());
+ eglExt.eglDestroySyncKHR(eglDrawable.getNativeSurface().getDisplayHandle(), eglTex.getSync());
}
- super.destroyTexImage(gl, imgTex);
+ super.destroyTexFrame(gl, frame);
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index d3d45e692..7cea51dc8 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -3,14 +3,14 @@
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
- *
+ *
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
- *
+ *
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
- *
+ *
* THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
@@ -20,7 +20,7 @@
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
+ *
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of JogAmp Community.
@@ -28,331 +28,670 @@
package jogamp.opengl.util.av;
import java.io.IOException;
-import java.net.URLConnection;
+import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.Map;
+import javax.media.nativewindow.AbstractGraphicsDevice;
import javax.media.opengl.GL;
import javax.media.opengl.GL2;
+import javax.media.opengl.GLContext;
+import javax.media.opengl.GLDrawable;
+import javax.media.opengl.GLDrawableFactory;
import javax.media.opengl.GLES2;
import javax.media.opengl.GLException;
+import javax.media.opengl.GLProfile;
+
+import jogamp.opengl.Debug;
+import com.jogamp.common.net.URIQueryProps;
+import com.jogamp.common.os.Platform;
+import com.jogamp.common.util.LFRingbuffer;
+import com.jogamp.common.util.Ringbuffer;
+import com.jogamp.opengl.GLExtensions;
+import com.jogamp.opengl.util.TimeFrameI;
+import com.jogamp.opengl.util.av.AudioSink;
import com.jogamp.opengl.util.av.GLMediaPlayer;
+import com.jogamp.opengl.util.glsl.ShaderCode;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
+import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
/**
* After object creation an implementation may customize the behavior:
* <ul>
- * <li>{@link #setTextureCount(int)}</li>
+ * <li>{@link #setDesTextureCount(int)}</li>
* <li>{@link #setTextureTarget(int)}</li>
* <li>{@link EGLMediaPlayerImpl#setEGLTexImageAttribs(boolean, boolean)}.</li>
* </ul>
- *
+ *
* <p>
* See {@link GLMediaPlayer}.
* </p>
*/
public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
+ private static final int STREAM_WORKER_DELAY = Debug.getIntProperty("jogl.debug.GLMediaPlayer.StreamWorker.delay", false, 0);
protected static final String unknown = "unknown";
- protected State state;
+ protected volatile State state;
+ private final Object stateLock = new Object();
+
protected int textureCount;
protected int textureTarget;
protected int textureFormat;
+ protected int textureInternalFormat;
protected int textureType;
protected int texUnit;
-
-
+
+
protected int[] texMinMagFilter = { GL.GL_NEAREST, GL.GL_NEAREST };
protected int[] texWrapST = { GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE };
-
- protected URLConnection urlConn = null;
-
- protected float playSpeed = 1.0f;
-
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+
+ /** User requested URI stream location. */
+ protected URI streamLoc = null;
+ /**
+ * In case {@link #streamLoc} is a {@link GLMediaPlayer#CameraInputScheme},
+ * {@link #cameraPath} holds the URI's path portion
+ * as parsed in {@link #initStream(URI, int, int, int)}.
+ * @see #cameraProps
+ */
+ protected String cameraPath = null;
+ /** Optional camera properties, see {@link #cameraPath}. */
+ protected Map<String, String> cameraProps = null;
+
+ protected volatile float playSpeed = 1.0f;
+ protected float audioVolume = 1.0f;
+
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ protected int vid = GLMediaPlayer.STREAM_ID_AUTO;
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ protected int aid = GLMediaPlayer.STREAM_ID_AUTO;
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int width = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int height = 0;
- /** Video fps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Video avg. fps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected float fps = 0;
- /** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Video avg. frame duration in ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ protected float frame_duration = 0f;
+ /** Stream bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_stream = 0;
- /** Video bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Video bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_video = 0;
- /** Audio bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Audio bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_audio = 0;
- /** In frames. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
- protected int totalFrames = 0;
- /** In ms. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ protected int videoFrames = 0;
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ protected int audioFrames = 0;
+ /** In ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int duration = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected String acodec = unknown;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected String vcodec = unknown;
-
- protected int frameNumber = 0;
-
- protected TextureSequence.TextureFrame[] texFrames = null;
- protected HashMap<Integer, TextureSequence.TextureFrame> texFrameMap = new HashMap<Integer, TextureSequence.TextureFrame>();
- private ArrayList<GLMediaEventListener> eventListeners = new ArrayList<GLMediaEventListener>();
+
+ protected volatile int decodedFrameCount = 0;
+ protected int presentedFrameCount = 0;
+ protected int displayedFrameCount = 0;
+ protected volatile int video_pts_last = 0;
+
+ /**
+ * Help detect EOS, limit is {@link #MAX_FRAMELESS_MS_UNTIL_EOS}.
+ * To be used either by getNextTexture(..) or StreamWorker for audio-only.
+ */
+ private int nullFrameCount = 0;
+ private int maxNullFrameCountUntilEOS = 0;
+ /**
+ * Help detect EOS, limit {@value} milliseconds without a valid frame.
+ */
+ private static final int MAX_FRAMELESS_MS_UNTIL_EOS = 5000;
+ private static final int MAX_FRAMELESS_UNTIL_EOS_DEFAULT = MAX_FRAMELESS_MS_UNTIL_EOS / 30; // default value assuming 30fps
+
+ /** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initStreamImpl(int, int)}! */
+ protected AudioSink audioSink = null;
+ protected boolean audioSinkPlaySpeedSet = false;
+
+ /** System Clock Reference (SCR) of first audio PTS at start time. */
+ private long audio_scr_t0 = 0;
+ private boolean audioSCR_reset = true;
+
+ /** System Clock Reference (SCR) of first video frame at start time. */
+ private long video_scr_t0 = 0;
+ /** System Clock Reference (SCR) PTS offset, i.e. first video PTS at start time. */
+ private int video_scr_pts = 0;
+ /** Cumulative video pts diff. */
+ private float video_dpts_cum = 0;
+ /** Cumulative video frames. */
+ private int video_dpts_count = 0;
+ /** Number of min frame count required for video cumulative sync. */
+ private static final int VIDEO_DPTS_NUM = 20;
+ /** Cumulative coefficient, value {@value}. */
+ private static final float VIDEO_DPTS_COEFF = 0.7943282f; // (float) Math.exp(Math.log(0.01) / VIDEO_DPTS_NUM);
+ /** Maximum valid video pts diff. */
+ private static final int VIDEO_DPTS_MAX = 5000; // 5s max diff
+ /** Trigger video PTS reset with given cause as bitfield. */
+ private boolean videoSCR_reset = false;
+
+ protected TextureFrame[] videoFramesOrig = null;
+ protected Ringbuffer<TextureFrame> videoFramesFree = null;
+ protected Ringbuffer<TextureFrame> videoFramesDecoded = null;
+ protected volatile TextureFrame lastFrame = null;
+ /**
+ * @see #isGLOriented()
+ */
+ protected boolean isInGLOrientation = false;
+
+ private final ArrayList<GLMediaEventListener> eventListeners = new ArrayList<GLMediaEventListener>();
protected GLMediaPlayerImpl() {
- this.textureCount=3;
+ this.textureCount=0;
this.textureTarget=GL.GL_TEXTURE_2D;
this.textureFormat = GL.GL_RGBA;
- this.textureType = GL.GL_UNSIGNED_BYTE;
+ this.textureInternalFormat = GL.GL_RGBA;
+ this.textureType = GL.GL_UNSIGNED_BYTE;
this.texUnit = 0;
this.state = State.Uninitialized;
}
@Override
- public void setTextureUnit(int u) { texUnit = u; }
-
+ public final void setTextureUnit(int u) { texUnit = u; }
+
@Override
- public int getTextureUnit() { return texUnit; }
-
- protected final void setTextureCount(int textureCount) {
- this.textureCount=textureCount;
- }
+ public final int getTextureUnit() { return texUnit; }
+
+ @Override
+ public final int getTextureTarget() { return textureTarget; }
+
@Override
public final int getTextureCount() { return textureCount; }
-
+
protected final void setTextureTarget(int target) { textureTarget=target; }
- protected final void setTextureFormat(int f) { textureFormat=f; }
+ protected final void setTextureFormat(int internalFormat, int format) {
+ textureInternalFormat=internalFormat;
+ textureFormat=format;
+ }
protected final void setTextureType(int t) { textureType=t; }
+ @Override
public final void setTextureMinMagFilter(int[] minMagFilter) { texMinMagFilter[0] = minMagFilter[0]; texMinMagFilter[1] = minMagFilter[1];}
+ @Override
public final int[] getTextureMinMagFilter() { return texMinMagFilter; }
-
- public final void setTextureWrapST(int[] wrapST) { texWrapST[0] = wrapST[0]; texWrapST[1] = wrapST[1];}
- public final int[] getTextureWrapST() { return texWrapST; }
@Override
- public final TextureSequence.TextureFrame getLastTexture() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
- return getLastTextureImpl();
- }
- protected abstract TextureSequence.TextureFrame getLastTextureImpl();
-
+ public final void setTextureWrapST(int[] wrapST) { texWrapST[0] = wrapST[0]; texWrapST[1] = wrapST[1];}
@Override
- public final synchronized TextureSequence.TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
- if(State.Playing == state) {
- final TextureSequence.TextureFrame f = getNextTextureImpl(gl, blocking);
- return f;
+ public final int[] getTextureWrapST() { return texWrapST; }
+
+ private final void checkGLInit() {
+ if(State.Uninitialized == state || State.Initialized == state ) {
+ throw new IllegalStateException("GL not initialized: "+this);
}
- return getLastTextureImpl();
}
- protected abstract TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking);
-
+
@Override
public String getRequiredExtensionsShaderStub() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
if(GLES2.GL_TEXTURE_EXTERNAL_OES == textureTarget) {
- return TextureSequence.GL_OES_EGL_image_external_Required_Prelude;
+ return ShaderCode.createExtensionDirective(GLExtensions.OES_EGL_image_external, ShaderCode.ENABLE);
}
return "";
}
-
+
@Override
public String getTextureSampler2DType() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
switch(textureTarget) {
case GL.GL_TEXTURE_2D:
- case GL2.GL_TEXTURE_RECTANGLE:
+ case GL2.GL_TEXTURE_RECTANGLE:
return TextureSequence.sampler2D;
case GLES2.GL_TEXTURE_EXTERNAL_OES:
return TextureSequence.samplerExternalOES;
default:
- throw new GLException("Unsuported texture target: "+toHexString(textureTarget));
+ throw new GLException("Unsuported texture target: "+toHexString(textureTarget));
}
}
-
+
/**
* {@inheritDoc}
- *
+ *
* This implementation simply returns the build-in function name of <code>texture2D</code>,
* if not overridden by specialization.
*/
@Override
public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
return "texture2D";
}
-
+
/**
* {@inheritDoc}
- *
- * This implementation simply returns an empty string since it's using
+ *
+ * This implementation simply returns an empty string since it's using
* the build-in function <code>texture2D</code>,
* if not overridden by specialization.
*/
@Override
public String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
- return "";
+ checkGLInit();
+ return "";
}
-
+
@Override
- public final synchronized float getPlaySpeed() {
- return playSpeed;
- }
-
+ public final int getDecodedFrameCount() { return decodedFrameCount; }
+
@Override
- public final synchronized void setPlaySpeed(float rate) {
- if(State.Uninitialized != state && setPlaySpeedImpl(rate)) {
- playSpeed = rate;
+ public final int getPresentedFrameCount() { return presentedFrameCount; }
+
+ @Override
+ public final int getVideoPTS() { return video_pts_last; }
+
+ @Override
+ public final int getAudioPTS() {
+ if( State.Uninitialized != state ) {
+ return getAudioPTSImpl();
}
- if(DEBUG) { System.err.println("SetPlaySpeed: "+toString()); }
+ return 0;
}
- protected abstract boolean setPlaySpeedImpl(float rate);
-
- public final State start() {
- switch(state) {
- case Stopped:
- case Paused:
- if(startImpl()) {
- state = State.Playing;
- }
+ /** Override if not using audioSink! */
+ protected int getAudioPTSImpl() {
+ if( null != audioSink ) {
+ return audioSink.getPTS();
+ } else {
+ return 0;
}
- if(DEBUG) { System.err.println("Start: "+toString()); }
- return state;
}
- protected abstract boolean startImpl();
-
- public final State pause() {
- if(State.Playing == state && pauseImpl()) {
- state = State.Paused;
+
+ @Override
+ public final State getState() { return state; }
+
+ @Override
+ public final State play() {
+ synchronized( stateLock ) {
+ final State preState = state;
+ switch( state ) {
+ case Paused:
+ if( playImpl() ) {
+ resetAVPTS();
+ if( null != audioSink ) {
+ audioSink.play(); // cont. w/ new data
+ }
+ if( null != streamWorker ) {
+ streamWorker.doResume();
+ }
+ changeState(0, State.Playing);
+ }
+ default:
+ }
+ if(DEBUG) { System.err.println("Play: "+preState+" -> "+state+", "+toString()); }
+ return state;
}
- if(DEBUG) { System.err.println("Pause: "+toString()); }
- return state;
}
- protected abstract boolean pauseImpl();
-
- public final State stop() {
- switch(state) {
- case Playing:
- case Paused:
- if(stopImpl()) {
- state = State.Stopped;
+ protected abstract boolean playImpl();
+
+ @Override
+ public final State pause(boolean flush) {
+ return pauseImpl(flush, 0);
+ }
+ private final State pauseImpl(boolean flush, int event_mask) {
+ synchronized( stateLock ) {
+ final State preState = state;
+ if( State.Playing == state ) {
+ event_mask = addStateEventMask(event_mask, GLMediaPlayer.State.Paused);
+ state = State.Paused;
+ if( null != streamWorker ) {
+ streamWorker.doPause();
+ }
+ if( flush ) {
+ resetAVPTSAndFlush();
+ } else if( null != audioSink ) {
+ audioSink.pause();
}
+ attributesUpdated( event_mask );
+ if( !pauseImpl() ) {
+ play();
+ }
+ }
+ if(DEBUG) { System.err.println("Pause: "+preState+" -> "+state+", "+toString()); }
+ return state;
}
- if(DEBUG) { System.err.println("Stop: "+toString()); }
- return state;
}
- protected abstract boolean stopImpl();
-
+ protected abstract boolean pauseImpl();
+
@Override
- public final int getCurrentPosition() {
- if(State.Uninitialized != state) {
- return getCurrentPositionImpl();
+ public final State destroy(GL gl) {
+ return destroyImpl(gl, 0);
+ }
+ private final State destroyImpl(GL gl, int event_mask) {
+ synchronized( stateLock ) {
+ if( null != streamWorker ) {
+ streamWorker.doStop();
+ streamWorker = null;
+ }
+ destroyImpl(gl);
+ removeAllTextureFrames(gl);
+ textureCount=0;
+ changeState(event_mask, State.Uninitialized);
+ attachedObjects.clear();
+ return state;
}
- return 0;
}
- protected abstract int getCurrentPositionImpl();
-
+ protected abstract void destroyImpl(GL gl);
+
+ @Override
public final int seek(int msec) {
- final int cp;
- switch(state) {
- case Stopped:
- case Playing:
- case Paused:
- cp = seekImpl(msec);
- break;
- default:
- cp = 0;
+ synchronized( stateLock ) {
+ final State preState = state;
+ final int pts1;
+ switch(state) {
+ case Playing:
+ case Paused:
+ final State _state = state;
+ state = State.Paused;
+ if( null != streamWorker ) {
+ streamWorker.doPause();
+ }
+ // Adjust target ..
+ if( msec >= duration ) {
+ msec = duration - (int)Math.floor(frame_duration);
+ } else if( msec < 0 ) {
+ msec = 0;
+ }
+ pts1 = seekImpl(msec);
+ resetAVPTSAndFlush();
+ if( null != audioSink && State.Playing == _state ) {
+ audioSink.play(); // cont. w/ new data
+ }
+ if(DEBUG) {
+ System.err.println("Seek("+msec+"): "+getPerfString());
+ }
+ if( null != streamWorker ) {
+ streamWorker.doResume();
+ }
+ state = _state;
+ break;
+ default:
+ pts1 = 0;
+ }
+ if(DEBUG) { System.err.println("Seek("+msec+"): "+preState+" -> "+state+", "+toString()); }
+ return pts1;
}
- if(DEBUG) { System.err.println("Seek("+msec+"): "+toString()); }
- return cp;
}
protected abstract int seekImpl(int msec);
-
- public final State getState() { return state; }
-
- @Override
- public final State initGLStream(GL gl, URLConnection urlConn) throws IllegalStateException, GLException, IOException {
- if(State.Uninitialized != state) {
- throw new IllegalStateException("Instance not in state "+State.Uninitialized+", but "+state+", "+this);
- }
- this.urlConn = urlConn;
- if (this.urlConn != null) {
- try {
- if(null != gl) {
- if(null!=texFrames) {
- // re-init ..
- removeAllImageTextures(gl);
- } else {
- texFrames = new TextureSequence.TextureFrame[textureCount];
- }
- final int[] tex = new int[textureCount];
- {
- gl.glGenTextures(textureCount, tex, 0);
- final int err = gl.glGetError();
- if( GL.GL_NO_ERROR != err ) {
- throw new RuntimeException("TextureNames creation failed (num: "+textureCount+"): err "+toHexString(err));
- }
+
+ @Override
+ public final float getPlaySpeed() {
+ return playSpeed;
+ }
+
+ @Override
+ public final boolean setPlaySpeed(float rate) {
+ synchronized( stateLock ) {
+ final float preSpeed = playSpeed;
+ boolean res = false;
+ if(State.Uninitialized != state ) {
+ if( rate > 0.01f ) {
+ if( Math.abs(1.0f - rate) < 0.01f ) {
+ rate = 1.0f;
}
- initGLStreamImpl(gl, tex);
-
- for(int i=0; i<textureCount; i++) {
- final TextureSequence.TextureFrame tf = createTexImage(gl, i, tex);
- texFrames[i] = tf;
- texFrameMap.put(tex[i], tf);
+ if( setPlaySpeedImpl(rate) ) {
+ resetAVPTS();
+ playSpeed = rate;
+ res = true;
}
}
- state = State.Stopped;
- return state;
- } catch (Throwable t) {
- throw new GLException("Error initializing GL resources", t);
}
+ if(DEBUG) { System.err.println("setPlaySpeed("+rate+"): "+state+", "+preSpeed+" -> "+playSpeed+", "+toString()); }
+ return res;
+ }
+ }
+ /**
+ * Override if not using AudioSink, or AudioSink's {@link AudioSink#setPlaySpeed(float)} is not sufficient!
+ * <p>
+ * AudioSink shall respect <code>!audioSinkPlaySpeedSet</code> to determine data_size
+ * at {@link AudioSink#enqueueData(com.jogamp.opengl.util.av.AudioSink.AudioFrame)}.
+ * </p>
+ */
+ protected boolean setPlaySpeedImpl(float rate) {
+ if( null != audioSink ) {
+ audioSinkPlaySpeedSet = audioSink.setPlaySpeed(rate);
+ }
+ // still true, even if audioSink rejects command since we deal w/ video sync
+ // and AudioSink w/ audioSinkPlaySpeedSet at enqueueData(..).
+ return true;
+ }
+
+ @Override
+ public final float getAudioVolume() {
+ getAudioVolumeImpl();
+ return audioVolume;
+ }
+ /**
+ * Override if not using AudioSink, or AudioSink's {@link AudioSink#getVolume()} is not sufficient!
+ */
+ protected void getAudioVolumeImpl() {
+ if( null != audioSink ) {
+ audioVolume = audioSink.getVolume();
+ }
+ }
+
+ @Override
+ public boolean setAudioVolume(float v) {
+ synchronized( stateLock ) {
+ final float preVolume = audioVolume;
+ boolean res = false;
+ if(State.Uninitialized != state ) {
+ if( Math.abs(v) < 0.01f ) {
+ v = 0.0f;
+ } else if( Math.abs(1.0f - v) < 0.01f ) {
+ v = 1.0f;
+ }
+ if( setAudioVolumeImpl(v) ) {
+ audioVolume = v;
+ res = true;
+ }
+ }
+ if(DEBUG) { System.err.println("setAudioVolume("+v+"): "+state+", "+preVolume+" -> "+audioVolume+", "+toString()); }
+ return res;
}
- return state;
}
-
/**
- * Implementation shall set the following set of data here
- * @param gl TODO
- * @param texNames TODO
+ * Override if not using AudioSink, or AudioSink's {@link AudioSink#setVolume(float)} is not sufficient!
+ */
+ protected boolean setAudioVolumeImpl(float v) {
+ if( null != audioSink ) {
+ return audioSink.setVolume(v);
+ }
+ // still true, even if audioSink rejects command ..
+ return true;
+ }
+
+ @Override
+ public final void initStream(URI streamLoc, final int vid, final int aid, int reqTextureCount) throws IllegalStateException, IllegalArgumentException {
+ synchronized( stateLock ) {
+ if(State.Uninitialized != state) {
+ throw new IllegalStateException("Instance not in state unintialized: "+this);
+ }
+ if(null == streamLoc) {
+ throw new IllegalArgumentException("streamLock is null");
+ }
+ if( STREAM_ID_NONE != vid ) {
+ textureCount = validateTextureCount(reqTextureCount);
+ if( textureCount < TEXTURE_COUNT_MIN ) {
+ throw new InternalError("Validated texture count < "+TEXTURE_COUNT_MIN+": "+textureCount);
+ }
+ } else {
+ textureCount = 0;
+ }
+ decodedFrameCount = 0;
+ presentedFrameCount = 0;
+ displayedFrameCount = 0;
+ nullFrameCount = 0;
+ maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT;
+ this.streamLoc = streamLoc;
+
+ // Pre-parse for camera-input scheme
+ cameraPath = null;
+ cameraProps = null;
+ final String streamLocScheme = streamLoc.getScheme();
+ if( null != streamLocScheme && streamLocScheme.equals(CameraInputScheme) ) {
+ final String rawPath = streamLoc.getRawPath();
+ if( null != rawPath && rawPath.length() > 0 ) {
+ // cut-off root fwd-slash
+ cameraPath = rawPath.substring(1);
+ final URIQueryProps props = URIQueryProps.create(streamLoc, ';');
+ cameraProps = props.getProperties();
+ } else {
+ throw new IllegalArgumentException("Camera path is empty: "+streamLoc.toString());
+ }
+ }
+
+ this.vid = vid;
+ this.aid = aid;
+ if ( this.streamLoc != null ) {
+ new Thread() {
+ public void run() {
+ try {
+ // StreamWorker may be used, see API-doc of StreamWorker
+ initStreamImpl(vid, aid);
+ } catch (Throwable t) {
+ streamErr = new StreamException(t.getClass().getSimpleName()+" while initializing: "+GLMediaPlayerImpl.this.toString(), t);
+ changeState(GLMediaEventListener.EVENT_CHANGE_ERR, GLMediaPlayer.State.Uninitialized);
+ } // also initializes width, height, .. etc
+ }
+ }.start();
+ }
+ }
+ }
+ /**
+ * Implementation shall set the following set of data here
+ * @see #vid
+ * @see #aid
* @see #width
* @see #height
* @see #fps
* @see #bps_stream
- * @see #totalFrames
+ * @see #videoFrames
+ * @see #audioFrames
* @see #acodec
* @see #vcodec
*/
- protected abstract void initGLStreamImpl(GL gl, int[] texNames) throws IOException;
-
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- return new TextureSequence.TextureFrame( createTexImageImpl(gl, idx, tex, width, height, false) );
+ protected abstract void initStreamImpl(int vid, int aid) throws Exception;
+
+ @Override
+ public final StreamException getStreamException() {
+ final StreamException e;
+ synchronized( stateLock ) {
+ e = streamErr;
+ streamErr = null;
+ }
+ return e;
}
-
- protected Texture createTexImageImpl(GL gl, int idx, int[] tex, int tWidth, int tHeight, boolean mustFlipVertically) {
- if( 0 > tex[idx] ) {
- throw new RuntimeException("TextureName "+toHexString(tex[idx])+" invalid.");
+
+ @Override
+ public final void initGL(GL gl) throws IllegalStateException, StreamException, GLException {
+ synchronized( stateLock ) {
+ if(State.Initialized != state ) {
+ throw new IllegalStateException("Stream not in state initialized: "+this);
+ }
+ if( null != streamWorker ) {
+ final StreamException streamInitErr = getStreamException();
+ if( null != streamInitErr ) {
+ streamWorker = null; // already terminated!
+ destroy(null);
+ throw streamInitErr;
+ }
+ }
+ try {
+ if( STREAM_ID_NONE != vid ) {
+ removeAllTextureFrames(gl);
+ initGLImpl(gl);
+ if(DEBUG) {
+ System.err.println("initGLImpl.X "+this);
+ }
+ videoFramesOrig = createTexFrames(gl, textureCount);
+ if( TEXTURE_COUNT_MIN == textureCount ) {
+ videoFramesFree = null;
+ videoFramesDecoded = null;
+ lastFrame = videoFramesOrig[0];
+ } else {
+ videoFramesFree = new LFRingbuffer<TextureFrame>(videoFramesOrig);
+ videoFramesDecoded = new LFRingbuffer<TextureFrame>(TextureFrame[].class, textureCount);
+ lastFrame = videoFramesFree.getBlocking( );
+ }
+ if( null != streamWorker ) {
+ streamWorker.initGL(gl);
+ }
+ } else {
+ removeAllTextureFrames(null);
+ initGLImpl(null);
+ setTextureFormat(-1, -1);
+ setTextureType(-1);
+ videoFramesOrig = null;
+ videoFramesFree = null;
+ videoFramesDecoded = null;
+ lastFrame = null;
+ }
+ changeState(0, State.Paused);
+ } catch (Throwable t) {
+ destroyImpl(gl, GLMediaEventListener.EVENT_CHANGE_ERR); // -> GLMediaPlayer.State.Uninitialized
+ throw new GLException("Error initializing GL resources", t);
+ }
+ }
+ }
+ /**
+ * Shall initialize all GL related resources, if not audio-only.
+ * <p>
+ * Shall also take care of {@link AudioSink} initialization if appropriate.
+ * </p>
+ * @param gl null for audio-only, otherwise a valid and current GL object.
+ * @throws IOException
+ * @throws GLException
+ */
+ protected abstract void initGLImpl(GL gl) throws IOException, GLException;
+
+ /**
+ * Returns the validated number of textures to be handled.
+ * <p>
+ * Default is {@link #TEXTURE_COUNT_DEFAULT} minimum textures, if <code>desiredTextureCount</code>
+ * is < {@link #TEXTURE_COUNT_MIN}, {@link #TEXTURE_COUNT_MIN} is returned.
+ * </p>
+ * <p>
+ * Implementation must at least return a texture count of {@link #TEXTURE_COUNT_MIN}, <i>two</i>, the last texture and the decoding texture.
+ * </p>
+ */
+ protected int validateTextureCount(int desiredTextureCount) {
+ return desiredTextureCount < TEXTURE_COUNT_MIN ? TEXTURE_COUNT_MIN : desiredTextureCount;
+ }
+
+ protected TextureFrame[] createTexFrames(GL gl, final int count) {
+ final int[] texNames = new int[count];
+ gl.glGenTextures(count, texNames, 0);
+ final int err = gl.glGetError();
+ if( GL.GL_NO_ERROR != err ) {
+ throw new RuntimeException("TextureNames creation failed (num: "+count+"): err "+toHexString(err));
+ }
+ final TextureFrame[] texFrames = new TextureFrame[count];
+ for(int i=0; i<count; i++) {
+ texFrames[i] = createTexImage(gl, texNames[i]);
+ }
+ return texFrames;
+ }
+ protected abstract TextureFrame createTexImage(GL gl, int texName);
+
+ protected final Texture createTexImageImpl(GL gl, int texName, int tWidth, int tHeight) {
+ if( 0 > texName ) {
+ throw new RuntimeException("TextureName "+toHexString(texName)+" invalid.");
}
gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
- gl.glBindTexture(textureTarget, tex[idx]);
+ gl.glBindTexture(textureTarget, texName);
{
final int err = gl.glGetError();
if( GL.GL_NO_ERROR != err ) {
- throw new RuntimeException("Couldn't bind textureName "+toHexString(tex[idx])+" to 2D target, err "+toHexString(err));
+ throw new RuntimeException("Couldn't bind textureName "+toHexString(texName)+" to 2D target, err "+toHexString(err));
}
}
@@ -361,9 +700,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
gl.glTexImage2D(
textureTarget, // target
0, // level
- GL.GL_RGBA, // internal format
- tWidth, // width
- tHeight, // height
+ textureInternalFormat, // internal format
+ tWidth, // width
+ tHeight, // height
0, // border
textureFormat,
textureType,
@@ -371,55 +710,691 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
{
final int err = gl.glGetError();
if( GL.GL_NO_ERROR != err ) {
- throw new RuntimeException("Couldn't create TexImage2D RGBA "+tWidth+"x"+tHeight+", err "+toHexString(err));
+ throw new RuntimeException("Couldn't create TexImage2D RGBA "+tWidth+"x"+tHeight+", target "+toHexString(textureTarget)+
+ ", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+
+ ", err "+toHexString(err));
}
}
if(DEBUG) {
System.err.println("Created TexImage2D RGBA "+tWidth+"x"+tHeight+", target "+toHexString(textureTarget)+
- ", ifmt "+toHexString(GL.GL_RGBA)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType));
+ ", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType));
}
}
gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_MIN_FILTER, texMinMagFilter[0]);
- gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_MAG_FILTER, texMinMagFilter[1]);
+ gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_MAG_FILTER, texMinMagFilter[1]);
gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_S, texWrapST[0]);
gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_T, texWrapST[1]);
-
- return com.jogamp.opengl.util.texture.TextureIO.newTexture(tex[idx],
- textureTarget,
+
+ return com.jogamp.opengl.util.texture.TextureIO.newTexture(
+ texName, textureTarget,
tWidth, tHeight,
width, height,
- mustFlipVertically);
- }
-
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- imgTex.getTexture().destroy(gl);
- }
-
- protected void removeAllImageTextures(GL gl) {
- if(null != texFrames) {
- for(int i=0; i<textureCount; i++) {
- final TextureSequence.TextureFrame imgTex = texFrames[i];
- if(null != imgTex) {
- destroyTexImage(gl, imgTex);
+ !isInGLOrientation);
+ }
+
+ protected void destroyTexFrame(GL gl, TextureFrame frame) {
+ frame.getTexture().destroy(gl);
+ }
+
+ @Override
+ public final TextureFrame getLastTexture() throws IllegalStateException {
+ if( State.Paused != state && State.Playing != state ) {
+ throw new IllegalStateException("Instance not paused or playing: "+this);
+ }
+ return lastFrame;
+ }
+
+ private final void removeAllTextureFrames(GL gl) {
+ final TextureFrame[] texFrames = videoFramesOrig;
+ videoFramesOrig = null;
+ videoFramesFree = null;
+ videoFramesDecoded = null;
+ lastFrame = null;
+ if( null != texFrames ) {
+ for(int i=0; i<texFrames.length; i++) {
+ final TextureFrame frame = texFrames[i];
+ if(null != frame) {
+ if( null != gl ) {
+ destroyTexFrame(gl, frame);
+ }
texFrames[i] = null;
}
+ if( DEBUG ) {
+ System.err.println(Thread.currentThread().getName()+"> Clear TexFrame["+i+"]: "+frame+" -> null");
+ }
+ }
+ }
+ }
+
+ protected TextureFrame cachedFrame = null;
+ protected long lastTimeMillis = 0;
+
+ private final boolean[] stGotVFrame = { false };
+
+ @Override
+ public final TextureFrame getNextTexture(GL gl) throws IllegalStateException {
+ synchronized( stateLock ) {
+ if( State.Paused != state && State.Playing != state ) {
+ throw new IllegalStateException("Instance not paused or playing: "+this);
+ }
+ if(State.Playing == state) {
+ boolean dropFrame = false;
+ try {
+ do {
+ final boolean droppedFrame;
+ if( dropFrame ) {
+ presentedFrameCount--;
+ dropFrame = false;
+ droppedFrame = true;
+ } else {
+ droppedFrame = false;
+ }
+ final boolean playCached = null != cachedFrame;
+ final int video_pts;
+ final boolean hasVideoFrame;
+ TextureFrame nextFrame;
+ if( playCached ) {
+ nextFrame = cachedFrame;
+ cachedFrame = null;
+ presentedFrameCount--;
+ video_pts = nextFrame.getPTS();
+ hasVideoFrame = true;
+ } else {
+ if( null != videoFramesDecoded ) {
+ // multi-threaded and video available
+ nextFrame = videoFramesDecoded.get();
+ if( null != nextFrame ) {
+ video_pts = nextFrame.getPTS();
+ hasVideoFrame = true;
+ } else {
+ video_pts = TimeFrameI.INVALID_PTS;
+ hasVideoFrame = false;
+ }
+ } else {
+ // single-threaded or audio-only
+ video_pts = getNextSingleThreaded(gl, lastFrame, stGotVFrame);
+ nextFrame = lastFrame;
+ hasVideoFrame = stGotVFrame[0];
+ }
+ }
+ final long currentTimeMillis = Platform.currentTimeMillis();
+
+ if( TimeFrameI.END_OF_STREAM_PTS == video_pts ||
+ ( duration > 0 && duration <= video_pts ) || maxNullFrameCountUntilEOS <= nullFrameCount )
+ {
+ // EOS
+ if( DEBUG ) {
+ System.err.println( "AV-EOS (getNextTexture): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == video_pts)+", "+this);
+ }
+ pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS);
+
+ } else if( TimeFrameI.INVALID_PTS == video_pts ) { // no audio or video frame
+ if( null == videoFramesDecoded || !videoFramesDecoded.isEmpty() ) {
+ nullFrameCount++;
+ }
+ if( DEBUG ) {
+ final int audio_pts = getAudioPTSImpl();
+ final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
+ final int d_apts;
+ if( audio_pts != TimeFrameI.INVALID_PTS ) {
+ d_apts = audio_pts - audio_scr;
+ } else {
+ d_apts = 0;
+ }
+ final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed );
+ final int d_vpts = video_pts - video_scr;
+ System.err.println( "AV~: dT "+(currentTimeMillis-lastTimeMillis)+", nullFrames "+nullFrameCount+
+ getPerfStringImpl( video_scr, video_pts, d_vpts, audio_scr, audio_pts, d_apts, 0 ) + ", droppedFrame "+droppedFrame);
+ }
+ } else { // valid pts: has audio or video frame
+ nullFrameCount=0;
+
+ if( hasVideoFrame ) { // has video frame
+ presentedFrameCount++;
+
+ final int audio_pts = getAudioPTSImpl();
+ final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
+ final int d_apts;
+ if( audio_pts != TimeFrameI.INVALID_PTS ) {
+ d_apts = audio_pts - audio_scr;
+ } else {
+ d_apts = 0;
+ }
+
+ final int frame_period_last = video_pts - video_pts_last; // rendering loop interrupted ?
+ if( videoSCR_reset || frame_period_last > frame_duration*10 ) {
+ videoSCR_reset = false;
+ video_scr_t0 = currentTimeMillis;
+ video_scr_pts = video_pts;
+ }
+ final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed );
+ final int d_vpts = video_pts - video_scr;
+ // final int d_avpts = d_vpts - d_apts;
+ if( -VIDEO_DPTS_MAX > d_vpts || d_vpts > VIDEO_DPTS_MAX ) {
+ // if( -VIDEO_DPTS_MAX > d_avpts || d_avpts > VIDEO_DPTS_MAX ) {
+ if( DEBUG ) {
+ System.err.println( "AV*: dT "+(currentTimeMillis-lastTimeMillis)+", "+
+ getPerfStringImpl( video_scr, video_pts, d_vpts, audio_scr, audio_pts, d_apts, 0 ) + ", "+nextFrame+", playCached " + playCached+ ", dropFrame "+dropFrame);
+ }
+ } else {
+ final int dpy_den = displayedFrameCount > 0 ? displayedFrameCount : 1;
+ final int avg_dpy_duration = ( (int) ( currentTimeMillis - video_scr_t0 ) ) / dpy_den ; // ms/f
+ final int maxVideoDelay = Math.min(avg_dpy_duration, MAXIMUM_VIDEO_ASYNC);
+ video_dpts_count++;
+ // video_dpts_cum = d_avpts + VIDEO_DPTS_COEFF * video_dpts_cum;
+ video_dpts_cum = d_vpts + VIDEO_DPTS_COEFF * video_dpts_cum;
+ final int video_dpts_avg_diff = video_dpts_count >= VIDEO_DPTS_NUM ? getVideoDPTSAvg() : 0;
+ final int dt = (int) ( video_dpts_avg_diff / playSpeed + 0.5f );
+ // final int dt = (int) ( d_vpts / playSpeed + 0.5f );
+ // final int dt = (int) ( d_avpts / playSpeed + 0.5f );
+ final TextureFrame _nextFrame = nextFrame;
+ if( dt > maxVideoDelay ) {
+ cachedFrame = nextFrame;
+ nextFrame = null;
+ } else if ( !droppedFrame && dt < -maxVideoDelay && null != videoFramesDecoded && videoFramesDecoded.size() > 0 ) {
+ // only drop if prev. frame has not been dropped and
+ // frame is too late and one decoded frame is already available.
+ dropFrame = true;
+ }
+ video_pts_last = video_pts;
+ if( DEBUG ) {
+ System.err.println( "AV_: dT "+(currentTimeMillis-lastTimeMillis)+", "+
+ getPerfStringImpl( video_scr, video_pts, d_vpts,
+ audio_scr, audio_pts, d_apts,
+ video_dpts_avg_diff ) +
+ ", avg dpy-fps "+avg_dpy_duration+" ms/f, maxD "+maxVideoDelay+" ms, "+_nextFrame+", playCached " + playCached + ", dropFrame "+dropFrame);
+ }
+ }
+ } // has video frame
+ } // has audio or video frame
+
+ if( null != videoFramesFree && null != nextFrame ) {
+ // Had frame and not single threaded ? (TEXTURE_COUNT_MIN < textureCount)
+ final TextureFrame _lastFrame = lastFrame;
+ lastFrame = nextFrame;
+ if( null != _lastFrame ) {
+ videoFramesFree.putBlocking(_lastFrame);
+ }
+ }
+ lastTimeMillis = currentTimeMillis;
+ } while( dropFrame );
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ displayedFrameCount++;
+ return lastFrame;
+ }
+ }
+ protected void preNextTextureImpl(GL gl) {}
+ protected void postNextTextureImpl(GL gl) {}
+ /**
+ * Process stream until the next video frame, i.e. {@link TextureFrame}, has been reached.
+ * Audio frames, i.e. {@link AudioSink.AudioFrame}, shall be handled in the process.
+ * <p>
+ * Video frames shall be ignored, if {@link #getVID()} is {@link #STREAM_ID_NONE}.
+ * </p>
+ * <p>
+ * Audio frames shall be ignored, if {@link #getAID()} is {@link #STREAM_ID_NONE}.
+ * </p>
+ * <p>
+ * Method may be invoked on the <a href="#streamworker"><i>StreamWorker</i> decoding thread</a>.
+ * </p>
+ * <p>
+ * Implementation shall care of OpenGL synchronization as required, e.g. glFinish()/glFlush()!
+ * </p>
+ * @param gl valid and current GL instance, shall be <code>null</code> for audio only.
+ * @param nextFrame the {@link TextureFrame} to store the video PTS and texture data,
+ * shall be <code>null</code> for audio only.
+ * @return the last processed video PTS value, maybe {@link TimeFrameI#INVALID_PTS} if video frame is invalid or n/a.
+ * Will be {@link TimeFrameI#END_OF_STREAM_PTS} if end of stream reached.
+ */
+ protected abstract int getNextTextureImpl(GL gl, TextureFrame nextFrame);
+
+ protected final int getNextSingleThreaded(final GL gl, final TextureFrame nextFrame, boolean[] gotVFrame) throws InterruptedException {
+ final int pts;
+ if( STREAM_ID_NONE != vid ) {
+ preNextTextureImpl(gl);
+ pts = getNextTextureImpl(gl, nextFrame);
+ postNextTextureImpl(gl);
+ if( TimeFrameI.INVALID_PTS != pts ) {
+ newFrameAvailable(nextFrame, Platform.currentTimeMillis());
+ gotVFrame[0] = true;
+ } else {
+ gotVFrame[0] = false;
+ }
+ } else {
+ // audio only
+ pts = getNextTextureImpl(null, null);
+ gotVFrame[0] = false;
+ }
+ return pts;
+ }
+
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * Note: All {@link AudioSink} operations are performed from {@link GLMediaPlayerImpl},
+ * i.e. {@link #play()}, {@link #pause(boolean)}, {@link #seek(int)}, {@link #setPlaySpeed(float)}, {@link #getAudioPTS()}.
+ * </p>
+ * <p>
+ * Implementations using an {@link AudioSink} shall write it's instance to {@link #audioSink}
+ * from within their {@link #initStreamImpl(int, int)} implementation.
+ * </p>
+ */
+ @Override
+ public final AudioSink getAudioSink() { return audioSink; }
+
+ /**
+ * To be called from implementation at 1st PTS after start
+ * w/ current pts value in milliseconds.
+ * @param audio_scr_t0
+ */
+ protected void setFirstAudioPTS2SCR(int pts) {
+ if( audioSCR_reset ) {
+ audio_scr_t0 = Platform.currentTimeMillis() - pts;
+ audioSCR_reset = false;
+ }
+ }
+ private void flushAllVideoFrames() {
+ if( null != videoFramesFree ) {
+ videoFramesFree.resetFull(videoFramesOrig);
+ lastFrame = videoFramesFree.get();
+ if( null == lastFrame ) { throw new InternalError("XXX"); }
+ videoFramesDecoded.clear();
+ }
+ cachedFrame = null;
+ }
+ private void resetAVPTSAndFlush() {
+ video_dpts_cum = 0;
+ video_dpts_count = 0;
+ resetAVPTS();
+ flushAllVideoFrames();
+ if( null != audioSink ) {
+ audioSink.flush();
+ }
+ }
+ private void resetAVPTS() {
+ nullFrameCount = 0;
+ presentedFrameCount = 0;
+ displayedFrameCount = 0;
+ decodedFrameCount = 0;
+ audioSCR_reset = true;
+ videoSCR_reset = true;
+ }
+ private final int getVideoDPTSAvg() {
+ return (int) ( video_dpts_cum * (1.0f - VIDEO_DPTS_COEFF) + 0.5f );
+ }
+
+ private final void newFrameAvailable(TextureFrame frame, long currentTimeMillis) {
+ decodedFrameCount++;
+ if( 0 == frame.getDuration() ) { // patch frame duration if not set already
+ frame.setDuration( (int) frame_duration );
+ }
+ synchronized(eventListenersLock) {
+ for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
+ i.next().newFrameAvailable(this, frame, currentTimeMillis);
+ }
+ }
+ }
+
+ /**
+ * After {@link GLMediaPlayerImpl#initStreamImpl(int, int) initStreamImpl(..)} is completed via
+ * {@link GLMediaPlayerImpl#updateAttributes(int, int, int, int, int, int, int, float, int, int, int, String, String) updateAttributes(..)},
+ * the latter decides whether StreamWorker is being used.
+ */
+ class StreamWorker extends Thread {
+ private volatile boolean isRunning = false;
+ private volatile boolean isActive = false;
+ private volatile boolean isBlocked = false;
+
+ private volatile boolean shallPause = true;
+ private volatile boolean shallStop = false;
+
+ private volatile GLContext sharedGLCtx = null;
+ private boolean sharedGLCtxCurrent = false;
+ private GLDrawable dummyDrawable = null;
+
+ /**
+ * Starts this daemon thread,
+ * <p>
+ * This thread pauses after it's started!
+ * </p>
+ **/
+ StreamWorker() {
+ setDaemon(true);
+ synchronized(this) {
+ start();
+ while( !isRunning ) {
+ this.notifyAll(); // wake-up startup-block
+ try {
+ this.wait(); // wait until started
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ private void makeCurrent(GLContext ctx) {
+ if( GLContext.CONTEXT_NOT_CURRENT >= ctx.makeCurrent() ) {
+ throw new GLException("Couldn't make ctx current: "+ctx);
+ }
+ }
+
+ private void destroySharedGL() {
+ if( null != sharedGLCtx ) {
+ if( sharedGLCtx.isCreated() ) {
+ // Catch dispose GLExceptions by GLEventListener, just 'print' them
+ // so we can continue with the destruction.
+ try {
+ sharedGLCtx.destroy();
+ } catch (GLException gle) {
+ gle.printStackTrace();
+ }
+ }
+ sharedGLCtx = null;
+ }
+ if( null != dummyDrawable ) {
+ final AbstractGraphicsDevice device = dummyDrawable.getNativeSurface().getGraphicsConfiguration().getScreen().getDevice();
+ dummyDrawable.setRealized(false);
+ dummyDrawable = null;
+ device.close();
+ }
+ }
+
+ public final synchronized void initGL(GL gl) {
+ final GLContext glCtx = gl.getContext();
+ final boolean glCtxCurrent = glCtx.isCurrent();
+ final GLProfile glp = gl.getGLProfile();
+ final GLDrawableFactory factory = GLDrawableFactory.getFactory(glp);
+ final AbstractGraphicsDevice device = glCtx.getGLDrawable().getNativeSurface().getGraphicsConfiguration().getScreen().getDevice();
+ dummyDrawable = factory.createDummyDrawable(device, true, glCtx.getGLDrawable().getChosenGLCapabilities(), null); // own device!
+ dummyDrawable.setRealized(true);
+ sharedGLCtx = dummyDrawable.createContext(glCtx);
+ makeCurrent(sharedGLCtx);
+ if( glCtxCurrent ) {
+ makeCurrent(glCtx);
+ } else {
+ sharedGLCtx.release();
+ }
+ }
+ public final synchronized void doPause() {
+ if( isActive ) {
+ shallPause = true;
+ if( Thread.currentThread() != this ) {
+ if( isBlocked && isActive ) {
+ this.interrupt();
+ }
+ while( isActive ) {
+ try {
+ this.wait(); // wait until paused
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ }
+ public final synchronized void doResume() {
+ if( isRunning && !isActive ) {
+ shallPause = false;
+ if( Thread.currentThread() != this ) {
+ while( !isActive ) {
+ this.notifyAll(); // wake-up pause-block
+ try {
+ this.wait(); // wait until resumed
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ }
+ public final synchronized void doStop() {
+ if( isRunning ) {
+ shallStop = true;
+ if( Thread.currentThread() != this ) {
+ if( isBlocked && isRunning ) {
+ this.interrupt();
+ }
+ while( isRunning ) {
+ this.notifyAll(); // wake-up pause-block (opt)
+ try {
+ this.wait(); // wait until stopped
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ }
+ public final boolean isRunning() { return isRunning; }
+ public final boolean isActive() { return isActive; }
+
+ @Override
+ public final void run() {
+ setName(getName()+"-StreamWorker_"+StreamWorkerInstanceId);
+ StreamWorkerInstanceId++;
+
+ synchronized ( this ) {
+ isRunning = true;
+ this.notifyAll(); // wake-up ctor()
+ }
+
+ while( !shallStop ){
+ if( shallPause ) {
+ synchronized ( this ) {
+ if( sharedGLCtxCurrent ) {
+ postNextTextureImpl(sharedGLCtx.getGL());
+ sharedGLCtx.release();
+ }
+ while( shallPause && !shallStop ) {
+ isActive = false;
+ this.notifyAll(); // wake-up doPause()
+ try {
+ this.wait(); // wait until resumed
+ } catch (InterruptedException e) {
+ if( !shallPause ) {
+ e.printStackTrace();
+ }
+ }
+ }
+ if( sharedGLCtxCurrent ) {
+ makeCurrent(sharedGLCtx);
+ preNextTextureImpl(sharedGLCtx.getGL());
+ }
+ isActive = true;
+ this.notifyAll(); // wake-up doResume()
+ }
+ }
+ if( !sharedGLCtxCurrent && null != sharedGLCtx ) {
+ synchronized ( this ) {
+ if( null != sharedGLCtx ) {
+ makeCurrent( sharedGLCtx );
+ preNextTextureImpl(sharedGLCtx.getGL());
+ sharedGLCtxCurrent = true;
+ }
+ if( null == videoFramesFree ) {
+ throw new InternalError("XXX videoFramesFree is null");
+ }
+ }
+ }
+
+ if( !shallStop ) {
+ TextureFrame nextFrame = null;
+ try {
+ isBlocked = true;
+ final GL gl;
+ if( STREAM_ID_NONE != vid ) {
+ nextFrame = videoFramesFree.getBlocking();
+ nextFrame.setPTS( TimeFrameI.INVALID_PTS ); // mark invalid until processed!
+ gl = sharedGLCtx.getGL();
+ } else {
+ gl = null;
+ }
+ isBlocked = false;
+ final int vPTS = getNextTextureImpl(gl, nextFrame);
+ boolean audioEOS = false;
+ if( TimeFrameI.INVALID_PTS != vPTS ) {
+ if( null != nextFrame ) {
+ if( STREAM_WORKER_DELAY > 0 ) {
+ Thread.sleep(STREAM_WORKER_DELAY);
+ }
+ if( !videoFramesDecoded.put(nextFrame) ) {
+ throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this);
+ }
+ newFrameAvailable(nextFrame, Platform.currentTimeMillis());
+ nextFrame = null;
+ } else {
+ // audio only
+ if( TimeFrameI.END_OF_STREAM_PTS == vPTS || ( duration > 0 && duration < vPTS ) ) {
+ audioEOS = true;
+ } else {
+ nullFrameCount = 0;
+ }
+ }
+ } else if( null == nextFrame ) {
+ // audio only
+ audioEOS = maxNullFrameCountUntilEOS <= nullFrameCount;
+ if( null == audioSink || 0 == audioSink.getEnqueuedFrameCount() ) {
+ nullFrameCount++;
+ }
+ }
+ if( audioEOS ) {
+ // state transition incl. notification
+ synchronized ( this ) {
+ shallPause = true;
+ isActive = false;
+ this.notifyAll(); // wake-up potential do*()
+ }
+ if( DEBUG ) {
+ System.err.println( "AV-EOS (StreamWorker): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == vPTS)+", "+GLMediaPlayerImpl.this);
+ }
+ pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS);
+ }
+ } catch (InterruptedException e) {
+ isBlocked = false;
+ if( !shallStop && !shallPause ) {
+ streamErr = new StreamException("InterruptedException while decoding: "+GLMediaPlayerImpl.this.toString(), e);
+ }
+ } catch (Throwable t) {
+ streamErr = new StreamException(t.getClass().getSimpleName()+" while decoding: "+GLMediaPlayerImpl.this.toString(), t);
+ } finally {
+ if( null != nextFrame ) { // put back
+ videoFramesFree.put(nextFrame);
+ }
+ if( null != streamErr ) {
+ if( DEBUG ) {
+ final Throwable t = null != streamErr.getCause() ? streamErr.getCause() : streamErr;
+ System.err.println("Caught StreamException: "+t.getMessage());
+ t.printStackTrace();
+ }
+ // state transition incl. notification
+ synchronized ( this ) {
+ shallPause = true;
+ isActive = false;
+ this.notifyAll(); // wake-up potential do*()
+ }
+ pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_ERR);
+ }
+ }
+ }
+ }
+ synchronized ( this ) {
+ if( sharedGLCtxCurrent ) {
+ postNextTextureImpl(sharedGLCtx.getGL());
+ }
+ destroySharedGL();
+ isRunning = false;
+ isActive = false;
+ this.notifyAll(); // wake-up doStop()
+ }
+ }
+ }
+ static int StreamWorkerInstanceId = 0;
+ private volatile StreamWorker streamWorker = null;
+ private volatile StreamException streamErr = null;
+
+ protected final int addStateEventMask(int event_mask, State newState) {
+ if( state != newState ) {
+ switch( newState ) {
+ case Uninitialized:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_UNINIT;
+ break;
+ case Initialized:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_INIT;
+ break;
+ case Playing:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_PLAY;
+ break;
+ case Paused:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_PAUSE;
+ break;
+ }
+ }
+ return event_mask;
+ }
+
+ protected final void attributesUpdated(int event_mask) {
+ if( 0 != event_mask ) {
+ final long now = Platform.currentTimeMillis();
+ synchronized(eventListenersLock) {
+ for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
+ i.next().attributesChanged(this, event_mask, now);
+ }
}
}
- texFrameMap.clear();
}
- protected final void updateAttributes(int width, int height, int bps_stream, int bps_video, int bps_audio,
- float fps, int totalFrames, int duration,
- String vcodec, String acodec) {
+ protected final void changeState(int event_mask, State newState) {
+ event_mask = addStateEventMask(event_mask, newState);
+ if( 0 != event_mask ) {
+ state = newState;
+ attributesUpdated( event_mask );
+ }
+ }
+
+ protected final void updateAttributes(int vid, int aid, int width, int height, int bps_stream,
+ int bps_video, int bps_audio, float fps,
+ int videoFrames, int audioFrames, int duration, String vcodec, String acodec) {
int event_mask = 0;
+ final boolean wasUninitialized = state == State.Uninitialized;
+
+ if( wasUninitialized ) {
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_INIT;
+ state = State.Initialized;
+ }
+ if( STREAM_ID_AUTO == vid ) {
+ vid = STREAM_ID_NONE;
+ }
+ if( this.vid != vid ) {
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_VID;
+ this.vid = vid;
+ }
+ if( STREAM_ID_AUTO == vid ) {
+ vid = STREAM_ID_NONE;
+ }
+ if( this.aid != aid ) {
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_AID;
+ this.aid = aid;
+ }
if( this.width != width || this.height != height ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_SIZE;
this.width = width;
this.height = height;
- }
+ }
if( this.fps != fps ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_FPS;
this.fps = fps;
+ if( 0 != fps ) {
+ this.frame_duration = 1000f / fps;
+ this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_MS_UNTIL_EOS / (int)this.frame_duration;
+ } else {
+ this.frame_duration = 0;
+ this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT;
+ }
}
if( this.bps_stream != bps_stream || this.bps_video != bps_video || this.bps_audio != bps_audio ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_BPS;
@@ -427,12 +1402,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
this.bps_video = bps_video;
this.bps_audio = bps_audio;
}
- if( this.totalFrames != totalFrames || this.duration != duration ) {
+ if( this.videoFrames != videoFrames || this.audioFrames != audioFrames || this.duration != duration ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_LENGTH;
- this.totalFrames = totalFrames;
+ this.videoFrames = videoFrames;
+ this.audioFrames = audioFrames;
this.duration = duration;
}
- if( (null!=acodec && acodec.length()>0 && !this.acodec.equals(acodec)) ) {
+ if( (null!=acodec && acodec.length()>0 && !this.acodec.equals(acodec)) ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_CODEC;
this.acodec = acodec;
}
@@ -443,97 +1419,152 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
if(0==event_mask) {
return;
}
- attributesUpdated(event_mask);
- }
-
- protected final void attributesUpdated(int event_mask) {
- synchronized(eventListenersLock) {
- for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
- i.next().attributesChanges(this, event_mask, System.currentTimeMillis());
+ if( wasUninitialized ) {
+ if( null != streamWorker ) {
+ throw new InternalError("XXX: StreamWorker not null - "+this);
+ }
+ if( TEXTURE_COUNT_MIN < textureCount || STREAM_ID_NONE == vid ) { // Enable StreamWorker for 'audio only' as well (Bug 918).
+ streamWorker = new StreamWorker();
+ }
+ if( DEBUG ) {
+ System.err.println("XXX Initialize @ updateAttributes: "+this);
}
}
+ attributesUpdated(event_mask);
}
- protected final void newFrameAvailable() {
- frameNumber++;
- synchronized(eventListenersLock) {
- for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
- i.next().newFrameAvailable(this, System.currentTimeMillis());
+
+ protected void setIsGLOriented(boolean isGLOriented) {
+ if( isInGLOrientation != isGLOriented ) {
+ if( DEBUG ) {
+ System.err.println("XXX gl-orient "+isInGLOrientation+" -> "+isGLOriented);
}
+ isInGLOrientation = isGLOriented;
+ for(int i=0; i<videoFramesOrig.length; i++) {
+ videoFramesOrig[i].getTexture().setMustFlipVertically(!isGLOriented);
+ }
+ attributesUpdated(GLMediaEventListener.EVENT_CHANGE_SIZE);
}
}
-
+
@Override
- public final synchronized State destroy(GL gl) {
- destroyImpl(gl);
- removeAllImageTextures(gl);
- state = State.Uninitialized;
- return state;
+ public final URI getURI() {
+ return streamLoc;
}
- protected abstract void destroyImpl(GL gl);
@Override
- public final synchronized URLConnection getURLConnection() {
- return urlConn;
- }
+ public final int getVID() { return vid; }
+
+ @Override
+ public final int getAID() { return aid; }
@Override
- public final synchronized String getVideoCodec() {
+ public final String getVideoCodec() {
return vcodec;
}
@Override
- public final synchronized String getAudioCodec() {
+ public final String getAudioCodec() {
return acodec;
}
@Override
- public final synchronized long getTotalFrames() {
- return totalFrames;
+ public final int getVideoFrames() {
+ return videoFrames;
+ }
+
+ @Override
+ public final int getAudioFrames() {
+ return audioFrames;
}
@Override
- public final synchronized int getDuration() {
+ public final int getDuration() {
return duration;
}
-
+
@Override
- public final synchronized long getStreamBitrate() {
+ public final long getStreamBitrate() {
return bps_stream;
}
@Override
- public final synchronized int getVideoBitrate() {
+ public final int getVideoBitrate() {
return bps_video;
}
-
+
@Override
- public final synchronized int getAudioBitrate() {
+ public final int getAudioBitrate() {
return bps_audio;
}
-
+
@Override
- public final synchronized float getFramerate() {
+ public final float getFramerate() {
return fps;
}
@Override
- public final synchronized int getWidth() {
+ public final boolean isGLOriented() {
+ return isInGLOrientation;
+ }
+
+ @Override
+ public final int getWidth() {
return width;
}
@Override
- public final synchronized int getHeight() {
+ public final int getHeight() {
return height;
}
@Override
- public final synchronized String toString() {
- final float ct = getCurrentPosition() / 1000.0f, tt = getDuration() / 1000.0f;
- final String loc = ( null != urlConn ) ? urlConn.getURL().toExternalForm() : "<undefined stream>" ;
- return "GLMediaPlayer["+state+", "+frameNumber+"/"+totalFrames+" frames, "+ct+"/"+tt+"s, speed "+playSpeed+", "+bps_stream+" bps, "+
- "Texture[count "+textureCount+", target "+toHexString(textureTarget)+", format "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
- "Stream[Video[<"+vcodec+">, "+width+"x"+height+", "+fps+" fps, "+bps_video+" bsp], "+
- "Audio[<"+acodec+">, "+bps_audio+" bsp]], "+loc+"]";
+ public final String toString() {
+ final float tt = getDuration() / 1000.0f;
+ final String loc = ( null != streamLoc ) ? streamLoc.toString() : "<undefined stream>" ;
+ final int freeVideoFrames = null != videoFramesFree ? videoFramesFree.size() : 0;
+ final int decVideoFrames = null != videoFramesDecoded ? videoFramesDecoded.size() : 0;
+ final int video_scr = video_scr_pts + (int) ( ( Platform.currentTimeMillis() - video_scr_t0 ) * playSpeed );
+ final String camPath = null != cameraPath ? ", camera: "+cameraPath : "";
+ return "GLMediaPlayer["+state+", vSCR "+video_scr+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s), z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+
+ "speed "+playSpeed+", "+bps_stream+" bps, hasSW "+(null!=streamWorker)+
+ ", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", tagt "+toHexString(textureTarget)+", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
+ "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
+ "Audio[id "+aid+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], uri "+loc+camPath+"]";
+ }
+
+ @Override
+ public final String getPerfString() {
+ final long currentTimeMillis = Platform.currentTimeMillis();
+ final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed );
+ final int d_vpts = video_pts_last - video_scr;
+ final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
+ final int audio_pts = getAudioPTSImpl();
+ final int d_apts = audio_pts - audio_scr;
+ return getPerfStringImpl( video_scr, video_pts_last, d_vpts, audio_scr, audio_pts, d_apts, getVideoDPTSAvg() );
+ }
+ private final String getPerfStringImpl(final int video_scr, final int video_pts, final int d_vpts,
+ final int audio_scr, final int audio_pts, final int d_apts,
+ final int video_dpts_avg_diff) {
+ final float tt = getDuration() / 1000.0f;
+ final String audioSinkInfo;
+ final AudioSink audioSink = getAudioSink();
+ if( null != audioSink ) {
+ audioSinkInfo = "AudioSink[frames [p "+audioSink.getEnqueuedFrameCount()+", q "+audioSink.getQueuedFrameCount()+", f "+audioSink.getFreeFrameCount()+", c "+audioSink.getFrameCount()+"], time "+audioSink.getQueuedTime()+", bytes "+audioSink.getQueuedByteCount()+"]";
+ } else {
+ audioSinkInfo = "";
+ }
+ final int freeVideoFrames, decVideoFrames;
+ if( null != videoFramesFree ) {
+ freeVideoFrames = videoFramesFree.size();
+ decVideoFrames = videoFramesDecoded.size();
+ } else {
+ freeVideoFrames = 0;
+ decVideoFrames = 0;
+ }
+ return state+", frames[(p "+presentedFrameCount+", d "+decodedFrameCount+") / "+videoFrames+", "+tt+" s, z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+
+ "speed " + playSpeed+", dAV "+( d_vpts - d_apts )+", vSCR "+video_scr+", vpts "+video_pts+", dSCR["+d_vpts+", avrg "+video_dpts_avg_diff+"], "+
+ "aSCR "+audio_scr+", apts "+audio_pts+" ( "+d_apts+" ), "+audioSinkInfo+
+ ", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+"]";
}
@Override
@@ -557,13 +1588,30 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
- public final synchronized GLMediaEventListener[] getEventListeners() {
+ public final GLMediaEventListener[] getEventListeners() {
synchronized(eventListenersLock) {
return eventListeners.toArray(new GLMediaEventListener[eventListeners.size()]);
}
}
- private Object eventListenersLock = new Object();
+ private final Object eventListenersLock = new Object();
+
+ @Override
+ public final Object getAttachedObject(String name) {
+ return attachedObjects.get(name);
+ }
+
+ @Override
+ public final Object attachObject(String name, Object obj) {
+ return attachedObjects.put(name, obj);
+ }
+
+ @Override
+ public final Object detachObject(String name) {
+ return attachedObjects.remove(name);
+ }
+
+ private final HashMap<String, Object> attachedObjects = new HashMap<String, Object>();
protected static final String toHexString(long v) {
return "0x"+Long.toHexString(v);
@@ -571,5 +1619,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected static final String toHexString(int v) {
return "0x"+Integer.toHexString(v);
}
-
-} \ No newline at end of file
+ protected static final int getPropIntVal(Map<String, String> props, String key) {
+ final String val = props.get(key);
+ try {
+ return Integer.valueOf(val).intValue();
+ } catch (NumberFormatException nfe) {
+ if(DEBUG) {
+ System.err.println("Not a valid integer for <"+key+">: <"+val+">");
+ }
+ }
+ return 0;
+ }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
new file mode 100644
index 000000000..6e006d9c0
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
@@ -0,0 +1,229 @@
+package jogamp.opengl.util.av;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
+import javax.sound.sampled.AudioSystem;
+import javax.sound.sampled.DataLine;
+import javax.sound.sampled.SourceDataLine;
+
+import com.jogamp.opengl.util.av.AudioSink;
+
+/***
+ * JavaSound Audio Sink
+ * <p>
+ * FIXME: Parameterize .. all configs .. best via an init-method, passing requested
+ * audio capabilities
+ * </p>
+ */
+public class JavaSoundAudioSink implements AudioSink {
+
+ // Chunk of audio processed at one time
+ public static final int BUFFER_SIZE = 1000;
+ public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2;
+ private static final boolean staticAvailable;
+
+ // Sample time values
+ // public static final double SAMPLE_TIME_IN_SECS = 1.0 / DEFAULT_SAMPLE_RATE;
+ // public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
+
+ private javax.sound.sampled.AudioFormat format;
+ private DataLine.Info info;
+ private SourceDataLine auline;
+ private int bufferCount;
+ private byte [] sampleData = new byte[BUFFER_SIZE];
+ private boolean initialized = false;
+ private AudioSink.AudioFormat chosenFormat = null;
+
+ private volatile boolean playRequested = false;
+ private float volume = 1.0f;
+
+ static {
+ boolean ok = false;
+ try {
+ AudioSystem.getAudioFileTypes();
+ ok = true;
+ } catch (Throwable t) {
+
+ }
+ staticAvailable=ok;
+ }
+
+ @Override
+ public String toString() {
+ return "JavaSoundSink[init "+initialized+", dataLine "+info+", source "+auline+", bufferCount "+bufferCount+
+ ", chosen "+chosenFormat+", jsFormat "+format;
+ }
+
+ @Override
+ public final float getPlaySpeed() { return 1.0f; } // FIXME
+
+ @Override
+ public final boolean setPlaySpeed(float rate) {
+ return false; // FIXME
+ }
+
+ @Override
+ public final float getVolume() {
+ // FIXME
+ return volume;
+ }
+
+ @Override
+ public final boolean setVolume(float v) {
+ // FIXME
+ volume = v;
+ return true;
+ }
+
+ @Override
+ public AudioSink.AudioFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public final int getMaxSupportedChannels() {
+ return 2;
+ }
+
+ @Override
+ public final boolean isSupported(AudioSink.AudioFormat format) {
+ return true;
+ }
+
+ @Override
+ public boolean init(AudioSink.AudioFormat requestedFormat, float frameDuration, int initialQueueSize, int queueGrowAmount, int queueLimit) {
+ if( !staticAvailable ) {
+ return false;
+ }
+ // Create the audio format we wish to use
+ format = new javax.sound.sampled.AudioFormat(requestedFormat.sampleRate, requestedFormat.sampleSize, requestedFormat.channelCount, requestedFormat.signed, !requestedFormat.littleEndian);
+
+ // Create dataline info object describing line format
+ info = new DataLine.Info(SourceDataLine.class, format);
+
+ // Clear buffer initially
+ Arrays.fill(sampleData, (byte) 0);
+ try{
+ // Get line to write data to
+ auline = (SourceDataLine) AudioSystem.getLine(info);
+ auline.open(format);
+ auline.start();
+ System.out.println("JavaSound audio sink");
+ initialized=true;
+ chosenFormat = requestedFormat;
+ } catch (Exception e) {
+ initialized=false;
+ }
+ return true;
+ }
+
+ @Override
+ public boolean isPlaying() {
+ return playRequested && auline.isRunning();
+ }
+
+ @Override
+ public void play() {
+ if( null != auline ) {
+ playRequested = true;
+ playImpl();
+ }
+ }
+ private void playImpl() {
+ if( playRequested && !auline.isRunning() ) {
+ auline.start();
+ }
+ }
+
+ @Override
+ public void pause() {
+ if( null != auline ) {
+ playRequested = false;
+ auline.stop();
+ }
+ }
+
+ @Override
+ public void flush() {
+ if( null != auline ) {
+ playRequested = false;
+ auline.stop();
+ auline.flush();
+ }
+ }
+
+ @Override
+ public final int getEnqueuedFrameCount() {
+ return 0; // FIXME
+ }
+
+ @Override
+ public int getFrameCount() {
+ return 1;
+ }
+
+ @Override
+ public int getQueuedFrameCount() {
+ return 0;
+ }
+
+ @Override
+ public boolean isInitialized() {
+ return initialized;
+ }
+
+ @Override
+ public void destroy() {
+ initialized = false;
+ chosenFormat = null;
+ // FIXEM: complete code!
+ }
+
+ @Override
+ public AudioFrame enqueueData(AudioDataFrame audioDataFrame) {
+ int byteSize = audioDataFrame.getByteSize();
+ final ByteBuffer byteBuffer = audioDataFrame.getData();
+ final byte[] bytes = new byte[byteSize];
+ final int p = byteBuffer.position();
+ byteBuffer.get(bytes, 0, byteSize);
+ byteBuffer.position(p);
+
+ int written = 0;
+ int len;
+ while (byteSize > 0) {
+ len = auline.write(bytes, written, byteSize);
+ byteSize -= len;
+ written += len;
+ }
+ playImpl();
+ return audioDataFrame;
+ }
+
+ @Override
+ public AudioFrame enqueueData(int pts, ByteBuffer bytes, int byteCount) {
+ return enqueueData(new AudioDataFrame(pts, chosenFormat.getBytesDuration(byteCount), bytes, byteCount));
+ }
+
+ @Override
+ public int getQueuedByteCount() {
+ return auline.getBufferSize() - auline.available();
+ }
+
+ @Override
+ public int getFreeFrameCount() {
+ return auline.available();
+ }
+
+ @Override
+ public int getQueuedTime() {
+ return getQueuedTimeImpl( getQueuedByteCount() );
+ }
+ private final int getQueuedTimeImpl(int byteCount) {
+ final int bytesPerSample = chosenFormat.sampleSize >>> 3; // /8
+ return byteCount / ( chosenFormat.channelCount * bytesPerSample * ( chosenFormat.sampleRate / 1000 ) );
+ }
+
+ @Override
+ public final int getPTS() { return 0; } // FIXME
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
new file mode 100644
index 000000000..8d3dbdf44
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
@@ -0,0 +1,129 @@
+package jogamp.opengl.util.av;
+
+
+import java.nio.ByteBuffer;
+
+import com.jogamp.opengl.util.av.AudioSink;
+
+public class NullAudioSink implements AudioSink {
+
+ @Override
+ public boolean isInitialized() {
+ return true;
+ }
+
+ private volatile float playSpeed = 1.0f;
+ private volatile boolean playRequested = false;
+ private float volume = 1.0f;
+
+ @Override
+ public final float getPlaySpeed() { return playSpeed; }
+
+ @Override
+ public final boolean setPlaySpeed(float rate) {
+ if( Math.abs(1.0f - rate) < 0.01f ) {
+ rate = 1.0f;
+ }
+ playSpeed = rate;
+ return true;
+ }
+
+ @Override
+ public final float getVolume() {
+ // FIXME
+ return volume;
+ }
+
+ @Override
+ public final boolean setVolume(float v) {
+ // FIXME
+ volume = v;
+ return true;
+ }
+
+ @Override
+ public AudioFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public final int getMaxSupportedChannels() {
+ return 8;
+ }
+
+ @Override
+ public final boolean isSupported(AudioFormat format) {
+ return true;
+ }
+
+ @Override
+ public boolean init(AudioFormat requestedFormat, float frameDuration, int initialQueueSize, int queueGrowAmount, int queueLimit) {
+ return true;
+ }
+
+ @Override
+ public boolean isPlaying() {
+ return playRequested;
+ }
+
+ @Override
+ public void play() {
+ playRequested = true;
+ }
+
+ @Override
+ public void pause() {
+ playRequested = false;
+ }
+
+ @Override
+ public void flush() {
+ }
+
+ @Override
+ public void destroy() {
+ }
+
+ @Override
+ public final int getEnqueuedFrameCount() {
+ return 0;
+ }
+
+ @Override
+ public int getFrameCount() {
+ return 0;
+ }
+
+ @Override
+ public int getQueuedFrameCount() {
+ return 0;
+ }
+
+ @Override
+ public int getQueuedByteCount() {
+ return 0;
+ }
+
+ @Override
+ public int getQueuedTime() {
+ return 0;
+ }
+
+ @Override
+ public final int getPTS() { return 0; }
+
+ @Override
+ public int getFreeFrameCount() {
+ return 1;
+ }
+
+ @Override
+ public AudioFrame enqueueData(AudioDataFrame audioDataFrame) {
+ return null;
+ }
+
+ @Override
+ public AudioFrame enqueueData(int pts, ByteBuffer bytes, int byteCount) {
+ return null;
+ }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
index 3d740d6b2..79129e563 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
@@ -3,14 +3,14 @@
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
- *
+ *
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
- *
+ *
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
- *
+ *
* THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
@@ -20,7 +20,7 @@
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
+ *
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of JogAmp Community.
@@ -32,12 +32,15 @@ import java.net.URLConnection;
import java.nio.ByteBuffer;
import javax.media.opengl.GL;
+import javax.media.opengl.GLException;
import javax.media.opengl.GLProfile;
import jogamp.opengl.util.av.GLMediaPlayerImpl;
import com.jogamp.common.nio.Buffers;
+import com.jogamp.common.os.Platform;
import com.jogamp.common.util.IOUtil;
+import com.jogamp.opengl.util.av.GLMediaPlayer;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureData;
import com.jogamp.opengl.util.texture.TextureIO;
@@ -49,114 +52,126 @@ import com.jogamp.opengl.util.texture.TextureSequence;
*/
public class NullGLMediaPlayer extends GLMediaPlayerImpl {
private TextureData texData = null;
- private TextureSequence.TextureFrame frame = null;
private int pos_ms = 0;
- private int pos_start = 0;
-
+ private long pos_start = 0;
+
public NullGLMediaPlayer() {
super();
- this.setTextureCount(1);
+
}
@Override
- protected boolean setPlaySpeedImpl(float rate) {
+ protected final boolean setPlaySpeedImpl(float rate) {
return false;
}
@Override
- protected boolean startImpl() {
- pos_start = (int)System.currentTimeMillis();
+ protected final boolean playImpl() {
+ pos_start = Platform.currentTimeMillis();
return true;
}
@Override
- protected boolean pauseImpl() {
+ protected final boolean pauseImpl() {
return true;
}
@Override
- protected boolean stopImpl() {
- return true;
- }
-
- @Override
- protected int seekImpl(int msec) {
+ protected final int seekImpl(int msec) {
pos_ms = msec;
validatePos();
return pos_ms;
}
-
- @Override
- protected TextureSequence.TextureFrame getLastTextureImpl() {
- return frame;
- }
@Override
- protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
- return frame;
+ protected final int getNextTextureImpl(GL gl, TextureFrame nextFrame) {
+ final int pts = getAudioPTSImpl();
+ nextFrame.setPTS( pts );
+ return pts;
}
-
+
@Override
- protected int getCurrentPositionImpl() {
- pos_ms = (int)System.currentTimeMillis() - pos_start;
+ protected final int getAudioPTSImpl() {
+ pos_ms = (int) ( Platform.currentTimeMillis() - pos_start );
validatePos();
return pos_ms;
}
@Override
- protected void destroyImpl(GL gl) {
+ protected final void destroyImpl(GL gl) {
+ if(null != texData) {
+ texData.destroy();
+ texData = null;
+ }
}
-
- @Override
- protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException {
+
+ public final static TextureData createTestTextureData() {
+ TextureData res = null;
try {
- URLConnection urlConn = IOUtil.getResource("jogl/util/data/av/test-ntsc01-160x90.png", NullGLMediaPlayer.class.getClassLoader());
+ URLConnection urlConn = IOUtil.getResource("jogl/util/data/av/test-ntsc01-57x32.png", NullGLMediaPlayer.class.getClassLoader());
if(null != urlConn) {
- texData = TextureIO.newTextureData(GLProfile.getGL2ES2(), urlConn.getInputStream(), false, TextureIO.PNG);
+ res = TextureIO.newTextureData(GLProfile.getGL2ES2(), urlConn.getInputStream(), false, TextureIO.PNG);
}
} catch (Exception e) {
e.printStackTrace();
}
- if(null != texData) {
- width = texData.getWidth();
- height = texData.getHeight();
- } else {
- width = 640;
- height = 480;
- ByteBuffer buffer = Buffers.newDirectByteBuffer(width*height*4);
+ if(null == res) {
+ final int w = 160;
+ final int h = 90;
+ ByteBuffer buffer = Buffers.newDirectByteBuffer(w*h*4);
while(buffer.hasRemaining()) {
buffer.put((byte) 0xEA); buffer.put((byte) 0xEA); buffer.put((byte) 0xEA); buffer.put((byte) 0xEA);
}
buffer.rewind();
- texData = new TextureData(GLProfile.getGL2ES2(),
- GL.GL_RGBA, width, height, 0,
- GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, false,
+ res = new TextureData(GLProfile.getGL2ES2(),
+ GL.GL_RGBA, w, h, 0,
+ GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, false,
false, false, buffer, null);
}
- fps = 24f;
- duration = 10*60*1000; // msec
- totalFrames = (int) ( (duration/1000)*fps );
- vcodec = "png-static";
+ return res;
+ }
+
+ @Override
+ protected final void initStreamImpl(int vid, int aid) throws IOException {
+ texData = createTestTextureData();
+ final float _fps = 24f;
+ final int _duration = 10*60*1000; // msec
+ final int _totalFrames = (int) ( (_duration/1000)*_fps );
+ updateAttributes(0 /* fake */, GLMediaPlayer.STREAM_ID_NONE,
+ texData.getWidth(), texData.getHeight(), 0,
+ 0, 0, _fps,
+ _totalFrames, 0, _duration, "png-static", null);
+ }
+ @Override
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
+ isInGLOrientation = true;
+ }
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * Returns {@link GLMediaPlayer#TEXTURE_COUNT_MIN}.
+ * </p>
+ */
+ @Override
+ protected int validateTextureCount(int desiredTextureCount) {
+ return TEXTURE_COUNT_MIN;
}
-
+
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- Texture texture = super.createTexImageImpl(gl, idx, tex, width, height, false);
+ protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
+ final Texture texture = super.createTexImageImpl(gl, texName, width, height);
if(null != texData) {
texture.updateImage(gl, texData);
- texData.destroy();
- texData = null;
- }
- frame = new TextureSequence.TextureFrame( texture );
- return frame;
+ }
+ return new TextureSequence.TextureFrame( texture );
}
-
+
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- frame = null;
- super.destroyTexImage(gl, imgTex);
+ protected final void destroyTexFrame(GL gl, TextureSequence.TextureFrame frame) {
+ super.destroyTexFrame(gl, frame);
}
-
+
private void validatePos() {
boolean considerPausing = false;
if( 0 > pos_ms) {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
index ce9df21cf..a6a6fba97 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
@@ -3,14 +3,14 @@
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
- *
+ *
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
- *
+ *
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
- *
+ *
* THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
@@ -20,20 +20,20 @@
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
+ *
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of JogAmp Community.
*/
-
+
package jogamp.opengl.util.av.impl;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import javax.media.opengl.GLProfile;
@@ -41,208 +41,377 @@ import javax.media.opengl.GLProfile;
import com.jogamp.common.os.DynamicLibraryBundle;
import com.jogamp.common.os.DynamicLibraryBundleInfo;
import com.jogamp.common.util.RunnableExecutor;
+import com.jogamp.common.util.VersionNumber;
/**
- * FIXME: We need native structure access methods to deal with API changes
- * in the libav headers, which break binary compatibility!
- * Currently we are binary compatible w/ [0.6 ?, ] 0.7 and 0.8 but not w/ trunk.
- *
- * ChangeList for trunk:
- * Thu Jan 12 11:21:02 2012 a17479dfce67fbea2d0a1bf303010dce1e79059f major 53 -> 54
- * Mon Feb 27 22:40:11 2012 ee42df8a35c2b795f524c856834d0823dbd4e75d reorder AVStream and AVFormatContext
- * Tue Feb 28 12:07:53 2012 322537478b63c6bc01e640643550ff539864d790 minor 1 -> 2
+ * See {@link FFMPEGMediaPlayer#compatibility}.
*/
class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
- private static List<String> glueLibNames = new ArrayList<String>(); // none
-
- private static final int symbolCount = 31;
- private static String[] symbolNames = {
- "avcodec_version",
+ private static final boolean DEBUG = FFMPEGMediaPlayer.DEBUG || DynamicLibraryBundleInfo.DEBUG;
+
+ private static final List<String> glueLibNames = new ArrayList<String>(); // none
+
+ private static final int symbolCount = 65;
+ private static final String[] symbolNames = {
+ "avutil_version",
"avformat_version",
-/* 3 */ "avutil_version",
-
+ "avcodec_version",
+ "avresample_version",
+/* 5 */ "swresample_version",
+
// libavcodec
- "avcodec_close",
- "avcodec_string",
- "avcodec_find_decoder",
- "avcodec_open2", // 53.6.0 (opt)
- "avcodec_open",
- "avcodec_alloc_frame",
- "avcodec_default_get_buffer",
- "avcodec_default_release_buffer",
- "av_free_packet",
+ "avcodec_register_all",
+ "avcodec_close",
+ "avcodec_string",
+ "avcodec_find_decoder",
+ "avcodec_open2", // 53.6.0 (opt)
+ "avcodec_alloc_frame",
+ "avcodec_get_frame_defaults",
+ "avcodec_free_frame", // 54.28.0 (opt)
+ "avcodec_default_get_buffer", // <= 54 (opt), else sp_avcodec_default_get_buffer2
+ "avcodec_default_release_buffer", // <= 54 (opt), else sp_av_frame_unref
+ "avcodec_default_get_buffer2", // 55 (opt)
+ "avcodec_get_edge_width",
+ "av_image_fill_linesizes",
+ "avcodec_align_dimensions",
+ "avcodec_align_dimensions2",
+ "avcodec_flush_buffers",
+ "av_init_packet",
+ "av_new_packet",
+ "av_destruct_packet",
+ "av_free_packet",
"avcodec_decode_audio4", // 53.25.0 (opt)
- "avcodec_decode_audio3", // 52.23.0
-/* 15 */ "avcodec_decode_video2", // 52.23.0
-
+/* 27 */ "avcodec_decode_video2", // 52.23.0
+
// libavutil
- "av_pix_fmt_descriptors",
- "av_free",
-/* 18 */ "av_get_bits_per_pixel",
-
+ "av_pix_fmt_descriptors",
+ "av_frame_unref", // 55.0.0 (opt)
+ "av_realloc",
+ "av_free",
+ "av_get_bits_per_pixel",
+ "av_samples_get_buffer_size",
+ "av_get_bytes_per_sample", // 51.4.0
+ "av_opt_set_int", // 51.12.0
+ "av_dict_get",
+ "av_dict_count", // 54.* (opt)
+ "av_dict_set",
+/* 28 */ "av_dict_free",
+
// libavformat
"avformat_alloc_context",
"avformat_free_context", // 52.96.0 (opt)
"avformat_close_input", // 53.17.0 (opt)
- "av_close_input_file",
- "av_register_all",
- "avformat_open_input",
- "av_dump_format",
+ "av_register_all",
+ "av_find_input_format",
+ "avformat_open_input",
+ "av_dump_format",
"av_read_frame",
"av_seek_frame",
+ "avformat_seek_file", // ??? (opt)
+ "av_read_play",
+ "av_read_pause",
"avformat_network_init", // 53.13.0 (opt)
"avformat_network_deinit", // 53.13.0 (opt)
- "avformat_find_stream_info", // 53.3.0 (opt)
-/* 29 */ "av_find_stream_info",
- };
-
- // alternate symbol names
- private static String[][] altSymbolNames = {
- { "avcodec_open", "avcodec_open2" }, // old, 53.6.0
- { "avcodec_decode_audio3", "avcodec_decode_audio4" }, // old, 53.25.0
- { "av_close_input_file", "avformat_close_input" }, // old, 53.17.0
- { "av_find_stream_info", "avformat_find_stream_info" }, // old, 53.3.0
+/* 54 */ "avformat_find_stream_info", // 53.3.0 (opt)
+
+ // libavdevice
+/* 55 */ "avdevice_register_all", // ???
+
+ // libavresample
+ "avresample_alloc_context", // 1.0.1
+ "avresample_open",
+ "avresample_close",
+ "avresample_free",
+/* 60 */ "avresample_convert",
+
+ // libavresample
+ "av_opt_set_sample_fmt", // actually lavu .. but exist only w/ swresample!
+ "swr_alloc",
+ "swr_init",
+ "swr_free",
+/* 65 */ "swr_convert",
+
};
-
+
// optional symbol names
- private static String[] optionalSymbolNames = {
- "avformat_free_context", // 52.96.0 (opt)
- "avformat_network_init", // 53.13.0 (opt)
- "avformat_network_deinit", // 53.13.0 (opt)
+ private static final String[] optionalSymbolNames = {
+ "avformat_seek_file", // ??? (opt)
+ "avcodec_free_frame", // 54.28.0 (opt)
+ "av_frame_unref", // 55.0.0 (opt)
+ "av_dict_count", // 54.* (opt)
+ "avcodec_default_get_buffer", // <= 54 (opt), else sp_avcodec_default_get_buffer2
+ "avcodec_default_release_buffer", // <= 54 (opt), else sp_av_frame_unref
+ "avcodec_default_get_buffer2", // 55 (opt)
+
+ // libavdevice
+ "avdevice_register_all", // 53.0.0 (opt)
+
+ // libavresample
+ "avresample_version", // 1.0.1
+ "avresample_alloc_context", // 1.0.1
+ "avresample_open",
+ "avresample_close",
+ "avresample_free",
+ "avresample_convert",
+
+ // libavresample
+ "av_opt_set_sample_fmt", // actually lavu .. but exist only w/ swresample!
+ "swresample_version", // 0
+ "swr_alloc",
+ "swr_init",
+ "swr_free",
+ "swr_convert",
};
-
- private static long[] symbolAddr;
+
+ private static final long[] symbolAddr = new long[symbolCount];
private static final boolean ready;
-
+ private static final boolean libsUFCLoaded;
+ private static final boolean avresampleLoaded; // optional
+ private static final boolean swresampleLoaded; // optional
+ private static final boolean avdeviceLoaded; // optional
+ static final VersionNumber avCodecVersion;
+ static final VersionNumber avFormatVersion;
+ static final VersionNumber avUtilVersion;
+ static final VersionNumber avResampleVersion;
+ static final VersionNumber swResampleVersion;
+ private static final FFMPEGNatives natives;
+
+ private static final int LIB_IDX_UTI = 0;
+ private static final int LIB_IDX_FMT = 1;
+ private static final int LIB_IDX_COD = 2;
+ private static final int LIB_IDX_DEV = 3;
+ private static final int LIB_IDX_AVR = 4;
+ private static final int LIB_IDX_SWR = 5;
+
static {
- // native ffmpeg media player implementation is included in jogl_desktop and jogl_mobile
+ // native ffmpeg media player implementation is included in jogl_desktop and jogl_mobile
GLProfile.initSingleton();
boolean _ready = false;
+ /** util, format, codec, device, avresample, swresample */
+ boolean[] _loaded= new boolean[6];
+ /** util, format, codec, avresample, swresample */
+ VersionNumber[] _versions = new VersionNumber[5];
try {
- _ready = initSymbols();
+ _ready = initSymbols(_loaded, _versions);
} catch (Throwable t) {
t.printStackTrace();
}
- ready = _ready;
- if(!ready) {
- System.err.println("FFMPEG: Not Available");
+ libsUFCLoaded = _loaded[LIB_IDX_UTI] && _loaded[LIB_IDX_FMT] && _loaded[LIB_IDX_COD];
+ avdeviceLoaded = _loaded[LIB_IDX_DEV];
+ avresampleLoaded = _loaded[LIB_IDX_AVR];
+ swresampleLoaded = _loaded[LIB_IDX_SWR];
+ avUtilVersion = _versions[0];
+ avFormatVersion = _versions[1];
+ avCodecVersion = _versions[2];
+ avResampleVersion = _versions[3];
+ swResampleVersion = _versions[4];
+ if(!libsUFCLoaded) {
+ System.err.println("LIB_AV Not Available: lavu, lavc, lavu");
+ natives = null;
+ ready = false;
+ } else if(!_ready) {
+ System.err.println("LIB_AV Not Matching");
+ natives = null;
+ ready = false;
+ } else {
+ if( avCodecVersion.getMajor() == 53 && avFormatVersion.getMajor() == 53 && avUtilVersion.getMajor() == 51 ) {
+ // lavc53.lavf53.lavu51
+ natives = new FFMPEGv08Natives();
+ } else if( avCodecVersion.getMajor() == 54 && avFormatVersion.getMajor() == 54 && avUtilVersion.getMajor() == 52 ) {
+ // lavc54.lavf54.lavu52.lavr01
+ natives = new FFMPEGv09Natives();
+ } else if( avCodecVersion.getMajor() == 55 && avFormatVersion.getMajor() == 55 && avUtilVersion.getMajor() == 52 ) {
+ // lavc55.lavf55.lavu52.lavr01
+ natives = new FFMPEGv10Natives();
+ } else {
+ System.err.println("LIB_AV No Version/Native-Impl Match");
+ natives = null;
+ }
+ if( null != natives && FFMPEGStaticNatives.initIDs0() ) {
+ ready = natives.initSymbols0(symbolAddr, symbolCount);
+ } else {
+ ready = false;
+ }
}
}
-
+
+ static boolean libsLoaded() { return libsUFCLoaded; }
+ static boolean avDeviceLoaded() { return avdeviceLoaded; }
+ static boolean avResampleLoaded() { return avresampleLoaded; }
+ static boolean swResampleLoaded() { return swresampleLoaded; }
+ static FFMPEGNatives getNatives() { return natives; }
static boolean initSingleton() { return ready; }
-
- private static boolean initSymbols() {
- final DynamicLibraryBundle dl = new DynamicLibraryBundle(new FFMPEGDynamicLibraryBundleInfo());
- final boolean avutilLoaded = dl.isToolLibLoaded(0);
- final boolean avformatLoaded = dl.isToolLibLoaded(1);
- final boolean avcodecLoaded = dl.isToolLibLoaded(2);
- if(!avutilLoaded || !avformatLoaded || !avcodecLoaded) {
- throw new RuntimeException("FFMPEG Tool library incomplete: [ avutil "+avutilLoaded+", avformat "+avformatLoaded+", avcodec "+avcodecLoaded+"]");
- }
- if(!dl.isToolLibComplete()) {
- throw new RuntimeException("FFMPEG Tool libraries incomplete");
+
+ /**
+ * @param loaded 6: util, format, codec, device, avresample, swresample
+ * @param versions 5: util, format, codec, avresample, swresample
+ * @return
+ */
+ private static final boolean initSymbols(boolean[] loaded, VersionNumber[] versions) {
+ for(int i=0; i<6; i++) {
+ loaded[i] = false;
+ }
+ final DynamicLibraryBundle dl = AccessController.doPrivileged(new PrivilegedAction<DynamicLibraryBundle>() {
+ @Override
+ public DynamicLibraryBundle run() {
+ return new DynamicLibraryBundle(new FFMPEGDynamicLibraryBundleInfo());
+ } } );
+ dl.toString();
+ for(int i=0; i<6; i++) {
+ loaded[i] = dl.isToolLibLoaded(i);
+ }
+ if( !loaded[LIB_IDX_UTI] || !loaded[LIB_IDX_FMT] || !loaded[LIB_IDX_COD] ) {
+ throw new RuntimeException("FFMPEG Tool library incomplete: [ avutil "+loaded[LIB_IDX_UTI]+", avformat "+loaded[LIB_IDX_FMT]+", avcodec "+loaded[LIB_IDX_COD]+"]");
}
if(symbolNames.length != symbolCount) {
throw new InternalError("XXX0 "+symbolNames.length+" != "+symbolCount);
}
- symbolAddr = new long[symbolCount];
-
+
// optional symbol name set
final Set<String> optionalSymbolNameSet = new HashSet<String>();
optionalSymbolNameSet.addAll(Arrays.asList(optionalSymbolNames));
-
- // alternate symbol name mapping to indexed array
- final Map<String, Integer> mAltSymbolNames = new HashMap<String, Integer>();
- final int[][] iAltSymbolNames = new int[altSymbolNames.length][];
- {
- final List<String> symbolNameList = Arrays.asList(symbolNames);
- for(int i=0; i<altSymbolNames.length; i++) {
- iAltSymbolNames[i] = new int[altSymbolNames[i].length];
- for(int j=0; j<altSymbolNames[i].length; j++) {
- mAltSymbolNames.put(altSymbolNames[i][j], new Integer(i));
- iAltSymbolNames[i][j] = symbolNameList.indexOf(altSymbolNames[i][j]);
- }
- }
- }
-
+
// lookup
- for(int i = 0; i<symbolCount; i++) {
- symbolAddr[i] = dl.dynamicLookupFunction(symbolNames[i]);
- }
-
+ AccessController.doPrivileged(new PrivilegedAction<Object>() {
+ @Override
+ public Object run() {
+ for(int i = 0; i<symbolCount; i++) {
+ symbolAddr[i] = dl.dynamicLookupFunction(symbolNames[i]);
+ }
+ return null;
+ } } );
+
// validate results
+ boolean res = true;
for(int i = 0; i<symbolCount; i++) {
if( 0 == symbolAddr[i] ) {
// no symbol, check optional and alternative symbols
final String symbol = symbolNames[i];
if ( !optionalSymbolNameSet.contains(symbol) ) {
- // check for API changed symbols
- boolean ok = false;
- final Integer cI = mAltSymbolNames.get(symbol);
- if ( null != cI ) {
- // check whether alternative symbol is available
- final int ci = cI.intValue();
- for(int j=0; !ok && j<iAltSymbolNames[ci].length; j++) {
- final int si = iAltSymbolNames[ci][j];
- ok = 0 != symbolAddr[si];
- if(ok && (true || DEBUG )) { // keep it verbose per default for now ..
- System.err.println("OK: Unresolved symbol <"+symbol+">, but has alternative <"+symbolNames[si]+">");
- }
- }
- }
- if(!ok) {
- System.err.println("Fail: Could not resolve symbol <"+symbolNames[i]+">: not optional, no alternatives.");
- return false;
- }
- } else if(true || DEBUG ) { // keep it verbose per default for now ..
+ System.err.println("Fail: Could not resolve symbol <"+symbolNames[i]+">: not optional, no alternatives.");
+ res = false;
+ } else if(DEBUG) {
System.err.println("OK: Unresolved optional symbol <"+symbolNames[i]+">");
}
}
}
- return initSymbols0(symbolAddr, symbolCount);
+ versions[0] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvVersion0(symbolAddr[0]));
+ versions[1] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvVersion0(symbolAddr[1]));
+ versions[2] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvVersion0(symbolAddr[2]));
+ versions[3] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvVersion0(symbolAddr[3]));
+ versions[4] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvVersion0(symbolAddr[4]));
+
+ return res;
}
-
+
protected FFMPEGDynamicLibraryBundleInfo() {
}
@Override
- public boolean shallLinkGlobal() { return true; }
+ public final boolean shallLinkGlobal() { return true; }
+ /**
+ * {@inheritDoc}
+ * <p>
+ * Returns <code>true</code>.
+ * </p>
+ */
@Override
- public boolean shallLookupGlobal() { return true; }
-
+ public final boolean shallLookupGlobal() {
+ return true;
+ }
+
@Override
public final List<String> getGlueLibNames() {
return glueLibNames;
}
@Override
- public List<List<String>> getToolLibNames() {
+ public final List<List<String>> getToolLibNames() {
List<List<String>> libsList = new ArrayList<List<String>>();
+ // 6: util, format, codec, device, avresample, swresample
+
final List<String> avutil = new ArrayList<String>();
avutil.add("avutil"); // default
- avutil.add("avutil-52"); // dummy future proof
+
+ avutil.add("libavutil.so.53"); // dummy future proof
+ avutil.add("libavutil.so.52"); // ffmpeg 1.2 + 2 / libav 9 + 10
+ avutil.add("libavutil.so.51"); // 0.8
+ avutil.add("libavutil.so.50"); // 0.7
+
+ avutil.add("avutil-53"); // dummy future proof
+ avutil.add("avutil-52"); // ffmpeg 1.2 + 2 / libav 9 + 10
avutil.add("avutil-51"); // 0.8
avutil.add("avutil-50"); // 0.7
libsList.add(avutil);
-
+
final List<String> avformat = new ArrayList<String>();
avformat.add("avformat"); // default
- avformat.add("avformat-55"); // dummy future proof
- avformat.add("avformat-54"); // 0.?
+
+ avformat.add("libavformat.so.56"); // dummy future proof
+ avformat.add("libavformat.so.55"); // ffmpeg 2 / libav 10
+ avformat.add("libavformat.so.54"); // ffmpeg 1.2 / libav 9
+ avformat.add("libavformat.so.53"); // 0.8
+ avformat.add("libavformat.so.52"); // 0.7
+
+ avformat.add("avformat-56"); // dummy future proof
+ avformat.add("avformat-55"); // ffmpeg 2 / libav 10
+ avformat.add("avformat-54"); // ffmpeg 1.2 / libav 9
avformat.add("avformat-53"); // 0.8
avformat.add("avformat-52"); // 0.7
libsList.add(avformat);
-
+
final List<String> avcodec = new ArrayList<String>();
avcodec.add("avcodec"); // default
- avcodec.add("avcodec-55"); // dummy future proof
- avcodec.add("avcodec-54"); // 0.?
+
+ avcodec.add("libavcodec.so.56"); // dummy future proof
+ avcodec.add("libavcodec.so.55"); // ffmpeg 2/ libav 10
+ avcodec.add("libavcodec.so.54"); // ffmpeg 1.2 / libav 9
+ avcodec.add("libavcodec.so.53"); // 0.8
+ avcodec.add("libavcodec.so.52"); // 0.7
+
+ avcodec.add("avcodec-56"); // dummy future proof
+ avcodec.add("avcodec-55"); // ffmpeg 2/ libav 10
+ avcodec.add("avcodec-54"); // ffmpeg 1.2 / libav 9
avcodec.add("avcodec-53"); // 0.8
avcodec.add("avcodec-52"); // 0.7
libsList.add(avcodec);
-
+
+ final List<String> avdevice = new ArrayList<String>();
+ avdevice.add("avdevice"); // default
+
+ avdevice.add("libavdevice.so.56"); // dummy future proof
+ avdevice.add("libavdevice.so.55"); // ffmpeg 2
+ avdevice.add("libavdevice.so.54"); // ffmpeg 1.2 / libav 10
+ avdevice.add("libavdevice.so.53"); // 0.8 && libav 9
+
+ avdevice.add("avdevice-56"); // dummy future proof
+ avdevice.add("avdevice-55"); // ffmpeg 2
+ avdevice.add("avdevice-54"); // ffmpeg 1.2 / libav 10
+ avdevice.add("avdevice-53"); // 0.8 && libav 9
+ libsList.add(avdevice);
+
+ final List<String> avresample = new ArrayList<String>();
+ avresample.add("avresample"); // default
+
+ avresample.add("libavresample.so.2"); // dummy future proof
+ avresample.add("libavresample.so.1"); // libav 9 + 10
+
+ avresample.add("avresample-2"); // dummy future proof
+ avresample.add("avresample-1"); // libav 9 + 10
+ libsList.add(avresample);
+
+ final List<String> swresample = new ArrayList<String>();
+ swresample.add("swresample"); // default
+
+ swresample.add("libswresample.so.1"); // dummy future proof
+ swresample.add("libswresample.so.0"); // ffmpeg 1.2 + 2.x
+
+ swresample.add("swresample-1"); // dummy future proof
+ swresample.add("swresample-0"); // ffmpeg 1.2 + 2.x
+ libsList.add(swresample);
+
return libsList;
}
@@ -257,14 +426,12 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
}
@Override
- public boolean useToolGetProcAdressFirst(String funcName) {
+ public final boolean useToolGetProcAdressFirst(String funcName) {
return false;
}
@Override
- public RunnableExecutor getLibLoaderExecutor() {
+ public final RunnableExecutor getLibLoaderExecutor() {
return DynamicLibraryBundle.getDefaultRunnableExecutor();
- }
-
- private static native boolean initSymbols0(long[] symbols, int count);
+ }
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index 7d10cff4d..344ba48ee 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -3,14 +3,14 @@
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
- *
+ *
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
- *
+ *
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
- *
+ *
* THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
@@ -20,7 +20,7 @@
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
+ *
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of JogAmp Community.
@@ -29,514 +29,797 @@
package jogamp.opengl.util.av.impl;
import java.io.IOException;
-import java.nio.Buffer;
import java.nio.ByteBuffer;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
import javax.media.opengl.GL;
import javax.media.opengl.GL2ES2;
import javax.media.opengl.GLException;
+import com.jogamp.common.os.Platform;
+import com.jogamp.common.util.IOUtil;
import com.jogamp.common.util.VersionNumber;
import com.jogamp.gluegen.runtime.ProcAddressTable;
+import com.jogamp.opengl.util.TimeFrameI;
import com.jogamp.opengl.util.GLPixelStorageModes;
+import com.jogamp.opengl.util.av.AudioSink;
+import com.jogamp.opengl.util.av.AudioSink.AudioFormat;
+import com.jogamp.opengl.util.av.AudioSinkFactory;
+import com.jogamp.opengl.util.av.GLMediaPlayer;
import com.jogamp.opengl.util.texture.Texture;
-import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.opengl.GLContextImpl;
-import jogamp.opengl.es1.GLES1ProcAddressTable;
-import jogamp.opengl.es2.GLES2ProcAddressTable;
-import jogamp.opengl.gl4.GL4bcProcAddressTable;
-import jogamp.opengl.util.av.EGLMediaPlayerImpl;
+import jogamp.opengl.util.av.GLMediaPlayerImpl;
+import jogamp.opengl.util.av.impl.FFMPEGNatives.PixelFormat;
+import jogamp.opengl.util.av.impl.FFMPEGNatives.SampleFormat;
/***
* Implementation utilizes <a href="http://libav.org/">Libav</a>
- * or <a href="http://ffmpeg.org/">FFmpeg</a> which is ubiquitous
- * available and usually pre-installed on Unix platforms. Due to legal
- * reasons we cannot deploy binaries of it, which contains patented codecs.
+ * or <a href="http://ffmpeg.org/">FFmpeg</a> which are ubiquitous
+ * available and usually pre-installed on Unix platforms.
+ * <p>
+ * Due to legal reasons we cannot deploy binaries of it, which contains patented codecs.
+ * </p>
+ * <p>
* Besides the default BSD/Linux/.. repositories and installations,
- * precompiled binaries can be found at the listed location below.
+ * precompiled binaries can be found at the
+ * <a href="#libavavail">listed location below</a>.
+ * </p>
+ *
+ * <a name="implspecifics"><h5>Implementation specifics</h5></a>
* <p>
- * Implements YUV420P to RGB fragment shader conversion
- * and the usual packed RGB formats.
- * The decoded video frame is written directly into an OpenGL texture
- * on the GPU in it's native format. A custom fragment shader converts
- * the native pixelformat to a usable RGB format if required.
- * Hence only 1 copy is required before bloating the picture
- * from YUV to RGB, for example.
- * </p>
+ * The decoded video frame is written directly into an OpenGL texture
+ * on the GPU in it's native format. A custom fragment shader converts
+ * the native pixelformat to a usable <i>RGB</i> format if required.
+ * Hence only 1 copy is required before bloating the picture
+ * from <i>YUV*</i> to <i>RGB</i>, for example.
+ * </p>
* <p>
- * Utilizes a slim dynamic and native binding to the Lib_av
+ * Implements pixel format conversion to <i>RGB</i> via
+ * fragment shader texture-lookup functions:
+ * <ul>
+ * <li>{@link PixelFormat#YUV420P}</li>
+ * <li>{@link PixelFormat#YUVJ420P}</li>
+ * <li>{@link PixelFormat#YUV422P}</li>
+ * <li>{@link PixelFormat#YUVJ422P}</li>
+ * <li>{@link PixelFormat#YUYV422}</li>
+ * <li>{@link PixelFormat#BGR24}</li>
+ * </ul>
+ * </p>
+ * <p>
+ *
+ * <a name="libavspecifics"><h5>Libav Specifics</h5></a>
+ * <p>
+ * Utilizes a slim dynamic and native binding to the Lib_av
* libraries:
* <ul>
- * <li>libavutil</li>
- * <li>libavformat</li>
* <li>libavcodec</li>
- * </ul>
+ * <li>libavformat</li>
+ * <li>libavutil</li>
+ * <li>libavresample (opt)</li>
+ * <li>libavdevice (opt)</li>
+ * </ul>
+ * </p>
+ *
+ * <a name="compatibility"><h5>LibAV Compatibility</h5></a>
+ * <p>
+ * Currently we are binary compatible w/:
+ * <table border="1">
+ * <tr><th>libav / ffmpeg</th><th>lavc</th><th>lavf</th><th>lavu</th><th>lavr</th> <th>FFMPEG* class</th></tr>
+ * <tr><td>0.8</td> <td>53</td> <td>53</td> <td>51</td> <td></td> <td>FFMPEGv08</td></tr>
+ * <tr><td>9.0 / 1.2</td> <td>54</td> <td>54</td> <td>52</td> <td>01/00</td> <td>FFMPEGv09</td></tr>
+ * <tr><td>10 / 2</td> <td>55</td> <td>55</td> <td>52</td> <td>01/00</td> <td>FFMPEGv10</td></tr>
+ * </table>
* </p>
* <p>
- * http://libav.org/
+ * See http://upstream-tracker.org/versions/libav.html
* </p>
- * <p>
- * Check tag 'FIXME: Add more planar formats !'
+ * <p>
+ * Check tag 'FIXME: Add more planar formats !'
* here and in the corresponding native code
- * <code>jogl/src/jogl/native/ffmpeg/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c</code>
+ * <code>jogl/src/jogl/native/libav/ffmpeg_impl_template.c</code>
* </p>
+ *
+ *
+ * <a name="todo"><h5>TODO:</h5></a>
* <p>
- * TODO:
* <ul>
- * <li>Audio Output</li>
- * <li>Off thread <i>next frame</i> processing using multiple target textures</li>
- * <li>better pts sync handling</li>
- * <li>fix seek</li>
- * </ul>
+ * <li>better audio synchronization handling? (video is synchronized)</li>
+ * </ul>
* </p>
- * Pre-compiled Libav / FFmpeg packages:
+ *
+ * <a name="libavavail"><h5>FFMPEG / LibAV Availability</h5></a>
+ * <p>
* <ul>
- * <li>Windows: http://ffmpeg.zeranoe.com/builds/</li>
- * <li>MacOSX: http://www.ffmpegx.com/</li>
+ * <li>GNU/Linux: ffmpeg or libav are deployed in most distributions.</li>
+ * <li>Windows:
+ * <ul>
+ * <li>http://ffmpeg.zeranoe.com/builds/ (ffmpeg) <i>recommended, works w/ dshow</i></li>
+ * <li>http://win32.libav.org/releases/ (libav)</li>
+ * </ul></li>
+ * <li>MacOSX: http://ffmpegmac.net/</li>
* <li>OpenIndiana/Solaris:<pre>
* pkg set-publisher -p http://pkg.openindiana.org/sfe-encumbered.
* pkt install pkg:/video/ffmpeg
* </pre></li>
- * </ul>
+ * </ul>
+ * </p>
*/
-public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
- public static final VersionNumber avUtilVersion;
- public static final VersionNumber avFormatVersion;
- public static final VersionNumber avCodecVersion;
- static final boolean available;
-
+public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
+
+ /** POSIX ENOSYS {@value}: Function not implemented. FIXME: Move to GlueGen ?!*/
+ private static final int ENOSYS = 38;
+
+ // Instance data
+ private static final FFMPEGNatives natives;
+ private static final int avUtilMajorVersionCC;
+ private static final int avFormatMajorVersionCC;
+ private static final int avCodecMajorVersionCC;
+ private static final int avResampleMajorVersionCC;
+ private static final int swResampleMajorVersionCC;
+ private static final boolean available;
+
static {
- if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) {
- avUtilVersion = getAVVersion(getAvUtilVersion0());
- avFormatVersion = getAVVersion(getAvFormatVersion0());
- avCodecVersion = getAVVersion(getAvCodecVersion0());
- System.err.println("LIB_AV Util : "+avUtilVersion);
- System.err.println("LIB_AV Format: "+avFormatVersion);
- System.err.println("LIB_AV Codec : "+avCodecVersion);
- available = initIDs0();
+ final boolean libAVGood = FFMPEGDynamicLibraryBundleInfo.initSingleton();
+ final boolean libAVVersionGood;
+ if( FFMPEGDynamicLibraryBundleInfo.libsLoaded() ) {
+ natives = FFMPEGDynamicLibraryBundleInfo.getNatives();
+ if( null != natives ) {
+ avCodecMajorVersionCC = natives.getAvCodecMajorVersionCC0();
+ avFormatMajorVersionCC = natives.getAvFormatMajorVersionCC0();
+ avUtilMajorVersionCC = natives.getAvUtilMajorVersionCC0();
+ avResampleMajorVersionCC = natives.getAvResampleMajorVersionCC0();
+ swResampleMajorVersionCC = natives.getSwResampleMajorVersionCC0();
+ } else {
+ avUtilMajorVersionCC = 0;
+ avFormatMajorVersionCC = 0;
+ avCodecMajorVersionCC = 0;
+ avResampleMajorVersionCC = 0;
+ swResampleMajorVersionCC = 0;
+ }
+ final VersionNumber avCodecVersion = FFMPEGDynamicLibraryBundleInfo.avCodecVersion;
+ final VersionNumber avFormatVersion = FFMPEGDynamicLibraryBundleInfo.avFormatVersion;
+ final VersionNumber avUtilVersion = FFMPEGDynamicLibraryBundleInfo.avUtilVersion;
+ final VersionNumber avResampleVersion = FFMPEGDynamicLibraryBundleInfo.avResampleVersion;
+ final boolean avResampleLoaded = FFMPEGDynamicLibraryBundleInfo.avResampleLoaded();
+ final VersionNumber swResampleVersion = FFMPEGDynamicLibraryBundleInfo.swResampleVersion;
+ final boolean swResampleLoaded = FFMPEGDynamicLibraryBundleInfo.swResampleLoaded();
+ if( DEBUG ) {
+ System.err.println("LIB_AV Codec : "+avCodecVersion+" [cc "+avCodecMajorVersionCC+"]");
+ System.err.println("LIB_AV Format : "+avFormatVersion+" [cc "+avFormatMajorVersionCC+"]");
+ System.err.println("LIB_AV Util : "+avUtilVersion+" [cc "+avUtilMajorVersionCC+"]");
+ System.err.println("LIB_AV Resample: "+avResampleVersion+" [cc "+avResampleMajorVersionCC+", loaded "+avResampleLoaded+"]");
+ System.err.println("LIB_SW Resample: "+swResampleVersion+" [cc "+swResampleMajorVersionCC+", loaded "+swResampleLoaded+"]");
+ System.err.println("LIB_AV Device : [loaded "+FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded()+"]");
+ System.err.println("LIB_AV Class : "+(null!= natives ? natives.getClass().getSimpleName() : "n/a"));
+ }
+ libAVVersionGood = avCodecMajorVersionCC == avCodecVersion.getMajor() &&
+ avFormatMajorVersionCC == avFormatVersion.getMajor() &&
+ avUtilMajorVersionCC == avUtilVersion.getMajor() &&
+ ( !avResampleLoaded || avResampleMajorVersionCC < 0 || avResampleMajorVersionCC == avResampleVersion.getMajor() ) &&
+ ( !swResampleLoaded || swResampleMajorVersionCC < 0 || swResampleMajorVersionCC == swResampleVersion.getMajor() ) ;
+ if( !libAVVersionGood ) {
+ System.err.println("LIB_AV Not Matching Compile-Time / Runtime Major-Version");
+ }
} else {
- avUtilVersion = null;
- avFormatVersion = null;
- avCodecVersion = null;
- available = false;
+ natives = null;
+ avUtilMajorVersionCC = 0;
+ avFormatMajorVersionCC = 0;
+ avCodecMajorVersionCC = 0;
+ avResampleMajorVersionCC = 0;
+ swResampleMajorVersionCC = 0;
+ libAVVersionGood = false;
}
+ available = libAVGood && libAVVersionGood && null != natives;
}
-
+
public static final boolean isAvailable() { return available; }
- private static VersionNumber getAVVersion(int vers) {
- return new VersionNumber( ( vers >> 16 ) & 0xFF,
- ( vers >> 8 ) & 0xFF,
- ( vers >> 0 ) & 0xFF );
- }
-
- protected long moviePtr = 0;
- protected long procAddrGLTexSubImage2D = 0;
- protected EGLMediaPlayerImpl.EGLTextureFrame lastTex = null;
- protected GLPixelStorageModes psm;
- protected PixelFormat vPixelFmt = null;
- protected int vPlanes = 0;
- protected int vBitsPerPixel = 0;
- protected int vBytesPerPixelPerPlane = 0;
- protected int[] vLinesize = { 0, 0, 0 }; // per plane
- protected int[] vTexWidth = { 0, 0, 0 }; // per plane
- protected int texWidth, texHeight; // overall (stuffing planes in one texture)
- protected ByteBuffer texCopy;
+ //
+ // General
+ //
+
+ private long moviePtr = 0;
+
+ //
+ // Video
+ //
+
+ private String texLookupFuncName = "ffmpegTexture2D";
+ private boolean usesTexLookupShader = false;
+ private PixelFormat vPixelFmt = null;
+ private int vPlanes = 0;
+ private int vBitsPerPixel = 0;
+ private int vBytesPerPixelPerPlane = 0;
+ private int texWidth, texHeight; // overall (stuffing planes in one texture)
+ private String singleTexComp = "r";
+ private final GLPixelStorageModes psm;
+
+ //
+ // Audio
+ //
+
+ private AudioSink.AudioFormat avChosenAudioFormat;
+ private int audioSamplesPerFrameAndChannel = 0;
public FFMPEGMediaPlayer() {
- super(TextureType.GL, false);
if(!available) {
throw new RuntimeException("FFMPEGMediaPlayer not available");
}
- setTextureCount(1);
- moviePtr = createInstance0(true);
+ moviePtr = natives.createInstance0(this, DEBUG_NATIVE);
if(0==moviePtr) {
throw new GLException("Couldn't create FFMPEGInstance");
}
psm = new GLPixelStorageModes();
+ audioSink = null;
}
-
+
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- if(TextureType.GL == texType) {
- final Texture texture = super.createTexImageImpl(gl, idx, tex, texWidth, texHeight, true);
- lastTex = new EGLTextureFrame(null, texture, 0, 0);
- } else {
- throw new InternalError("n/a");
+ protected final void destroyImpl(GL gl) {
+ if (moviePtr != 0) {
+ natives.destroyInstance0(moviePtr);
+ moviePtr = 0;
}
- return lastTex;
+ destroyAudioSink();
}
-
- @Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- lastTex = null;
- super.destroyTexImage(gl, imgTex);
+ private final void destroyAudioSink() {
+ final AudioSink _audioSink = audioSink;
+ if( null != _audioSink ) {
+ audioSink = null;
+ _audioSink.destroy();
+ }
}
-
+
+ public static final String dev_video_linux = "/dev/video";
+
@Override
- protected void destroyImpl(GL gl) {
- if (moviePtr != 0) {
- destroyInstance0(moviePtr);
- moviePtr = 0;
+ protected final void initStreamImpl(int vid, int aid) throws IOException {
+ if(0==moviePtr) {
+ throw new GLException("FFMPEG native instance null");
}
+ if(DEBUG) {
+ System.err.println("initStream: p1 "+this);
+ }
+
+ final String streamLocS = IOUtil.decodeURIIfFilePath(streamLoc);
+ destroyAudioSink();
+ if( GLMediaPlayer.STREAM_ID_NONE == aid ) {
+ audioSink = AudioSinkFactory.createNull();
+ } else {
+ audioSink = AudioSinkFactory.createDefault();
+ }
+ final AudioFormat preferredAudioFormat = audioSink.getPreferredFormat();
+ if(DEBUG) {
+ System.err.println("initStream: p2 preferred "+preferredAudioFormat+", "+this);
+ }
+
+ final boolean isCameraInput = null != cameraPath;
+ final String resStreamLocS;
+ // int rw=640, rh=480, rr=15;
+ int rw=-1, rh=-1, rr=-1;
+ String sizes = null;
+ if( isCameraInput ) {
+ switch(Platform.OS_TYPE) {
+ case ANDROID:
+ // ??
+ case FREEBSD:
+ case HPUX:
+ case LINUX:
+ case SUNOS:
+ resStreamLocS = dev_video_linux + cameraPath;
+ break;
+ case WINDOWS:
+ resStreamLocS = cameraPath;
+ break;
+ case MACOS:
+ case OPENKODE:
+ default:
+ resStreamLocS = streamLocS; // FIXME: ??
+ break;
+ }
+ if( null != cameraProps ) {
+ sizes = cameraProps.get(CameraPropSizeS);
+ int v = getPropIntVal(cameraProps, CameraPropWidth);
+ if( v > 0 ) { rw = v; }
+ v = getPropIntVal(cameraProps, CameraPropHeight);
+ if( v > 0 ) { rh = v; }
+ v = getPropIntVal(cameraProps, CameraPropRate);
+ if( v > 0 ) { rr = v; }
+ }
+ } else {
+ resStreamLocS = streamLocS;
+ }
+ final int aMaxChannelCount = audioSink.getMaxSupportedChannels();
+ final int aPrefSampleRate = preferredAudioFormat.sampleRate;
+ // setStream(..) issues updateAttributes*(..), and defines avChosenAudioFormat, vid, aid, .. etc
+ if(DEBUG) {
+ System.err.println("initStream: p3 cameraPath "+cameraPath+", isCameraInput "+isCameraInput);
+ System.err.println("initStream: p3 stream "+streamLoc+" -> "+streamLocS+" -> "+resStreamLocS);
+ System.err.println("initStream: p3 vid "+vid+", sizes "+sizes+", reqVideo "+rw+"x"+rh+"@"+rr+", aid "+aid+", aMaxChannelCount "+aMaxChannelCount+", aPrefSampleRate "+aPrefSampleRate);
+ }
+ natives.setStream0(moviePtr, resStreamLocS, isCameraInput, vid, sizes, rw, rh, rr, aid, aMaxChannelCount, aPrefSampleRate);
}
-
+
@Override
- protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException {
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
- final String urlS=urlConn.getURL().toExternalForm();
-
- System.out.println("setURL: p1 "+this);
- setStream0(moviePtr, urlS, -1, -1);
- System.out.println("setURL: p2 "+this);
- int tf;
- switch(vBytesPerPixelPerPlane) {
- case 1: tf = GL2ES2.GL_RED; break;
- case 3: tf = GL2ES2.GL_RGB; break;
- case 4: tf = GL2ES2.GL_RGBA; break;
- default: throw new RuntimeException("Unsupported bytes-per-pixel / plane "+vBytesPerPixelPerPlane);
- }
- setTextureFormat(tf);
- setTextureType(GL.GL_UNSIGNED_BYTE);
- GLContextImpl ctx = (GLContextImpl)gl.getContext();
- ProcAddressTable pt = ctx.getGLProcAddressTable();
- if(pt instanceof GLES2ProcAddressTable) {
- procAddrGLTexSubImage2D = ((GLES2ProcAddressTable)pt)._addressof_glTexSubImage2D;
- } else if(pt instanceof GLES1ProcAddressTable) {
- procAddrGLTexSubImage2D = ((GLES1ProcAddressTable)pt)._addressof_glTexSubImage2D;
- } else if(pt instanceof GL4bcProcAddressTable) {
- procAddrGLTexSubImage2D = ((GL4bcProcAddressTable)pt)._addressof_glTexSubImage2D;
+ if(null == audioSink) {
+ throw new GLException("AudioSink null");
+ }
+ final int audioQueueLimit;
+ if( null != gl && STREAM_ID_NONE != vid ) {
+ final GLContextImpl ctx = (GLContextImpl)gl.getContext();
+ AccessController.doPrivileged(new PrivilegedAction<Object>() {
+ @Override
+ public Object run() {
+ final ProcAddressTable pt = ctx.getGLProcAddressTable();
+ final long procAddrGLTexSubImage2D = pt.getAddressFor("glTexSubImage2D");
+ final long procAddrGLGetError = pt.getAddressFor("glGetError");
+ final long procAddrGLFlush = pt.getAddressFor("glFlush");
+ final long procAddrGLFinish = pt.getAddressFor("glFinish");
+ natives.setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError, procAddrGLFlush, procAddrGLFinish);
+ return null;
+ } } );
+ audioQueueLimit = AudioSink.DefaultQueueLimitWithVideo;
+ } else {
+ audioQueueLimit = AudioSink.DefaultQueueLimitAudioOnly;
+ }
+ if(DEBUG) {
+ System.err.println("initGL: p3 avChosen "+avChosenAudioFormat);
+ }
+
+ if( STREAM_ID_NONE == aid ) {
+ audioSink.destroy();
+ audioSink = AudioSinkFactory.createNull();
+ audioSink.init(AudioSink.DefaultFormat, 0, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit);
} else {
- throw new InternalError("Unknown ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getClass().getName());
+ final float frameDuration;
+ if( audioSamplesPerFrameAndChannel > 0 ) {
+ frameDuration= avChosenAudioFormat.getSamplesDuration(audioSamplesPerFrameAndChannel);
+ } else {
+ frameDuration = AudioSink.DefaultFrameDuration;
+ }
+ final boolean audioSinkOK = audioSink.init(avChosenAudioFormat, frameDuration, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit);
+ if( !audioSinkOK ) {
+ System.err.println("AudioSink "+audioSink.getClass().getName()+" does not support "+avChosenAudioFormat+", using Null");
+ audioSink.destroy();
+ audioSink = AudioSinkFactory.createNull();
+ audioSink.init(avChosenAudioFormat, 0, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit);
+ }
+ }
+ if(DEBUG) {
+ System.err.println("initGL: p4 chosen "+avChosenAudioFormat);
+ System.err.println("initGL: p4 chosen "+audioSink);
+ }
+
+ if( null != gl && STREAM_ID_NONE != vid ) {
+ int tf, tif=GL.GL_RGBA; // texture format and internal format
+ int tt = GL.GL_UNSIGNED_BYTE;
+ switch(vBytesPerPixelPerPlane) {
+ case 1:
+ if( gl.isGL3ES3() ) {
+ // RED is supported on ES3 and >= GL3 [core]; ALPHA is deprecated on core
+ tf = GL2ES2.GL_RED; tif=GL2ES2.GL_RED; singleTexComp = "r";
+ } else {
+ // ALPHA is supported on ES2 and GL2, i.e. <= GL3 [core] or compatibility
+ tf = GL2ES2.GL_ALPHA; tif=GL2ES2.GL_ALPHA; singleTexComp = "a";
+ }
+ break;
+
+ case 2: if( vPixelFmt == PixelFormat.YUYV422 ) {
+ // YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr
+ // Stuffed into RGBA half width texture
+ tf = GL2ES2.GL_RGBA; tif=GL2ES2.GL_RGBA; break;
+ } else {
+ tf = GL2ES2.GL_RG; tif=GL2ES2.GL_RG; break;
+ }
+ case 3: tf = GL2ES2.GL_RGB; tif=GL.GL_RGB; break;
+ case 4: if( vPixelFmt == PixelFormat.BGRA ) {
+ tf = GL2ES2.GL_BGRA; tif=GL.GL_RGBA; break;
+ } else {
+ tf = GL2ES2.GL_RGBA; tif=GL.GL_RGBA; break;
+ }
+ default: throw new RuntimeException("Unsupported bytes-per-pixel / plane "+vBytesPerPixelPerPlane);
+ }
+ setTextureFormat(tif, tf);
+ setTextureType(tt);
+ if(DEBUG) {
+ System.err.println("initGL: p5: video "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane+
+ ", tex "+texWidth+"x"+texHeight+", usesTexLookupShader "+usesTexLookupShader);
+ }
}
}
- private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
- int lSz0, int lSz1, int lSz2,
- int tWd0, int tWd1, int tWd2) {
- vPixelFmt = PixelFormat.valueOf(pixFmt);
- vPlanes = planes;
- vBitsPerPixel = bitsPerPixel;
- vBytesPerPixelPerPlane = bytesPerPixelPerPlane;
- vLinesize[0] = lSz0; vLinesize[1] = lSz1; vLinesize[2] = lSz2;
- vTexWidth[0] = tWd0; vTexWidth[1] = tWd1; vTexWidth[2] = tWd2;
-
- switch(vPixelFmt) {
- case YUV420P:
- // YUV420P: Adding U+V on right side of fixed height texture,
- // since width is already aligned by decoder.
- // Y=w*h, Y=w/2*h/2, U=w/2*h/2
- // w*h + 2 ( w/2 * h/2 )
- // w*h + w*h/2
- // 2*w/2 * h
- texWidth = vTexWidth[0] + vTexWidth[1]; texHeight = height;
+ @Override
+ protected final TextureFrame createTexImage(GL gl, int texName) {
+ return new TextureFrame( createTexImageImpl(gl, texName, texWidth, texHeight) );
+ }
+
+ /**
+ * @param sampleRate sample rate in Hz (1/s)
+ * @param sampleSize sample size in bits
+ * @param channelCount number of channels
+ * @param signed true if signed number, false for unsigned
+ * @param fixedP true for fixed point value, false for unsigned floating point value with a sampleSize of 32 (float) or 64 (double)
+ * @param planar true for planar data package (each channel in own data buffer), false for packed data channels interleaved in one buffer.
+ * @param littleEndian true for little-endian, false for big endian
+ * @return
+ */
+
+ /**
+ * Native callback
+ * Converts the given libav/ffmpeg values to {@link AudioFormat} and returns {@link AudioSink#isSupported(AudioFormat)}.
+ * @param audioSampleFmt ffmpeg/libav audio-sample-format, see {@link SampleFormat}.
+ * @param audioSampleRate sample rate in Hz (1/s)
+ * @param audioChannels number of channels
+ */
+ final boolean isAudioFormatSupported(int audioSampleFmt, int audioSampleRate, int audioChannels) {
+ final SampleFormat avFmt = SampleFormat.valueOf(audioSampleFmt);
+ final AudioFormat audioFormat = avAudioFormat2Local(avFmt, audioSampleRate, audioChannels);
+ final boolean res = audioSink.isSupported(audioFormat);
+ if( DEBUG ) {
+ System.err.println("AudioSink.isSupported: "+res+": av[fmt "+avFmt+", rate "+audioSampleRate+", chan "+audioChannels+"] -> "+audioFormat);
+ }
+ return res;
+ }
+
+ /**
+ * Returns {@link AudioFormat} as converted from the given libav/ffmpeg values.
+ * @param audioSampleFmt ffmpeg/libav audio-sample-format, see {@link SampleFormat}.
+ * @param audioSampleRate sample rate in Hz (1/s)
+ * @param audioChannels number of channels
+ */
+ private final AudioFormat avAudioFormat2Local(SampleFormat audioSampleFmt, int audioSampleRate, int audioChannels) {
+ final int sampleSize;
+ boolean planar = true;
+ boolean fixedP = true;
+ final boolean signed;
+ switch( audioSampleFmt ) {
+ case S32:
+ planar = false;
+ case S32P:
+ sampleSize = 32;
+ signed = true;
+ break;
+ case S16:
+ planar = false;
+ case S16P:
+ sampleSize = 16;
+ signed = true;
+ break;
+ case U8:
+ planar = false;
+ case U8P:
+ sampleSize = 8;
+ signed = false;
+ break;
+ case DBL:
+ planar = false;
+ case DBLP:
+ sampleSize = 64;
+ signed = true;
+ fixedP = false;
break;
- // case PIX_FMT_YUYV422:
- case RGB24:
- case BGR24:
- case ARGB:
- case RGBA:
- case ABGR:
- case BGRA:
- texWidth = vTexWidth[0]; texHeight = height;
+ case FLT:
+ planar = false;
+ case FLTP:
+ sampleSize = 32;
+ signed = true;
+ fixedP = false;
break;
- default: // FIXME: Add more planar formats !
- throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt);
+ default: // FIXME: Add more formats !
+ throw new IllegalArgumentException("Unsupported sampleformat: "+audioSampleFmt);
+ }
+ return new AudioFormat(audioSampleRate, sampleSize, audioChannels, signed, fixedP, planar, true /* littleEndian */);
+ }
+
+ /**
+ * Native callback
+ * @param vid
+ * @param pixFmt
+ * @param planes
+ * @param bitsPerPixel
+ * @param bytesPerPixelPerPlane
+ * @param tWd0
+ * @param tWd1
+ * @param tWd2
+ * @param aid
+ * @param audioSampleFmt
+ * @param audioSampleRate
+ * @param audioChannels
+ * @param audioSamplesPerFrameAndChannel in audio samples per frame and channel
+ */
+ void setupFFAttributes(int vid, int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
+ int tWd0, int tWd1, int tWd2, int vW, int vH,
+ int aid, int audioSampleFmt, int audioSampleRate,
+ int audioChannels, int audioSamplesPerFrameAndChannel) {
+ // defaults ..
+ vPixelFmt = null;
+ vPlanes = 0;
+ vBitsPerPixel = 0;
+ vBytesPerPixelPerPlane = 0;
+ usesTexLookupShader = false;
+ texWidth = 0; texHeight = 0;
+
+ final int[] vTexWidth = { 0, 0, 0 }; // per plane
+
+ if( STREAM_ID_NONE != vid ) {
+ vPixelFmt = PixelFormat.valueOf(pixFmt);
+ vPlanes = planes;
+ vBitsPerPixel = bitsPerPixel;
+ vBytesPerPixelPerPlane = bytesPerPixelPerPlane;
+ vTexWidth[0] = tWd0; vTexWidth[1] = tWd1; vTexWidth[2] = tWd2;
+
+ switch(vPixelFmt) {
+ case YUVJ420P:
+ case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+ usesTexLookupShader = true;
+ // YUV420P: Adding U+V on right side of fixed height texture,
+ // since width is already aligned by decoder.
+ // Splitting texture to 4 quadrants:
+ // Y covers left top/low quadrant
+ // U on top-right quadrant.
+ // V on low-right quadrant.
+ // Y=w*h, U=w/2*h/2, V=w/2*h/2
+ // w*h + 2 ( w/2 * h/2 )
+ // w*h + w*h/2
+ texWidth = vTexWidth[0] + vTexWidth[1]; texHeight = vH;
+ break;
+ case YUVJ422P:
+ case YUV422P:
+ usesTexLookupShader = true;
+ // YUV422P: Adding U+V on right side of fixed height texture,
+ // since width is already aligned by decoder.
+ // Splitting texture to 4 columns
+ // Y covers columns 1+2
+ // U covers columns 3
+ // V covers columns 4
+ texWidth = vTexWidth[0] + vTexWidth[1] + vTexWidth[2]; texHeight = vH;
+ break;
+ case YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr - stuffed into RGBA half width texture
+ case BGR24:
+ usesTexLookupShader = true;
+ texWidth = vTexWidth[0]; texHeight = vH;
+ break;
+
+ case RGB24:
+ case ARGB:
+ case RGBA:
+ case ABGR:
+ case BGRA:
+ usesTexLookupShader = false;
+ texWidth = vTexWidth[0]; texHeight = vH;
+ break;
+ default: // FIXME: Add more formats !
+ throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt);
+ }
}
+
+ // defaults ..
+ final SampleFormat aSampleFmt;
+ avChosenAudioFormat = null;;
+ this.audioSamplesPerFrameAndChannel = 0;
+
+ if( STREAM_ID_NONE != aid ) {
+ aSampleFmt = SampleFormat.valueOf(audioSampleFmt);
+ avChosenAudioFormat = avAudioFormat2Local(aSampleFmt, audioSampleRate, audioChannels);
+ this.audioSamplesPerFrameAndChannel = audioSamplesPerFrameAndChannel;
+ } else {
+ aSampleFmt = null;
+ }
+
if(DEBUG) {
- System.err.println("XXX0: fmt "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane);
+ System.err.println("audio: id "+aid+", fmt "+aSampleFmt+", "+avChosenAudioFormat+", aFrameSize/fc "+audioSamplesPerFrameAndChannel);
+ System.err.println("video: id "+vid+", fmt "+vW+"x"+vH+", "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane+", usesTexLookupShader "+usesTexLookupShader);
for(int i=0; i<3; i++) {
- System.err.println("XXX0 "+i+": "+vTexWidth[i]+"/"+vLinesize[i]);
+ System.err.println("video: p["+i+"]: "+vTexWidth[i]);
}
- System.err.println("XXX0 total tex "+texWidth+"x"+texHeight);
+ System.err.println("video: total tex "+texWidth+"x"+texHeight);
+ System.err.println(this.toString());
}
}
-
+
+ /**
+ * Native callback
+ * @param isInGLOrientation
+ * @param pixFmt
+ * @param planes
+ * @param bitsPerPixel
+ * @param bytesPerPixelPerPlane
+ * @param tWd0
+ * @param tWd1
+ * @param tWd2
+ */
+ void updateVidAttributes(boolean isInGLOrientation, int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
+ int tWd0, int tWd1, int tWd2, int vW, int vH) {
+ }
+
/**
* {@inheritDoc}
- *
+ *
* If this implementation generates a specialized shader,
* it allows the user to override the default function name <code>ffmpegTexture2D</code>.
* Otherwise the call is delegated to it's super class.
*/
@Override
- public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
+ public final String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
if(State.Uninitialized == state) {
throw new IllegalStateException("Instance not initialized: "+this);
}
- if(PixelFormat.YUV420P == vPixelFmt) {
+ if( usesTexLookupShader ) {
if(null != desiredFuncName && desiredFuncName.length()>0) {
- textureLookupFunctionName = desiredFuncName;
+ texLookupFuncName = desiredFuncName;
}
- return textureLookupFunctionName;
+ return texLookupFuncName;
}
- return super.getTextureLookupFunctionName(desiredFuncName);
+ return super.getTextureLookupFunctionName(desiredFuncName);
}
- private String textureLookupFunctionName = "ffmpegTexture2D";
-
+
/**
* {@inheritDoc}
- *
+ *
* Depending on the pixelformat, a specific conversion shader is being created,
- * e.g. YUV420P to RGB. Otherwise the call is delegated to it's super class.
- */
+ * e.g. YUV420P to RGB. Otherwise the call is delegated to it's super class.
+ */
@Override
- public String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
+ public final String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
if(State.Uninitialized == state) {
throw new IllegalStateException("Instance not initialized: "+this);
}
+ if( !usesTexLookupShader ) {
+ return super.getTextureLookupFragmentShaderImpl();
+ }
final float tc_w_1 = (float)getWidth() / (float)texWidth;
switch(vPixelFmt) {
- case YUV420P:
- return
- "vec4 "+textureLookupFunctionName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
+ case YUVJ420P:
+ case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+ return
+ "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" vec2 u_off = vec2("+tc_w_1+", 0.0);\n"+
" vec2 v_off = vec2("+tc_w_1+", 0.5);\n"+
" vec2 tc_half = texCoord*0.5;\n"+
" float y,u,v,r,g,b;\n"+
- " y = texture2D(image, texCoord).r;\n"+
- " u = texture2D(image, u_off+tc_half).r;\n"+
- " v = texture2D(image, v_off+tc_half).r;\n"+
+ " y = texture2D(image, texCoord)."+singleTexComp+";\n"+
+ " u = texture2D(image, u_off+tc_half)."+singleTexComp+";\n"+
+ " v = texture2D(image, v_off+tc_half)."+singleTexComp+";\n"+
" y = 1.1643*(y-0.0625);\n"+
" u = u-0.5;\n"+
" v = v-0.5;\n"+
" r = y+1.5958*v;\n"+
" g = y-0.39173*u-0.81290*v;\n"+
- " b = y+2.017*u;\n"+
+ " b = y+2.017*u;\n"+
" return vec4(r, g, b, 1);\n"+
"}\n"
- ;
- default: // FIXME: Add more planar formats !
- return super.getTextureLookupFragmentShaderImpl();
- }
- }
-
- @Override
- protected synchronized int getCurrentPositionImpl() {
- return 0!=moviePtr ? getVideoPTS0(moviePtr) : 0;
- }
+ ;
- @Override
- protected synchronized boolean setPlaySpeedImpl(float rate) {
- return true;
+ case YUVJ422P:
+ case YUV422P: ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
+ return
+ "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
+ " vec2 u_off = vec2("+tc_w_1+" , 0.0);\n"+
+ " vec2 v_off = vec2("+tc_w_1+" * 1.5, 0.0);\n"+
+ " vec2 tc_halfw = vec2(texCoord.x*0.5, texCoord.y);\n"+
+ " float y,u,v,r,g,b;\n"+
+ " y = texture2D(image, texCoord)."+singleTexComp+";\n"+
+ " u = texture2D(image, u_off+tc_halfw)."+singleTexComp+";\n"+
+ " v = texture2D(image, v_off+tc_halfw)."+singleTexComp+";\n"+
+ " y = 1.1643*(y-0.0625);\n"+
+ " u = u-0.5;\n"+
+ " v = v-0.5;\n"+
+ " r = y+1.5958*v;\n"+
+ " g = y-0.39173*u-0.81290*v;\n"+
+ " b = y+2.017*u;\n"+
+ " return vec4(r, g, b, 1);\n"+
+ "}\n"
+ ;
+
+ case YUYV422: // < packed YUV 4:2:2, 2 x 16bpp, [Y0 Cb] [Y1 Cr]
+ // Stuffed into RGBA half width texture
+ return
+ "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
+ " "+
+ " float y1,u,y2,v,y,r,g,b;\n"+
+ " vec2 tc_halfw = vec2(texCoord.x*0.5, texCoord.y);\n"+
+ " vec4 yuyv = texture2D(image, tc_halfw).rgba;\n"+
+ " y1 = yuyv.r;\n"+
+ " u = yuyv.g;\n"+
+ " y2 = yuyv.b;\n"+
+ " v = yuyv.a;\n"+
+ " y = mix( y1, y2, mod(gl_FragCoord.x, 2) ); /* avoid branching! */\n"+
+ " y = 1.1643*(y-0.0625);\n"+
+ " u = u-0.5;\n"+
+ " v = v-0.5;\n"+
+ " r = y+1.5958*v;\n"+
+ " g = y-0.39173*u-0.81290*v;\n"+
+ " b = y+2.017*u;\n"+
+ " return vec4(r, g, b, 1);\n"+
+ "}\n"
+ ;
+ case BGR24:
+ return
+ "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
+ " "+
+ " vec3 bgr = texture2D(image, texCoord).rgb;\n"+
+ " return vec4(bgr.b, bgr.g, bgr.r, 1);\n"+ /* just swizzle */
+ "}\n"
+ ;
+
+ default: // FIXME: Add more formats !
+ throw new InternalError("Add proper mapping of: vPixelFmt "+vPixelFmt+", usesTexLookupShader "+usesTexLookupShader);
+ }
}
@Override
- public synchronized boolean startImpl() {
+ public final boolean playImpl() {
if(0==moviePtr) {
return false;
}
+ final int errno = natives.play0(moviePtr);
+ if( DEBUG_NATIVE && errno != 0 && errno != -ENOSYS) {
+ System.err.println("libav play err: "+errno);
+ }
return true;
}
- /** @return time position after issuing the command */
@Override
- public synchronized boolean pauseImpl() {
+ public final boolean pauseImpl() {
if(0==moviePtr) {
return false;
}
+ final int errno = natives.pause0(moviePtr);
+ if( DEBUG_NATIVE && errno != 0 && errno != -ENOSYS) {
+ System.err.println("libav pause err: "+errno);
+ }
return true;
}
- /** @return time position after issuing the command */
@Override
- public synchronized boolean stopImpl() {
+ protected final synchronized int seekImpl(int msec) {
if(0==moviePtr) {
- return false;
+ throw new GLException("FFMPEG native instance null");
}
- return true;
+ return natives.seek0(moviePtr, msec);
}
- /** @return time position after issuing the command */
@Override
- protected synchronized int seekImpl(int msec) {
- if(0==moviePtr) {
- throw new GLException("FFMPEG native instance null");
- }
- int pts0 = getVideoPTS0(moviePtr);
- int pts1 = seek0(moviePtr, msec);
- System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1);
- return pts1;
+ protected void preNextTextureImpl(GL gl) {
+ psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
+ gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
}
@Override
- protected TextureSequence.TextureFrame getLastTextureImpl() {
- return lastTex;
+ protected void postNextTextureImpl(GL gl) {
+ psm.restore(gl);
}
-
- private long lastVideoTime = 0;
- private int lastVideoPTS = 0;
- private static final int dt_d = 9;
-
+
@Override
- protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
+ protected final int getNextTextureImpl(GL gl, TextureFrame nextFrame) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
- }
- if(null != lastTex) {
- psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
- try {
- final Texture tex = lastTex.getTexture();
- gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
- tex.enable(gl);
- tex.bind(gl);
- readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType);
- } finally {
- psm.restore(gl);
- }
- final int pts = getVideoPTS0(moviePtr); // this frame
- if(blocking) {
- // poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit!
- final long now = System.currentTimeMillis();
- final long now_d = now - lastVideoTime;
- final long pts_d = pts - lastVideoPTS;
- final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
- lastVideoTime = now;
- // System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
- if(dt>dt_d) {
- try {
- Thread.sleep(dt-dt_d);
- } catch (InterruptedException e) { }
- } /* else if(0>pts_d) {
- System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
- } */
- }
- lastVideoPTS = pts;
}
- return lastTex;
- }
-
- private void consumeAudio(int len) {
-
+ int vPTS = TimeFrameI.INVALID_PTS;
+ if( null != gl ) {
+ final Texture tex = nextFrame.getTexture();
+ tex.enable(gl);
+ tex.bind(gl);
+ }
+
+ /** Try decode up to 10 packets to find one containing video. */
+ for(int i=0; TimeFrameI.INVALID_PTS == vPTS && 10 > i; i++) {
+ vPTS = natives.readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
+ }
+ if( null != nextFrame ) {
+ nextFrame.setPTS(vPTS);
+ }
+ return vPTS;
}
-
- private static native int getAvUtilVersion0();
- private static native int getAvFormatVersion0();
- private static native int getAvCodecVersion0();
- private static native boolean initIDs0();
- private native long createInstance0(boolean verbose);
- private native void destroyInstance0(long moviePtr);
-
- private native void setStream0(long moviePtr, String url, int vid, int aid);
-
- private native int getVideoPTS0(long moviePtr);
-
- private native int getAudioPTS0(long moviePtr);
- private native Buffer getAudioBuffer0(long moviePtr, int plane);
-
- private native int readNextPacket0(long moviePtr, long procAddrGLTexSubImage2D, int texTarget, int texFmt, int texType);
-
- private native int seek0(long moviePtr, int position);
-
- public static enum PixelFormat {
- // NONE= -1,
- YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
- YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
- RGB24, ///< packed RGB 8:8:8, 24bpp, RGBRGB...
- BGR24, ///< packed RGB 8:8:8, 24bpp, BGRBGR...
- YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
- YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
- YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
- YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
- GRAY8, ///< Y , 8bpp
- MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
- MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
- PAL8, ///< 8 bit with RGB32 palette
- YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of YUV420P and setting color_range
- YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of YUV422P and setting color_range
- YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of YUV444P and setting color_range
- XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing
- XVMC_MPEG2_IDCT,
- UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
- UYYVYY411, ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
- BGR8, ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
- BGR4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
- BGR4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
- RGB8, ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
- RGB4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
- RGB4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
- NV12, ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
- NV21, ///< as above, but U and V bytes are swapped
-
- ARGB, ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
- RGBA, ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
- ABGR, ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
- BGRA, ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
-
- GRAY16BE, ///< Y , 16bpp, big-endian
- GRAY16LE, ///< Y , 16bpp, little-endian
- YUV440P, ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
- YUVJ440P, ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of YUV440P and setting color_range
- YUVA420P, ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
- VDPAU_H264,///< H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_MPEG1,///< MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_MPEG2,///< MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_WMV3,///< WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_VC1, ///< VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- RGB48BE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
- RGB48LE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
-
- RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
- RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
- RGB555BE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
- RGB555LE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
-
- BGR565BE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
- BGR565LE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
- BGR555BE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
- BGR555LE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
-
- VAAPI_MOCO, ///< HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
- VAAPI_IDCT, ///< HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
- VAAPI_VLD, ///< HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
-
- YUV420P16LE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- YUV420P16BE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- YUV422P16LE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- YUV422P16BE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- YUV444P16LE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- YUV444P16BE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- VDPAU_MPEG4, ///< MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- DXVA2_VLD, ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
-
- RGB444LE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
- RGB444BE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
- BGR444LE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
- BGR444BE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
- Y400A, ///< 8bit gray, 8bit alpha
- BGR48BE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
- BGR48LE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
- YUV420P9BE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- YUV420P9LE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- YUV420P10BE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- YUV420P10LE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- YUV422P10BE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- YUV422P10LE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- YUV444P9BE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- YUV444P9LE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- YUV444P10BE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- YUV422P9BE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- YUV422P9LE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- VDA_VLD, ///< hardware decoding through VDA
- GBRP, ///< planar GBR 4:4:4 24bpp
- GBRP9BE, ///< planar GBR 4:4:4 27bpp, big endian
- GBRP9LE, ///< planar GBR 4:4:4 27bpp, little endian
- GBRP10BE, ///< planar GBR 4:4:4 30bpp, big endian
- GBRP10LE, ///< planar GBR 4:4:4 30bpp, little endian
- GBRP16BE, ///< planar GBR 4:4:4 48bpp, big endian
- GBRP16LE, ///< planar GBR 4:4:4 48bpp, little endian
- COUNT ///< number of pixel formats in this list
- ;
- public static PixelFormat valueOf(int i) {
- for (PixelFormat fmt : PixelFormat.values()) {
- if(fmt.ordinal() == i) {
- return fmt;
- }
- }
- return null;
+
+ final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) {
+ setFirstAudioPTS2SCR( audio_pts );
+ if( 1.0f == playSpeed || audioSinkPlaySpeedSet ) {
+ audioSink.enqueueData( audio_pts, sampleData, data_size);
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
new file mode 100644
index 000000000..b4b887bc9
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
@@ -0,0 +1,281 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.av.impl;
+
+import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
+
+/* pp */ abstract class FFMPEGNatives {
+
+ private static final Object mutex_avcodec_openclose_jni = new Object();
+
+ final boolean initSymbols0(long[] symbols, int count) {
+ return initSymbols0(mutex_avcodec_openclose_jni, symbols, count);
+ }
+ abstract boolean initSymbols0(Object mutex_avcodec_openclose, long[] symbols, int count);
+ abstract int getAvUtilMajorVersionCC0();
+ abstract int getAvFormatMajorVersionCC0();
+ abstract int getAvCodecMajorVersionCC0();
+ abstract int getAvResampleMajorVersionCC0();
+ abstract int getSwResampleMajorVersionCC0();
+
+ abstract long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
+ abstract void destroyInstance0(long moviePtr);
+
+ /**
+ * Issues {@link #updateAttributes(int, int, int, int, int, int, int, float, int, int, String, String)}
+ * and {@link #updateAttributes2(int, int, int, int, int, int, int, int, int, int)}.
+ *
+ * @param moviePtr
+ * @param url
+ * @param vid
+ * @param sizes requested video size as string, i.e. 'hd720'. May be null to favor vWidth and vHeight.
+ * @param vWidth requested video width (for camera mode)
+ * @param vHeight requested video width (for camera mode)
+ * @param vRate requested video framerate (for camera mode)
+ * @param aid
+ * @param aPrefSampleRate
+ * @param aPrefChannelCount
+ */
+ abstract void setStream0(long moviePtr, String url, boolean isCameraInput,
+ int vid, String sizes, int vWidth, int vHeight,
+ int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+
+ abstract void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
+
+ abstract int getVideoPTS0(long moviePtr);
+
+ abstract int getAudioPTS0(long moviePtr);
+
+ /**
+ * @return resulting current video PTS, or {@link TextureFrame#INVALID_PTS}
+ */
+ abstract int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+
+ abstract int play0(long moviePtr);
+ abstract int pause0(long moviePtr);
+ abstract int seek0(long moviePtr, int position);
+
+ /** FFMPEG/libAV Audio Sample Format */
+ public static enum SampleFormat {
+ // NONE = -1,
+ U8, ///< unsigned 8 bits
+ S16, ///< signed 16 bits
+ S32, ///< signed 32 bits
+ FLT, ///< float
+ DBL, ///< double
+
+ U8P, ///< unsigned 8 bits, planar
+ S16P, ///< signed 16 bits, planar
+ S32P, ///< signed 32 bits, planar
+ FLTP, ///< float, planar
+ DBLP, ///< double, planar
+
+ COUNT; ///< Number of sample formats.
+
+ /**
+ * Returns the matching SampleFormat value corresponding to the given SampleFormat's integer ordinal.
+ * <pre>
+ * given:
+ * ordinal = enumValue.ordinal()
+ * reverse:
+ * enumValue = EnumClass.values()[ordinal]
+ * </pre>
+ * @throws IllegalArgumentException if the given ordinal is out of range, i.e. not within [ 0 .. SampleFormat.values().length-1 ]
+ */
+ public static SampleFormat valueOf(int ordinal) throws IllegalArgumentException {
+ final SampleFormat[] all = SampleFormat.values();
+ if( 0 <= ordinal && ordinal < all.length ) {
+ return all[ordinal];
+ }
+ throw new IllegalArgumentException("Ordinal "+ordinal+" out of range of SampleFormat.values()[0.."+(all.length-1)+"]");
+ }
+ };
+
+ /** FFMPEG/libAV Pixel Format */
+ public static enum PixelFormat {
+ // NONE= -1,
+ /** planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) */
+ YUV420P,
+ /** packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr ( sharing Cb and Cr w/ 2 pixels )*/
+ YUYV422,
+ /** packed RGB 8:8:8, 24bpp, RGBRGB... */
+ RGB24,
+ /** packed RGB 8:8:8, 24bpp, BGRBGR... */
+ BGR24,
+ /** planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) */
+ YUV422P,
+ /** planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) */
+ YUV444P,
+ /** planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) */
+ YUV410P,
+ /** planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) */
+ YUV411P,
+ /** Y, 8bpp */
+ GRAY8,
+ /** Y, 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb */
+ MONOWHITE,
+ /** Y, 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb */
+ MONOBLACK,
+ /** 8 bit with RGB32 palette */
+ PAL8,
+ /** planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of YUV420P and setting color_range */
+ YUVJ420P,
+ /** planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of YUV422P and setting color_range */
+ YUVJ422P,
+ /** planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of YUV444P and setting color_range */
+ YUVJ444P,
+ /** XVideo Motion Acceleration via common packet passing */
+ XVMC_MPEG2_MC,
+ /** */
+ XVMC_MPEG2_IDCT,
+ /** packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 */
+ UYVY422,
+ /** packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 */
+ UYYVYY411,
+ /** packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) */
+ BGR8,
+ /** packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits */
+ BGR4,
+ /** packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) */
+ BGR4_BYTE,
+ /** packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) */
+ RGB8,
+ /** packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits */
+ RGB4,
+ /** packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) */
+ RGB4_BYTE,
+ /** planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) */
+ NV12,
+ /** as above, but U and V bytes are swapped */
+ NV21,
+
+ /** packed ARGB 8:8:8:8, 32bpp, ARGBARGB... */
+ ARGB,
+ /** packed RGBA 8:8:8:8, 32bpp, RGBARGBA... */
+ RGBA,
+ /** packed ABGR 8:8:8:8, 32bpp, ABGRABGR... */
+ ABGR,
+ /** packed BGRA 8:8:8:8, 32bpp, BGRABGRA... */
+ BGRA,
+
+ /** Y, 16bpp, big-endian */
+ GRAY16BE,
+ /** Y , 16bpp, little-endian */
+ GRAY16LE,
+ /** planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) */
+ YUV440P,
+ /** planar YUV 4:4:0 full scale (JPEG), deprecated in favor of YUV440P and setting color_range */
+ YUVJ440P,
+ /** planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) */
+ YUVA420P,
+ /** H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */
+ VDPAU_H264,
+ /** MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */
+ VDPAU_MPEG1,
+ /** MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */
+ VDPAU_MPEG2,
+ /** WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */
+ VDPAU_WMV3,
+ /** VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */
+ VDPAU_VC1,
+ /** packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian */
+ RGB48BE,
+ /** packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian */
+ RGB48LE,
+
+ RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
+ RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
+ RGB555BE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
+ RGB555LE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
+
+ BGR565BE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
+ BGR565LE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
+ BGR555BE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
+ BGR555LE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
+
+ VAAPI_MOCO, ///< HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
+ VAAPI_IDCT, ///< HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
+ VAAPI_VLD, ///< HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+
+ YUV420P16LE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ YUV420P16BE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ YUV422P16LE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ YUV422P16BE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ YUV444P16LE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ YUV444P16BE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ VDPAU_MPEG4, ///< MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ DXVA2_VLD, ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
+
+ RGB444LE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
+ RGB444BE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
+ BGR444LE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
+ BGR444BE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
+ Y400A, ///< 8bit gray, 8bit alpha
+ BGR48BE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
+ BGR48LE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
+ YUV420P9BE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ YUV420P9LE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ YUV420P10BE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ YUV420P10LE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ YUV422P10BE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ YUV422P10LE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ YUV444P9BE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ YUV444P9LE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ YUV444P10BE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ YUV422P9BE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ YUV422P9LE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ VDA_VLD, ///< hardware decoding through VDA
+ GBRP, ///< planar GBR 4:4:4 24bpp
+ GBRP9BE, ///< planar GBR 4:4:4 27bpp, big endian
+ GBRP9LE, ///< planar GBR 4:4:4 27bpp, little endian
+ GBRP10BE, ///< planar GBR 4:4:4 30bpp, big endian
+ GBRP10LE, ///< planar GBR 4:4:4 30bpp, little endian
+ GBRP16BE, ///< planar GBR 4:4:4 48bpp, big endian
+ GBRP16LE, ///< planar GBR 4:4:4 48bpp, little endian
+ COUNT ///< number of pixel formats in this list
+ ;
+ /**
+ * Returns the matching PixelFormat value corresponding to the given PixelFormat's integer ordinal.
+ * <pre>
+ * given:
+ * ordinal = enumValue.ordinal()
+ * reverse:
+ * enumValue = EnumClass.values()[ordinal]
+ * </pre>
+ * @throws IllegalArgumentException if the given ordinal is out of range, i.e. not within [ 0 .. PixelFormat.values().length-1 ]
+ */
+ public static PixelFormat valueOf(int ordinal) throws IllegalArgumentException {
+ final PixelFormat[] all = PixelFormat.values();
+ if( 0 <= ordinal && ordinal < all.length ) {
+ return all[ordinal];
+ }
+ throw new IllegalArgumentException("Ordinal "+ordinal+" out of range of PixelFormat.values()[0.."+(all.length-1)+"]");
+ }
+ }
+} \ No newline at end of file
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java
new file mode 100644
index 000000000..22a045825
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java
@@ -0,0 +1,41 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.av.impl;
+
+import com.jogamp.common.util.VersionNumber;
+
+class FFMPEGStaticNatives {
+ static VersionNumber getAVVersion(int vers) {
+ return new VersionNumber( ( vers >> 16 ) & 0xFF,
+ ( vers >> 8 ) & 0xFF,
+ ( vers >> 0 ) & 0xFF );
+ }
+ static native boolean initIDs0();
+
+ static native int getAvVersion0(long func);
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java
new file mode 100644
index 000000000..6bab23f25
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.av.impl;
+
+class FFMPEGv08Natives extends FFMPEGNatives {
+ @Override
+ native boolean initSymbols0(Object mutex_avcodec_openclose, long[] symbols, int count);
+
+ @Override
+ native int getAvUtilMajorVersionCC0();
+
+ @Override
+ native int getAvFormatMajorVersionCC0();
+
+ @Override
+ native int getAvCodecMajorVersionCC0();
+
+ @Override
+ native int getAvResampleMajorVersionCC0();
+
+ @Override
+ native int getSwResampleMajorVersionCC0();
+
+ @Override
+ native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
+
+ @Override
+ native void destroyInstance0(long moviePtr);
+
+ @Override
+ native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+
+ @Override
+ native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
+
+ @Override
+ native int getVideoPTS0(long moviePtr);
+
+ @Override
+ native int getAudioPTS0(long moviePtr);
+
+ @Override
+ native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+
+ @Override
+ native int play0(long moviePtr);
+
+ @Override
+ native int pause0(long moviePtr);
+
+ @Override
+ native int seek0(long moviePtr, int position);
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java
new file mode 100644
index 000000000..a48b5f21f
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.av.impl;
+
+class FFMPEGv09Natives extends FFMPEGNatives {
+ @Override
+ native boolean initSymbols0(Object mutex_avcodec_openclose, long[] symbols, int count);
+
+ @Override
+ native int getAvUtilMajorVersionCC0();
+
+ @Override
+ native int getAvFormatMajorVersionCC0();
+
+ @Override
+ native int getAvCodecMajorVersionCC0();
+
+ @Override
+ native int getAvResampleMajorVersionCC0();
+
+ @Override
+ native int getSwResampleMajorVersionCC0();
+
+ @Override
+ native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
+
+ @Override
+ native void destroyInstance0(long moviePtr);
+
+ @Override
+ native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+
+ @Override
+ native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
+
+ @Override
+ native int getVideoPTS0(long moviePtr);
+
+ @Override
+ native int getAudioPTS0(long moviePtr);
+
+ @Override
+ native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+
+ @Override
+ native int play0(long moviePtr);
+
+ @Override
+ native int pause0(long moviePtr);
+
+ @Override
+ native int seek0(long moviePtr, int position);
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv10Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv10Natives.java
new file mode 100644
index 000000000..f35fb29dc
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv10Natives.java
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.av.impl;
+
+class FFMPEGv10Natives extends FFMPEGNatives {
+ @Override
+ native boolean initSymbols0(Object mutex_avcodec_openclose, long[] symbols, int count);
+
+ @Override
+ native int getAvUtilMajorVersionCC0();
+
+ @Override
+ native int getAvFormatMajorVersionCC0();
+
+ @Override
+ native int getAvCodecMajorVersionCC0();
+
+ @Override
+ native int getAvResampleMajorVersionCC0();
+
+ @Override
+ native int getSwResampleMajorVersionCC0();
+
+ @Override
+ native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
+
+ @Override
+ native void destroyInstance0(long moviePtr);
+
+ @Override
+ native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+
+ @Override
+ native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
+
+ @Override
+ native int getVideoPTS0(long moviePtr);
+
+ @Override
+ native int getAudioPTS0(long moviePtr);
+
+ @Override
+ native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+
+ @Override
+ native int play0(long moviePtr);
+
+ @Override
+ native int pause0(long moviePtr);
+
+ @Override
+ native int seek0(long moviePtr, int position);
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
index aef98fcde..05a94def8 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
@@ -3,14 +3,14 @@
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
- *
+ *
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
- *
+ *
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
- *
+ *
* THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
@@ -20,7 +20,7 @@
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
+ *
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of JogAmp Community.
@@ -29,11 +29,9 @@
package jogamp.opengl.util.av.impl;
import java.io.IOException;
-import java.net.URL;
import javax.media.opengl.GL;
import javax.media.opengl.GLException;
-import javax.media.opengl.GLProfile;
import com.jogamp.opengl.util.texture.TextureSequence;
@@ -47,18 +45,18 @@ import jogamp.opengl.util.av.EGLMediaPlayerImpl;
*/
public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
static final boolean available;
-
+
static {
- // OMX binding is included in jogl_desktop and jogl_mobile
+ available = false;
+ /** FIXME!
+ // OMX binding is included in jogl_desktop and jogl_mobile
GLProfile.initSingleton();
- available = initIDs0();
+ available = initIDs0(); */
}
-
+
public static final boolean isAvailable() { return available; }
-
+
protected long moviePtr = 0;
-
- protected TextureSequence.TextureFrame lastTex = null;
public OMXGLMediaPlayer() {
super(TextureType.KHRImage, true);
@@ -72,51 +70,60 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
moviePtr = _createInstance();
if(0==moviePtr) {
throw new GLException("Couldn't create OMXInstance");
- }
+ }
}
-
+
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- final EGLTextureFrame eglTex = (EGLTextureFrame) super.createTexImage(gl, idx, tex);
- _setStreamEGLImageTexture2D(moviePtr, idx, tex[idx], eglTex.getImage(), eglTex.getSync());
- lastTex = eglTex;
+ protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
+ final EGLTextureFrame eglTex = (EGLTextureFrame) super.createTexImage(gl, texName);
+ _setStreamEGLImageTexture2D(moviePtr, texName, eglTex.getImage(), eglTex.getSync());
return eglTex;
}
-
+
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- lastTex = null;
- super.destroyTexImage(gl, imgTex);
+ protected void destroyTexFrame(GL gl, TextureSequence.TextureFrame imgTex) {
+ super.destroyTexFrame(gl, imgTex);
}
-
+
@Override
protected void destroyImpl(GL gl) {
- _detachVideoRenderer(moviePtr);
if (moviePtr != 0) {
+ _stop(moviePtr);
+ _detachVideoRenderer(moviePtr);
_destroyInstance(moviePtr);
moviePtr = 0;
}
}
-
+
@Override
- protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException {
+ protected void initStreamImpl(int vid, int aid) throws IOException {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
- final URL url = urlConn.getURL();
- if(!url.getProtocol().equals("file")) {
- throw new IOException("Only file URLs are allowed: "+url);
+ if(!streamLoc.getScheme().equals("file")) {
+ throw new IOException("Only file schemes are allowed: "+streamLoc);
+ }
+ final String path=streamLoc.getPath();
+ if(DEBUG) {
+ System.out.println("initGLStream: clean path "+path);
+ }
+
+ if(DEBUG) {
+ System.out.println("initGLStream: p1 "+this);
}
- final String path=url.getPath();
- System.out.println("setURL: clean path "+path);
-
- System.out.println("setURL: p1 "+this);
_setStream(moviePtr, textureCount, path);
- System.out.println("setURL: p2 "+this);
+ if(DEBUG) {
+ System.out.println("initGLStream: p2 "+this);
+ }
}
-
@Override
- protected int getCurrentPositionImpl() {
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
+ // NOP
+ isInGLOrientation = true;
+ }
+
+ @Override
+ protected int getAudioPTSImpl() {
return 0!=moviePtr ? _getCurrentPosition(moviePtr) : 0;
}
@@ -130,7 +137,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- public synchronized boolean startImpl() {
+ public synchronized boolean playImpl() {
if(0==moviePtr) {
return false;
}
@@ -150,16 +157,6 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
/** @return time position after issuing the command */
@Override
- public synchronized boolean stopImpl() {
- if(0==moviePtr) {
- return false;
- }
- _stop(moviePtr);
- return true;
- }
-
- /** @return time position after issuing the command */
- @Override
protected int seekImpl(int msec) {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
@@ -168,36 +165,33 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected TextureSequence.TextureFrame getLastTextureImpl() {
- return lastTex;
- }
-
- @Override
- protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
+ protected int getNextTextureImpl(GL gl, TextureFrame nextFrame) {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
- final int nextTex = _getNextTextureID(moviePtr, blocking);
+ final int nextTex = _getNextTextureID(moviePtr, true);
if(0 < nextTex) {
- final TextureSequence.TextureFrame eglImgTex = texFrameMap.get(new Integer(_getNextTextureID(moviePtr, blocking)));
+ // FIXME set pts !
+ /* FIXME
+ final TextureSequence.TextureFrame eglImgTex =
+ texFrameMap.get(new Integer(_getNextTextureID(moviePtr, blocking)));
if(null!=eglImgTex) {
lastTex = eglImgTex;
- }
+ } */
}
- return lastTex;
+ return 0; // FIXME: return pts
}
-
+
private String replaceAll(String orig, String search, String repl) {
- String dest=null;
+ StringBuilder dest = new StringBuilder();
// In case replaceAll / java.util.regex.* is not supported (-> CVM)
int i=0,j;
- dest = new String();
while((j=orig.indexOf(search, i))>=0) {
- dest=dest.concat(orig.substring(i, j));
- dest=dest.concat(repl);
+ dest.append(orig.substring(i, j));
+ dest.append(repl);
i=j+1;
}
- return dest.concat(orig.substring(i, orig.length()));
+ return dest.append(orig.substring(i, orig.length())).toString();
}
private void errorCheckEGL(String s) {
@@ -208,15 +202,15 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
private static native boolean initIDs0();
- private native long _createInstance();
+ private native long _createInstance();
private native void _destroyInstance(long moviePtr);
-
+
private native void _detachVideoRenderer(long moviePtr); // stop before
private native void _attachVideoRenderer(long moviePtr); // detach before
private native void _setStream(long moviePtr, int textureNum, String path);
private native void _activateStream(long moviePtr);
-
- private native void _setStreamEGLImageTexture2D(long moviePtr, int i, int tex, long image, long sync);
+
+ private native void _setStreamEGLImageTexture2D(long moviePtr, int tex, long image, long sync);
private native int _seek(long moviePtr, int position);
private native void _setPlaySpeed(long moviePtr, float rate);
private native void _play(long moviePtr);