From d0e01cb5c0ec3e48b8a9b9b79a7795b214c6e3ea Mon Sep 17 00:00:00 2001
From: Sven Gothel
Date: Sat, 24 Aug 2013 17:56:49 +0200
Subject: GLMediaPlayer Multithreaded Decoding: GLMediaPlayer* (Part-6) - DONE
Multithreaded decoding and API should be considered stable by now,
minor changes may apply if Android/OMX impl. requires it.
We still need to solve TODO's as listed below, copied from 474ce65081ecd452215bc07ab866666cb11ca8b1.
+++
- *TextureFrame OO changes:
- TextureFrame extends TimeFrameI
- GLMediaPlayerImpl*
- Adapt to Ringbuffer changes of GlueGen commit f9f881e59c78e3036cb3f956bc97cfc3197f620d
- Fix impl. method's API doc
- getNextTextureImpl(..) returns video PTS
- Fix audio-only playback
- frame dropping shall only happen if:
- previous frame has not been dropped
- frame is too later
- one decoded frame is already available
- Don't block for decoder anymore:
- nextFrame = "videoFramesDecoded.getBlocking() -> videoFramesDecoded.get()";
No 'next decoded frame avail' only could mean:
- slow decoding/hardware
- slow transport
hence we shall not block rendering.
- Add DEBUG output if using last frame
- Add integer property 'jogl.debug.GLMediaPlayer.StreamWorker.delay' in milliseconds
to simulate slow decoding, i.e. delay is added in StreamWorker after decoding
before pushing new frame to Ringbuffer.
- FFMPEGMediaPlayer:
- audioFrameLimitWithVideo 128 -> 64
- audioFrameLimitAudioOnly 128 -> 32
- uses AudioSink's 'enqueueData(int pts, ByteBuffer bytes, int byteCount)'
- fixes for audio-only playback
+++
Working Tests: MovieSimple and MovieCube
TODO-1: Fix
- Android
- OMXGLMediaPlayer
TODO-2:
- Fix issue where async audio frames arrive much later than 1st video frame, i.e. around 300ms.
- Default TextureCount .. maybe 3 ?
- Adding Audio synchronization ?
- Find 'truth' about correlation of audio and video PTS values,
currently, we assume both to be unrelated ?
---
.../com/jogamp/opengl/util/av/GLMediaPlayer.java | 12 ++++------
.../opengl/util/texture/TextureSequence.java | 28 +++++++---------------
2 files changed, 13 insertions(+), 27 deletions(-)
(limited to 'src/jogl/classes/com')
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index 726eddb01..02fbd721c 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -35,6 +35,7 @@ import javax.media.opengl.GLException;
import jogamp.opengl.Debug;
import com.jogamp.opengl.util.texture.TextureSequence;
+import com.jogamp.opengl.util.TimeFrameI;
/**
* GLMediaPlayer interface specifies a {@link TextureSequence} state machine
@@ -116,14 +117,11 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* to be properly considered by {@link GLMediaPlayerFactory#create(ClassLoader, String)}
* and {@link GLMediaPlayerFactory#createDefault()}.
*
+ * Timestamp Accuracy
*
- * Variable type, value range and dimension has been chosen to suit embedded CPUs
- * and characteristics of audio and video streaming.
- * Milliseconds of type integer with a maximum value of {@link Integer#MAX_VALUE}
- * will allow tracking time up 2,147,483.647 seconds or
- * 24 days 20 hours 31 minutes and 23 seconds.
- * Milliseconds granularity is also more than enough to deal with A-V synchronization,
- * where the threshold usually lies within 22ms.
+ *
+ * Timestamp type and value range has been chosen to suit embedded CPUs
+ * and characteristics of audio and video streaming. See {@link TimeFrameI}.
*
*
* Audio and video synchronization
diff --git a/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java b/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java
index e13e5ff13..8b6cc1bf9 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java
@@ -29,6 +29,8 @@ package com.jogamp.opengl.util.texture;
import javax.media.opengl.GL;
+import com.jogamp.opengl.util.TimeFrameI;
+
/**
* Protocol for texture sequences, like animations, movies, etc.
*
@@ -109,35 +111,21 @@ public interface TextureSequence {
* Texture holder interface, maybe specialized by implementation
* to associated related data.
*/
- public static class TextureFrame {
- /** Constant marking an invalid PTS, i.e. Integer.MIN_VALUE == 0x80000000 == {@value}. Sync w/ native code. */
- public static final int INVALID_PTS = 0x80000000;
-
- /** Constant marking the end of the stream PTS, i.e. Integer.MIN_VALUE - 1 == 0x7FFFFFFF == {@value}. Sync w/ native code. */
- public static final int END_OF_STREAM_PTS = 0x7FFFFFFF;
-
+ public static class TextureFrame extends TimeFrameI {
+ public TextureFrame(Texture t, int pts, int duration) {
+ super(pts, duration);
+ texture = t;
+ }
public TextureFrame(Texture t) {
texture = t;
- pts = INVALID_PTS;
- duration = 0;
}
public final Texture getTexture() { return texture; }
- /** Get this frame's presentation timestamp (PTS) in milliseconds. */
- public final int getPTS() { return pts; }
- /** Set this frame's presentation timestamp (PTS) in milliseconds. */
- public final void setPTS(int pts) { this.pts = pts; }
- /** Get this frame's duration in milliseconds. */
- public final int getDuration() { return duration; }
- /** Set this frame's duration in milliseconds. */
- public final void setDuration(int duration) { this.duration = duration; }
public String toString() {
- return "TextureFrame[pts " + pts + " ms, l " + duration + " ms, texID "+ texture.getTextureObject() + "]";
+ return "TextureFrame[pts " + pts + " ms, l " + duration + " ms, texID "+ (null != texture ? texture.getTextureObject() : 0) + "]";
}
protected final Texture texture;
- protected int pts;
- protected int duration;
}
public interface TexSeqEventListener {
--
cgit v1.2.3