From 38e51e4a5f6f35c658df10f6d48a33e3ffaea2f3 Mon Sep 17 00:00:00 2001
From: Sven Gothel <sgothel@jausoft.com>
Date: Mon, 7 Jul 2014 23:46:19 +0200
Subject: Bug 1021: Add GenericStereoDevice* Supporting custom configurations;
 Hook-in oculusvr-sdk java distortion-mesh calculation if available

StereoDeviceFactory support new GenericStereoDeviceFactory, with it's GenericStereoDevice and GenericStereoDeviceRenderer.

GenericStereoDevice maintains different configurations, triggered either by passing a GenericStereoDevice.Config
instance directly or by the device-index parameter:

  - 0: monoscopi device: No post-processing

  - 1: stereoscopic device SBS: No post-processing

  - 2: stereoscopic device SBS + Lenses: Distortion post-processing
       (only available w/ oculusvr-sdk sub-module)

Producing a 'GenericStereoDevice.Config' instance is self containing
and may extend if supporting more device types like top-bottom, interlaced etc.

StereoDemo01 handles all use-cases and may be used as a test-bed
to add and experiment with stereoscopy, devices and settings.
---
 .../jogamp/opengl/util/stereo/DistortionMesh.java  |  95 ++++
 .../opengl/util/stereo/GenericStereoDevice.java    | 457 ++++++++++++++++
 .../util/stereo/GenericStereoDeviceFactory.java    |  43 ++
 .../util/stereo/GenericStereoDeviceRenderer.java   | 605 +++++++++++++++++++++
 .../opengl/util/stereo/ScaleAndOffset2D.java       | 107 ++++
 .../opengl/util/stereo/shader/dist01_chroma.fp     |  26 +
 .../opengl/util/stereo/shader/dist01_chroma.vp     |  33 ++
 .../opengl/util/stereo/shader/dist01_plain.fp      |  22 +
 .../opengl/util/stereo/shader/dist01_plain.vp      |  27 +
 .../opengl/util/stereo/shader/dist01_timewarp.vp   |  44 ++
 .../util/stereo/shader/dist01_timewarp_chroma.vp   |  65 +++
 11 files changed, 1524 insertions(+)
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp
 create mode 100644 src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp

(limited to 'src/jogl/classes/jogamp/opengl/util/stereo')

diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java b/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java
new file mode 100644
index 000000000..7a2483121
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java
@@ -0,0 +1,95 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ *    1. Redistributions of source code must retain the above copyright notice, this list of
+ *       conditions and the following disclaimer.
+ *
+ *    2. Redistributions in binary form must reproduce the above copyright notice, this list
+ *       of conditions and the following disclaimer in the documentation and/or other materials
+ *       provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import com.jogamp.opengl.util.stereo.EyeParameter;
+
+public class DistortionMesh {
+    public static interface Producer {
+        /** Initialize */
+        void init(final GenericStereoDevice.Config deviceConfig, final float[] eyeReliefInMeters);
+
+        /** Distortion Mesh Producer */
+        DistortionMesh create(final EyeParameter eyeParam, final int distortionBits);
+    }
+    public static class DistortionVertex {
+        /** {@value} */
+        public static final int def_pos_size = 2;
+        /** {@value} */
+        public static final int def_vignetteFactor_size = 1;
+        /** {@value} */
+        public static final int def_timewarpFactor_size = 1;
+        /** {@value} */
+        public static final int def_texR_size = 2;
+        /** {@value} */
+        public static final int def_texG_size = 2;
+        /** {@value} */
+        public static final int def_texB_size = 2;
+
+        /** {@value} */
+        public static final int def_total_size = def_pos_size + def_vignetteFactor_size + def_timewarpFactor_size +
+                                                 def_texR_size + def_texG_size + def_texB_size;
+
+        public DistortionVertex(final float[] data, final int pos_size,
+                                final int vignetteFactor_size, final int timewarpFactor_size, final int texR_size,
+                                final int texG_size, final int texB_size) {
+            this.data = data;
+            this.pos_size = pos_size;
+            this.vignetteFactor_size = vignetteFactor_size;
+            this.timewarpFactor_size = timewarpFactor_size;
+            this.texR_size = texR_size;
+            this.texG_size = texG_size;
+            this.texB_size = texB_size;
+        }
+        final float[] data;
+
+        /** Usually {@link #def_pos_size} */
+        final int pos_size;
+        /** Usually {@link #def_vignetteFactor_size} */
+        final int vignetteFactor_size;
+        /** Usually {@link #def_timewarpFactor_size} */
+        final int timewarpFactor_size;
+        /** Usually {@link #def_texR_size} */
+        final int texR_size;
+        /** Usually {@link #def_texG_size} */
+        final int texG_size;
+        /** Usually {@link #def_texB_size} */
+        final int texB_size;
+    }
+    public DistortionMesh(final DistortionMesh.DistortionVertex[] vertices, final int vertexCount,
+                          final short[] indices, final int indexCount) {
+        this.vertices = vertices;
+        this.vertexCount = vertexCount;
+        this.indices = indices;
+        this.indexCount = indexCount;
+    }
+    final DistortionMesh.DistortionVertex[] vertices;
+    final int vertexCount;
+    final short[] indices;
+    final int indexCount;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java
new file mode 100644
index 000000000..d7fb95d91
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java
@@ -0,0 +1,457 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ *    1. Redistributions of source code must retain the above copyright notice, this list of
+ *       conditions and the following disclaimer.
+ *
+ *    2. Redistributions in binary form must reproduce the above copyright notice, this list
+ *       of conditions and the following disclaimer in the documentation and/or other materials
+ *       provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import java.util.Arrays;
+
+import javax.media.nativewindow.util.Dimension;
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.Point;
+import javax.media.nativewindow.util.PointImmutable;
+import javax.media.nativewindow.util.Rectangle;
+import javax.media.nativewindow.util.RectangleImmutable;
+
+import com.jogamp.common.util.ReflectionUtil;
+import com.jogamp.opengl.math.FloatUtil;
+import com.jogamp.opengl.math.FovHVHalves;
+import com.jogamp.opengl.util.stereo.EyeParameter;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+import com.jogamp.opengl.util.stereo.StereoUtil;
+
+public class GenericStereoDevice implements StereoDevice {
+    public static enum ShutterType {
+        Global, RollingLeftToRight, RollingRightToLeft, RollingTopToBottom
+    }
+    public static class Config extends StereoDevice.Config {
+        public Config(final String name,
+                      final ShutterType shutterType,
+                      final DimensionImmutable surfaceSizeInPixels,
+                      final float[] screenSizeInMeters,
+                      final DimensionImmutable eyeTextureSize,
+                      final float pupilCenterFromScreenTopInMeters,
+                      final float interpupillaryDistanceInMeters,
+                      final int[] eyeRenderOrder,
+                      final EyeParameter[] defaultEyeParam,
+                      final DistortionMesh.Producer distortionMeshProducer,
+                      final int supportedDistortionBits,
+                      final int recommendedDistortionBits,
+                      final int minimumDistortionBits
+                      ) {
+            this.name = name;
+            this.shutterType = shutterType;
+            this.surfaceSizeInPixels = surfaceSizeInPixels;
+            this.screenSizeInMeters = screenSizeInMeters;
+            this.eyeTextureSize = eyeTextureSize;
+            this.pupilCenterFromScreenTopInMeters = pupilCenterFromScreenTopInMeters;
+            this.interpupillaryDistanceInMeters = interpupillaryDistanceInMeters;
+            this.eyeRenderOrder = eyeRenderOrder;
+            this.defaultEyeParam = defaultEyeParam;
+            this.distortionMeshProducer = distortionMeshProducer;
+            this.supportedDistortionBits = supportedDistortionBits;
+            this.recommendedDistortionBits = recommendedDistortionBits;
+            this.minimumDistortionBits = minimumDistortionBits;
+            this.pupilCenterFromTopLeft = new float[2][2];
+            calcPupilCenterFromTopLeft();
+        }
+        /** A variation w/ different surface/screen specs */
+        public Config(final Config source,
+                      final DimensionImmutable surfaceSizeInPixels,
+                      final float[] screenSizeInMeters,
+                      final DimensionImmutable eyeTextureSize) {
+            this.name = source.name;
+            this.shutterType = source.shutterType;
+            this.surfaceSizeInPixels = surfaceSizeInPixels;
+            this.screenSizeInMeters = screenSizeInMeters;
+            this.eyeTextureSize = eyeTextureSize;
+            this.pupilCenterFromScreenTopInMeters = source.pupilCenterFromScreenTopInMeters;
+            this.interpupillaryDistanceInMeters = source.interpupillaryDistanceInMeters;
+            this.eyeRenderOrder = source.eyeRenderOrder;
+            this.defaultEyeParam = source.defaultEyeParam;
+            this.distortionMeshProducer = source.distortionMeshProducer;
+            this.supportedDistortionBits = source.supportedDistortionBits;
+            this.recommendedDistortionBits = source.recommendedDistortionBits;
+            this.minimumDistortionBits = source.minimumDistortionBits;
+            this.pupilCenterFromTopLeft = new float[2][2];
+            calcPupilCenterFromTopLeft();
+        }
+        private void calcPupilCenterFromTopLeft() {
+            final float visibleWidthOfOneEye = 0.5f * screenSizeInMeters[0];
+            final float leftPupilCenterFromLeftInMeters = ( screenSizeInMeters[0] - interpupillaryDistanceInMeters ) * 0.5f;
+            final float rightPupilCenterFromMiddleInMeters = leftPupilCenterFromLeftInMeters + interpupillaryDistanceInMeters - visibleWidthOfOneEye;
+            pupilCenterFromTopLeft[0][0] = leftPupilCenterFromLeftInMeters / visibleWidthOfOneEye;
+            pupilCenterFromTopLeft[0][1] = pupilCenterFromScreenTopInMeters     / screenSizeInMeters[1];
+            pupilCenterFromTopLeft[1][0] = rightPupilCenterFromMiddleInMeters / visibleWidthOfOneEye;
+            pupilCenterFromTopLeft[1][1] =  pupilCenterFromTopLeft[0][1];
+        }
+
+        /**
+         * Return the vertical pupil center from the screen top in the range [0..1].
+         * @param screenHeightInMeters
+         * @param pupilCenterFromScreenTopInMeters
+         */
+        public static float getVertPupilCenterFromTop(final float screenHeightInMeters, final float pupilCenterFromScreenTopInMeters) {
+            return pupilCenterFromScreenTopInMeters / screenHeightInMeters;
+        }
+
+        /**
+         * Return the horizontal pupil center from the left side for both eyes in the range [0..1].
+         * <pre>
+            <-------------left eye------------->|                       |<-----------right eye-------------->
+            <------------------------------------screenSizeInMeters.Width----------------------------------->
+                                       <------interpupillaryDistanceInMeters------>
+            <--centerFromLeftInMeters->
+                                       ^
+                                 center of pupil
+         * </pre>
+         *
+         * @param screenWidthInMeters
+         * @param interpupillaryDistanceInMeters
+         */
+        public static float[] getHorizPupilCenterFromLeft(final float screenWidthInMeters, final float interpupillaryDistanceInMeters) {
+            final float visibleWidthOfOneEye = 0.5f * screenWidthInMeters;
+            final float leftPupilCenterFromLeftInMeters = ( screenWidthInMeters - interpupillaryDistanceInMeters ) * 0.5f;
+            final float rightPupilCenterFromMiddleInMeters = leftPupilCenterFromLeftInMeters + interpupillaryDistanceInMeters - visibleWidthOfOneEye;
+            return new float[] { leftPupilCenterFromLeftInMeters    / visibleWidthOfOneEye,
+                                 rightPupilCenterFromMiddleInMeters / visibleWidthOfOneEye };
+        }
+
+        private void init() {
+            final float[] eyeReliefInMeters = new float[defaultEyeParam.length];
+            if( 0 < defaultEyeParam.length ) {
+                eyeReliefInMeters[0] = defaultEyeParam[0].eyeReliefZ;
+            }
+            if( 1 < defaultEyeParam.length ) {
+                eyeReliefInMeters[1] = defaultEyeParam[1].eyeReliefZ;
+            }
+            if( null != distortionMeshProducer ) {
+                distortionMeshProducer.init(this, eyeReliefInMeters);
+            }
+        }
+
+        @Override
+        public String toString() { return "StereoConfig["+name+", shutter "+shutterType+", surfaceSize "+surfaceSizeInPixels+
+                                   ", screenSize "+screenSizeInMeters[0]+" x "+screenSizeInMeters[0]+
+                                   " [m], eyeTexSize "+eyeTextureSize+", IPD "+interpupillaryDistanceInMeters+
+                                   " [m], eyeParam "+Arrays.toString(defaultEyeParam)+
+                                   ", distortionBits[supported ["+StereoUtil.distortionBitsToString(supportedDistortionBits)+
+                                                  "], recommended ["+StereoUtil.distortionBitsToString(recommendedDistortionBits)+
+                                                  "], minimum ["+StereoUtil.distortionBitsToString(minimumDistortionBits)+"]]]";
+        }
+
+        /** Configuration Name */
+        public final String name;
+        public final ShutterType shutterType;
+
+        public final DimensionImmutable surfaceSizeInPixels;
+        public final float[] screenSizeInMeters;
+        /** Texture size per eye */
+        public final DimensionImmutable eyeTextureSize;
+
+        /** Vertical distance from pupil to screen-top in meters */
+        public final float pupilCenterFromScreenTopInMeters;
+        /** Horizontal interpupillary distance (IPD) in meters */
+        public final float interpupillaryDistanceInMeters;
+        /**
+         * Pupil center from top left per eye, ranging from [0..1], maybe used to produce FovHVHalves,
+         * see {@link #getHorizPupilCenterFromLeft(float, float)} and {@link #getVertPupilCenterFromTop(float, float)}.
+         */
+        public final float[/*per-eye*/][/*xy*/] pupilCenterFromTopLeft;
+        public final int[] eyeRenderOrder;
+        public final EyeParameter[] defaultEyeParam;
+        public final DistortionMesh.Producer distortionMeshProducer;
+
+        public final int supportedDistortionBits;
+        public final int recommendedDistortionBits;
+        public final int minimumDistortionBits;
+    }
+
+    /** A mono view configuration, only one eye is supported */
+    public static final Config config01Mono01;
+
+    /** A default stereo SBS view configuration */
+    public static final Config config02StereoSBS01;
+
+    /** A default stereo SBS lense view configuration, utilizing similar settings as OculusVR DK1 */
+    public static final Config config03StereoSBSLense01;
+
+    private static final Config[] configs;
+
+    static {
+        final float[] DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES = { 0.0f, 1.6f, -5.0f };  // 1.6 up, 5 forward
+        final float[] DEFAULT_EYE_POSITION_OFFSET_STEREO        = { 0.0f, 0.3f,  3.0f };  // 0.3 up, 3 back
+        final float[] DEFAULT_EYE_POSITION_OFFSET_MONO          = { 0.0f, 0.0f,  3.0f };  //         3 back
+
+        final float d2r = FloatUtil.PI / 180.0f;
+        {
+            config01Mono01 = new Config(
+                            "Def01Mono01",
+                            ShutterType.RollingTopToBottom,
+                            new Dimension(1280, 800),          // resolution
+                            new float[] { 0.1498f, 0.0936f },  // screenSize [m]
+                            new Dimension(1280, 800),          // eye textureSize
+                            0.0936f/2f,                        // pupilCenterFromScreenTop [m]
+                            0.0635f,                           // IPD [m]
+                            new int[] { 0 },                   // eye order
+                            new EyeParameter[] {
+                                new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_MONO,
+                                                 // degrees: 45/2 l, 45/2 r, 45/2 * aspect t, 45/2 * aspect b
+                                                 FovHVHalves.byFovyRadianAndAspect(45f*d2r, 1280f / 800f),
+                                                 0f /* distNoseToPupil */, 0f /* verticalDelta */, 0f /* eyeReliefInMeters */) },
+                            null, // mash producer distortion bits
+                            0,    // supported distortion bits
+                            0,    // recommended distortion bits
+                            0     // minimum distortion bits
+                            );
+        }
+
+        {
+            final DimensionImmutable surfaceSizeInPixel = new Dimension(1280, 800);
+            final float[] screenSizeInMeters = new float[] { 0.1498f, 0.0936f };
+            final float interpupillaryDistanceInMeters = 0.0635f;
+            final float pupilCenterFromScreenTopInMeters = screenSizeInMeters[1] / 2f;
+            final float[] horizPupilCenterFromLeft = Config.getHorizPupilCenterFromLeft(screenSizeInMeters[0], interpupillaryDistanceInMeters);
+            final float vertPupilCenterFromTop = Config.getVertPupilCenterFromTop(screenSizeInMeters[1], pupilCenterFromScreenTopInMeters);
+            final float fovy = 45f;
+            final float aspect = ( surfaceSizeInPixel.getWidth() / 2.0f ) / surfaceSizeInPixel.getHeight();
+            final FovHVHalves defaultSBSEyeFovLeft = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[0]);
+            final FovHVHalves defaultSBSEyeFovRight = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[1]);
+
+            config02StereoSBS01 = new Config(
+                            "Def02StereoSBS01",
+                            ShutterType.RollingTopToBottom,
+                            surfaceSizeInPixel,                // resolution
+                            screenSizeInMeters,                // screenSize [m]
+                            new Dimension(1280/2, 800),        // eye textureSize
+                            0.0936f/2f,                        // pupilCenterFromScreenTop [m]
+                            interpupillaryDistanceInMeters,    // IPD [m]
+                            new int[] { 0, 1 },                // eye order
+                            new EyeParameter[] {
+                                new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_STEREO, defaultSBSEyeFovLeft,
+                                              0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */),
+                                new EyeParameter(1, DEFAULT_EYE_POSITION_OFFSET_STEREO, defaultSBSEyeFovRight,
+                                             -0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */) },
+                            null,   // mash producer distortion bits
+                            0,      // supported distortion bits
+                            0,      // recommended distortion bits
+                            0       // minimum distortion bits
+                            );
+        }
+
+        {
+            DistortionMesh.Producer lenseDistMeshProduce = null;
+            try {
+                lenseDistMeshProduce =
+                    (DistortionMesh.Producer)
+                    ReflectionUtil.createInstance("jogamp.opengl.oculusvr.stereo.lense.DistortionMeshProducer", GenericStereoDevice.class.getClassLoader());
+            } catch (final Throwable t) {
+                if(StereoDevice.DEBUG) { System.err.println("Caught: "+t.getMessage()); t.printStackTrace(); }
+            }
+
+            final DimensionImmutable surfaceSizeInPixel = new Dimension(1280, 800);
+            final float[] screenSizeInMeters = new float[] { 0.1498f, 0.0936f };
+            final DimensionImmutable eyeTextureSize = new Dimension(1122, 1553);
+            final float interpupillaryDistanceInMeters = 0.0635f;
+            final float pupilCenterFromScreenTopInMeters = screenSizeInMeters[1] / 2f;
+            final float[] horizPupilCenterFromLeft = Config.getHorizPupilCenterFromLeft(screenSizeInMeters[0], interpupillaryDistanceInMeters);
+            final float vertPupilCenterFromTop = Config.getVertPupilCenterFromTop(screenSizeInMeters[1], pupilCenterFromScreenTopInMeters);
+            final float fovy = 129f;
+            final float aspect = eyeTextureSize.getWidth() / eyeTextureSize.getHeight();
+            final FovHVHalves defaultSBSEyeFovLenseLeft = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[0]);
+            final FovHVHalves defaultSBSEyeFovLenseRight = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[1]);
+
+            config03StereoSBSLense01 = null == lenseDistMeshProduce ? null :
+                           new Config(
+                            "Def03StereoSBSLense01",
+                            ShutterType.RollingTopToBottom,
+                            surfaceSizeInPixel,                // resolution
+                            screenSizeInMeters,                // screenSize [m]
+                            eyeTextureSize,                    // eye textureSize
+                            pupilCenterFromScreenTopInMeters,  // pupilCenterFromScreenTop [m]
+                            interpupillaryDistanceInMeters,    // IPD [m]
+                            new int[] { 0, 1 },                // eye order
+                            new EyeParameter[] {
+                                new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES, defaultSBSEyeFovLenseLeft,
+                                              0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */),
+                                new EyeParameter(1, DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES, defaultSBSEyeFovLenseRight,
+                                             -0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */) },
+                            lenseDistMeshProduce,
+                            // supported distortion bits
+                            StereoDeviceRenderer.DISTORTION_BARREL | StereoDeviceRenderer.DISTORTION_CHROMATIC | StereoDeviceRenderer.DISTORTION_VIGNETTE,
+                            // recommended distortion bits
+                            StereoDeviceRenderer.DISTORTION_BARREL | StereoDeviceRenderer.DISTORTION_CHROMATIC | StereoDeviceRenderer.DISTORTION_VIGNETTE,
+                            // minimum distortion bits
+                            StereoDeviceRenderer.DISTORTION_BARREL
+                            );
+        }
+        configs = new Config[] { config01Mono01, config02StereoSBS01, config03StereoSBSLense01 };
+    }
+
+    public final int deviceIndex;
+    public final Config config;
+
+    public final Point surfacePos;
+    private final FovHVHalves[] defaultEyeFov;
+
+    private boolean sensorsStarted = false;
+
+    public GenericStereoDevice(final int deviceIndex, final StereoDevice.Config customConfig) {
+        this.deviceIndex = deviceIndex;
+
+        if( customConfig instanceof GenericStereoDevice.Config) {
+            this.config = (GenericStereoDevice.Config) customConfig;
+        } else {
+            final int cfgIdx = Math.min(deviceIndex % 10, configs.length-1);
+            this.config = null != configs[cfgIdx] ? configs[cfgIdx] : config02StereoSBS01;
+        }
+        config.init();
+
+        this.surfacePos = new Point(0, 0);
+
+        defaultEyeFov = new FovHVHalves[config.defaultEyeParam.length];
+        for(int i=0; i<defaultEyeFov.length; i++) {
+            defaultEyeFov[i] = config.defaultEyeParam[i].fovhv;
+        }
+    }
+
+    @Override
+    public String toString() {
+        return "GenericStereoDevice["+config+", surfacePos "+surfacePos+"]";
+    }
+
+    public void setSurfacePosition(final int x, final int y) {
+        surfacePos.set(x, y);
+    }
+
+    @Override
+    public final void dispose() {
+        // NOP
+    }
+
+    @Override
+    public final PointImmutable getPosition() {
+        return surfacePos;
+    }
+
+    @Override
+    public final DimensionImmutable getSurfaceSize() {
+        return config.surfaceSizeInPixels;
+    }
+
+    @Override
+    public float[] getDefaultEyePositionOffset() {
+        return config.defaultEyeParam[0].positionOffset;
+    }
+
+    @Override
+    public final FovHVHalves[] getDefaultFOV() {
+        return defaultEyeFov;
+    }
+
+    @Override
+    public final boolean startSensors(final boolean start) {
+        if( start && !sensorsStarted ) {
+            if( startSensorsImpl(true) ) {
+                sensorsStarted = true;
+                return true;
+            } else {
+                sensorsStarted = false;
+                return false;
+            }
+        } else if( sensorsStarted ) {
+            if( startSensorsImpl(false) ) {
+                sensorsStarted = false;
+                return true;
+            } else {
+                sensorsStarted = true;
+                return false;
+            }
+        } else {
+            // No state change -> Success
+            return true;
+        }
+    }
+    private boolean startSensorsImpl(final boolean start) { return start; }
+
+    @Override
+    public boolean getSensorsStarted() { return sensorsStarted; }
+
+    @Override
+    public int[] getEyeRenderOrder() { return config.eyeRenderOrder; }
+
+    @Override
+    public int getSupportedDistortionBits() {
+        return config.supportedDistortionBits;
+    };
+
+    @Override
+    public int getRecommendedDistortionBits() {
+        return config.recommendedDistortionBits;
+    }
+
+    @Override
+    public int getMinimumDistortionBits() {
+        return config.minimumDistortionBits;
+    }
+
+    @Override
+    public final StereoDeviceRenderer createRenderer(final int distortionBits,
+                                                     final int textureCount, final float[] eyePositionOffset,
+                                                     final FovHVHalves[] eyeFov, final float pixelsPerDisplayPixel, final int textureUnit) {
+       final EyeParameter[] eyeParam = new EyeParameter[eyeFov.length];
+       for(int i=0; i<eyeParam.length; i++) {
+           final EyeParameter defaultEyeParam = config.defaultEyeParam[i];
+           eyeParam[i] = new EyeParameter(i, eyePositionOffset, eyeFov[i],
+                                          defaultEyeParam.distNoseToPupilX, defaultEyeParam.distMiddleToPupilY, defaultEyeParam.eyeReliefZ);
+       }
+
+       final RectangleImmutable[] eyeViewports = new RectangleImmutable[eyeParam.length];
+       final DimensionImmutable eyeTextureSize = config.eyeTextureSize;
+       final DimensionImmutable totalTextureSize;
+       if( 1 < eyeParam.length ) {
+           // Stereo SBS
+           totalTextureSize = new Dimension(eyeTextureSize.getWidth()*2, eyeTextureSize.getHeight());
+           if( 1 == textureCount ) { // validated in ctor below!
+               eyeViewports[0] = new Rectangle(0, 0,
+                       totalTextureSize.getWidth() / 2, totalTextureSize.getHeight());
+
+               eyeViewports[1] = new Rectangle((totalTextureSize.getWidth() + 1) / 2, 0,
+                       totalTextureSize.getWidth() / 2, totalTextureSize.getHeight());
+           } else {
+               eyeViewports[0] = new Rectangle(0, 0, eyeTextureSize.getWidth(), eyeTextureSize.getHeight());
+               eyeViewports[1] = eyeViewports[0];
+           }
+       } else {
+           // Mono
+           totalTextureSize = eyeTextureSize;
+           eyeViewports[0] = new Rectangle(0, 0, totalTextureSize.getWidth(), totalTextureSize.getHeight());
+       }
+       return new GenericStereoDeviceRenderer(this, distortionBits, textureCount, eyePositionOffset, eyeParam, pixelsPerDisplayPixel, textureUnit,
+                                              eyeTextureSize, totalTextureSize, eyeViewports);
+    }
+}
\ No newline at end of file
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java
new file mode 100644
index 000000000..a59e8d833
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java
@@ -0,0 +1,43 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ *    1. Redistributions of source code must retain the above copyright notice, this list of
+ *       conditions and the following disclaimer.
+ *
+ *    2. Redistributions in binary form must reproduce the above copyright notice, this list
+ *       of conditions and the following disclaimer in the documentation and/or other materials
+ *       provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceFactory;
+
+public class GenericStereoDeviceFactory extends StereoDeviceFactory {
+
+    public static boolean isAvailable() {
+        return true;
+    }
+
+    @Override
+    public final StereoDevice createDevice(final int deviceIndex, final StereoDevice.Config config, final boolean verbose) {
+        return new GenericStereoDevice(deviceIndex, config);
+    }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java
new file mode 100644
index 000000000..d957bd4e7
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java
@@ -0,0 +1,605 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ *    1. Redistributions of source code must retain the above copyright notice, this list of
+ *       conditions and the following disclaimer.
+ *
+ *    2. Redistributions in binary form must reproduce the above copyright notice, this list
+ *       of conditions and the following disclaimer in the documentation and/or other materials
+ *       provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import java.nio.FloatBuffer;
+import java.nio.ShortBuffer;
+
+import javax.media.nativewindow.util.Dimension;
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLArrayData;
+import javax.media.opengl.GLException;
+import javax.media.opengl.GLUniformData;
+
+import jogamp.common.os.PlatformPropsImpl;
+
+import com.jogamp.common.nio.Buffers;
+import com.jogamp.common.os.Platform;
+import com.jogamp.opengl.JoglVersion;
+import com.jogamp.opengl.util.GLArrayDataServer;
+import com.jogamp.opengl.util.glsl.ShaderCode;
+import com.jogamp.opengl.util.glsl.ShaderProgram;
+import com.jogamp.opengl.util.stereo.EyeParameter;
+import com.jogamp.opengl.util.stereo.EyePose;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+import com.jogamp.opengl.util.stereo.StereoUtil;
+
+/**
+ * Generic Stereo Device Distortion and OpenGL Renderer Utility
+ */
+public class GenericStereoDeviceRenderer implements StereoDeviceRenderer {
+    private static final String shaderPrefix01 = "dist01";
+    private static final String shaderTimewarpSuffix = "_timewarp";
+    private static final String shaderChromaSuffix = "_chroma";
+    private static final String shaderPlainSuffix = "_plain";
+
+    public static class GenericEye implements StereoDeviceRenderer.Eye {
+        private final int eyeName;
+        private final int distortionBits;
+        private final int vertexCount;
+        private final int indexCount;
+        private final RectangleImmutable viewport;
+
+        private final GLUniformData eyeToSourceUVScale;
+        private final GLUniformData eyeToSourceUVOffset;
+        private final GLUniformData eyeRotationStart;
+        private final GLUniformData eyeRotationEnd;
+
+        /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+        private final GLArrayDataServer iVBO;
+        private final GLArrayData vboPos, vboParams, vboTexCoordsR, vboTexCoordsG, vboTexCoordsB;
+        private final GLArrayDataServer indices;
+
+        private final EyeParameter eyeParameter;
+
+        private final EyePose eyePose;
+
+        @Override
+        public final RectangleImmutable getViewport() { return viewport; }
+
+        @Override
+        public final EyeParameter getEyeParameter() { return eyeParameter; }
+
+        @Override
+        public final EyePose getLastEyePose() { return eyePose; }
+
+        private GenericEye(final GenericStereoDevice device, final int distortionBits,
+                         final float[] eyePositionOffset, final EyeParameter eyeParam,
+                         final DimensionImmutable textureSize, final RectangleImmutable eyeViewport) {
+            this.eyeName = eyeParam.number;
+            this.distortionBits = distortionBits;
+            this.viewport = eyeViewport;
+
+            final boolean usePP = null != device.config.distortionMeshProducer && 0 != distortionBits;
+
+            final boolean usesTimewarp = usePP && StereoUtil.usesTimewarpDistortion(distortionBits);
+            final FloatBuffer fstash = Buffers.newDirectFloatBuffer( 2 + 2 + ( usesTimewarp ? 16 + 16 : 0 ) ) ;
+
+            if( usePP ) {
+                eyeToSourceUVScale = new GLUniformData("svr_EyeToSourceUVScale", 2, Buffers.slice2Float(fstash, 0, 2));
+                eyeToSourceUVOffset = new GLUniformData("svr_EyeToSourceUVOffset", 2, Buffers.slice2Float(fstash, 2, 2));
+            } else {
+                eyeToSourceUVScale = null;
+                eyeToSourceUVOffset = null;
+            }
+
+            if( usesTimewarp ) {
+                eyeRotationStart = new GLUniformData("svr_EyeRotationStart", 4, 4, Buffers.slice2Float(fstash, 4, 16));
+                eyeRotationEnd = new GLUniformData("svr_EyeRotationEnd", 4, 4, Buffers.slice2Float(fstash, 20, 16));
+            } else {
+                eyeRotationStart = null;
+                eyeRotationEnd = null;
+            }
+
+            this.eyeParameter = eyeParam;
+
+            this.eyePose = new EyePose(eyeName);
+
+            updateEyePose(device); // 1st init
+
+            // Setup: eyeToSourceUVScale, eyeToSourceUVOffset
+            if( usePP ) {
+                final ScaleAndOffset2D textureScaleAndOffset = new ScaleAndOffset2D(eyeParam.fovhv, textureSize, eyeViewport);
+                if( StereoDevice.DEBUG ) {
+                    System.err.println("XXX."+eyeName+": eyeParam      "+eyeParam);
+                    System.err.println("XXX."+eyeName+": uvScaleOffset "+textureScaleAndOffset);
+                    System.err.println("XXX."+eyeName+": textureSize   "+textureSize);
+                    System.err.println("XXX."+eyeName+": viewport      "+eyeViewport);
+                }
+                final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue();
+                eyeToSourceUVScaleFB.put(0, textureScaleAndOffset.scale[0]);
+                eyeToSourceUVScaleFB.put(1, textureScaleAndOffset.scale[1]);
+                final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue();
+                eyeToSourceUVOffsetFB.put(0, textureScaleAndOffset.offset[0]);
+                eyeToSourceUVOffsetFB.put(1, textureScaleAndOffset.offset[1]);
+            } else {
+                vertexCount = 0;
+                indexCount = 0;
+                iVBO = null;
+                vboPos = null;
+                vboParams = null;
+                vboTexCoordsR = null;
+                vboTexCoordsG = null;
+                vboTexCoordsB = null;
+                indices = null;
+                if( StereoDevice.DEBUG ) {
+                    System.err.println("XXX."+eyeName+": "+this);
+                }
+                return;
+            }
+            final DistortionMesh meshData = device.config.distortionMeshProducer.create(eyeParam, distortionBits);
+            if( null == meshData ) {
+                throw new GLException("Failed to create meshData for eye "+eyeParam+", and "+StereoUtil.distortionBitsToString(distortionBits));
+            }
+
+            vertexCount = meshData.vertexCount;
+            indexCount = meshData.indexCount;
+
+            /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+            final boolean useChromatic = StereoUtil.usesChromaticDistortion(distortionBits);
+            final boolean useVignette = StereoUtil.usesVignetteDistortion(distortionBits);
+
+            final int compsPerElement = 2+2+2+( useChromatic ? 2+2 /* texCoordG + texCoordB */: 0 );
+            iVBO = GLArrayDataServer.createGLSLInterleaved(compsPerElement, GL.GL_FLOAT, false, vertexCount, GL.GL_STATIC_DRAW);
+            vboPos = iVBO.addGLSLSubArray("svr_Position", 2, GL.GL_ARRAY_BUFFER);
+            vboParams = iVBO.addGLSLSubArray("svr_Params", 2, GL.GL_ARRAY_BUFFER);
+            vboTexCoordsR = iVBO.addGLSLSubArray("svr_TexCoordR", 2, GL.GL_ARRAY_BUFFER);
+            if( useChromatic ) {
+                vboTexCoordsG = iVBO.addGLSLSubArray("svr_TexCoordG", 2, GL.GL_ARRAY_BUFFER);
+                vboTexCoordsB = iVBO.addGLSLSubArray("svr_TexCoordB", 2, GL.GL_ARRAY_BUFFER);
+            } else {
+                vboTexCoordsG = null;
+                vboTexCoordsB = null;
+            }
+            indices = GLArrayDataServer.createData(1, GL.GL_SHORT, indexCount, GL.GL_STATIC_DRAW, GL.GL_ELEMENT_ARRAY_BUFFER);
+
+            /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+            final FloatBuffer iVBOFB = (FloatBuffer)iVBO.getBuffer();
+
+            for ( int vertNum = 0; vertNum < vertexCount; vertNum++ ) {
+                final DistortionMesh.DistortionVertex v = meshData.vertices[vertNum];
+                int dataIdx = 0;
+
+                if( StereoDevice.DUMP_DATA ) {
+                    System.err.println("XXX."+eyeName+": START VERTEX "+vertNum+" / "+vertexCount);
+                }
+                // pos
+                if( v.pos_size >= 2 ) {
+                    if( StereoDevice.DUMP_DATA ) {
+                        System.err.println("XXX."+eyeName+": pos ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+                    }
+                    iVBOFB.put(v.data[dataIdx]);
+                    iVBOFB.put(v.data[dataIdx+1]);
+                } else {
+                    iVBOFB.put(0f);
+                    iVBOFB.put(0f);
+                }
+                dataIdx += v.pos_size;
+
+                // params
+                if( v.vignetteFactor_size >= 1 && useVignette ) {
+                    if( StereoDevice.DUMP_DATA ) {
+                        System.err.println("XXX."+eyeName+": vignette "+v.data[dataIdx]);
+                    }
+                    iVBOFB.put(v.data[dataIdx]);
+                } else {
+                    iVBOFB.put(1.0f);
+                }
+                dataIdx += v.vignetteFactor_size;
+
+                if( v.timewarpFactor_size >= 1 ) {
+                    if( StereoDevice.DUMP_DATA ) {
+                        System.err.println("XXX."+eyeName+": timewarp "+v.data[dataIdx]);
+                    }
+                    iVBOFB.put(v.data[dataIdx]);
+                } else {
+                    iVBOFB.put(1.0f);
+                }
+                dataIdx += v.timewarpFactor_size;
+
+                // texCoordR
+                if( v.texR_size >= 2 ) {
+                    if( StereoDevice.DUMP_DATA ) {
+                        System.err.println("XXX."+eyeName+": texR ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+                    }
+                    iVBOFB.put(v.data[dataIdx]);
+                    iVBOFB.put(v.data[dataIdx+1]);
+                } else {
+                    iVBOFB.put(1f);
+                    iVBOFB.put(1f);
+                }
+                dataIdx += v.texR_size;
+
+                if( useChromatic ) {
+                    // texCoordG
+                    if( v.texG_size >= 2 ) {
+                        if( StereoDevice.DUMP_DATA ) {
+                            System.err.println("XXX."+eyeName+": texG ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+                        }
+                        iVBOFB.put(v.data[dataIdx]);
+                        iVBOFB.put(v.data[dataIdx+1]);
+                    } else {
+                        iVBOFB.put(1f);
+                        iVBOFB.put(1f);
+                    }
+                    dataIdx += v.texG_size;
+
+                    // texCoordB
+                    if( v.texB_size >= 2 ) {
+                        if( StereoDevice.DUMP_DATA ) {
+                            System.err.println("XXX."+eyeName+": texB ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+                        }
+                        iVBOFB.put(v.data[dataIdx]);
+                        iVBOFB.put(v.data[dataIdx+1]);
+                    } else {
+                        iVBOFB.put(1f);
+                        iVBOFB.put(1f);
+                    }
+                    dataIdx += v.texB_size;
+                } else {
+                    dataIdx += v.texG_size;
+                    dataIdx += v.texB_size;
+                }
+            }
+            if( StereoDevice.DUMP_DATA ) {
+                System.err.println("XXX."+eyeName+": iVBO "+iVBO);
+            }
+            {
+                if( StereoDevice.DUMP_DATA ) {
+                    System.err.println("XXX."+eyeName+": idx "+indices+", count "+indexCount);
+                    for(int i=0; i< indexCount; i++) {
+                        if( 0 == i % 16 ) {
+                            System.err.printf("%n%5d: ", i);
+                        }
+                        System.err.printf("%5d, ", (int)meshData.indices[i]);
+                    }
+                    System.err.println();
+                }
+                final ShortBuffer out = (ShortBuffer) indices.getBuffer();
+                out.put(meshData.indices, 0, meshData.indexCount);
+            }
+            if( StereoDevice.DEBUG ) {
+                System.err.println("XXX."+eyeName+": "+this);
+            }
+        }
+
+        private void linkData(final GL2ES2 gl, final ShaderProgram sp) {
+            if( null == iVBO ) return;
+
+            if( 0 > vboPos.setLocation(gl, sp.program()) ) {
+                throw new GLException("Couldn't locate "+vboPos);
+            }
+            if( 0 > vboParams.setLocation(gl, sp.program()) ) {
+                throw new GLException("Couldn't locate "+vboParams);
+            }
+            if( 0 > vboTexCoordsR.setLocation(gl, sp.program()) ) {
+                throw new GLException("Couldn't locate "+vboTexCoordsR);
+            }
+            if( StereoUtil.usesChromaticDistortion(distortionBits) ) {
+                if( 0 > vboTexCoordsG.setLocation(gl, sp.program()) ) {
+                    throw new GLException("Couldn't locate "+vboTexCoordsG);
+                }
+                if( 0 > vboTexCoordsB.setLocation(gl, sp.program()) ) {
+                    throw new GLException("Couldn't locate "+vboTexCoordsB);
+                }
+            }
+            if( 0 > eyeToSourceUVScale.setLocation(gl, sp.program()) ) {
+                throw new GLException("Couldn't locate "+eyeToSourceUVScale);
+            }
+            if( 0 > eyeToSourceUVOffset.setLocation(gl, sp.program()) ) {
+                throw new GLException("Couldn't locate "+eyeToSourceUVOffset);
+            }
+            if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+                if( 0 > eyeRotationStart.setLocation(gl, sp.program()) ) {
+                    throw new GLException("Couldn't locate "+eyeRotationStart);
+                }
+                if( 0 > eyeRotationEnd.setLocation(gl, sp.program()) ) {
+                    throw new GLException("Couldn't locate "+eyeRotationEnd);
+                }
+            }
+            iVBO.seal(gl, true);
+            iVBO.enableBuffer(gl, false);
+            indices.seal(gl, true);
+            indices.enableBuffer(gl, false);
+        }
+
+        private void dispose(final GL2ES2 gl) {
+            if( null == iVBO ) return;
+            iVBO.destroy(gl);
+            indices.destroy(gl);
+        }
+        private void enableVBO(final GL2ES2 gl, final boolean enable) {
+            if( null == iVBO ) return;
+            iVBO.enableBuffer(gl, enable);
+            indices.bindBuffer(gl, enable); // keeps VBO binding if enable:=true
+        }
+
+        private void updateUniform(final GL2ES2 gl, final ShaderProgram sp) {
+            if( null == iVBO ) return;
+            gl.glUniform(eyeToSourceUVScale);
+            gl.glUniform(eyeToSourceUVOffset);
+            if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+                gl.glUniform(eyeRotationStart);
+                gl.glUniform(eyeRotationEnd);
+            }
+        }
+
+        /**
+         * Updates {@link #ovrEyePose} and it's extracted
+         * {@link #eyeRenderPoseOrientation} and {@link #eyeRenderPosePosition}.
+         * @param hmdCtx used get the {@link #ovrEyePose} via {@link OVR#ovrHmd_GetEyePose(OvrHmdContext, int)}
+         */
+        private EyePose updateEyePose(final GenericStereoDevice hmdCtx) {
+            return eyePose;
+        }
+
+        @Override
+        public String toString() {
+            final String ppTxt = null == iVBO ? ", no post-processing" :
+                        ", uvScale["+eyeToSourceUVScale.floatBufferValue().get(0)+", "+eyeToSourceUVScale.floatBufferValue().get(1)+
+                        "], uvOffset["+eyeToSourceUVOffset.floatBufferValue().get(0)+", "+eyeToSourceUVOffset.floatBufferValue().get(1)+"]";
+
+            return "Eye["+eyeName+", viewport "+viewport+
+                        ", "+eyeParameter+
+                        ", vertices "+vertexCount+", indices "+indexCount+
+                        ppTxt+
+                        ", desc"+eyeParameter+", "+eyePose+"]";
+        }
+    }
+
+    private final GenericStereoDevice device;
+    private final GenericEye[] eyes;
+    private final int distortionBits;
+    private final int textureCount;
+    private final DimensionImmutable singleTextureSize;
+    private final DimensionImmutable totalTextureSize;
+    /** if texUnit0 is null: no post-processing */
+    private final GLUniformData texUnit0;
+
+    private ShaderProgram sp;
+    private long frameStart = 0;
+
+    @Override
+    public String toString() {
+        return "GenericStereo[distortion["+StereoUtil.distortionBitsToString(distortionBits)+
+                             "], singleSize "+singleTextureSize+
+                             ", sbsSize "+totalTextureSize+
+                             ", texCount "+textureCount+", texUnit "+(null != texUnit0 ? texUnit0.intValue() : "n/a")+
+                             ", "+PlatformPropsImpl.NEWLINE+"  "+(0 < eyes.length ? eyes[0] : "none")+
+                             ", "+PlatformPropsImpl.NEWLINE+"  "+(1 < eyes.length ? eyes[1] : "none")+"]";
+    }
+
+
+    private static final DimensionImmutable zeroSize = new Dimension(0, 0);
+
+    /* pp */ GenericStereoDeviceRenderer(final GenericStereoDevice context, final int distortionBits,
+                                       final int textureCount, final float[] eyePositionOffset,
+                                       final EyeParameter[] eyeParam, final float pixelsPerDisplayPixel, final int textureUnit,
+                                       final DimensionImmutable singleTextureSize, final DimensionImmutable totalTextureSize,
+                                       final RectangleImmutable[] eyeViewports) {
+        this.device = context;
+        this.eyes = new GenericEye[eyeParam.length];
+        this.distortionBits = ( distortionBits | context.getMinimumDistortionBits() ) & context.getSupportedDistortionBits();
+        final boolean usePP = null != device.config.distortionMeshProducer && 0 != this.distortionBits;
+        final DimensionImmutable textureSize;
+
+        if( usePP ) {
+            if( 1 > textureCount || 2 < textureCount ) {
+                this.textureCount = 2;
+            } else {
+                this.textureCount = textureCount;
+            }
+            this.singleTextureSize = singleTextureSize;
+            this.totalTextureSize = totalTextureSize;
+            textureSize = 1 == textureCount ? totalTextureSize : singleTextureSize;
+            texUnit0 = new GLUniformData("svr_Texture0", textureUnit);
+        } else {
+            this.textureCount = 0;
+            this.singleTextureSize = zeroSize;
+            this.totalTextureSize = zeroSize;
+            textureSize = zeroSize;
+            texUnit0 = null;
+        }
+        for(int i=0; i<eyeParam.length; i++) {
+            eyes[i] = new GenericEye(context, this.distortionBits, eyePositionOffset, eyeParam[i], textureSize, eyeViewports[i]);
+        }
+        sp = null;
+    }
+
+    @Override
+    public StereoDevice getDevice() {  return device; }
+
+    @Override
+    public final int getDistortionBits() { return distortionBits; }
+
+    @Override
+    public final boolean usesSideBySideStereo() { return true; }
+
+    @Override
+    public final DimensionImmutable getSingleSurfaceSize() { return singleTextureSize; }
+
+    @Override
+    public final DimensionImmutable getTotalSurfaceSize() { return totalTextureSize; }
+
+    @Override
+    public final int getTextureCount() { return textureCount; }
+
+    @Override
+    public final int getTextureUnit() { return ppAvailable() ? texUnit0.intValue() : 0; }
+
+    @Override
+    public final boolean ppAvailable() { return null != texUnit0; }
+
+    @Override
+    public final void init(final GL gl) {
+        if( StereoDevice.DEBUG ) {
+            System.err.println(JoglVersion.getGLInfo(gl, null).toString());
+        }
+        if( null != sp ) {
+            throw new IllegalStateException("Already initialized");
+        }
+        if( !ppAvailable() ) {
+            return;
+        }
+        final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+        final String vertexShaderBasename;
+        final String fragmentShaderBasename;
+        {
+            final boolean usesTimewarp = StereoUtil.usesTimewarpDistortion(distortionBits);
+            final boolean usesChromatic = StereoUtil.usesChromaticDistortion(distortionBits);
+
+            final StringBuilder sb = new StringBuilder();
+            sb.append(shaderPrefix01);
+            if( !usesChromatic && !usesTimewarp ) {
+                sb.append(shaderPlainSuffix);
+            } else if( usesChromatic && !usesTimewarp ) {
+                sb.append(shaderChromaSuffix);
+            } else if( usesTimewarp ) {
+                sb.append(shaderTimewarpSuffix);
+                if( usesChromatic ) {
+                    sb.append(shaderChromaSuffix);
+                }
+            }
+            vertexShaderBasename = sb.toString();
+            sb.setLength(0);
+            sb.append(shaderPrefix01);
+            if( usesChromatic ) {
+                sb.append(shaderChromaSuffix);
+            } else {
+                sb.append(shaderPlainSuffix);
+            }
+            fragmentShaderBasename = sb.toString();
+        }
+        final ShaderCode vp0 = ShaderCode.create(gl2es2, GL2ES2.GL_VERTEX_SHADER, GenericStereoDeviceRenderer.class, "shader",
+                "shader/bin", vertexShaderBasename, true);
+        final ShaderCode fp0 = ShaderCode.create(gl2es2, GL2ES2.GL_FRAGMENT_SHADER, GenericStereoDeviceRenderer.class, "shader",
+                "shader/bin", fragmentShaderBasename, true);
+        vp0.defaultShaderCustomization(gl2es2, true, true);
+        fp0.defaultShaderCustomization(gl2es2, true, true);
+
+        sp = new ShaderProgram();
+        sp.add(gl2es2, vp0, System.err);
+        sp.add(gl2es2, fp0, System.err);
+        if(!sp.link(gl2es2, System.err)) {
+            throw new GLException("could not link program: "+sp);
+        }
+        sp.useProgram(gl2es2, true);
+        if( 0 > texUnit0.setLocation(gl2es2, sp.program()) ) {
+            throw new GLException("Couldn't locate "+texUnit0);
+        }
+        for(int i=0; i<eyes.length; i++) {
+            eyes[i].linkData(gl2es2, sp);
+        }
+        sp.useProgram(gl2es2, false);
+    }
+
+    @Override
+    public final void dispose(final GL gl) {
+        final GL2ES2 gl2es2 = gl.getGL2ES2();
+        if( null != sp ) {
+            sp.useProgram(gl2es2, false);
+        }
+        for(int i=0; i<eyes.length; i++) {
+            eyes[i].dispose(gl2es2);
+        }
+        if( null != sp ) {
+            sp.destroy(gl2es2);
+        }
+    }
+
+    @Override
+    public final Eye getEye(final int eyeNum) {
+        return eyes[eyeNum];
+    }
+
+    @Override
+    public final EyePose updateEyePose(final int eyeNum) {
+        return eyes[eyeNum].updateEyePose(device);
+    }
+
+    @Override
+    public final void beginFrame(final GL gl) {
+        frameStart = Platform.currentTimeMillis();
+    }
+
+    @Override
+    public final void endFrame(final GL gl) {
+        if( 0 == frameStart ) {
+            throw new IllegalStateException("beginFrame not called");
+        }
+        frameStart = 0;
+    }
+
+    @Override
+    public final void ppBegin(final GL gl) {
+        if( null == sp ) {
+            throw new IllegalStateException("Not initialized");
+        }
+        if( 0 == frameStart ) {
+            throw new IllegalStateException("beginFrame not called");
+        }
+        final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+        gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+        gl.glClear(GL.GL_COLOR_BUFFER_BIT);
+        gl.glActiveTexture(GL.GL_TEXTURE0 + getTextureUnit());
+
+        gl2es2.glDisable(GL.GL_CULL_FACE);
+        gl2es2.glDisable(GL.GL_DEPTH_TEST);
+        gl2es2.glDisable(GL.GL_BLEND);
+
+        if( !gl2es2.isGLcore() ) {
+            gl2es2.glEnable(GL.GL_TEXTURE_2D);
+        }
+
+        sp.useProgram(gl2es2, true);
+
+        gl2es2.glUniform(texUnit0);
+    }
+
+    @Override
+    public final void ppOneEye(final GL gl, final int eyeNum) {
+        final GenericEye eye = eyes[eyeNum];
+        final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+        eye.updateUniform(gl2es2, sp);
+        eye.enableVBO(gl2es2, true);
+        gl2es2.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0);
+        eyes[eyeNum].enableVBO(gl2es2, false);
+    }
+
+    @Override
+    public final void ppEnd(final GL gl) {
+        sp.useProgram(gl.getGL2ES2(), false);
+    }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java b/src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java
new file mode 100644
index 000000000..ce154e03e
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java
@@ -0,0 +1,107 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ *    1. Redistributions of source code must retain the above copyright notice, this list of
+ *       conditions and the following disclaimer.
+ *
+ *    2. Redistributions in binary form must reproduce the above copyright notice, this list
+ *       of conditions and the following disclaimer in the documentation and/or other materials
+ *       provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
+
+import com.jogamp.opengl.math.FovHVHalves;
+import com.jogamp.opengl.math.VectorUtil;
+
+/**
+ * 2D scale and offset NDC class,
+ * providing conversion from {@link FovHVHalves} in tangent to NDC space.
+ * <p>
+ * See <a href="https://www.opengl.org/wiki/Compute_eye_space_from_window_space">OpenGL.org: Compute eye space from window space</a>
+ * </p>
+ */
+public final class ScaleAndOffset2D {
+    /** Scale for x- and y-component. */
+    final float[] scale;
+    /** Offset for x- and y-component. */
+    final float[] offset;
+
+    private static final float[] vec2Half = new float[] { 0.5f, 0.5f };
+
+    public String toString() {
+        return "[offset "+offset[0]+" / "+offset[1]+", scale "+scale[0]+" x "+scale[1]+"]";
+    }
+
+    public ScaleAndOffset2D(final float[] scale, final float[] offset) {
+        this.scale = scale;
+        this.offset = offset;
+    }
+
+    /**
+     * Create the <i>Normalized Device Coordinate Space</i> (NDC) [-1,+1] instance
+     * from the given <code>fovHVHalves</code>.
+     */
+    public ScaleAndOffset2D(final FovHVHalves fovHVHalves) {
+        final FovHVHalves tanFovHVHalves = fovHVHalves.toTangents();
+        final float projXScale = 2.0f / ( tanFovHVHalves.left+ tanFovHVHalves.right);
+        final float projYScale = 2.0f / ( tanFovHVHalves.top + tanFovHVHalves.bottom );
+        final float projXOffset = ( tanFovHVHalves.left - tanFovHVHalves.right ) * projXScale * 0.5f;
+        final float projYOffset = ( tanFovHVHalves.top - tanFovHVHalves.bottom ) * projYScale * 0.5f;
+
+        this.scale = new float[] { projXScale, projYScale };
+        this.offset = new float[] { projXOffset, projYOffset };
+    }
+
+    /**
+     * Create the <i>Normalized Device Coordinate Space</i> (NDC) [-1,+1] instance
+     * from the given <code>fovHVHalves</code>, for the subsection of the <code>render-viewport</code> within the <code>rendertarget-size</code>.
+     */
+    public ScaleAndOffset2D(final FovHVHalves fovHVHalves, final DimensionImmutable rendertargetSize, final RectangleImmutable renderViewport) {
+        final ScaleAndOffset2D eyeToSourceNDC = new ScaleAndOffset2D(fovHVHalves);
+        final float[] vec2Tmp1 = new float[2];
+        final float[] vec2Tmp2 = new float[2];
+        final float[] scale  = VectorUtil.scaleVec2(vec2Tmp1, eyeToSourceNDC.scale, 0.5f);
+        final float[] offset = VectorUtil.addVec2(vec2Tmp2, VectorUtil.scaleVec2(vec2Tmp2, eyeToSourceNDC.offset, 0.5f), vec2Half);
+
+        final float[] scale2 = new float[] { (float)renderViewport.getWidth() / (float)rendertargetSize.getWidth(),
+                                             (float)renderViewport.getHeight() / (float)rendertargetSize.getHeight() };
+
+        final float[] offset2 = new float[] { (float)renderViewport.getX() / (float)rendertargetSize.getWidth(),
+                                              (float)renderViewport.getY() / (float)rendertargetSize.getHeight() };
+
+        VectorUtil.scaleVec2(scale, scale, scale2);
+        VectorUtil.addVec2(offset, VectorUtil.scaleVec2(offset, offset, scale2), offset2);
+
+        this.scale = scale;
+        this.offset = offset;
+    }
+
+    /**
+     * Return the <i>tangent FOV space</i> of this <i>eye to source NDC</i> instance.
+     */
+    public final float[] convertToTanFovSpace(final float[] rendertargetNDC) {
+        final float[] vec2Tmp1 = new float[2];
+        return VectorUtil.divVec2(vec2Tmp1, VectorUtil.subVec2(vec2Tmp1, rendertargetNDC, this.offset), this.scale);
+    }
+
+}
\ No newline at end of file
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp
new file mode 100644
index 000000000..4ac404729
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp
@@ -0,0 +1,26 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+  #define varying in
+  out vec4 svr_FragColor;
+  #define texture2D texture
+#else
+  #define svr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D  svr_Texture0;
+
+varying vec3    svv_Fade;
+varying vec2    svv_TexCoordR;
+varying vec2    svv_TexCoordG;
+varying vec2    svv_TexCoordB;
+
+void main (void)
+{
+  // 3 samples for fixing chromatic aberrations
+  vec3 color = vec3(texture2D(svr_Texture0, svv_TexCoordR).r,
+                    texture2D(svr_Texture0, svv_TexCoordG).g,
+                    texture2D(svr_Texture0, svv_TexCoordB).b);
+  svr_FragColor = vec4(svv_Fade * color, 1.0);  // include vignetteFade
+}
+
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp
new file mode 100644
index 000000000..d4ab585d5
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp
@@ -0,0 +1,33 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+  #define attribute in
+  #define varying out
+#endif
+
+uniform vec2    svr_EyeToSourceUVScale;
+uniform vec2    svr_EyeToSourceUVOffset;
+
+attribute vec2  svr_Position;
+attribute vec2  svr_Params;
+attribute vec2  svr_TexCoordR;
+attribute vec2  svr_TexCoordG;
+attribute vec2  svr_TexCoordB;
+
+varying vec3    svv_Fade;
+varying vec2    svv_TexCoordR;
+varying vec2    svv_TexCoordG;
+varying vec2    svv_TexCoordB;
+
+void main(void)
+{
+    gl_Position = vec4(svr_Position.xy, 0.5, 1.0);
+    svv_Fade = vec3(svr_Params.r); // vignetteFade
+    
+    svv_TexCoordR = svr_TexCoordR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+    svv_TexCoordG = svr_TexCoordG * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordG.y = 1.0-svv_TexCoordG.y;
+    svv_TexCoordB = svr_TexCoordB * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordB.y = 1.0-svv_TexCoordB.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp
new file mode 100644
index 000000000..2df890648
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp
@@ -0,0 +1,22 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+  #define varying in
+  out vec4 svr_FragColor;
+  #define texture2D texture
+#else
+  #define svr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D  svr_Texture0;
+
+varying vec3    svv_Fade;
+varying vec2    svv_TexCoordR;
+
+void main (void)
+{
+  // 3 samples for fixing chromatic aberrations
+  vec3 color = texture2D(svr_Texture0, svv_TexCoordR).rgb;
+  svr_FragColor = vec4(svv_Fade * color, 1.0);  // include vignetteFade
+}
+
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp
new file mode 100644
index 000000000..335d3f0f6
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp
@@ -0,0 +1,27 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+  #define attribute in
+  #define varying out
+#endif
+
+uniform vec2    svr_EyeToSourceUVScale;
+uniform vec2    svr_EyeToSourceUVOffset;
+
+attribute vec2  svr_Position;
+attribute vec2  svr_Params;
+attribute vec2  svr_TexCoordR;
+
+varying vec3    svv_Fade;
+varying vec2    svv_TexCoordR;
+
+void main(void)
+{
+    gl_Position = vec4(svr_Position.xy, 0.5, 1.0);
+    svv_Fade = vec3(svr_Params.r); // vignetteFade
+    
+    // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+    // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)    
+    svv_TexCoordR = svr_TexCoordR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp
new file mode 100644
index 000000000..c4461ec3e
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp
@@ -0,0 +1,44 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+  #define attribute in
+  #define varying out
+#endif
+
+uniform vec2    svr_EyeToSourceUVScale;
+uniform vec2    svr_EyeToSourceUVOffset;
+uniform mat4    svr_EyeRotationStart;
+uniform mat4    svr_EyeRotationEnd;
+
+attribute vec2  svr_Position;
+attribute vec2  svr_Params;
+attribute vec2  svr_TexCoordR;
+
+varying vec3    svv_Fade;
+varying vec2    svv_TexCoordR;
+
+void main(void)
+{
+    gl_Position = vec4(svr_Position.xy, 0.0, 1.0);
+    svv_Fade = vec3(svr_Params.r); // vignetteFade
+    
+    // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+    // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+    vec3 TanEyeAngle = vec3 ( svr_TexCoordR, 1.0 );
+    
+    // Accurate time warp lerp vs. faster
+    // Apply the two 3x3 timewarp rotations to these vectors.
+	vec3 TransformedStart = (svr_EyeRotationStart * vec4(TanEyeAngle, 0)).xyz;
+	vec3 TransformedEnd   = (svr_EyeRotationEnd * vec4(TanEyeAngle, 0)).xyz;
+    // And blend between them.
+    vec3 Transformed = mix ( TransformedStart, TransformedEnd, svr_Params.g /* timewarpLerpFactor */ );
+    
+    // Project them back onto the Z=1 plane of the rendered images.
+    float RecipZ = 1.0 / Transformed.z;
+    vec2 Flattened = vec2 ( Transformed.x * RecipZ, Transformed.y * RecipZ );
+    
+    // These are now still in TanEyeAngle space.
+    // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+    svv_TexCoordR = Flattened * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp
new file mode 100644
index 000000000..c08ed3113
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp
@@ -0,0 +1,65 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+  #define attribute in
+  #define varying out
+#endif
+
+uniform vec2    svr_EyeToSourceUVScale;
+uniform vec2    svr_EyeToSourceUVOffset;
+uniform mat4    svr_EyeRotationStart;
+uniform mat4    svr_EyeRotationEnd;
+
+attribute vec2  svr_Position;
+attribute vec2  svr_Params;
+attribute vec2  svr_TexCoordR;
+attribute vec2  svr_TexCoordG;
+attribute vec2  svr_TexCoordB;
+
+varying vec3    svv_Fade;
+varying vec2    svv_TexCoordR;
+varying vec2    svv_TexCoordG;
+varying vec2    svv_TexCoordB;
+
+void main(void)
+{
+    gl_Position = vec4(svr_Position.xy, 0.0, 1.0);
+    svv_Fade = vec3(svr_Params.r); // vignetteFade
+    
+    // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+    // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+    vec3 TanEyeAngleR = vec3 ( svr_TexCoordR, 1.0 );
+    vec3 TanEyeAngleG = vec3 ( svr_TexCoordG, 1.0 );
+    vec3 TanEyeAngleB = vec3 ( svr_TexCoordB, 1.0 );
+    
+    // Accurate time warp lerp vs. faster
+    // Apply the two 3x3 timewarp rotations to these vectors.
+	vec3 TransformedRStart = (svr_EyeRotationStart * vec4(TanEyeAngleR, 0)).xyz;
+	vec3 TransformedGStart = (svr_EyeRotationStart * vec4(TanEyeAngleG, 0)).xyz;
+	vec3 TransformedBStart = (svr_EyeRotationStart * vec4(TanEyeAngleB, 0)).xyz;
+	vec3 TransformedREnd   = (svr_EyeRotationEnd * vec4(TanEyeAngleR, 0)).xyz;
+	vec3 TransformedGEnd   = (svr_EyeRotationEnd * vec4(TanEyeAngleG, 0)).xyz;
+	vec3 TransformedBEnd   = (svr_EyeRotationEnd * vec4(TanEyeAngleB, 0)).xyz;
+
+    // And blend between them.
+    vec3 TransformedR = mix ( TransformedRStart, TransformedREnd, svr_Params.g /* timewarpLerpFactor */ );
+    vec3 TransformedG = mix ( TransformedGStart, TransformedGEnd, svr_Params.g /* timewarpLerpFactor */ );
+    vec3 TransformedB = mix ( TransformedBStart, TransformedBEnd, svr_Params.g /* timewarpLerpFactor */ );
+    
+    // Project them back onto the Z=1 plane of the rendered images.
+    float RecipZR = 1.0 / TransformedR.z;
+    float RecipZG = 1.0 / TransformedG.z;
+    float RecipZB = 1.0 / TransformedB.z;
+    vec2 FlattenedR = vec2 ( TransformedR.x * RecipZR, TransformedR.y * RecipZR );
+    vec2 FlattenedG = vec2 ( TransformedG.x * RecipZG, TransformedG.y * RecipZG );
+    vec2 FlattenedB = vec2 ( TransformedB.x * RecipZB, TransformedB.y * RecipZB );
+    
+    // These are now still in TanEyeAngle space.
+    // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+    svv_TexCoordR = FlattenedR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+    svv_TexCoordG = FlattenedG * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordG.y = 1.0-svv_TexCoordG.y;
+    svv_TexCoordB = FlattenedB * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+    svv_TexCoordB.y = 1.0-svv_TexCoordB.y;
+}
-- 
cgit v1.2.3