From 38e51e4a5f6f35c658df10f6d48a33e3ffaea2f3 Mon Sep 17 00:00:00 2001
From: Sven Gothel
* If this instance is {@link #inTangents} already, method returns this instance,
* otherwise a newly created instance w/ converted values to tangent.
*
- final float halfHorizFovTan = (float)Math.tan(horizontalFov/2f);
- final float halfVertFovTan = (float)Math.tan(verticalFov/2f);
+ halfHorizFovTan = tan( horizontalFov / 2f );
+ halfVertFovTan = tan( verticalFov / 2f );
*
* @param horizontalFov whole horizontal FOV in radians
* @param verticalFov whole vertical FOV in radians
*/
- public static FovHVHalves createByRadians(final float horizontalFov, final float verticalFov) {
+ public static FovHVHalves byRadians(final float horizontalFov, final float verticalFov) {
final float halfHorizFovTan = FloatUtil.tan(horizontalFov/2f);
final float halfVertFovTan = FloatUtil.tan(verticalFov/2f);
return new FovHVHalves(halfHorizFovTan, halfHorizFovTan, halfVertFovTan, halfVertFovTan, true);
}
/**
- * Returns this instance values in tangent values.
+ * Returns a symmetrical centered {@link FovHVHalves} instance in tangents, using:
+ *
+ top = bottom = tan( verticalFov / 2f );
+ left = right = aspect * top;
+ *
+ *
+ * @param verticalFov vertical FOV in radians
+ * @param aspect aspect ration width / height
+ */
+ public static FovHVHalves byFovyRadianAndAspect(final float verticalFov, final float aspect) {
+ final float halfVertFovTan = FloatUtil.tan(verticalFov/2f);
+ final float halfHorizFovTan = aspect * halfVertFovTan;
+ return new FovHVHalves(halfHorizFovTan, halfHorizFovTan,
+ halfVertFovTan, halfVertFovTan, true);
+ }
+
+ /**
+ * Returns a custom symmetry {@link FovHVHalves} instance in tangents, using:
+ *
+ left = tan( horizontalFov * horizCenterFromLeft )
+ right = tan( horizontalFov * ( 1f - horizCenterFromLeft ) )
+ top = tan( verticalFov * vertCenterFromTop )
+ bottom = tan( verticalFov * (1f - vertCenterFromTop ) )
+ *
+ * @param horizontalFov whole horizontal FOV in radians
+ * @param horizCenterFromLeft horizontal center from left in [0..1]
+ * @param verticalFov whole vertical FOV in radians
+ * @param vertCenterFromTop vertical center from top in [0..1]
+ */
+ public static FovHVHalves byRadians(final float horizontalFov, final float horizCenterFromLeft,
+ final float verticalFov, final float vertCenterFromTop) {
+ return new FovHVHalves(FloatUtil.tan(horizontalFov * horizCenterFromLeft),
+ FloatUtil.tan(horizontalFov * ( 1f - horizCenterFromLeft )),
+ FloatUtil.tan(verticalFov * vertCenterFromTop),
+ FloatUtil.tan(verticalFov * (1f - vertCenterFromTop )),
+ true);
+ }
+
+ /**
+ * Returns a custom symmetry {@link FovHVHalves} instance in tangents,
+ * via computing the horizontalFov
using:
+ *
+ halfVertFovTan = tan( verticalFov / 2f );
+ halfHorizFovTan = aspect * halfVertFovTan;
+ horizontalFov = atan( halfHorizFovTan ) * 2f;
+ return {@link #byRadians(float, float, float, float) byRadians}(horizontalFov, horizCenterFromLeft, verticalFov, vertCenterFromTop)
+ *
+ * @param verticalFov whole vertical FOV in radians
+ * @param vertCenterFromTop vertical center from top in [0..1]
+ * @param aspect aspect ration width / height
+ * @param horizCenterFromLeft horizontal center from left in [0..1]
+ */
+ public static FovHVHalves byFovyRadianAndAspect(final float verticalFov, final float vertCenterFromTop,
+ final float aspect, final float horizCenterFromLeft) {
+ final float halfVertFovTan = FloatUtil.tan(verticalFov/2f);
+ final float halfHorizFovTan = aspect * halfVertFovTan;
+ final float horizontalFov = FloatUtil.atan(halfHorizFovTan) * 2f;
+ return byRadians(horizontalFov, horizCenterFromLeft, verticalFov, vertCenterFromTop);
+ }
+
+ /**
+ * Returns this instance in tangent values.
*
+ * Result is an array of float values for + *
+ * Monoscopic devices return an array length of one, without the value for the right-eye! + *
*/ public FovHVHalves[] getDefaultFOV(); @@ -76,13 +92,61 @@ public interface StereoDevice { /** Return true if sensors have been started, false otherwise */ public boolean getSensorsStarted(); + /** + * Returns an array of the preferred eye rendering order. + * The array length reflects the supported eye count. + *+ * Monoscopic devices only support one eye, where stereoscopic device two eyes. + *
+ */ + public int[] getEyeRenderOrder(); + + /** + * Returns the supported distortion compensation by the {@link StereoDeviceRenderer}, + * e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, {@link StereoDeviceRenderer#DISTORTION_CHROMATIC}, etc. + * @see StereoDeviceRenderer#getDistortionBits() + * @see #createRenderer(int, int, float[], FovHVHalves[], float, int) + * @see #getRecommendedDistortionBits() + * @see #getMinimumDistortionBits() + */ + public int getSupportedDistortionBits(); + + /** + * Returns the recommended distortion compensation bits for the {@link StereoDeviceRenderer}, + * e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, {@link StereoDeviceRenderer#DISTORTION_CHROMATIC} + * {@link StereoDeviceRenderer#DISTORTION_VIGNETTE}. + *+ * User shall use the recommended distortion compensation to achieve a distortion free view. + *
+ * @see StereoDeviceRenderer#getDistortionBits() + * @see #createRenderer(int, int, float[], FovHVHalves[], float, int) + * @see #getSupportedDistortionBits() + * @see #getMinimumDistortionBits() + */ + public int getRecommendedDistortionBits(); + + /** + * Returns the minimum distortion compensation bits as required by the {@link StereoDeviceRenderer}, + * e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL} in case the stereoscopic display uses [a]spherical lenses. + *+ * Minimum distortion compensation bits are being enforced by the {@link StereoDeviceRenderer}. + *
+ * @see #getSupportedDistortionBits() + * @see #getRecommendedDistortionBits() + * @see StereoDeviceRenderer#getDistortionBits() + * @see #createRenderer(int, int, float[], FovHVHalves[], float, int) + */ + public int getMinimumDistortionBits(); + /** * Create a new {@link StereoDeviceRenderer} instance. * - * @param distortionBits {@link StereoDeviceRenderer} distortion bits, e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, etc. - * @param textureCount desired texture count for post-processing, see {@link StereoDeviceRenderer#getTextureCount()} and {@link StereoDeviceRenderer#ppRequired()} - * @param eyePositionOffset eye position offset, e.g. {@link #DEFAULT_EYE_POSITION_OFFSET}. - * @param eyeFov FovHVHalves[2] field-of-view per eye + * @param distortionBits {@link StereoDeviceRenderer} distortion bits, e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, etc, + * see {@link #getRecommendedDistortionBits()}. + * @param textureCount desired texture count for post-processing, see {@link StereoDeviceRenderer#getTextureCount()} and {@link StereoDeviceRenderer#ppAvailable()} + * @param eyePositionOffset eye position offset, e.g. {@link #getDefaultEyePositionOffset()}. + * @param eyeFov FovHVHalves[] field-of-view per eye, e.g. {@link #getDefaultFOV()}. May contain only one value for monoscopic devices, + * see {@link #getEyeRenderOrder()}. * @param pixelsPerDisplayPixel * @param textureUnit * @return diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java index d9054ce28..46ce82f03 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java +++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java @@ -31,26 +31,43 @@ import com.jogamp.common.util.ReflectionUtil; /** * Platform agnostic {@link StereoDevice} factory. + *+ * To implement a new {@link StereoDevice}, the following interfaces/classes must be implemented: + *
- * Distortion requires {@link #ppRequired() post-processing}. + * Distortion requires {@link #ppAvailable() post-processing}. *
*/ public int getDistortionBits(); @@ -133,7 +133,7 @@ public interface StereoDeviceRenderer { * ** Either the renderer presents the images side-by-side according to the {@link Eye#getViewport() eye's viewport}, - * or {@link #ppRequired() post-processing} is utilized to merge {@link #getTextureCount() textures} + * or {@link #ppAvailable() post-processing} is utilized to merge {@link #getTextureCount() textures} * to a side-by-side configuration. *
*/ @@ -156,7 +156,7 @@ public interface StereoDeviceRenderer { public DimensionImmutable getTotalSurfaceSize(); /** - * Returns the used texture-image count for post-processing, see {@link #ppRequired()}. + * Returns the used texture-image count for post-processing, see {@link #ppAvailable()}. *
* In case the renderer does not support multiple textures for post-processing,
* or no post-processing at all, method returns zero despite the request
@@ -165,7 +165,7 @@ public interface StereoDeviceRenderer {
*/
public int getTextureCount();
- /** Returns the desired texture-image unit for post-processing, see {@link #ppRequired()}. */
+ /** Returns the desired texture-image unit for post-processing, see {@link #ppAvailable()}. */
public int getTextureUnit();
/** Initialize OpenGL related resources */
@@ -181,13 +181,12 @@ public interface StereoDeviceRenderer {
public void endFrame(final GL gl);
/**
- * Returns true
if stereoscopic post-processing is required,
+ * Returns true
if stereoscopic post-processing is required and available,
* otherwise false
.
*
- * Stereoscopic post-processing is usually required if: + * Stereoscopic post-processing is available if: *
@@ -195,15 +194,15 @@ public interface StereoDeviceRenderer { * the following post-processing methods must be called to before {@link #endFrame()}: *
* {@link #updateEyePose(int)} for both eyes must be called upfront * when rendering upstream {@link StereoGLEventListener}. @@ -214,20 +213,14 @@ public interface StereoDeviceRenderer { public void ppBegin(final GL gl); /** - * Performs stereoscopic post-processing for both eyes, see {@link #ppRequired()}. - * @param gl - */ - public void ppBothEyes(final GL gl); - - /** - * Performs stereoscopic post-processing for one eye, see {@link #ppRequired()}. + * Performs stereoscopic post-processing for one eye, see {@link #ppAvailable()}. * @param gl * @param eyeNum */ public void ppOneEye(final GL gl, final int eyeNum); /** - * End stereoscopic post-processing, see {@link #ppRequired()}. + * End stereoscopic post-processing, see {@link #ppAvailable()}. * @param gl */ public void ppEnd(final GL gl); diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java index 280d99233..3031013b8 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java +++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java @@ -50,7 +50,7 @@ public class StereoUtil { final StringBuilder sb = new StringBuilder(); if( usesBarrelDistortion(distortionBits) ) { if( appendComma ) { sb.append(", "); }; - sb.append("barrell"); appendComma=true; + sb.append("barrel"); appendComma=true; } if( usesVignetteDistortion(distortionBits) ) { if( appendComma ) { sb.append(", "); }; diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java b/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java new file mode 100644 index 000000000..7a2483121 --- /dev/null +++ b/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java @@ -0,0 +1,95 @@ +/** + * Copyright 2014 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package jogamp.opengl.util.stereo; + +import com.jogamp.opengl.util.stereo.EyeParameter; + +public class DistortionMesh { + public static interface Producer { + /** Initialize */ + void init(final GenericStereoDevice.Config deviceConfig, final float[] eyeReliefInMeters); + + /** Distortion Mesh Producer */ + DistortionMesh create(final EyeParameter eyeParam, final int distortionBits); + } + public static class DistortionVertex { + /** {@value} */ + public static final int def_pos_size = 2; + /** {@value} */ + public static final int def_vignetteFactor_size = 1; + /** {@value} */ + public static final int def_timewarpFactor_size = 1; + /** {@value} */ + public static final int def_texR_size = 2; + /** {@value} */ + public static final int def_texG_size = 2; + /** {@value} */ + public static final int def_texB_size = 2; + + /** {@value} */ + public static final int def_total_size = def_pos_size + def_vignetteFactor_size + def_timewarpFactor_size + + def_texR_size + def_texG_size + def_texB_size; + + public DistortionVertex(final float[] data, final int pos_size, + final int vignetteFactor_size, final int timewarpFactor_size, final int texR_size, + final int texG_size, final int texB_size) { + this.data = data; + this.pos_size = pos_size; + this.vignetteFactor_size = vignetteFactor_size; + this.timewarpFactor_size = timewarpFactor_size; + this.texR_size = texR_size; + this.texG_size = texG_size; + this.texB_size = texB_size; + } + final float[] data; + + /** Usually {@link #def_pos_size} */ + final int pos_size; + /** Usually {@link #def_vignetteFactor_size} */ + final int vignetteFactor_size; + /** Usually {@link #def_timewarpFactor_size} */ + final int timewarpFactor_size; + /** Usually {@link #def_texR_size} */ + final int texR_size; + /** Usually {@link #def_texG_size} */ + final int texG_size; + /** Usually {@link #def_texB_size} */ + final int texB_size; + } + public DistortionMesh(final DistortionMesh.DistortionVertex[] vertices, final int vertexCount, + final short[] indices, final int indexCount) { + this.vertices = vertices; + this.vertexCount = vertexCount; + this.indices = indices; + this.indexCount = indexCount; + } + final DistortionMesh.DistortionVertex[] vertices; + final int vertexCount; + final short[] indices; + final int indexCount; +} diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java new file mode 100644 index 000000000..d7fb95d91 --- /dev/null +++ b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java @@ -0,0 +1,457 @@ +/** + * Copyright 2014 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package jogamp.opengl.util.stereo; + +import java.util.Arrays; + +import javax.media.nativewindow.util.Dimension; +import javax.media.nativewindow.util.DimensionImmutable; +import javax.media.nativewindow.util.Point; +import javax.media.nativewindow.util.PointImmutable; +import javax.media.nativewindow.util.Rectangle; +import javax.media.nativewindow.util.RectangleImmutable; + +import com.jogamp.common.util.ReflectionUtil; +import com.jogamp.opengl.math.FloatUtil; +import com.jogamp.opengl.math.FovHVHalves; +import com.jogamp.opengl.util.stereo.EyeParameter; +import com.jogamp.opengl.util.stereo.StereoDevice; +import com.jogamp.opengl.util.stereo.StereoDeviceRenderer; +import com.jogamp.opengl.util.stereo.StereoUtil; + +public class GenericStereoDevice implements StereoDevice { + public static enum ShutterType { + Global, RollingLeftToRight, RollingRightToLeft, RollingTopToBottom + } + public static class Config extends StereoDevice.Config { + public Config(final String name, + final ShutterType shutterType, + final DimensionImmutable surfaceSizeInPixels, + final float[] screenSizeInMeters, + final DimensionImmutable eyeTextureSize, + final float pupilCenterFromScreenTopInMeters, + final float interpupillaryDistanceInMeters, + final int[] eyeRenderOrder, + final EyeParameter[] defaultEyeParam, + final DistortionMesh.Producer distortionMeshProducer, + final int supportedDistortionBits, + final int recommendedDistortionBits, + final int minimumDistortionBits + ) { + this.name = name; + this.shutterType = shutterType; + this.surfaceSizeInPixels = surfaceSizeInPixels; + this.screenSizeInMeters = screenSizeInMeters; + this.eyeTextureSize = eyeTextureSize; + this.pupilCenterFromScreenTopInMeters = pupilCenterFromScreenTopInMeters; + this.interpupillaryDistanceInMeters = interpupillaryDistanceInMeters; + this.eyeRenderOrder = eyeRenderOrder; + this.defaultEyeParam = defaultEyeParam; + this.distortionMeshProducer = distortionMeshProducer; + this.supportedDistortionBits = supportedDistortionBits; + this.recommendedDistortionBits = recommendedDistortionBits; + this.minimumDistortionBits = minimumDistortionBits; + this.pupilCenterFromTopLeft = new float[2][2]; + calcPupilCenterFromTopLeft(); + } + /** A variation w/ different surface/screen specs */ + public Config(final Config source, + final DimensionImmutable surfaceSizeInPixels, + final float[] screenSizeInMeters, + final DimensionImmutable eyeTextureSize) { + this.name = source.name; + this.shutterType = source.shutterType; + this.surfaceSizeInPixels = surfaceSizeInPixels; + this.screenSizeInMeters = screenSizeInMeters; + this.eyeTextureSize = eyeTextureSize; + this.pupilCenterFromScreenTopInMeters = source.pupilCenterFromScreenTopInMeters; + this.interpupillaryDistanceInMeters = source.interpupillaryDistanceInMeters; + this.eyeRenderOrder = source.eyeRenderOrder; + this.defaultEyeParam = source.defaultEyeParam; + this.distortionMeshProducer = source.distortionMeshProducer; + this.supportedDistortionBits = source.supportedDistortionBits; + this.recommendedDistortionBits = source.recommendedDistortionBits; + this.minimumDistortionBits = source.minimumDistortionBits; + this.pupilCenterFromTopLeft = new float[2][2]; + calcPupilCenterFromTopLeft(); + } + private void calcPupilCenterFromTopLeft() { + final float visibleWidthOfOneEye = 0.5f * screenSizeInMeters[0]; + final float leftPupilCenterFromLeftInMeters = ( screenSizeInMeters[0] - interpupillaryDistanceInMeters ) * 0.5f; + final float rightPupilCenterFromMiddleInMeters = leftPupilCenterFromLeftInMeters + interpupillaryDistanceInMeters - visibleWidthOfOneEye; + pupilCenterFromTopLeft[0][0] = leftPupilCenterFromLeftInMeters / visibleWidthOfOneEye; + pupilCenterFromTopLeft[0][1] = pupilCenterFromScreenTopInMeters / screenSizeInMeters[1]; + pupilCenterFromTopLeft[1][0] = rightPupilCenterFromMiddleInMeters / visibleWidthOfOneEye; + pupilCenterFromTopLeft[1][1] = pupilCenterFromTopLeft[0][1]; + } + + /** + * Return the vertical pupil center from the screen top in the range [0..1]. + * @param screenHeightInMeters + * @param pupilCenterFromScreenTopInMeters + */ + public static float getVertPupilCenterFromTop(final float screenHeightInMeters, final float pupilCenterFromScreenTopInMeters) { + return pupilCenterFromScreenTopInMeters / screenHeightInMeters; + } + + /** + * Return the horizontal pupil center from the left side for both eyes in the range [0..1]. + *
+ <-------------left eye------------->| |<-----------right eye--------------> + <------------------------------------screenSizeInMeters.Width-----------------------------------> + <------interpupillaryDistanceInMeters------> + <--centerFromLeftInMeters-> + ^ + center of pupil + *+ * + * @param screenWidthInMeters + * @param interpupillaryDistanceInMeters + */ + public static float[] getHorizPupilCenterFromLeft(final float screenWidthInMeters, final float interpupillaryDistanceInMeters) { + final float visibleWidthOfOneEye = 0.5f * screenWidthInMeters; + final float leftPupilCenterFromLeftInMeters = ( screenWidthInMeters - interpupillaryDistanceInMeters ) * 0.5f; + final float rightPupilCenterFromMiddleInMeters = leftPupilCenterFromLeftInMeters + interpupillaryDistanceInMeters - visibleWidthOfOneEye; + return new float[] { leftPupilCenterFromLeftInMeters / visibleWidthOfOneEye, + rightPupilCenterFromMiddleInMeters / visibleWidthOfOneEye }; + } + + private void init() { + final float[] eyeReliefInMeters = new float[defaultEyeParam.length]; + if( 0 < defaultEyeParam.length ) { + eyeReliefInMeters[0] = defaultEyeParam[0].eyeReliefZ; + } + if( 1 < defaultEyeParam.length ) { + eyeReliefInMeters[1] = defaultEyeParam[1].eyeReliefZ; + } + if( null != distortionMeshProducer ) { + distortionMeshProducer.init(this, eyeReliefInMeters); + } + } + + @Override + public String toString() { return "StereoConfig["+name+", shutter "+shutterType+", surfaceSize "+surfaceSizeInPixels+ + ", screenSize "+screenSizeInMeters[0]+" x "+screenSizeInMeters[0]+ + " [m], eyeTexSize "+eyeTextureSize+", IPD "+interpupillaryDistanceInMeters+ + " [m], eyeParam "+Arrays.toString(defaultEyeParam)+ + ", distortionBits[supported ["+StereoUtil.distortionBitsToString(supportedDistortionBits)+ + "], recommended ["+StereoUtil.distortionBitsToString(recommendedDistortionBits)+ + "], minimum ["+StereoUtil.distortionBitsToString(minimumDistortionBits)+"]]]"; + } + + /** Configuration Name */ + public final String name; + public final ShutterType shutterType; + + public final DimensionImmutable surfaceSizeInPixels; + public final float[] screenSizeInMeters; + /** Texture size per eye */ + public final DimensionImmutable eyeTextureSize; + + /** Vertical distance from pupil to screen-top in meters */ + public final float pupilCenterFromScreenTopInMeters; + /** Horizontal interpupillary distance (IPD) in meters */ + public final float interpupillaryDistanceInMeters; + /** + * Pupil center from top left per eye, ranging from [0..1], maybe used to produce FovHVHalves, + * see {@link #getHorizPupilCenterFromLeft(float, float)} and {@link #getVertPupilCenterFromTop(float, float)}. + */ + public final float[/*per-eye*/][/*xy*/] pupilCenterFromTopLeft; + public final int[] eyeRenderOrder; + public final EyeParameter[] defaultEyeParam; + public final DistortionMesh.Producer distortionMeshProducer; + + public final int supportedDistortionBits; + public final int recommendedDistortionBits; + public final int minimumDistortionBits; + } + + /** A mono view configuration, only one eye is supported */ + public static final Config config01Mono01; + + /** A default stereo SBS view configuration */ + public static final Config config02StereoSBS01; + + /** A default stereo SBS lense view configuration, utilizing similar settings as OculusVR DK1 */ + public static final Config config03StereoSBSLense01; + + private static final Config[] configs; + + static { + final float[] DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES = { 0.0f, 1.6f, -5.0f }; // 1.6 up, 5 forward + final float[] DEFAULT_EYE_POSITION_OFFSET_STEREO = { 0.0f, 0.3f, 3.0f }; // 0.3 up, 3 back + final float[] DEFAULT_EYE_POSITION_OFFSET_MONO = { 0.0f, 0.0f, 3.0f }; // 3 back + + final float d2r = FloatUtil.PI / 180.0f; + { + config01Mono01 = new Config( + "Def01Mono01", + ShutterType.RollingTopToBottom, + new Dimension(1280, 800), // resolution + new float[] { 0.1498f, 0.0936f }, // screenSize [m] + new Dimension(1280, 800), // eye textureSize + 0.0936f/2f, // pupilCenterFromScreenTop [m] + 0.0635f, // IPD [m] + new int[] { 0 }, // eye order + new EyeParameter[] { + new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_MONO, + // degrees: 45/2 l, 45/2 r, 45/2 * aspect t, 45/2 * aspect b + FovHVHalves.byFovyRadianAndAspect(45f*d2r, 1280f / 800f), + 0f /* distNoseToPupil */, 0f /* verticalDelta */, 0f /* eyeReliefInMeters */) }, + null, // mash producer distortion bits + 0, // supported distortion bits + 0, // recommended distortion bits + 0 // minimum distortion bits + ); + } + + { + final DimensionImmutable surfaceSizeInPixel = new Dimension(1280, 800); + final float[] screenSizeInMeters = new float[] { 0.1498f, 0.0936f }; + final float interpupillaryDistanceInMeters = 0.0635f; + final float pupilCenterFromScreenTopInMeters = screenSizeInMeters[1] / 2f; + final float[] horizPupilCenterFromLeft = Config.getHorizPupilCenterFromLeft(screenSizeInMeters[0], interpupillaryDistanceInMeters); + final float vertPupilCenterFromTop = Config.getVertPupilCenterFromTop(screenSizeInMeters[1], pupilCenterFromScreenTopInMeters); + final float fovy = 45f; + final float aspect = ( surfaceSizeInPixel.getWidth() / 2.0f ) / surfaceSizeInPixel.getHeight(); + final FovHVHalves defaultSBSEyeFovLeft = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[0]); + final FovHVHalves defaultSBSEyeFovRight = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[1]); + + config02StereoSBS01 = new Config( + "Def02StereoSBS01", + ShutterType.RollingTopToBottom, + surfaceSizeInPixel, // resolution + screenSizeInMeters, // screenSize [m] + new Dimension(1280/2, 800), // eye textureSize + 0.0936f/2f, // pupilCenterFromScreenTop [m] + interpupillaryDistanceInMeters, // IPD [m] + new int[] { 0, 1 }, // eye order + new EyeParameter[] { + new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_STEREO, defaultSBSEyeFovLeft, + 0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */), + new EyeParameter(1, DEFAULT_EYE_POSITION_OFFSET_STEREO, defaultSBSEyeFovRight, + -0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */) }, + null, // mash producer distortion bits + 0, // supported distortion bits + 0, // recommended distortion bits + 0 // minimum distortion bits + ); + } + + { + DistortionMesh.Producer lenseDistMeshProduce = null; + try { + lenseDistMeshProduce = + (DistortionMesh.Producer) + ReflectionUtil.createInstance("jogamp.opengl.oculusvr.stereo.lense.DistortionMeshProducer", GenericStereoDevice.class.getClassLoader()); + } catch (final Throwable t) { + if(StereoDevice.DEBUG) { System.err.println("Caught: "+t.getMessage()); t.printStackTrace(); } + } + + final DimensionImmutable surfaceSizeInPixel = new Dimension(1280, 800); + final float[] screenSizeInMeters = new float[] { 0.1498f, 0.0936f }; + final DimensionImmutable eyeTextureSize = new Dimension(1122, 1553); + final float interpupillaryDistanceInMeters = 0.0635f; + final float pupilCenterFromScreenTopInMeters = screenSizeInMeters[1] / 2f; + final float[] horizPupilCenterFromLeft = Config.getHorizPupilCenterFromLeft(screenSizeInMeters[0], interpupillaryDistanceInMeters); + final float vertPupilCenterFromTop = Config.getVertPupilCenterFromTop(screenSizeInMeters[1], pupilCenterFromScreenTopInMeters); + final float fovy = 129f; + final float aspect = eyeTextureSize.getWidth() / eyeTextureSize.getHeight(); + final FovHVHalves defaultSBSEyeFovLenseLeft = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[0]); + final FovHVHalves defaultSBSEyeFovLenseRight = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[1]); + + config03StereoSBSLense01 = null == lenseDistMeshProduce ? null : + new Config( + "Def03StereoSBSLense01", + ShutterType.RollingTopToBottom, + surfaceSizeInPixel, // resolution + screenSizeInMeters, // screenSize [m] + eyeTextureSize, // eye textureSize + pupilCenterFromScreenTopInMeters, // pupilCenterFromScreenTop [m] + interpupillaryDistanceInMeters, // IPD [m] + new int[] { 0, 1 }, // eye order + new EyeParameter[] { + new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES, defaultSBSEyeFovLenseLeft, + 0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */), + new EyeParameter(1, DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES, defaultSBSEyeFovLenseRight, + -0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */) }, + lenseDistMeshProduce, + // supported distortion bits + StereoDeviceRenderer.DISTORTION_BARREL | StereoDeviceRenderer.DISTORTION_CHROMATIC | StereoDeviceRenderer.DISTORTION_VIGNETTE, + // recommended distortion bits + StereoDeviceRenderer.DISTORTION_BARREL | StereoDeviceRenderer.DISTORTION_CHROMATIC | StereoDeviceRenderer.DISTORTION_VIGNETTE, + // minimum distortion bits + StereoDeviceRenderer.DISTORTION_BARREL + ); + } + configs = new Config[] { config01Mono01, config02StereoSBS01, config03StereoSBSLense01 }; + } + + public final int deviceIndex; + public final Config config; + + public final Point surfacePos; + private final FovHVHalves[] defaultEyeFov; + + private boolean sensorsStarted = false; + + public GenericStereoDevice(final int deviceIndex, final StereoDevice.Config customConfig) { + this.deviceIndex = deviceIndex; + + if( customConfig instanceof GenericStereoDevice.Config) { + this.config = (GenericStereoDevice.Config) customConfig; + } else { + final int cfgIdx = Math.min(deviceIndex % 10, configs.length-1); + this.config = null != configs[cfgIdx] ? configs[cfgIdx] : config02StereoSBS01; + } + config.init(); + + this.surfacePos = new Point(0, 0); + + defaultEyeFov = new FovHVHalves[config.defaultEyeParam.length]; + for(int i=0; i
fovHVHalves
.
+ */
+ public ScaleAndOffset2D(final FovHVHalves fovHVHalves) {
+ final FovHVHalves tanFovHVHalves = fovHVHalves.toTangents();
+ final float projXScale = 2.0f / ( tanFovHVHalves.left+ tanFovHVHalves.right);
+ final float projYScale = 2.0f / ( tanFovHVHalves.top + tanFovHVHalves.bottom );
+ final float projXOffset = ( tanFovHVHalves.left - tanFovHVHalves.right ) * projXScale * 0.5f;
+ final float projYOffset = ( tanFovHVHalves.top - tanFovHVHalves.bottom ) * projYScale * 0.5f;
+
+ this.scale = new float[] { projXScale, projYScale };
+ this.offset = new float[] { projXOffset, projYOffset };
+ }
+
+ /**
+ * Create the Normalized Device Coordinate Space (NDC) [-1,+1] instance
+ * from the given fovHVHalves
, for the subsection of the render-viewport
within the rendertarget-size
.
+ */
+ public ScaleAndOffset2D(final FovHVHalves fovHVHalves, final DimensionImmutable rendertargetSize, final RectangleImmutable renderViewport) {
+ final ScaleAndOffset2D eyeToSourceNDC = new ScaleAndOffset2D(fovHVHalves);
+ final float[] vec2Tmp1 = new float[2];
+ final float[] vec2Tmp2 = new float[2];
+ final float[] scale = VectorUtil.scaleVec2(vec2Tmp1, eyeToSourceNDC.scale, 0.5f);
+ final float[] offset = VectorUtil.addVec2(vec2Tmp2, VectorUtil.scaleVec2(vec2Tmp2, eyeToSourceNDC.offset, 0.5f), vec2Half);
+
+ final float[] scale2 = new float[] { (float)renderViewport.getWidth() / (float)rendertargetSize.getWidth(),
+ (float)renderViewport.getHeight() / (float)rendertargetSize.getHeight() };
+
+ final float[] offset2 = new float[] { (float)renderViewport.getX() / (float)rendertargetSize.getWidth(),
+ (float)renderViewport.getY() / (float)rendertargetSize.getHeight() };
+
+ VectorUtil.scaleVec2(scale, scale, scale2);
+ VectorUtil.addVec2(offset, VectorUtil.scaleVec2(offset, offset, scale2), offset2);
+
+ this.scale = scale;
+ this.offset = offset;
+ }
+
+ /**
+ * Return the tangent FOV space of this eye to source NDC instance.
+ */
+ public final float[] convertToTanFovSpace(final float[] rendertargetNDC) {
+ final float[] vec2Tmp1 = new float[2];
+ return VectorUtil.divVec2(vec2Tmp1, VectorUtil.subVec2(vec2Tmp1, rendertargetNDC, this.offset), this.scale);
+ }
+
+}
\ No newline at end of file
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp
new file mode 100644
index 000000000..4ac404729
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp
@@ -0,0 +1,26 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define varying in
+ out vec4 svr_FragColor;
+ #define texture2D texture
+#else
+ #define svr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D svr_Texture0;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+varying vec2 svv_TexCoordG;
+varying vec2 svv_TexCoordB;
+
+void main (void)
+{
+ // 3 samples for fixing chromatic aberrations
+ vec3 color = vec3(texture2D(svr_Texture0, svv_TexCoordR).r,
+ texture2D(svr_Texture0, svv_TexCoordG).g,
+ texture2D(svr_Texture0, svv_TexCoordB).b);
+ svr_FragColor = vec4(svv_Fade * color, 1.0); // include vignetteFade
+}
+
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp
new file mode 100644
index 000000000..d4ab585d5
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp
@@ -0,0 +1,33 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+attribute vec2 svr_TexCoordG;
+attribute vec2 svr_TexCoordB;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+varying vec2 svv_TexCoordG;
+varying vec2 svv_TexCoordB;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.5, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ svv_TexCoordR = svr_TexCoordR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+ svv_TexCoordG = svr_TexCoordG * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordG.y = 1.0-svv_TexCoordG.y;
+ svv_TexCoordB = svr_TexCoordB * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordB.y = 1.0-svv_TexCoordB.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp
new file mode 100644
index 000000000..2df890648
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp
@@ -0,0 +1,22 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define varying in
+ out vec4 svr_FragColor;
+ #define texture2D texture
+#else
+ #define svr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D svr_Texture0;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+
+void main (void)
+{
+ // 3 samples for fixing chromatic aberrations
+ vec3 color = texture2D(svr_Texture0, svv_TexCoordR).rgb;
+ svr_FragColor = vec4(svv_Fade * color, 1.0); // include vignetteFade
+}
+
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp
new file mode 100644
index 000000000..335d3f0f6
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp
@@ -0,0 +1,27 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.5, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ svv_TexCoordR = svr_TexCoordR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp
new file mode 100644
index 000000000..c4461ec3e
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp
@@ -0,0 +1,44 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+uniform mat4 svr_EyeRotationStart;
+uniform mat4 svr_EyeRotationEnd;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.0, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+ vec3 TanEyeAngle = vec3 ( svr_TexCoordR, 1.0 );
+
+ // Accurate time warp lerp vs. faster
+ // Apply the two 3x3 timewarp rotations to these vectors.
+ vec3 TransformedStart = (svr_EyeRotationStart * vec4(TanEyeAngle, 0)).xyz;
+ vec3 TransformedEnd = (svr_EyeRotationEnd * vec4(TanEyeAngle, 0)).xyz;
+ // And blend between them.
+ vec3 Transformed = mix ( TransformedStart, TransformedEnd, svr_Params.g /* timewarpLerpFactor */ );
+
+ // Project them back onto the Z=1 plane of the rendered images.
+ float RecipZ = 1.0 / Transformed.z;
+ vec2 Flattened = vec2 ( Transformed.x * RecipZ, Transformed.y * RecipZ );
+
+ // These are now still in TanEyeAngle space.
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ svv_TexCoordR = Flattened * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp
new file mode 100644
index 000000000..c08ed3113
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp
@@ -0,0 +1,65 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+uniform mat4 svr_EyeRotationStart;
+uniform mat4 svr_EyeRotationEnd;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+attribute vec2 svr_TexCoordG;
+attribute vec2 svr_TexCoordB;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+varying vec2 svv_TexCoordG;
+varying vec2 svv_TexCoordB;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.0, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+ vec3 TanEyeAngleR = vec3 ( svr_TexCoordR, 1.0 );
+ vec3 TanEyeAngleG = vec3 ( svr_TexCoordG, 1.0 );
+ vec3 TanEyeAngleB = vec3 ( svr_TexCoordB, 1.0 );
+
+ // Accurate time warp lerp vs. faster
+ // Apply the two 3x3 timewarp rotations to these vectors.
+ vec3 TransformedRStart = (svr_EyeRotationStart * vec4(TanEyeAngleR, 0)).xyz;
+ vec3 TransformedGStart = (svr_EyeRotationStart * vec4(TanEyeAngleG, 0)).xyz;
+ vec3 TransformedBStart = (svr_EyeRotationStart * vec4(TanEyeAngleB, 0)).xyz;
+ vec3 TransformedREnd = (svr_EyeRotationEnd * vec4(TanEyeAngleR, 0)).xyz;
+ vec3 TransformedGEnd = (svr_EyeRotationEnd * vec4(TanEyeAngleG, 0)).xyz;
+ vec3 TransformedBEnd = (svr_EyeRotationEnd * vec4(TanEyeAngleB, 0)).xyz;
+
+ // And blend between them.
+ vec3 TransformedR = mix ( TransformedRStart, TransformedREnd, svr_Params.g /* timewarpLerpFactor */ );
+ vec3 TransformedG = mix ( TransformedGStart, TransformedGEnd, svr_Params.g /* timewarpLerpFactor */ );
+ vec3 TransformedB = mix ( TransformedBStart, TransformedBEnd, svr_Params.g /* timewarpLerpFactor */ );
+
+ // Project them back onto the Z=1 plane of the rendered images.
+ float RecipZR = 1.0 / TransformedR.z;
+ float RecipZG = 1.0 / TransformedG.z;
+ float RecipZB = 1.0 / TransformedB.z;
+ vec2 FlattenedR = vec2 ( TransformedR.x * RecipZR, TransformedR.y * RecipZR );
+ vec2 FlattenedG = vec2 ( TransformedG.x * RecipZG, TransformedG.y * RecipZG );
+ vec2 FlattenedB = vec2 ( TransformedB.x * RecipZB, TransformedB.y * RecipZB );
+
+ // These are now still in TanEyeAngle space.
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ svv_TexCoordR = FlattenedR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+ svv_TexCoordG = FlattenedG * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordG.y = 1.0-svv_TexCoordG.y;
+ svv_TexCoordB = FlattenedB * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordB.y = 1.0-svv_TexCoordB.y;
+}
--
cgit v1.2.3