summaryrefslogtreecommitdiffstats
path: root/src/classes/share/com/sun/j3d/audioengines
diff options
context:
space:
mode:
authorJulien Gouesse <[email protected]>2015-11-28 17:08:51 +0100
committerJulien Gouesse <[email protected]>2015-11-28 17:08:51 +0100
commit6aa301b9466351538d779b98e3b756ac5bd34236 (patch)
tree2ca0bfca4b6dc4c18ab8cf04e8b02caf02171f54 /src/classes/share/com/sun/j3d/audioengines
parenteb8eab5c70f5390bac752f56bb331b7f55169001 (diff)
Relocate package prefix to org.jogamp.java3d
Diffstat (limited to 'src/classes/share/com/sun/j3d/audioengines')
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/AudioEngine.java210
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/AudioEngine3D.java502
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/AudioEngine3DL2.java309
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/AudioEngineThread.java270
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/AuralParameters.java190
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/Sample.java480
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSAuralParameters.java72
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSChannel.java430
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSClip.java348
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSDirectionalSample.java737
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSMidi.java62
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSPositionalSample.java1344
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSSample.java363
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSStream.java62
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JSThread.java854
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/JavaSoundMixer.java984
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/javasound/package.html11
-rw-r--r--src/classes/share/com/sun/j3d/audioengines/package.html11
18 files changed, 0 insertions, 7239 deletions
diff --git a/src/classes/share/com/sun/j3d/audioengines/AudioEngine.java b/src/classes/share/com/sun/j3d/audioengines/AudioEngine.java
deleted file mode 100644
index f81fe4f..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/AudioEngine.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines;
-
-import javax.media.j3d.AudioDevice;
-import javax.media.j3d.PhysicalEnvironment;
-import javax.media.j3d.Sound;
-
-/**
- * The AudioEngine Class defines an audio output device that generates
- * sound 'image' from scene graph.
- * An AudioEngine object encapsulates the AudioDevice's basic information.
- *
- * <p>
- * NOTE: AudioEngine developers should not subclass this class directly.
- * Subclass AudioEngine3DL2 instead.
- */
-public abstract class AudioEngine implements AudioDevice {
-
- /*
- * This device's UNIX file descriptor
- */
- int fileDescriptor;
-
- /*
- * Type of audio output device J3D sound is played over:
- * HEADPHONE, MONO_SPEAKER, STEREO_SPEAKERS
- */
- int audioPlaybackType = HEADPHONES;
-
- /*
- * Distance from center ear (midpoint between ears) to physical speaker.
- * Default reflects distance for headphones.
- * For two speakers it is assumed that the speakers are the same
- * distance from the listener and that
- */
- float distanceToSpeaker = 0.0f;
-
- /*
- * Angle between the vector from center ear parallel to head coordiate
- * Z axis and the vector from the center ear to the speaker.
- * For two speakers it is assumed that the speakers are placed at the
- * same angular offset from the listener.
- */
- float angleOffsetToSpeaker = 0.0f;
-
- /*
- * Channels currently available
- */
- int channelsAvailable = 8;
-
- /*
- * Total number of Channels ever available
- */
- int totalChannels = 8;
-
- /**
- * Construct a new AudioEngine with the specified P.E.
- * @param physicalEnvironment the physical environment object where we
- * want access to this device.
- */
- public AudioEngine(PhysicalEnvironment physicalEnvironment ) {
- physicalEnvironment.setAudioDevice(this);
- }
-
- /**
- * Code to initialize the device
- * @return flag: true is initialized sucessfully, false if error
- */
- @Override
- public abstract boolean initialize();
-
- /**
- * Code to close the device
- * @return flag: true is closed sucessfully, false if error
- */
- @Override
- public abstract boolean close();
-
- /*
- * Audio Playback Methods
- */
- /**
- * Set Type of Audio Playback physical transducer(s) sound is output to.
- * Valid types are HEADPHONE, MONO_SPEAKER, STEREO_SPEAKERS
- * @param type of audio output device
- */
- @Override
- public void setAudioPlaybackType(int type) {
- audioPlaybackType = type;
- }
-
- /**
- * Get Type of Audio Playback Output Device
- * returns audio playback type to which sound is currently output
- */
- @Override
- public int getAudioPlaybackType() {
- return audioPlaybackType;
- }
-
- /**
- * Set Distance from the Center Ear to a Speaker
- * @param distance from the center ear and to the speaker
- */
- @Override
- public void setCenterEarToSpeaker(float distance) {
- distanceToSpeaker = distance;
- }
-
- /**
- * Get Distance from Ear to Speaker
- * returns value set as distance from listener's ear to speaker
- */
- @Override
- public float getCenterEarToSpeaker() {
- return distanceToSpeaker;
- }
-
- /**
- * Set Angle Offset To Speaker
- * @param angle in radian between head coordinate Z axis and vector to speaker */
- @Override
- public void setAngleOffsetToSpeaker(float angle) {
- angleOffsetToSpeaker = angle;
- }
-
- /**
- * Get Angle Offset To Speaker
- * returns value set as angle between vector to speaker and Z head axis
- */
- @Override
- public float getAngleOffsetToSpeaker() {
- return angleOffsetToSpeaker;
- }
-
- /**
- * Query total number of channels available for sound rendering
- * for this audio device.
- * returns number of maximum sound channels you can run with this
- * library/device-driver.
- */
- @Override
- public int getTotalChannels() {
- // this method should be overridden by a device specific implementation
- return (totalChannels);
- }
-
- /**
- * Query number of channels currently available for use by the
- * returns number of sound channels currently available (number
- * not being used by active sounds.
- */
- @Override
- public int getChannelsAvailable() {
- return (channelsAvailable);
- }
-
- /**
- * Query number of channels that would be used to render a particular
- * sound node.
- * @param sound refenence to sound node that query to be performed on
- * returns number of sound channels used by a specific Sound node
- * @deprecated This method is now part of the Sound class
- */
- @Override
- public int getChannelsUsedForSound(Sound sound) {
- if (sound != null)
- return sound.getNumberOfChannelsUsed();
- else
- return -1;
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/AudioEngine3D.java b/src/classes/share/com/sun/j3d/audioengines/AudioEngine3D.java
deleted file mode 100644
index dcf334a..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/AudioEngine3D.java
+++ /dev/null
@@ -1,502 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines;
-
-import java.util.ArrayList;
-
-import javax.media.j3d.AudioDevice3D;
-import javax.media.j3d.MediaContainer;
-import javax.media.j3d.PhysicalEnvironment;
-import javax.media.j3d.Transform3D;
-import javax.media.j3d.View;
-import javax.vecmath.Point3d;
-import javax.vecmath.Vector3d;
-
-
-/**
- * The AudioEngine3D Class defines an audio output device that generates
- * sound 'image' from high-level sound parameters passed to it during
- * scene graph.
- *
- * <P>
- * The methods in this class are meant to be optionally overridden by an
- * extended class. This extended class would provice device specific code.
- *
- * <P>
- * Error checking on all parameters passed to these methods is already
- * explicitly being done by the Java 3D core code that calls these methods.
- *
- * <p>
- * NOTE: AudioEngine developers should not subclass this class directly.
- * Subclass AudioEngine3DL2 instead.
- */
-
-public abstract class AudioEngine3D extends AudioEngine implements AudioDevice3D
-{
- /*
- * Identifiers of sample associated with sound source
- * This array grows as the AudioDevice3D implementation requires it larger.
- */
- protected ArrayList samples = new ArrayList(64);
-
- /**
- * Current View sound is being rendered
- */
- protected View currentView = (View)null;
-
- /*
- * current Aural attribute Parameters
- */
- protected AuralParameters attribs = new AuralParameters();
-
- /**
- * Construct a new AudioEngine with the specified PhysicalEnvironment.
- * @param physicalEnvironment the physical environment object where we
- * want access to this device.
- */
- public AudioEngine3D(PhysicalEnvironment physicalEnvironment ) {
- super(physicalEnvironment);
- }
-
- /*
- *
- * Methods that affect AudioEngine3D fields that are NOT associated
- * with a specific sound sample
- *
- */
-
- /**
- * Save a reference to the current View object.
- * @param reference to current view object
- */
- @Override
- public void setView(View reference) {
- currentView = reference;
- return;
- }
- /**
- * Get reference to the current View object.
- * @return reference to current view object
- */
- public View getView() {
- return (currentView);
- }
-
- /*
- *
- * Methods explicitly affect sound rendering and that require
- * audio device specific methods that override this class.
- *
- */
-
- /**
- * Prepare Sound in device.
- * Makes sound assessible to device - in this case attempts to load sound
- * Stores sound type and data.
- * @param soundType denotes type of sound: Background, Point or Cone
- * @param soundData descrition of sound source data
- * @return index into sample vector of Sample object for sound
- */
- @Override
- public int prepareSound(int soundType, MediaContainer soundData) {
- // This method must be overridden by device specific implementation
- return Sample.NULL_SAMPLE;
- }
-
- /**
- * Clear Sound.
- * Removes/clears associated sound data with this sound source node
- * @param index device specific reference number to device driver sample
- */
- @Override
- public abstract void clearSound(int index);
-
- /**
- * Set the transform for local to virtual world coordinate space
- * @param index device specific reference number to device driver sample
- * @param trans is a reference to virtual world composite transform
- */
- @Override
- public void setVworldXfrm(int index, Transform3D trans) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.vworldXfrm.set(trans);
- return;
- }
- /**
- * Start sample playing on audio device
- * @param index device specific reference number to device driver sample
- * @return status: < 0 denotes an error
- */
- @Override
- public abstract int startSample(int index);
-
- /**
- * Stop sample playing on audio device
- * @param index device specific reference number to device driver sample
- * @return status: < 0 denotes an error
- */
- @Override
- public abstract int stopSample(int index);
-
- /**
- * Update sample.
- * Implies that some parameters affecting rendering have been modified.
- * @param index device specific reference number to device driver sample
- */
- // TODO: The update method exists on a TEMPORARY basis.
- @Override
- public abstract void updateSample(int index);
-
- /**
- * Mute sample.
- * @param index device specific reference number to device driver sample
- */
- @Override
- public abstract void muteSample(int index);
-
- /**
- * Unmute sample.
- * @param index device specific reference number to device driver sample
- */
- @Override
- public abstract void unmuteSample(int index);
-
- /**
- * Pause sample.
- * @param index device specific reference number to device driver sample
- */
- @Override
- public abstract void pauseSample(int index);
-
- /**
- * Unpause sample.
- * @param index device specific reference number to device driver sample
- */
- @Override
- public abstract void unpauseSample(int index);
-
- /*
- *
- * Methods that affect fields associated with the sound sample
- * and that may cause implicit rendering.
- *
- */
- /**
- * Set gain scale factor applied to sample.
- * @param index device specific reference number to device driver sample
- * @param scaleFactor floating point multiplier applied to sample amplitude
- */
- @Override
- public void setSampleGain(int index, float scaleFactor) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setGain(scaleFactor);
- return;
- }
-
- /**
- * Set number of times sample is looped.
- * @param index device specific reference number to device driver sample
- * @param count number of times sample is repeated
- */
- @Override
- public void setLoop(int index, int count) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setLoopCount(count);
- return;
- }
-
- /**
- * Set location of sample.
- * @param index device specific reference number to device driver sample
- * @param position point location in virtual world coordinate of sample
- */
- @Override
- public void setPosition(int index, Point3d position) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setPosition(position);
- return;
- }
-
- /* Set elliptical distance attenuation arrays applied to sample amplitude.
- * @param index device specific reference number to device driver sample
- * @param frontDistance defines an array of distance along the position axis
- * thru which ellipses pass
- * @param frontAttenuationScaleFactor gain scale factors
- * @param backDistance defines an array of distance along the negative axis
- * thru which ellipses pass
- * @param backAttenuationScaleFactor gain scale factors
- */
- @Override
- public void setDistanceGain(int index,
- double[] frontDistance, float[] frontAttenuationScaleFactor,
- double[] backDistance, float[] backAttenuationScaleFactor) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setDistanceGain(frontDistance, frontAttenuationScaleFactor,
- backDistance, backAttenuationScaleFactor);
- return;
- }
-
- /**
- * Set direction vector of sample.
- * @param index device specific reference number to device driver sample
- * @param direction vector in virtual world coordinate.
- */
- @Override
- public void setDirection(int index, Vector3d direction) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setDirection(direction);
- return;
- }
-
- /**
- * Set angular attenuation arrays affecting angular amplitude attenuation
- * and angular distance filtering.
- * @param index device specific reference number to device driver sample
- * @param filterType denotes type of filtering (on no filtering) applied
- * to sample.
- * @param angle array containing angular distances from sound axis
- * @param attenuationScaleFactor array containing gain scale factor
- * @param filterCutoff array containing filter cutoff frequencies.
- * The filter values for each tuples can be set to Sound.NO_FILTER.
- */
- @Override
- public void setAngularAttenuation(int index, int filterType,
- double[] angle, float[] attenuationScaleFactor, float[] filterCutoff) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setAngularAttenuation(filterType, angle,
- attenuationScaleFactor, filterCutoff);
- return;
- }
-
- /**
- * Set rolloff value for current aural attribute applied to all samples.
- * @param rolloff scale factor applied to standard speed of sound.
- */
- @Override
- public void setRolloff(float rolloff) {
- attribs.rolloff = rolloff;
- return;
- }
-
- /**
- * Set reverberation surface reflection coefficient value for current aural
- * attribute applied to all samples.
- * @param coefficient applied to amplitude of reverbation added at each
- * iteration of reverb processing.
- */
- @Override
- public void setReflectionCoefficient(float coefficient) {
- attribs.reflectionCoefficient = coefficient;
- return;
- }
-
- /**
- * Set reverberation delay time for current aural attribute applied to
- * all samples.
- * @param reverbDelay amount of time in millisecond between each
- * iteration of reverb processing.
- */
- @Override
- public void setReverbDelay(float reverbDelay) {
- attribs.reverbDelay = reverbDelay;
- return;
- }
-
- /**
- * Set reverberation order for current aural attribute applied to all
- * samples.
- * @param reverbOrder number of times reverb process loop is iterated.
- */
- @Override
- public void setReverbOrder(int reverbOrder) {
- attribs.reverbOrder = reverbOrder;
- return;
- }
-
- /**
- * Set distance filter for current aural attribute applied to all samples.
- * @param filterType denotes type of filtering (on no filtering) applied
- * to all sample based on distance between listener and sound.
- * @param dist is an attenuation array of distance and low-pass filter values.
- */
- @Override
- public void setDistanceFilter(int filterType,
- double[] dist, float[] filterCutoff) {
- attribs.setDistanceFilter(filterType, dist, filterCutoff);
- return;
- }
-
- /**
- * Set frequency scale factor for current aural attribute applied to all
- * samples.
- * @param scaleFactor frequency scale factor applied to samples normal
- * playback rate.
- */
- @Override
- public void setFrequencyScaleFactor(float scaleFactor) {
- attribs.frequencyScaleFactor = scaleFactor;
- return;
- }
- /**
- * Set velocity scale factor for current aural attribute applied to all
- * samples when Doppler is calculated.
- * @param scaleFactor scale factor applied to postional samples'
- * listener-to-soundSource velocity.
- * playback rate.
- */
- @Override
- public void setVelocityScaleFactor(float scaleFactor) {
- attribs.velocityScaleFactor = scaleFactor;
- return;
- }
-
- /**
- * Get number of channels used by a particular sample on the audio device.
- * @param index device specific reference number to device driver sample
- * @return number of channels currently being used by this sample.
- */
- @Override
- public int getNumberOfChannelsUsed(int index) {
- // This method must be overridden by device specific implementation
- Sample sample = getSample(index);
- if (sample != null)
- return (sample.getNumberOfChannelsUsed());
- else
- return 0;
- }
-
- /**
- * Get number of channels that would be used by a particular sample on
- * the audio device given the mute flag passed in as a parameter.
- * @param index device specific reference number to device driver sample
- * @param muteFlag denotes the mute state to assume while executing this
- * query. This mute value does not have to match the current mute state
- * of the sample.
- * @return number of channels that would be used by this sample if it
- * were playing.
- */
- @Override
- public int getNumberOfChannelsUsed(int index, boolean muteFlag) {
- // This method must be overridden by device specific implementation
- Sample sample = getSample(index);
- if (sample != null)
- return (sample.getNumberOfChannelsUsed());
- else
- return 0;
- }
-
- /**
- * Get length of time a sample would play if allowed to play to completion.
- * @param index device specific reference number to device driver sample
- * @return length of sample in milliseconds
- */
- @Override
- public long getSampleDuration(int index) {
- Sample sample = getSample(index);
- if (sample != null)
- return (sample.getDuration());
- else
- return 0L;
- }
-
- /**
- * Get time this sample begun playing on the audio device.
- * @param index device specific reference number to device driver sample
- * @return system clock time sample started
- */
- @Override
- public long getStartTime(int index) {
- Sample sample = getSample(index);
- if (sample != null)
- return (sample.getStartTime());
- else
- return 0L;
- }
-
- /**
- * Get reference to the array list of samples
- * @return reference to samples list
- * @deprecated unsafe to get reference to samples list with this method.
- * It's better to directly reference samples list within a synchronized
- * block which also contains calls to .getSample(index).
- */
- protected ArrayList getSampleList() {
- return (samples);
- }
-
- public int getSampleListSize() {
- return (samples.size());
- }
-
- /**
- * Get specific sample from indexed sample list
- * Checks for valid index before attempting to get sample from list.
- * @param index device specific reference number to device driver sample
- * @return reference to sample; returns null if index out of range.
- *
- * @since Java 3D 1.2.1
- */
- public Sample getSample(int index) {
- synchronized(samples) {
- if ((index >= 0) && (index < samples.size())) {
- Sample sample = (Sample)samples.get(index);
- return (sample);
- }
- else
- return null;
- }
- }
-
- /*
- * Get reference to current aural attribute parameters associated with
- * this audio device.
- * @return reference to current aural attribute parameters
- */
- public AuralParameters getAuralParameters() {
- return (attribs);
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/AudioEngine3DL2.java b/src/classes/share/com/sun/j3d/audioengines/AudioEngine3DL2.java
deleted file mode 100644
index 1bffd52..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/AudioEngine3DL2.java
+++ /dev/null
@@ -1,309 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines;
-
-import javax.media.j3d.AudioDevice3DL2;
-import javax.media.j3d.AuralAttributes;
-import javax.media.j3d.PhysicalEnvironment;
-import javax.media.j3d.Sound;
-
-
-/**
- * The AudioEngine3DL2 Class defines an audio output device that generates
- * sound 'image' from high-level sound parameters passed to it during
- * scene graph.
- *
- * <P>
- * The methods in this class are meant to be optionally overridden by an
- * extended class. This extended class would provice device specific code.
- *
- * <P>
- * Error checking on all parameters passed to these methods is already
- * explicitly being done by the Java 3D core code that calls these methods.
- *
- * <P>
- * These methods should NOT be called by any application if the audio engine
- * is associated with a Physical Environment used by Java3D Core.
- *
- * @since Java 3D 1.3
- */
-public abstract class AudioEngine3DL2 extends AudioEngine3D implements AudioDevice3DL2 {
- /**
- * Construct a new AudioEngine3DL2 with the specified PhysicalEnvironment.
- * @param physicalEnvironment the physical environment object where we
- * want access to this device.
- */
- public AudioEngine3DL2(PhysicalEnvironment physicalEnvironment ) {
- super(physicalEnvironment);
- }
-
- /*
- *
- * Methods that affect AudioEngine3DLD fields that are NOT associated
- * with a specific sound sample
- *
- */
-
- /**
- * Pauses audio device engine without closing the device and associated
- * threads.
- * Causes all cached sounds to be paused and all streaming sounds to be
- * stopped.
- */
- @Override
- public abstract void pause();
-
- /**
- * Resumes audio device engine (if previously paused) without
- * reinitializing the device.
- * Causes all paused cached sounds to be resumed and all streaming
- * sounds restarted.
- */
- @Override
- public abstract void resume();
-
- /**
- * Set overall gain control of all sounds playing on the audio device.
- * @param scaleFactor scale factor applied to calculated amplitudes for
- * all sounds playing on this device
- */
- @Override
- public abstract void setGain(float scaleFactor);
-
- /*
- *
- * Methods explicitly affect a particular sound rendering and that
- * require audio device specific methods that override this class.
- *
- */
-
- /**
- * Set scale factor applied to sample playback rate for a particular sound
- * associated with the audio device.
- * Changing the device sample rate affects both the pitch and speed.
- * This scale factor is applied to ALL sound types.
- * Changes (scales) the playback rate of a sound independent of
- * Doppler rate changes.
- * @param index device specific reference to device driver sample
- * @param scaleFactor non-negative factor applied to calculated
- * amplitudes for all sounds playing on this device
- * @see Sound#setRateScaleFactor
- */
- @Override
- public void setRateScaleFactor(int index, float scaleFactor) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setRateScaleFactor(scaleFactor);
- return;
- }
-
-
- /*
- *
- * Methods explicitly affect aural attributes of the listening space
- * used to calculated reverberation during sound rendering.
- * These require audio device specific methods that override this class.
- *
- */
-
- /**
- * Set late reflection (referred to as 'reverb') attenuation.
- * This scale factor is applied to iterative, indistinguishable
- * late reflections that constitute the tail of reverberated sound in
- * the aural environment.
- * This parameter, along with the early reflection coefficient, defines
- * the reflective/absorptive characteristic of the surfaces in the
- * current listening region.
- * @param coefficient late reflection attenuation factor
- * @see AuralAttributes#setReverbCoefficient
- */
- @Override
- public void setReverbCoefficient(float coefficient) {
- attribs.reverbCoefficient = coefficient;
- return;
- }
-
-
- /**
- * Sets the early reflection delay time.
- * In this form, the parameter specifies the delay time between each order
- * of reflection (while reverberation is being rendered) explicitly given
- * in milliseconds.
- * @param reflectionDelay time between each order of early reflection
- * @see AuralAttributes#setReflectionDelay
- */
- @Override
- public void setReflectionDelay(float reflectionDelay) {
- attribs.reflectionDelay = reflectionDelay;
- return;
- }
-
- /**
- * Set reverb decay time.
- * Defines the reverberation decay curve.
- * @param time decay time in milliseconds
- * @see AuralAttributes#setDecayTime
- */
- @Override
- public void setDecayTime(float time) {
- attribs.decayTime = time;
- return;
- }
-
- /**
- * Set reverb decay filter.
- * This provides for frequencies above the given cutoff frequency to be
- * attenuated during reverb decay at a different rate than frequencies
- * below this value. Thus, defining a different reverb decay curve for
- * frequencies above the cutoff value.
- * @param frequencyCutoff value of frequencies in Hertz above which a
- * low-pass filter is applied.
- * @see AuralAttributes#setDecayFilter
- */
- @Override
- public void setDecayFilter(float frequencyCutoff) {
- attribs.decayFrequencyCutoff = frequencyCutoff;
- return;
- }
-
- /**
- * Set reverb diffusion.
- * This defines the echo dispersement (also referred to as 'echo density').
- * The value of this reverb parameter is expressed as a percent of the
- * audio device's minimum-to-maximum values.
- * @param diffusion percentage expressed within the range of 0.0 and 1.0
- * @see AuralAttributes#setDiffusion
- */
- @Override
- public void setDiffusion(float diffusion) {
- attribs.diffusion = diffusion;
- return;
- }
-
- /**
- * Set reverb density.
- * This defines the modal density (also referred to as 'spectral
- * coloration').
- * The value of this parameter is expressed as a percent of the audio
- * device's minimum-to-maximum values for this reverb parameter.
- * @param density reverb density expressed as a percentage,
- * within the range of 0.0 and 1.0
- * @see AuralAttributes#setDensity
- */
- @Override
- public void setDensity(float density) {
- attribs.density = density;
- return;
- }
-
-
- /**
- * Set the obstruction gain control. This method allows for attenuating
- * sound waves traveling between the sound source and the listener
- * obstructed by objects. Direct sound signals/waves for obstructed sound
- * source are attenuated but not indirect (reflected) waves.
- * There is no corresponding Core AuralAttributes method at this time.
- * @param index device specific reference to device driver sample
- * @param scaleFactor non-negative factor applied to direct sound gain
- */
- @Override
- public void setObstructionGain(int index, float scaleFactor) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setObstructionGain(scaleFactor);
- return;
- }
-
- /**
- * Set the obstruction filter control.
- * This provides for frequencies above the given cutoff frequency
- * to be attenuated, during while the gain of an obstruction signal
- * is being calculated, at a different rate than frequencies
- * below this value.
- * There is no corresponding Core AuralAttributes method at this time.
- * @param index device specific reference to device driver sample
- * @param frequencyCutoff value of frequencies in Hertz above which a
- * low-pass filter is applied.
- */
-
- @Override
- public void setObstructionFilter(int index, float frequencyCutoff) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setObstructionFilter(frequencyCutoff);
- return;
- }
-
- /**
- * Set the occlusion gain control. This method allows for attenuating
- * sound waves traveling between the sound source and the listener
- * occluded by objects. Both direct and indirect sound signals/waves
- * for occluded sound sources are attenuated.
- * There is no corresponding Core AuralAttributes method at this time.
- * @param index device specific reference to device driver sample
- * @param scaleFactor non-negative factor applied to direct sound gain
- */
- @Override
- public void setOcclusionGain(int index, float scaleFactor) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setObstructionGain(scaleFactor);
- return;
- }
-
- /**
- * Set the occlusion filter control.
- * This provides for frequencies above the given cutoff frequency
- * to be attenuated, during while the gain of an occluded signal
- * is being calculated, at a different rate than frequencies below
- * this value.
- * There is no corresponding Core AuralAttributes method at this time.
- * @param index device specific reference to device driver sample
- * @param frequencyCutoff value of frequencies in Hertz above which a
- * low-pass filter is applied.
- */
- @Override
- public void setOcclusionFilter(int index, float frequencyCutoff) {
- Sample sample = getSample(index);
- if (sample != null)
- sample.setObstructionFilter(frequencyCutoff);
- return;
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/AudioEngineThread.java b/src/classes/share/com/sun/j3d/audioengines/AudioEngineThread.java
deleted file mode 100644
index be16a6e..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/AudioEngineThread.java
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines;
-
-/*
- * Audio Engine Thread
- */
-
-
-/**
- * The Thread Class extended for Audio Device engines that must process
- * calls dynamically, in 'real-time" to asynchronously change engine
- * parameters.
- *
- * <p>
- * NOTE: this class is probably not needed for those Audio Device implementations
- * that handle all dynamic parameters in the low-level audio library.
- */
-public class AudioEngineThread extends Thread {
-
- // Debug print flag
- static final protected boolean debugFlag = false;
-
-
- protected void debugPrint(String message) {
- if (debugFlag)
- System.out.println(message);
- }
-
- /**
- * The classification types.
- */
- protected static final int WORK_THREAD = 0x01;
- protected static final int UPDATE_THREAD = 0x02;
-
- /**
- * This runMonitor action puts the thread into an initial wait state
- */
- protected static final int WAIT = 0;
-
- /**
- * This runMonitor action notifies MasterControl that this thread
- * has completed and wait.
- */
- protected static final int NOTIFY_AND_WAIT = 1;
-
- /**
- * This runMonitor action tells the thread to run N number of
- * iterations.
- */
- protected static final int RUN = 2;
-
- /**
- * This runMonitor action tells the thread to stop running
- */
- protected static final int STOP = 3;
-
- /**
- * This indicates that this thread has been activated by MC
- */
- protected boolean active = false;
-
- /**
- * This indicates that this thread is alive and running
- */
- protected boolean running = true;
-
-
- /**
- * This indicates that this thread is ready
- */
- protected boolean started = false;
-
- /**
- * The time values passed into this thread
- */
- protected long referenceTime;
-
- /**
- * Use to assign threadOpts WAIT_ALL_THREADS
- */
- protected long lastWaitTimestamp = 0;
-
- /**
- * The type of this thread. It is one of the above constants.
- */
- protected int type;
-
- /**
- * The classification of this thread. It is one of the above constants.
- */
- protected int classification = WORK_THREAD;
-
- /**
- * The arguments passed in for this thread
- */
- protected Object[] args = null;
-
- /**
- * Flag to indicate that user initiate a thread stop
- */
- protected boolean userStop = false;
-
- /**
- * Flag to indicate that this thread is waiting to be notify
- */
- protected boolean waiting = false;
-
- /**
- * Some variables used to name threads correctly
- */
- protected static int numInstances = 0;
- protected int instanceNum = -1;
-
- /**
- * This constructor simply assigns the given id.
- */
- public AudioEngineThread(ThreadGroup t, String threadName) {
- super(t, threadName);
- if (debugFlag)
- debugPrint("AudioEngineThread.constructor("+threadName +")");
- }
-
- synchronized int newInstanceNum() {
- return (++numInstances);
- }
-
- int getInstanceNum() {
- if (instanceNum == -1)
- instanceNum = newInstanceNum();
- return instanceNum;
- }
-
- /**
- * This method is defined by all slave threads to implement
- * one iteration of work.
- */
- synchronized public void doWork() {
- if (debugFlag)
- debugPrint("AudioEngineThread.doWork()");
- }
-
- /**
- * This initializes this thread. Once this method returns, the thread is
- * ready to do work.
- */
- public void initialize() {
- if (debugFlag)
- debugPrint("AudioEngineThread.initialize()");
- this.start();
- while (!started) {
- try {
- Thread.currentThread().sleep(1, 0);
- } catch (InterruptedException e) {
- }
- }
- }
-
- /**
- * This causes the threads run method to exit.
- */
- public void finish() {
- while (!waiting) {
- try {
- Thread.sleep(10);
- } catch (InterruptedException e) {}
- }
- runMonitor(STOP, 0,null);
- }
-
- /*
- * This thread controls the syncing of all the canvases attached to
- * this view.
- */
- @Override
- public void run() {
- if (debugFlag)
- debugPrint("AudioEngineThread.run");
- runMonitor(WAIT, 0, null);
- while (running) {
- doWork();
- runMonitor(WAIT, 0, null);
- }
- // resource clean up
- shutdown();
- }
-
- synchronized public void runMonitor(int action, long referenceTime, Object[] args){
- switch (action) {
- case WAIT:
- if (debugFlag)
- debugPrint("AudioEngineThread.runMonitor(WAIT)");
- try {
- started = true;
- waiting = true;
- wait();
- } catch (InterruptedException e) {
- System.err.println(e);
- }
- waiting = false;
- break;
- case RUN:
- if (debugFlag)
- debugPrint("AudioEngineThread.runMonitor(RUN)");
- this.referenceTime = referenceTime;
- this.args = args;
- notify();
- break;
- case STOP:
- if (debugFlag)
- debugPrint("AudioEngineThread.runMonitor(STOP)");
- running = false;
- notify();
- break;
- }
- }
-
- public void shutdown() {
- }
-
- // default resource clean up method
- public void cleanup() {
- active = false;
- running = true;
- started = true;
- lastWaitTimestamp = 0;
- classification = WORK_THREAD;
- args = null;
- userStop = false;
- referenceTime = 0;
-
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/AuralParameters.java b/src/classes/share/com/sun/j3d/audioengines/AuralParameters.java
deleted file mode 100644
index f2d4627..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/AuralParameters.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines;
-
-
-/**
- * The AuralParameters Class defines a set of fields that define the
- * Aural listening environment. Many of the parameters correspond to
- * AuralAttribute fields.
- *
- * <p>
- * Error checking on all parameters passed to these methods is already
- * explicitly being done by the Java 3D core code that calls these methods.
- */
-
-public class AuralParameters
-{
- // Speed of Sound in meters/milliseconds
- public static final float SPEED_OF_SOUND = 0.344f;
- public static final int NO_FILTERING = -1;
-
- public float rolloff = 1.0f;
- public float reflectionCoefficient = 0.0f;
- public float reverbDelay = 40.0f;
- public int reverbOrder = 0;
- public float frequencyScaleFactor = 1.0f;
- public float velocityScaleFactor = 0.0f;
- int filterType = NO_FILTERING;
- double[] filterDistance = null;
- float[] filterCutoff = null;
-
- /*
- * @since Java 3D 1.3
- */
- public float reverbCoefficient = 1.0f;
- public float reflectionDelay = 20.0f;
- public float decayTime = 1000.0f;
- public float decayFrequencyCutoff = 5000.0f;
- public float diffusion = 1.0f; // 100%
- public float density = 1.0f; // 100%
-
- /**
- * Construct a new AuralParameters object
- */
- public AuralParameters() {
- frequencyScaleFactor = 1.0f;
- velocityScaleFactor = 0.0f;
- rolloff = 1.0f;
- reflectionCoefficient = 0.0f;
- reflectionDelay = 20.0f;
- reverbCoefficient = 1.0f;
- reverbDelay = 40.0f;
- reverbOrder = 0;
- filterType = NO_FILTERING;
- filterDistance = new double[2]; // start out with array of two
- filterCutoff = new float[2]; // start out with array of two
- decayTime = 1000.0f;
- decayFrequencyCutoff = 5000.0f;
- diffusion = 1.0f; // 100%
- density = 1.0f; // 100%
- }
-
- public void setDistanceFilter(int filterType, double[] distance,
- float[] filterCutoff) {
- boolean error = false;
- boolean allocate = false;
- int attenuationLength = 0;
- if (distance == null || filterCutoff == null) {
- error = true;
- }
- else {
- attenuationLength = distance.length;
- if (attenuationLength == 0 || filterType == NO_FILTERING) {
- error = true;
- }
- }
- if (error) {
- this.filterType = NO_FILTERING;
- this.filterDistance = null;
- this.filterCutoff = null;
- if (debugFlag)
- debugPrint("setDistanceFilter NO_FILTERING");
- return;
- }
- this.filterType = filterType;
- if (debugFlag)
- debugPrint("setDistanceFilter type = " + filterType);
- if ((filterDistance == null) || (filterCutoff == null)) {
- allocate = true;
- }
- else if (attenuationLength > filterDistance.length) {
- allocate = true;
- }
- if (allocate) {
- if (debugFlag)
- debugPrint("setDistanceFilter length = " + attenuationLength);
- this.filterDistance = new double[attenuationLength];
- this.filterCutoff = new float[attenuationLength];
- }
- System.arraycopy(distance, 0, this.filterDistance, 0,
- attenuationLength);
- System.arraycopy(filterCutoff, 0, this.filterCutoff, 0,
- attenuationLength);
-
- if (debugFlag) {
- debugPrint("setDistanceFilter arrays = ");
- for (int i=0; i<attenuationLength; i++)
- debugPrint(this.filterDistance[i] + "," + this.filterCutoff[i]);
- debugPrint("setDistanceFilter passed in = ");
- for (int i=0; i<attenuationLength; i++)
- debugPrint((float)(filterDistance[i]) + "," + filterCutoff[i]);
- }
- return;
- }
- public int getDistanceFilterLength() {
- if (filterDistance != null)
- return filterDistance.length;
- return 0;
- }
-
- public int getDistanceFilterType() {
- return filterType;
- }
-
- public void getDistanceFilter(double[] distance, float[] filterCutoff) {
- if (distance == null || filterCutoff == null)
- return;
- int attenuationLength = distance.length;
- if (attenuationLength == 0 ||
- (filterDistance==null) || (filterCutoff==null))
- return;
- if (attenuationLength > filterDistance.length)
- attenuationLength = filterDistance.length;
- System.arraycopy(this.filterDistance, 0, distance, 0,
- attenuationLength);
- System.arraycopy(this.filterCutoff, 0, filterCutoff, 0,
- attenuationLength);
- return;
- }
-
- // Debug print flags
- static final boolean debugFlag = false;
- static final boolean internalErrors = false;
-
- /**
- * Debug print method for Sound nodes
- */
- protected void debugPrint(String message) {
- if (debugFlag)
- System.out.println(message);
- }
-
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/Sample.java b/src/classes/share/com/sun/j3d/audioengines/Sample.java
deleted file mode 100644
index 3506dc8..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/Sample.java
+++ /dev/null
@@ -1,480 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines;
-
-import javax.media.j3d.MediaContainer;
-import javax.media.j3d.Sound;
-import javax.media.j3d.Transform3D;
-import javax.media.j3d.View;
-import javax.vecmath.Point3d;
-import javax.vecmath.Point3f;
-import javax.vecmath.Vector3d;
-import javax.vecmath.Vector3f;
-
-/**
- * The Sample class defines the data and methods associated with a sound
- * sample played through the AudioDevice.
- * This contains all the data fields for non-spatialized and spatialized
- * (positional and directional) sound samples.
- */
-public class Sample {
-
- // Debug print flags and methods
- static final protected boolean debugFlag = false;
- static final protected boolean internalErrors = false;
-
- protected void debugPrint(String message) {
- if (debugFlag)
- System.out.println(message);
- }
-
- protected void debugPrintln(String message) {
- if (debugFlag)
- System.out.println(message);
- }
-
- /**
- * Null Sound identifier denotes sound is not created or initialized
- */
- public static final int NULL_SAMPLE = -1;
-
- /**
- * sound data associated with sound source
- */
- protected MediaContainer soundData = null;
-
- /**
- * sound data associated with sound source
- */
- protected int soundType = -1;
-
- /**
- * Overall Scale Factor applied to sound gain.
- */
- protected float gain = 1.0f; // Valid values are >= 0.0.
-
- /**
- * Overall Scale Factor applied to sound.
- * @since Java 3D 1.3
- */
- protected float rateScaleFactor = 1.0f; // Valid values are >= 0.0.
-
- /**
- * Number of times sound is looped/repeated during play
- */
- protected int loopCount = 0; // Range from 0 to POSITIVE_INFINITY(-1)
-
-
- /*
- * Duration of sample
- * This should match the Sound node constant of same name
- */
- public static final int DURATION_UNKNOWN = -1;
- protected long duration = DURATION_UNKNOWN;
-
- protected int numberOfChannels = 0;
- protected boolean mute = false; // denotes if sample is muted
- // (playing with zero gain)
-
- /*
- *
- * Fields associated with positional sound samples
- *
- */
- /*
- * Local to Vworld transform
- */
- protected Transform3D vworldXfrm = new Transform3D();
- protected boolean vwXfrmFlag = false;
-
- /*
- * Origin of Sound source in Listener's space.
- */
- protected Point3f position = new Point3f(0.0f, 0.0f, 0.0f);
-
- /*
- * Pairs of distances and gain scale factors that define piecewise linear
- * gain attenuation between each pair.
- */
- protected double[] attenuationDistance = null;
- protected float[] attenuationGain = null;;
-
- /**
- * dirty flags denoting what has changed since last rendering
- */
- protected int dirtyFlags = 0xFFFF;
-
- /*
- *
- * Direction sample fields
- *
- */
- /**
- * The Cone Sound's direction vector. This is the cone axis.
- */
- protected Vector3f direction = new Vector3f(0.0f, 0.0f, 1.0f);
-
- /**
- * Pairs of distances and gain scale factors that define piecewise linear
- * gain BACK attenuation between each pair.
- * These are used for defining elliptical attenuation regions.
- */
- protected double[] backAttenuationDistance = null;
- protected float[] backAttenuationGain = null;
-
- /**
- * Directional Sound's gain can be attenuated based on the listener's
- * location off-angle from the source source direction.
- * This can be set by three parameters:
- * angular distance in radians
- * gain scale factor
- * filtering (currently the only filtering supported is lowpass)
- */
- protected double[] angularDistance = {0.0, (Math.PI * 0.5)};
- protected float[] angularGain = {1.0f, 0.0f};
-
- /**
- * Distance Filter
- * Each sound source is attenuated by a filter based on it's distance
- * from the listener.
- * For now the only supported filterType will be LOW_PASS frequency
- * cutoff.
- * At some time full FIR filtering will be supported.
- */
- public static final int NO_FILTERING = -1;
- public static final int LOW_PASS = 1;
-
- protected int angularFilterType = NO_FILTERING;
- protected float[] angularFilterCutoff = {Sound.NO_FILTER, Sound.NO_FILTER};
-
- /*
- * Obstruction and Occlusion parameters
- * For now the only type of filtering supported is a low-pass filter
- * defined by a frequency cutoff value.
- * @since Java 3D 1.3
- */
- protected float obstructionGain = 1.0f; // scale factor
- protected int obstructionFilterType = NO_FILTERING;
- protected float obstructionFilterCutoff = Sound.NO_FILTER;
- protected float occlusionGain = 1.0f; // scale factor
- protected int occlusionFilterType = NO_FILTERING;
- protected float occlusionFilterCutoff = Sound.NO_FILTER;
-
- /*
- * Construct a new audio device Sample object
- */
- public Sample() {
- if (debugFlag)
- debugPrintln("Sample constructor");
- }
-
- public long getDuration() {
- return 0;
- }
-
- public long getStartTime() {
- return 0;
- }
-
- public int getNumberOfChannelsUsed() {
- return 0;
- }
-
- public void setDirtyFlags(int flags) {
- dirtyFlags = flags;
- }
-
- public int getDirtyFlags() {
- return dirtyFlags;
- }
-
- public void setSoundType(int type) {
- soundType = type;
- }
-
- public int getSoundType() {
- return soundType;
- }
-
- public void setSoundData(MediaContainer ref) {
- soundData = ref;
- }
-
- public MediaContainer getSoundData() {
- return soundData;
- }
-
- public void setMuteFlag(boolean flag) {
- mute = flag;
- }
-
- public boolean getMuteFlag() {
- return mute;
- }
-
- public void setVWrldXfrmFlag(boolean flag) {
- // this flag is ONLY true if the VirtualWorld Transform is ever set
- vwXfrmFlag = flag;
- }
-
- public boolean getVWrldXfrmFlag() {
- return vwXfrmFlag;
- }
-
- public void setGain(float scaleFactor) {
- gain = scaleFactor;
- }
-
- public float getGain() {
- return gain;
- }
-
- public void setLoopCount(int count) {
- loopCount = count;
- }
-
- public int getLoopCount() {
- return loopCount;
- }
-
-
- public void setPosition(Point3d position) {
- this.position.set(position);
- return;
- }
-
- // TODO: no get method for Position
-
-
- public void setDistanceGain(
- double[] frontDistance, float[] frontAttenuationScaleFactor,
- double[] backDistance, float[] backAttenuationScaleFactor) {
- if (frontDistance != null) {
- int size = frontDistance.length;
- attenuationDistance = new double[size];
- attenuationGain = new float[size];
- for (int i=0; i<size; i++) {
- attenuationDistance[i] = frontDistance[i];
- attenuationGain[i] = frontAttenuationScaleFactor[i];
- }
- }
- else {
- attenuationDistance = null;
- attenuationGain = null;
- }
- if (backDistance != null && frontDistance != null) {
- int size = backDistance.length;
- backAttenuationDistance = new double[size];
- backAttenuationGain = new float[size];
- for (int i=0; i<size; i++) {
- backAttenuationDistance[i] = backDistance[i];
- backAttenuationGain[i] = backAttenuationScaleFactor[i];
- }
- }
- else {
- backAttenuationDistance = null;
- backAttenuationGain = null;
- }
- return;
- }
-
- // TODO: no get method for Back Attenuation
-
-
- public void setDirection(Vector3d direction) {
- this.direction.set(direction);
- return;
- }
-
- // TODO: no get method for Direction
-
-
- public void setAngularAttenuation(int filterType, double[] angle,
- float[] attenuationScaleFactor, float[] filterCutoff) {
- if (angle != null) {
- int size = angle.length;
- angularDistance = new double[size];
- angularGain = new float[size];
- if (filterType != NO_FILTERING && filterCutoff != null)
- angularFilterCutoff = new float[size];
- else
- angularFilterCutoff = null;
- for (int i=0; i<size; i++) {
- angularDistance[i] = angle[i];
- angularGain[i] = attenuationScaleFactor[i];
- if (filterType != NO_FILTERING)
- angularFilterCutoff[i] = filterCutoff[i];
- }
- angularFilterType = filterType;
- }
- else {
- angularDistance = null;
- angularGain = null;
- angularFilterCutoff = null;
- angularFilterType = NO_FILTERING;
- }
- }
-
- // TODO: no get method for Angular Attenuation
-
-
- /*
- * Set Rate ScaleFactor
- * @since Java 3D 1.3
- */
- public void setRateScaleFactor(float scaleFactor) {
- rateScaleFactor = scaleFactor;
- }
-
- /*
- * Get Rate ScaleFactor
- * @since Java 3D 1.3
- */
- public float getRateScaleFactor() {
- return rateScaleFactor;
- }
-
-
- /*
- * Set Obstruction Gain
- * @since Java 3D 1.3
- */
- public void setObstructionGain(float scaleFactor) {
- obstructionGain = scaleFactor;
- }
-
- /*
- * Get Obstruction Gain
- * @since Java 3D 1.3
- */
- public float getObstructionGain() {
- return obstructionGain;
- }
-
- /*
- * Set Obstruction Filter Cutoff Frequency
- * @since Java 3D 1.3
- */
- public void setObstructionFilter(float cutoffFrequency) {
- obstructionFilterType = LOW_PASS;
- obstructionFilterCutoff = cutoffFrequency;
- }
-
- // TODO: no get method for Obstruction Filtering
-
-
- /*
- * Set Occlusion Gain
- * @since Java 3D 1.3
- */
- public void setOcclusionGain(float scaleFactor) {
- occlusionGain = scaleFactor;
- }
-
- /*
- * Get Occlusion Gain
- * @since Java 3D 1.3
- */
- public float getOcclusionGain() {
- return occlusionGain;
- }
-
- /*
- * Set Occlusion Filter Cutoff Frequency
- * @since Java 3D 1.3
- */
- public void setOcclusionFilter(float cutoffFrequency) {
- occlusionFilterType = LOW_PASS;
- occlusionFilterCutoff = cutoffFrequency;
- }
-
- // TODO: no get method for Occlusion Filtering
-
-
- /**
- * Clears/re-initialize fields associated with sample data
- * for this sound,
- * and frees any device specific data associated with this sample.
- */
- public void clear() {
- if (debugFlag)
- debugPrintln("Sample.clear() entered");
- soundData = (MediaContainer)null;
- soundType = NULL_SAMPLE;
- gain = 1.0f;
- loopCount = 0;
- duration = DURATION_UNKNOWN;
- numberOfChannels = 0;
- vworldXfrm.setIdentity();
- vwXfrmFlag = false;
- position.set(0.0f, 0.0f, 0.0f);
- attenuationDistance = null;
- attenuationGain = null;
- direction.set(0.0f, 0.0f, 1.0f);
- backAttenuationDistance = null;
- backAttenuationGain = null;
- if (angularDistance != null) {
- angularDistance[0] = 0.0f;
- angularDistance[1] = (float)(Math.PI) * 0.5f;
- }
- if (angularGain != null) {
- angularGain[0] = 1.0f;
- angularGain[1] = 0.0f;
- }
- angularFilterType = NO_FILTERING;
- if (angularFilterCutoff != null) {
- angularFilterCutoff[0] = Sound.NO_FILTER;
- angularFilterCutoff[1] = Sound.NO_FILTER;
- }
- obstructionGain = 1.0f;
- obstructionFilterType = NO_FILTERING;
- obstructionFilterCutoff = Sound.NO_FILTER;
- occlusionGain = 1.0f;
- occlusionFilterType = NO_FILTERING;
- occlusionFilterCutoff = Sound.NO_FILTER;
- }
-
- /*
- * Render
- */
- public void render(int dirtyFlags, View view, AuralParameters attribs) {
- // meant to be overridden
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSAuralParameters.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSAuralParameters.java
deleted file mode 100644
index 1f1240c..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSAuralParameters.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-
-/**
- * The AudioDevice dependent sound node and aural attribute node parameters.
- * These are explicitly maintained for HaeSoundMixer
- */
-
-public class JSAuralParameters extends com.sun.j3d.audioengines.AuralParameters {
-
- /**
- * Reverb Parameters
- *
- * dirty flag checked and cleared by render()
- */
- static int REFLECTION_COEFF_CHANGED = 1;
- static int REVERB_DELAY_CHANGED = 2;
- static int REVERB_ORDER_CHANGED = 4;
-
- int reverbDirty = 0xFFFF;
- int lastReverbSpeed = 0; // TODO: NOT used yet
- boolean reverbFlag = false; // previously refered to as reverbOn
- int reverbType = 1; // Reverb type 1 equals NONE in JavaSound engine
-
-
- JSAuralParameters() {
- super();
- reverbDirty = 0xFFFF;
- lastReverbSpeed = 0;
- reverbType = 1;
- reverbFlag = false;
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSChannel.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSChannel.java
deleted file mode 100644
index 06457df..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSChannel.java
+++ /dev/null
@@ -1,430 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-/*
- * IMPLEMENTATION NOTE: The JavaSoundMixer is incomplete and really needs
- * to be rewritten.
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-// import java.applet.*;
-import java.io.InputStream;
-import java.net.URL;
-
-import javax.sound.sampled.AudioFormat;
-import javax.sound.sampled.AudioInputStream;
-import javax.sound.sampled.AudioSystem;
-import javax.sound.sampled.DataLine;
-
-import com.sun.j3d.audioengines.Sample;
-
-/**
- * The JSChannel Class defines an audio output methods that call JavaSound
- * API methods common for all data line types: streams, clip and MIDI lines.
- */
-
-class JSChannel {
-
- AudioInputStream ais = null;
- long startTime = 0;
- URL url = null;
- InputStream inputStream = null;
- AudioFormat audioFormat = null;
- // WORKAROUND for (possibly old) bug in JavaSound
- // JavaSound has left and right flipped
- // TODO: verify whether this is still true
- static double panLeft = 1.0;
- static double panRight = -1.0;
- float rateInHz = 0.0f;
-
- /**
- * Debug print mechanism for Sound nodes
- */
- static final boolean debugFlag = false;
-
- static void debugPrint(String message) {
- if (debugFlag)
- System.out.print(message);
- }
-
- static void debugPrintln(String message) {
- if (debugFlag)
- System.out.println(message);
- }
-
-
- /**
- * Code to initialize the device
- * @return flag: true is initialized sucessfully, false if error
- */
- boolean initialize() {
- // for now do nothing
- return true;
- }
-
- /**
- * @return reference to newly created AudioInputStream
- */
- AudioInputStream initAudioInputStream(InputStream inputStream, boolean cacheFlag) {
- ais = null;
- if (inputStream == null) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error initAudioInputStream ");
- debugPrintln("input stream given is null");
- }
- this.inputStream = null;
- return null;
- }
- try {
- if (debugFlag)
- debugPrintln("JSChannel: initAudioInputStream - try getting stream ");
- // open the sound data as an 'audio input stream'
- // and read the header information at the start of the data.
- ais = AudioSystem.getAudioInputStream(inputStream);
- // add this new stream to vector list of streams
- }
- catch (Exception e) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error initAudioInputStream ");
- debugPrintln("get stream failed");
- }
- e.printStackTrace();
- this.inputStream = null;
- return null;
- }
- // success, so save new inputStream and nullify url field
- this.inputStream = inputStream;
- url = null;
-/******
-// QUESTION: HOW do I figure out the data type of the file/url/inputStream????
- if (ais instanceof AudioMidiInputStream ||
- ais instanceof AudioRmfInputStream )
- // QUESTION: can non-cached MIDI files ever be supported ?
-*******/
- return ais;
- } // initAudioInputStream
-
-
- /**
- * @return reference to newly created AudioInputStream
- */
- AudioInputStream initAudioInputStream(URL path, boolean cacheFlag) {
- ais = null;
- if (path == null) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error initAudioInputStream ");
- debugPrintln("URL given is null");
- }
- this.url = null;
- return null;
- }
- try {
- if (debugFlag)
- debugPrintln("JSChannel: initAudioInputStream - try getting stream ");
- ais = AudioSystem.getAudioInputStream(path.openStream());
- }
- catch (Exception e) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error initAudioInputStream ");
- debugPrintln("get stream failed");
- }
- e.printStackTrace();
- this.url = null;
- return null;
- }
- // success, so save new url path and nullify input stream field
- this.url = path;
- inputStream = null;
- return ais;
- } // initAudioInputStream
-
-
- AudioInputStream reinitAudioInputStream(URL path) {
-/*****
- if (path == null) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error reinitAudioInputStream ");
- debugPrintln("URL given is null");
- }
- return null;
- }
- try {
- if (debugFlag)
- debugPrintln("JSChannel: reinitAudioInputStream - try getting stream ");
- ais = AudioSystem.getAudioInputStream(path.openStream());
- }
- catch (Exception e) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error reinitAudioInputStream ");
- debugPrintln("get stream failed");
- }
- e.printStackTrace();
- return null;
- }
- // Parametes stay the same except for start time which is changed later
- return ais;
-******/
- return null; // TODO: implement this
-
- } // reinitAudioInputStream
-
- AudioInputStream reinitAudioInputStream(InputStream inputStream) {
-/******
- AudioInputStream ais;
- if (inputStream == null) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error reinitAudioInputStream ");
- debugPrintln("InputStream given is null");
- }
- return null;
- }
- try {
-// Couldn't get this method to work!!!
- if (debugFlag)
- debugPrintln("JSChannel: reintAudioContainer - try closing stream ");
- inputStream.close();
-
- if (debugFlag)
- debugPrintln("JSChannel: reinitAudioInputStream - try getting stream ");
- ais = AudioSystem.getAudioInputStream(inputStream);
- }
- catch (Exception e) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error reinitAudioInputStream ");
- debugPrintln("get stream failed");
- }
- e.printStackTrace();
- return null;
- }
- // Parametes stay the same except for start time which is changed later
- return ais; // >=0 if everythings OK
-**************/
- return null; // TODO: implement this
-
- } // reinitAudioInputStream
-
-
- DataLine initDataLine(AudioInputStream ais) {
- if (debugFlag) {
- debugPrintln("JSChannel: initDataLine(" + ais + ")");
- debugPrintln(" must be overridden");
- }
- return null;
- }
-
- long getDuration() {
- // TODO: how should this really be done??
- if (debugFlag)
- debugPrintln("JSChannel:getDuration");
-
- if (ais == null || audioFormat == null ) {
- if (debugFlag)
- debugPrintln("JSChannel: Internal Error getDuration");
- return (long)Sample.DURATION_UNKNOWN;
- }
- // Otherwise we'll assume that we can calculate this duration
-
- // get "duration" of audio stream (wave file)
- // TODO: For True STREAMing audio the size is unknown...
- long numFrames = ais.getFrameLength();
- if (debugFlag)
- debugPrintln(" frame length = " + numFrames);
- if (numFrames <= 0)
- return (long)Sample.DURATION_UNKNOWN;
-
- float rateInFrames = audioFormat.getFrameRate();
- rateInHz = audioFormat.getSampleRate();
- if (debugFlag)
- debugPrintln(" rate in Frames = " + rateInFrames);
- if (numFrames <= 0)
- return (long)Sample.DURATION_UNKNOWN;
- long duration = (long)((float)numFrames/rateInFrames);
- if (debugFlag)
- debugPrintln(" duration(based on ais) = " + duration);
- return duration;
- }
-
- /**
- * Start TWO Samples
- */
- boolean startSamples(int loopCount, float leftGain, float rightGain,
- int leftDelay, int rightDelay) {
- if (debugFlag)
- debugPrint("JSChannel: startSamples must be overridden");
- return false;
- } // end of start Samples
-
- /*
- * Starts a Sample
- */
- boolean startSample(int loopCount, float gain, int delay) {
- if (debugFlag)
- debugPrint("JSChannel: startSample must be overridden");
- return false;
- } // end of start (single) Sample
-
- int stopSample() {
-// This will tell thread to stop reading and writing
- // reload with old URL
- // reloadSample
- if (debugFlag)
- debugPrint("JSChannel: stopSample must be overridden");
- startTime = 0;
- return 0;
- }
-
- int stopSamples() {
-// This will tell thread to stop reading and writing
- // TODO: For muting, stop sound but don't clear startTime...
- // QUESTION: what does it mean for replaying that .stop "frees memory"
- if (debugFlag)
- debugPrint("JSChannel: stopSample must be overridden");
-// reloadSample
-
- startTime = 0;
- return 0;
- }
-
- void setSampleGain(float gain) {
-// TODO: Must be done in thread
- if (debugFlag)
- debugPrint("JSChannel: setSampleGain must be overridden");
- }
-
- void setSampleDelay(int delay) {
- if (debugFlag)
- debugPrint("JSChannel: setSampleDelay must be overridden");
- /*
- * null method
- */
- // dynamic changes to sample delay while playing is not implemented
- }
-
- void setSampleReverb(int type, boolean on) {
- if (debugFlag)
- debugPrint("JSChannel: setSampleReverb must be overridden");
- }
-
- void setSampleRate() {
- if (debugFlag)
- debugPrint("JSChannel: setSampleRate must be overridden");
- }
- void scaleSampleRate(float scaleFactor) {
- /**
- * Change rate for Doppler affect or pitch shifting.
- * Engine maximum sample rate is 48kHz so clamp to that
- * max value.
- */
- if (debugFlag)
- debugPrintln("JSChannel: scaleSampleRate");
- if (ais == null) {
- if (debugFlag) {
- debugPrint("JSChannel: Internal Error scaleSampleRate: ");
- debugPrintln("ais is null");
- }
- return;
- }
-
- AudioFormat audioFormat = ais.getFormat();
- float rate = audioFormat.getSampleRate();
-
- double newRate = rate * scaleFactor;
- if (newRate > 48000.0) // clamp to 48K max
- newRate = 48000.0;
-/****
-// NOTE: This doesn't work...
-/// audioStream.setSampleRate(newRate);
-
-// need to set FloatControl.Type(SAMPLE_RATE) to new value somehow...
-
- if (debugFlag) {
- debugPrintln("JSChannel: scaleSampleRate: new rate = " +
- rate * scaleFactor);
- debugPrintln(" >>>>>>>>>>>>>>> using scaleFactor = " +
- scaleFactor);
- }
-****/
- }
-
- int pauseSamples() {
- /**
- * Pause playing samples
- */
-// TODO: Notify thread
- return 0;
- }
-
- int pauseSample() {
- /**
- * Pause playing a sample
- */
-// TODO: Notify thread
- return 0;
- }
-
- int unpauseSamples() {
- /**
- * Resume playing samples
- */
-// TODO: Notify thread
- return 0;
- }
-
- int unpauseSample() {
- /**
- * Resume playing a sample
- */
-// TODO: Notify thread
- return 0;
- }
-
- void setSampleFiltering(boolean filterFlag, float cutoffFreq) {
- /**
- * Set or clear low-pass filtering
- */
-/****
-// QUESTION: how will this be done if data is written out one channel/sample at
- a time??
-****/
- // QUESTION: should filtering of Midi data be performed?
-// ais.setFiltering(filterFlag, cutoffFreq);
- }
-
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSClip.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSClip.java
deleted file mode 100644
index ba988c0..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSClip.java
+++ /dev/null
@@ -1,348 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-/*
- * IMPLEMENTATION NOTE: The JavaSoundMixer is incomplete and really needs
- * to be rewritten.
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-import javax.sound.sampled.AudioFormat;
-import javax.sound.sampled.AudioInputStream;
-import javax.sound.sampled.AudioSystem;
-import javax.sound.sampled.Clip;
-import javax.sound.sampled.DataLine;
-import javax.sound.sampled.LineEvent;
-
-/**
- * The JSClip Class defines an audio output methods that call JavaSound
- * Hae mixer methods.
- */
-
-class JSClip extends JSChannel {
-
- Clip line;
-
-// TODO: separate left and right channel required until I write into
-// stereo buffer!
- Clip otherChannel = null;
-
-// TODO: Reverb channel that is centered and not delayed is maintained separately
-// until a way to set stereo reverb send (panned and attenuated to give
-// the same affect) is implemented
- Clip reverbChannel = null;
-
-
- /**
- * Create data line for outputting audio input stream.
- * for a stream that is a sourceDataline
- * @return true is successful in initiallizing DataLine
- */
- @Override
- DataLine initDataLine(AudioInputStream ais) {
- if (debugFlag)
- debugPrintln("JSClip: initDataLine(" + ais + ")");
-
- try {
- if (debugFlag)
- debugPrintln("JSClip: loadSample - try getting new line ");
- /*
- * From the AudioInputStream fetch information about the format
- * of the audio data - including sampling frequency, number of
- * channels, size of samples,...
- */
- audioFormat = ais.getFormat();
-
- /*
- * we can't yet open the device for ALAW/ULAW playback,
- * convert ALAW/ULAW to PCM
- */
- if ((audioFormat.getEncoding() == AudioFormat.Encoding.ULAW) ||
- (audioFormat.getEncoding() == AudioFormat.Encoding.ALAW)) {
-
- AudioFormat tmp =
- new AudioFormat(
- AudioFormat.Encoding.PCM_SIGNED,
- audioFormat.getSampleRate(),
- audioFormat.getSampleSizeInBits() * 2,
- audioFormat.getChannels(),
- audioFormat.getFrameSize() * 2,
- audioFormat.getFrameRate(),
- true);
- ais = AudioSystem.getAudioInputStream(tmp, ais);
- audioFormat = tmp;
- }
-
- /*
- * ask JavaSound for outline with a format suitable for our
- * AudioInputStream. In order to ask for a line, a Info object
- * with the desired properties must be constructed.
- * Clip is used for outputing buffered data.
- * We have to pass the line the AudioFormat object so it knows
- * format will be.
- *
- * TODO: we could give JavaSound a hint about how big the
- * internal buffer for the line should be, rather than use the
- * default.
- */
- DataLine.Info info = new DataLine.Info(Clip.class,
- audioFormat);
- line = (Clip)AudioSystem.getLine(info);
-/*****
-// TODO: JSClip can't be a listener (do we need to do this in the thread?)
- if (debugFlag)
- debugPrintln("JSClip: addLineListener for clip");
- line.addLineListener(this);
-******/
-
- if (debugFlag)
- debugPrintln("JSClip: open sound Clip");
-
- // Make line ready to receive data.
- line.open(ais);
-
- // Line can now receive data but still needs to be
- // activated (opened) so it will pass data on to the
- // audio device. This is done at "startSample" time.
- }
- catch (Exception e) {
- if (debugFlag) {
- debugPrint("JSClip: Internal Error loadSample ");
- debugPrintln("get stream failed");
- }
- e.printStackTrace();
- // TODO: clean up vector elements that were set up for
- // failed sample
- return null;
- }
- return (DataLine)line;
- } // initDataLine
-
- /**
- * Start TWO Samples
- *
- * used when two samples are associated with a single Point or Cone
- * sound. This method handles starting both samples, rather than
- * forcing the caller to make two calls to startSample, so that the
- * actual Java Sound start methods called are as immediate (without
- * delay between as possible.
- */
- @Override
- boolean startSamples(int loopCount, float leftGain, float rightGain,
- int leftDelay, int rightDelay) {
- // loop count is ignored for Stream and MIDI
- // TODO: loop count isn't implemented for MIDI yet
-
- // left and rightDelay parameters are in terms of Samples
- if (debugFlag) {
- debugPrint("JSClip: startSamples ");
- debugPrintln("start stream for Left called with ");
- debugPrintln(" gain = " + leftGain +
- " delay = " + leftDelay);
- debugPrintln("start stream for Right called with ");
- debugPrintln(" gain = " + rightGain +
- " delay = " + rightDelay);
- }
-
- // This is called assuming that the Stream is allocated for a
- // Positional sample, but if it is not then fall back to
- // starting the single sample associated with this Stream
- if (otherChannel == null || reverbChannel == null)
- startSample(loopCount, leftGain, leftDelay);
-
- /*
- * ais for Left and Right streams should be same so just get ais
- * left stream
- */
- if (ais == null) {
- if (debugFlag) {
- debugPrint("JSClip: Internal Error startSamples: ");
- debugPrintln("either left or right ais is null");
- }
- return false;
- }
- Clip leftLine;
- Clip rightLine;
- leftLine = line;
- rightLine = otherChannel;
-// left line only for background sounds...
-// TODO:
-/***********
-for now just care about the left
- if (leftLine == null || rightLine == null) {
- if (debugFlag) {
- debugPrint("JSClip: startSamples Internal Error: ");
- debugPrintln("either left or right line null");
- }
- return false;
- }
-************/
-
- // we know that were processing TWO channels
- double ZERO_EPS = 0.0039; // approx 1/256 - twice MIDI precision
- double leftVolume = (double)leftGain;
- double rightVolume = (double)rightGain;
-
-// TODO: if not reading/writing done for Clips then I can't do
-// stereo trick (reading mono file and write to stereo buffer)
- // Save time sound started, only in left
- startTime = System.currentTimeMillis();
- if (debugFlag)
- debugPrintln("*****start Stream with new start time " +
- startTime);
- try {
- // QUESTION: Offset clip is done how???
-/*******
-// TODO:
-offset delayed sound
-set volume
-set pan??
-set reverb
- boolean reverbLeft = false; // off; reverb has it own channel
- boolean reverbRight = reverbLeft;
-
- if (leftDelay < rightDelay) {
-XXXX audioLeftStream.start(leftVolume, panLeft, reverbLeft);
-XXXX audioRightStream.start(rightVolume, panRight, reverbRight);
- }
- else {
-XXXX audioRightStream.start(rightVolume, panRight, reverbRight);
-XXXX audioLeftStream.start(leftVolume, panLeft, reverbLeft);
- }
-******/
- line.setLoopPoints(0, -1); // Loop the entire sound sample
- line.loop(loopCount); // plays clip loopCount + 1 times
- line.start(); // start the sound
- }
- catch (Exception e) {
- if (debugFlag) {
- debugPrint("JSClip: startSamples ");
- debugPrintln("audioInputStream.read failed");
- }
- e.printStackTrace();
- startTime = 0;
- return false;
- }
-
- if (debugFlag)
- debugPrintln("JSClip: startSamples returns");
- return true;
- } // end of startSamples
-
-
- /*
- * This method is called specifically for BackgroundSounds.
- * There is exactly ONE sample (mono or stereo) associated with
- * this type of sound. Consequently, delay is not applicable.
- * Since the sound has no auralAttributes applied to it reverb
- * is not applied to the sample.
- */
- @Override
- boolean startSample(int loopCount, float gain, int delay) {
- /*
- if (debugFlag) {
- debugPrint("JSClip: startSample ");
- debugPrintln("start stream called with ");
- debugPrintln(" gain = " + gain + ", delay is zero");
- }
-
- // Since only one sample is processed in startSample, just call
- // this more general method passing duplicate information
- // We don't really want to do this in the long term.
- return startSamples(loopCount, gain, gain, 0, 0);
- */
-
- // TODO: The following is temporary until we have fully
- // functional startSample and startSamples methods
- if (debugFlag)
- debugPrintln("JSClip.startSample(): starting sound Clip");
- line.setFramePosition(0); // Start playing from the beginning
- line.setLoopPoints(0, -1); // Loop the entire sound sample
- line.loop(loopCount);
- line.start();
- return true;
- } // end of start (single) Sample
-
- @Override
- int stopSample() {
- // This will tell thread to stop reading and writing
- // reload with old URL - reloadSample()???
-
- if (debugFlag)
- debugPrintln("JSClip.stopSample(): stopping sound Clip");
- line.stop();
-
- startTime = 0;
- return 0;
- }
-
- @Override
- int stopSamples() {
- // This will tell thread to stop reading and writing
- // TODO: For muting, stop sound but don't clear startTime...
- // QUESTION: what does it mean for replaying that .stop "frees memory"
-
- // reloadSample
- // QUESTION: set stop state WHERE??!!
-
- if (debugFlag)
- debugPrintln("JSClip.stopSample(): stopping sound Clip");
- line.stop();
-
- startTime = 0;
- return 0;
- }
-
- /*
- * called by LineListener class
- */
- public void update(LineEvent event) {
- if (event.getType().equals(LineEvent.Type.STOP)) {
- line.close(); // really a stop??
- }
- else if (event.getType().equals(LineEvent.Type.CLOSE)) {
- // this forces a system exit in example code
- // TODO: what should be done to close line
- if (debugFlag)
- debugPrint("JSClip.update(CLOSE) entered ");
- }
- }
-
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSDirectionalSample.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSDirectionalSample.java
deleted file mode 100644
index 05796c5..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSDirectionalSample.java
+++ /dev/null
@@ -1,737 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-/*
- * DirectionalSample object
- *
- * IMPLEMENTATION NOTE: The JavaSoundMixer is incomplete and really needs
- * to be rewritten.
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-import javax.vecmath.Point3f;
-import javax.vecmath.Vector3f;
-
-import com.sun.j3d.audioengines.AuralParameters;
-
-/**
- * The PostionalSample Class defines the data and methods associated with a
- * PointSound sample played through the AudioDevice.
- */
-
-class JSDirectionalSample extends JSPositionalSample
-{
- // The transformed direction of this sound
- Vector3f xformDirection = new Vector3f(0.0f, 0.0f, 1.0f);
-
- public JSDirectionalSample() {
- super();
- if (debugFlag)
- debugPrintln("JSDirectionalSample constructor");
- }
-
- void setXformedDirection() {
- if (debugFlag)
- debugPrint("*** setXformedDirection");
- if (!getVWrldXfrmFlag()) {
- if (debugFlag)
- debugPrint(" Transform NOT set yet, so dir => xformDir");
- xformDirection.set(direction);
- }
- else {
- if (debugFlag)
- debugPrint(" Transform dir => xformDir");
- vworldXfrm.transform(direction, xformDirection);
- }
- if (debugFlag)
- debugPrint(" xform(sound)Direction <= "+xformDirection.x+
- ", " + xformDirection.y + ", " + xformDirection.z);
- }
-
-
- /* ***********************************
- *
- * Intersect ray to head with Ellipse
- *
- * ***********************************/
- /*
- * An ellipse is defined using:
- * (1) the ConeSound's direction vector as the major axis of the ellipse;
- * (2) the max parameter (a front distance attenuation value) along the
- * cone's position axis; and
- * (3) the min parameter (a back distance attenuation value) along the
- * cone's negative axis
- * This method calculates the distance from the sound source to the
- * Intersection of the Ellipse with the ray from the sound source to the
- * listener's head.
- * This method returns the resulting distance.
- * If an error occurs, -1.0 is returned.
- *
- * A calculation are done in 'Cone' space:
- * The origin is defined as being the sound source position.
- * The ConeSound source axis is the X-axis of this Cone's space.
- * Since this ConeSound source defines a prolate spheroid (obtained
- * by revolving an ellipsoid about the major axis) we can define the
- * Y-axis of this Cone space as being in the same plane as the X-axis
- * and the vector from the origin to the head.
- * All calculations in Cone space can be generalized in this two-
- * dimensional space without loss of precision.
- * Location of the head, H, in Cone space can then be defined as:
- * H'(x,y) = (cos @, sin @) * | H |
- * where @ is the angle between the X-axis and the ray to H.
- * Using the equation of the line thru the origin and H', and the
- * equation of ellipse defined with min and max, find the
- * intersection by solving for x and then y.
- *
- * (I) The equation of the line thru the origin and H', and the
- * | H'(y) - S(y) |
- * y - S(y) = | ----------- | * [x - S(x)]
- * | H'(x) - S(x) |
- * and since S(x,y) is the origin of ConeSpace:
- * | H'(y) |
- * y = | ----- | x
- * | H'(x) |
- *
- * (II) The equation of ellipse:
- * x**2 y**2
- * ---- + ---- = 1
- * a**2 b**2
- * given a is length from origin to ellipse along major, X-axis, and
- * b is length from origin to ellipse along minor, Y-axis;
- * where a**2 = [(max+min)/2]**2 , since 2a = min+max;
- * where b**2 = min*max , since the triangle abc is made is defined by the
- * the points: S(x,y), origin, and (0,b),
- * thus b**2 = a**2 - S(x,y) = a**2 - ((a-min)**2) = 2a*min - min**2
- * b**2 = ((min+max)*min) - min**2 = min*max.
- * so the equation of the ellipse becomes:
- * x**2 y**2
- * ---------------- + ------- = 1
- * [(max+min)/2]**2 min*max
- *
- * Substuting for y from Eq.(I) into Eq.(II) gives
- * x**2 [(H'(y)/H'(x))*x]**2
- * ---------------- + -------------------- = 1
- * [(max+min)/2]**2 min*max
- *
- * issolating x**2 gives
- * | 1 [H'(y)/H'(x)]**2 |
- * x**2 | ---------------- + ---------------- | = 1
- * | [(max+min)/2]**2 min*max |
- *
- *
- * | 4 [(sin @ * |H|)/(cos @ * |H|)]**2 |
- * x**2 | -------------- + -------------------------------- | = 1
- * | [(max+min)]**2 min*max |
- *
- * | |
- * | 1 |
- * | |
- * x**2 = | --------------------------------------- |
- * | | 4 [sin @/cos @]**2 | |
- * | | -------------- + ---------------- | |
- * | | [(max+min)]**2 min*max | |
- *
- * substitute tan @ for [sin @/cos @], and take the square root and you have
- * the equation for x as calculated below.
- *
- * Then solve for y by plugging x into Eq.(I).
- *
- * Return the distance from the origin in Cone space to this intersection
- * point: square_root(x**2 + y**2).
- *
- */
- double intersectEllipse(double max, double min ) {
-
- if (debugFlag)
- debugPrint(" intersectEllipse entered with min/max = " + min + "/" + max);
- /*
- * First find angle '@' between the X-axis ('A') and the ray to Head ('H').
- * In local coordinates, use Dot Product of those two vectors to get cos @:
- * A(u)*H(u) + A(v)*H(v) + A(w)*H(v)
- * cos @ = --------------------------------
- * |A|*|H|
- * then since domain of @ is { 0 <= @ <= PI }, arccos can be used to get @.
- */
- Vector3f xAxis = this.direction; // axis is sound direction vector
- // Get the already calculated vector from sound source position to head
- Vector3f sourceToHead = this.sourceToCenterEar;
- // error check vectors not empty
- if (xAxis == null || sourceToHead == null) {
- if (debugFlag)
- debugPrint( " one or both of the vectors are null" );
- return (-1.0f); // denotes an error occurred
- }
-
- // Dot Product
- double dotProduct = (double)( (sourceToHead.dot(xAxis)) /
- (sourceToHead.length() * xAxis.length()));
- if (debugFlag)
- debugPrint( " dot product = " + dotProduct );
- // since theta angle is in the range between 0 and PI, arccos can be used
- double theta = (float)(Math.acos(dotProduct));
- if (debugFlag)
- debugPrint( " theta = " + theta );
-
- /*
- * Solve for X using Eq.s (I) and (II) from above.
- */
- double minPlusMax = (double)(min + max);
- double tangent = Math.tan(theta);
- double xSquared = 1.0 /
- ( ( 4.0 / (minPlusMax * minPlusMax) ) +
- ( (tangent * tangent) / (min * max) ) );
- double x = Math.sqrt(xSquared);
- if (debugFlag)
- debugPrint( " X = " + x );
- /*
- * Solve for y, given the result for x:
- * | H'(y) | | sin @ |
- * y = | ----- | x = | ----- | x
- * | H'(x) | | cos @ |
- */
- double y = tangent * x;
- if (debugFlag)
- debugPrint( " Y = " + y );
- double ySquared = y * y;
-
- /*
- * Now return distance from origin to intersection point (x,y)
- */
- float distance = (float)(Math.sqrt(xSquared + ySquared));
- if (debugFlag)
- debugPrint( " distance to intersection = " + distance );
- return (distance);
- }
-
- /* *****************
- *
- * Find Factor
- *
- * *****************/
- /*
- * Interpolates the correct attenuation scale factor given a 'distance'
- * value. This version used both front and back attenuation distance
- * and scale factor arrays (if non-null) in its calculation of the
- * the distance attenuation.
- * If the back attenuation arrays are null then this executes the
- * PointSoundRetained version of this method.
- * This method finds the intesection of the ray from the sound source
- * to the center-ear, with the ellipses defined by the two sets (front
- * and back) of distance attenuation arrays.
- * This method looks at pairs of intersection distance values to find
- * which pair the input distance argument is between:
- * [intersectionDistance[index] and intersectionDistance[index+1]
- * The index is used to get factorArray[index] and factorArray[index+1].
- * Then the ratio of the 'distance' between this pair of intersection
- * values is used to scale the two found factorArray values proportionally.
- */
- float findFactor(double distanceToHead,
- double[] maxDistanceArray, float[] maxFactorArray,
- double[] minDistanceArray, float[] minFactorArray) {
- int index, lowIndex, highIndex, indexMid;
- double returnValue;
-
- if (debugFlag) {
- debugPrint("JSDirectionalSample.findFactor entered:");
- debugPrint(" distance to head = " + distanceToHead);
- }
-
- if (minDistanceArray == null || minFactorArray == null) {
- /*
- * Execute the PointSoundRetained version of this method.
- * Assume it will check for other error conditions.
- */
- return ( this.findFactor(distanceToHead,
- maxDistanceArray, maxFactorArray) );
- }
-
- /*
- * Error checking
- */
- if (maxDistanceArray == null || maxFactorArray == null) {
- if (debugFlag)
- debugPrint(" findFactor: arrays null");
- return -1.0f;
- }
- // Assuming length > 1 already tested in set attenuation arrays methods
- int arrayLength = maxDistanceArray.length;
- if (arrayLength < 2) {
- if (debugFlag)
- debugPrint(" findFactor: arrays length < 2");
- return -1.0f;
- }
- int largestIndex = arrayLength - 1;
- /*
- * Calculate distanceGain scale factor
- */
- /*
- * distanceToHead is larger than greatest distance in maxDistanceArray
- * so head is beyond the outer-most ellipse.
- */
- if (distanceToHead >= maxDistanceArray[largestIndex]) {
- if (debugFlag)
- debugPrint(" findFactor: distance > " +
- maxDistanceArray[largestIndex]);
- if (debugFlag)
- debugPrint(" maxDistanceArray length = " +
- maxDistanceArray.length);
- if (debugFlag)
- debugPrint(" findFactor returns ****** " +
- maxFactorArray[largestIndex] + " ******");
- return maxFactorArray[largestIndex];
- }
-
- /*
- * distanceToHead is smaller than least distance in minDistanceArray
- * so head is inside the inner-most ellipse.
- */
- if (distanceToHead <= minDistanceArray[0]) {
- if (debugFlag)
- debugPrint(" findFactor: distance < " +
- maxDistanceArray[0]);
- if (debugFlag)
- debugPrint(" findFactor returns ****** " +
- minFactorArray[0] + " ******");
- return minFactorArray[0];
- }
-
- /*
- * distanceToHead is between points within attenuation arrays.
- * Use binary halfing of distance attenuation arrays.
- */
- {
- double[] distanceArray = new double[arrayLength];
- float[] factorArray = new float[arrayLength];
- boolean[] intersectionCalculated = new boolean[arrayLength];
- // initialize intersection calculated array flags to false
- for (int i=0; i<arrayLength; i++)
- intersectionCalculated[i] = false;
- boolean intersectionOnEllipse = false;
- int factorIndex = -1;
-
- /*
- * Using binary halving to find the two index values in the
- * front and back distance arrays that the distanceToHead
- * parameter (from sound source position to head) fails between.
- * Changing the the current low and high index values
- * calculate the intesection of ellipses (defined by this
- * min/max distance values) with the ray (sound source to
- * head). Put the resulting value into the distanceArray.
- */
- /*
- * initialize the lowIndex to first index of distance arrays.
- * initialize the highIndex to last index of distance arrays.
- */
- lowIndex = 0;
- highIndex = largestIndex;
-
- if (debugFlag)
- debugPrint(" while loop to find index that's closest: ");
- while (lowIndex < (highIndex-1)) {
- if (debugFlag)
- debugPrint(" lowIndex " + lowIndex +
- ", highIndex " + highIndex);
- /*
- * Calculate the Intersection of Ellipses (defined by this
- * min/max values) with the ray from the sound source to the
- * head. Put the resulting value into the distanceArray.
- */
- if (!intersectionCalculated[lowIndex]) {
- distanceArray[lowIndex] = this.intersectEllipse(
- maxDistanceArray[lowIndex], minDistanceArray[lowIndex]);
- // If return intersection distance is < 0 an error occurred.
- if (distanceArray[lowIndex] >= 0.0)
- intersectionCalculated[lowIndex] = true;
- else {
- /*
- * Error in ellipse intersection calculation. Use
- * average of max/min difference for intersection value.
- */
- distanceArray[lowIndex] = (minDistanceArray[lowIndex] +
- maxDistanceArray[lowIndex])*0.5;
- if (internalErrors)
- debugPrint(
- "Internal Error in intersectEllipse; use " +
- distanceArray[lowIndex] +
- " for intersection value " );
- // Rather than aborting, just use average and go on...
- intersectionCalculated[lowIndex] = true;
- }
- } // end of if intersection w/ lowIndex not already calculated
-
- if (!intersectionCalculated[highIndex]) {
- distanceArray[highIndex] = this.intersectEllipse(
- maxDistanceArray[highIndex],minDistanceArray[highIndex]);
- // If return intersection distance is < 0 an error occurred.
- if (distanceArray[highIndex] >= 0.0f)
- intersectionCalculated[highIndex] = true;
- else {
- /*
- * Error in ellipse intersection calculation. Use
- * average of max/min difference for intersection value.
- */
- distanceArray[highIndex] = (minDistanceArray[highIndex]+
- maxDistanceArray[highIndex])*0.5f;
- if (internalErrors)
- debugPrint(
- "Internal Error in intersectEllipse; use " +
- distanceArray[highIndex] +
- " for intersection value " );
- // Rather than aborting, just use average and go on...
- intersectionCalculated[highIndex] = true;
- }
- } // end of if intersection w/ highIndex not already calculated
-
- /*
- * Test for intersection points being the same as head position
- * distanceArray[lowIndex] and distanceArray[highIndex], if so
- * return factor value directly from array
- */
- if (distanceArray[lowIndex] >= distanceToHead) {
- if ((lowIndex != 0) &&
- (distanceToHead < distanceArray[lowIndex])) {
- if (internalErrors)
- debugPrint(
- "Internal Error: binary halving in " +
- "findFactor failed; distance < low " +
- "index value");
- }
- if (debugFlag) {
- debugPrint(" distanceArray[lowIndex] >= " +
- "distanceToHead" );
- debugPrint( " factorIndex = " + lowIndex);
- }
- intersectionOnEllipse = true;
- factorIndex = lowIndex;
- break;
- }
- else if (distanceArray[highIndex] <= distanceToHead) {
- if ((highIndex != largestIndex) &&
- (distanceToHead > distanceArray[highIndex])) {
- if (internalErrors)
- debugPrint(
- "Internal Error: binary halving in " +
- "findFactor failed; distance > high " +
- "index value");
- }
- if (debugFlag) {
- debugPrint(" distanceArray[highIndex] >= " +
- "distanceToHead" );
- debugPrint( " factorIndex = " + highIndex);
- }
- intersectionOnEllipse = true;
- factorIndex = highIndex;
- break;
- }
-
- if (distanceToHead > distanceArray[lowIndex] &&
- distanceToHead < distanceArray[highIndex] ) {
- indexMid = lowIndex + ((highIndex - lowIndex) / 2);
- if (distanceToHead <= distanceArray[indexMid])
- // value of distance in lower "half" of list
- highIndex = indexMid;
- else // value if distance in upper "half" of list
- lowIndex = indexMid;
- }
- } /* of while */
-
- /*
- * First check to see if distanceToHead is beyond min or max
- * ellipses, or on an ellipse.
- * If so, factor is calculated using the distance Ratio
- * (distanceToHead - min) / (max-min)
- * where max = maxDistanceArray[factorIndex], and
- * min = minDistanceArray[factorIndex]
- */
- if (intersectionOnEllipse && factorIndex >= 0) {
- if (debugFlag) {
- debugPrint( " ratio calculated using factorIndex " +
- factorIndex);
- debugPrint( " d.A. max pair for factorIndex " +
- maxDistanceArray[factorIndex] + ", " +
- maxFactorArray[factorIndex]);
- debugPrint( " d.A. min pair for lowIndex " +
- minDistanceArray[factorIndex] + ", " +
- minFactorArray[factorIndex]);
- }
- returnValue = (
- ( (distanceArray[factorIndex] -
- minDistanceArray[factorIndex]) /
- (maxDistanceArray[factorIndex] -
- minDistanceArray[factorIndex]) ) *
- (maxFactorArray[factorIndex] -
- minFactorArray[factorIndex]) ) +
- minFactorArray[factorIndex] ;
- if (debugFlag)
- debugPrint(" findFactor returns ****** " +
- returnValue + " ******");
- return (float)returnValue;
- }
-
- /* Otherwise, for distanceToHead between distance intersection
- * values, we need to calculate two factors - one for the
- * ellipse defined by lowIndex min/max factor arrays, and
- * the other by highIndex min/max factor arrays. Then the
- * distance Ratio (defined above) is applied, using these
- * two factor values, to get the final return value.
- */
- double highFactorValue = 1.0;
- double lowFactorValue = 0.0;
- highFactorValue =
- ( ((distanceArray[highIndex] - minDistanceArray[highIndex]) /
- (maxDistanceArray[highIndex]-minDistanceArray[highIndex])) *
- (maxFactorArray[highIndex] - minFactorArray[highIndex]) ) +
- minFactorArray[highIndex] ;
- if (debugFlag) {
- debugPrint( " highFactorValue calculated w/ highIndex " +
- highIndex);
- debugPrint( " d.A. max pair for highIndex " +
- maxDistanceArray[highIndex] + ", " +
- maxFactorArray[highIndex]);
- debugPrint( " d.A. min pair for lowIndex " +
- minDistanceArray[highIndex] + ", " +
- minFactorArray[highIndex]);
- debugPrint( " highFactorValue " + highFactorValue);
- }
- lowFactorValue =
- ( ((distanceArray[lowIndex] - minDistanceArray[lowIndex]) /
- (maxDistanceArray[lowIndex] - minDistanceArray[lowIndex])) *
- (maxFactorArray[lowIndex] - minFactorArray[lowIndex]) ) +
- minFactorArray[lowIndex] ;
- if (debugFlag) {
- debugPrint( " lowFactorValue calculated w/ lowIndex " +
- lowIndex);
- debugPrint( " d.A. max pair for lowIndex " +
- maxDistanceArray[lowIndex] + ", " +
- maxFactorArray[lowIndex]);
- debugPrint( " d.A. min pair for lowIndex " +
- minDistanceArray[lowIndex] + ", " +
- minFactorArray[lowIndex]);
- debugPrint( " lowFactorValue " + lowFactorValue);
- }
- /*
- * calculate gain scale factor based on the ratio distance
- * between ellipses the distanceToHead lies between.
- */
- /*
- * ratio: distance from listener to sound source
- * between lowIndex and highIndex times
- * attenuation value between lowIndex and highIndex
- * gives linearly interpolationed attenuation value
- */
- if (debugFlag) {
- debugPrint( " ratio calculated using distanceArray" +
- lowIndex + ", highIndex " + highIndex);
- debugPrint( " calculated pair for lowIndex " +
- distanceArray[lowIndex]+", "+ lowFactorValue);
- debugPrint( " calculated pair for highIndex " +
- distanceArray[highIndex]+", "+ highFactorValue );
- }
-
- returnValue =
- ( ( (distanceToHead - distanceArray[lowIndex]) /
- (distanceArray[highIndex] - distanceArray[lowIndex]) ) *
- (highFactorValue - lowFactorValue) ) +
- factorArray[lowIndex] ;
- if (debugFlag)
- debugPrint(" findFactor returns ******" +
- returnValue + " ******");
- return (float)returnValue;
- }
-
- }
-
- /**
- * CalculateDistanceAttenuation
- *
- * Simply calls ConeSound specific 'findFactor()' with
- * both front and back attenuation linear distance and gain scale factor
- * arrays.
- */
- @Override
- float calculateDistanceAttenuation(float distance) {
- float factor = findFactor(distance, this.attenuationDistance,
- this.attenuationGain, this.backAttenuationDistance,
- this.backAttenuationGain);
- if (factor < 0.0f)
- return 1.0f;
- else
- return factor;
- }
- /**
- * CalculateAngularGain
- *
- * Simply calls generic (for PointSound) 'findFactor()' with
- * a single set of angular attenuation distance and gain scalefactor arrays.
- */
- @Override
- float calculateAngularGain() {
- float angle = findAngularOffset();
- float factor = findFactor(angle, this.angularDistance, this.angularGain);
- if (factor < 0.0f)
- return 1.0f;
- else
- return factor;
- }
-
- /* *****************
- *
- * Find Angular Offset
- *
- * *****************/
- /*
- * Calculates the angle from the sound's direction axis and the ray from
- * the sound origin to the listener'center ear.
- * For Cone Sounds this value is the arc cosine of dot-product between
- * the sound direction vector and the vector (sound position,centerEar)
- * all in Virtual World coordinates space.
- * Center ear position is in Virtual World coordinates.
- * Assumes that calculation done in VWorld Space...
- * Assumes that xformPosition is already calculated...
- */
- float findAngularOffset() {
- Vector3f unitToEar = new Vector3f();
- Vector3f unitDirection = new Vector3f();
- Point3f xformPosition = positions[currentIndex];
- Point3f xformCenterEar = centerEars[currentIndex];
- float dotProduct;
- float angle;
- /*
- * TODO: (Question) is assumption that xformed values available O.K.
- * TODO: (Performance) save this angular offset and only recalculate
- * if centerEar or sound position have changed.
- */
- unitToEar.x = xformCenterEar.x - xformPosition.x;
- unitToEar.y = xformCenterEar.y - xformPosition.y;
- unitToEar.z = xformCenterEar.z - xformPosition.z;
- unitToEar.normalize();
- unitDirection.normalize(this.direction);
- dotProduct = unitToEar.dot(unitDirection);
- angle = (float)(Math.acos((double)dotProduct));
- if (debugFlag)
- debugPrint(" angle from cone direction = " + angle);
- return(angle);
- }
-
- /************
- *
- * Calculate Filter
- *
- * *****************/
- @Override
- /*
- * Calculates the low-pass cutoff frequency filter value applied to the
- * a sound based on both:
- * Distance Filter (from Aural Attributes) based on distance
- * between the sound and the listeners position
- * Angular Filter (for Directional Sounds) based on the angle
- * between a sound's projected direction and the
- * vector between the sounds position and center ear.
- * The lowest of these two filter is used.
- * This filter value is stored into the sample's filterFreq field.
- */
- void calculateFilter(float distance, AuralParameters attribs) {
- // setting filter cutoff freq to 44.1kHz which, in this
- // implementation, is the same as not performing filtering
- float distanceFilter = 44100.0f;
- float angularFilter = 44100.0f;
- int arrayLength = attribs.getDistanceFilterLength();
- int filterType = attribs.getDistanceFilterType();
-
- boolean distanceFilterFound = false;
- boolean angularFilterFound = false;
- if ((filterType == AuralParameters.NO_FILTERING) && arrayLength > 0) {
- double[] distanceArray = new double[arrayLength];
- float[] cutoffArray = new float[arrayLength];
- attribs.getDistanceFilter(distanceArray, cutoffArray);
-
- if (debugFlag) {
- debugPrint("distanceArray cutoffArray");
- for (int i=0; i<arrayLength; i++)
- debugPrint((float)distanceArray[i] + ", " + cutoffArray[i]);
- }
-
- // Calculate angle from direction axis towards listener
- float angle = findAngularOffset();
- distanceFilter = findFactor((double)angle,
- angularDistance, angularFilterCutoff);
- if (distanceFilter < 0.0f)
- distanceFilterFound = false;
- else
- distanceFilterFound = true;
- }
- else {
- distanceFilterFound = false;
- distanceFilter = -1.0f;
- }
-
- if (debugFlag)
- debugPrint(" calculateFilter arrayLength = " + arrayLength);
-
- // Angular filter of directional sound sources.
- arrayLength = angularDistance.length;
- filterType = angularFilterType;
- if ((filterType != AuralParameters.NO_FILTERING) && arrayLength > 0) {
- angularFilter = findFactor((double)distance,
- angularDistance, angularFilterCutoff);
- if (angularFilter < 0.0f)
- angularFilterFound = false;
- else
- angularFilterFound = true;
- }
- else {
- angularFilterFound = false;
- angularFilter = -1.0f;
- }
-
- filterFlag = distanceFilterFound || angularFilterFound;
- if (distanceFilter < 0.0f)
- filterFreq = angularFilter;
- else if (angularFilter < 0.0f)
- filterFreq = distanceFilter;
- else // both filter frequencies are > 0
- filterFreq = Math.min(distanceFilter, angularFilter);
-
- if (debugFlag)
- debugPrint(" calculateFilter flag,freq = " + filterFlag +
- "," + filterFreq );
- }
-
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSMidi.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSMidi.java
deleted file mode 100644
index 1e99e89..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSMidi.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-/**
- * The JSMidi class defines audio output methods that call the JavaSound
- * API methods for MIDI sounds.
- *
- * <p>
- * NOTE: This class is not yet implemented.
- */
-
-class JSMidi extends JSChannel {
- private static boolean warningReported = false;
-
- JSMidi() {
- // Report a "not implemented" warning message
- if (!warningReported) {
- System.err.println("***");
- System.err.println("*** WARNING: JavaSoundMixer: MIDI sound not implemented");
- System.err.println("***");
- warningReported = true;
- }
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSPositionalSample.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSPositionalSample.java
deleted file mode 100644
index 70519cc..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSPositionalSample.java
+++ /dev/null
@@ -1,1344 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-/*
- * Java Sound PositionalSample object
- *
- * IMPLEMENTATION NOTE: The JavaSoundMixer is incomplete and really needs
- * to be rewritten.
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-import javax.media.j3d.PhysicalBody;
-import javax.media.j3d.Transform3D;
-import javax.media.j3d.View;
-import javax.vecmath.Point3d;
-import javax.vecmath.Point3f;
-import javax.vecmath.Vector3f;
-
-import com.sun.j3d.audioengines.AuralParameters;
-
-/**
- * The PostionalSample Class defines the data and methods associated with a
- * PointSound sample played thru the AudioDevice.
- */
-
-class JSPositionalSample extends JSSample
-{
-
- // maintain fields for stereo channel rendering
- float leftGain = 1.0f; // scale factor
- float rightGain = 1.0f; // scale factor
- int leftDelay = 0; // left InterauralTimeDifference in millisec
- int rightDelay = 0; // right ITD in millisec
- // fields for reverb channel
-
- // debug flag for the verbose Doppler calculation methods
- static final
- protected boolean dopplerFlag = true;
-
- /**
- * For positional and directional sounds, TWO Hae streams or clips
- * are allocated, one each for the left and right channels, played at
- * a different (delayed) time and with a different gain value.
- */
- int secondIndex = NULL_SAMPLE;
- /**
- * A third sample for control of reverb of the stream/clip is openned
- * and maintained for all directional/positional sounds.
- * For now, even if no aural attributes (using reverb) are active,
- * a reverb channel is always started with the other two. A sound could
- * be started without reverb and then reverb added later, but since there
- * is no way to offset properly into all sounds (considering non-cached
- * and nconsistent rate-changes during playing) this third sound is
- * always allocated and started.
- */
- int reverbIndex = NULL_SAMPLE;
-
- /**
- * Save ear positions transformed into VirtualWorld coords from Head coords
- * These default positions are used when the real values cannot queried
- */
- Point3f xformLeftEar = new Point3f(-0.09f, -0.03f, 0.095f);
- Point3f xformRightEar = new Point3f(0.09f, -0.03f, 0.095f);
- // Z axis in head space - looking into the screen
- Vector3f xformHeadZAxis = new Vector3f(0.0f, 0.0f, -1.0f); // Va
-
- /**
- * Save vectors from source source position to transformed ear parameters
- */
- Vector3f sourceToCenterEar = new Vector3f(); // Vh
- Vector3f sourceToRightEar = new Vector3f(); // Vf or Vc
- Vector3f sourceToLeftEar = new Vector3f(); // Vf or Vc
-
- boolean averageDistances = false;
- long deltaTime = 0;
- double sourcePositionChange = -1.0;
- double headPositionChange = -1.0;
-
- /*
- * Maintain the last locations of sound and head as well as time the
- * sound was last processed.
- * Process delta distance and time as part of Doppler calculations.
- */
- static int MAX_DISTANCES = 4;
- int numDistances = 0;
-// TODO: time is based on changes to position!!! only
-// TODO: must shap shot when either Position OR ear changes!!!
-// TODO: must grab all changes to VIEW parameters (could change ear)!!!
-// not just when pointer to View changes!!
- long[] times = new long[MAX_DISTANCES];
- Point3f[] positions = new Point3f[MAX_DISTANCES]; // xformed sound source positions
- Point3f[] centerEars = new Point3f[MAX_DISTANCES]; // xformed center ear positions
- /*
- * a set of indices (first, last, and current) are maintained to point
- * into the above arrays
- */
- int firstIndex = 0;
- int lastIndex = 0;
- int currentIndex = 0;
-
- /*
- * Allow changes in Doppler rate only small incremental values otherwise
- * you hear skips in the pitch of a sound during playback.
- * When playback is faster, allow delta changes:
- * (diff in Factor for octave (1.0))/(12 1/2-steps))*(1/4) of half-step
- * When playback is slower, allow delta changes:
- * (diff in Factor for octave (0.5))/(12 1/2-steps))*(1/4) of half-step
- */
- double lastRequestedDopplerRateRatio = -1.0f;
- double lastActualDopplerRateRatio = -1.0f;
- static double maxRatio = 256.0f; // 8 times higher/lower
- /*
- * denotes movement of sound away or towards listener
- */
- static int TOWARDS = 1;
- static int NO_CHANGE = 0;
- static int AWAY = -1;
-
- /*
- * Process request for Filtering fields
- */
- boolean filterFlag = false;
- float filterFreq = -1.0f;
-
- /*
- * Construct a new audio device Sample object
- */
- public JSPositionalSample() {
- super();
- if (debugFlag)
- debugPrint("JSPositionalSample constructor");
- // initiallize circular buffer for averaging distance values
- for (int i=0; i<MAX_DISTANCES; i++) {
- positions[i] = new Point3f();
- centerEars[i] = new Point3f(0.09f, -0.03f, 0.095f);
- }
- clear();
- }
-
- // TODO: get/set secondChannel to JSStream/Clip/MIDI
- // TODO: get/set reverbChannel to JSStream/Clip/MIDI
- /*
- * Process request for Filtering fields
- */
- @Override
- boolean getFilterFlag() {
- return filterFlag;
- }
- @Override
- float getFilterFreq() {
- return filterFreq;
- }
-
-
- /**
- * Clears the fields associated with sample data for this sound, and
- * frees any device specific data associated with this sample.
- */
- @Override
- public void clear() {
- if (debugFlag)
- debugPrint("JSPositionalSample.clear() enter");
- super.clear();
- leftGain = 1.0f;
- rightGain = 1.0f;
- leftDelay = 0;
- rightDelay = 0;
- xformLeftEar.set(-0.09f, -0.03f, 0.095f);
- xformRightEar.set(0.09f, -0.03f, 0.095f);
- // Z axis in head space - looking into the screen
- xformHeadZAxis.set(0.0f, 0.0f, -1.0f); // Va
- sourceToCenterEar.set(0.0f, 0.0f, 0.0f); // Vh
- sourceToRightEar.set(0.0f, 0.0f, 0.0f); // Vf or Vc
- sourceToLeftEar.set(0.0f, 0.0f, 0.0f); // Vf or Vc
- reset();
- if (debugFlag)
- debugPrint("JSPositionalSample.clear() exit");
- }
-
- /**
- * Reset time and count based fields associated with sample data
- * for this sound
- */
- @Override
- void reset() {
- if (debugFlag)
- debugPrint("JSPositionalSample.reset() enter");
- super.reset();
- averageDistances = false; // denotes not previously processed
- deltaTime = 0;
- sourcePositionChange = -1.0;
- headPositionChange = -1.0;
- rateRatio = 1.0f;
- numDistances = 0;
- averageDistances = false;
- if (debugFlag)
- debugPrint("JSPositionalSample.reset() exit");
- }
- // increments index counters and bumps index numbers if the end of
- // the circular buffer is reached
- void incrementIndices() {
- int maxIndex = MAX_DISTANCES - 1;
- if (numDistances < maxIndex) {
- averageDistances = false;
- currentIndex = numDistances;
- lastIndex = currentIndex - 1;
- firstIndex = 0;
- numDistances++;
- }
- else if (numDistances == maxIndex) {
- // we filled the data buffers completely and are ready to
- // calculate averages
- averageDistances = true;
- currentIndex = maxIndex;
- lastIndex = currentIndex - 1;
- firstIndex = 0;
- numDistances++;
- }
- else if (numDistances > maxIndex) {
- // increment each counter and loop around
- averageDistances = true;
- currentIndex++;
- lastIndex++;
- firstIndex++;
- currentIndex %= MAX_DISTANCES;
- lastIndex %= MAX_DISTANCES;
- firstIndex %= MAX_DISTANCES;
- }
- }
-
- // Not only do we transform position but delta time is calculated and
- // old transformed position is saved
- // Average the last MAX_DISTANCES delta time and change in position using
- // an array for both and circlularly storing the time and distance values
- // into this array.
- // Current transformed position and time in stored into maxIndex of their
- // respective arrays.
- void setXformedPosition() {
- Point3f newPosition = new Point3f();
- if (debugFlag)
- debugPrint("*** setXformedPosition");
- // xform Position
- if (getVWrldXfrmFlag()) {
- if (debugFlag)
- debugPrint(" Transform set so transform pos");
- vworldXfrm.transform(position, newPosition);
- }
- else {
- if (debugFlag)
- debugPrint(" Transform NOT set so pos => xformPos");
- newPosition.set(position);
- }
- // store position and increment indices ONLY if theres an actual change
- if (newPosition.x == positions[currentIndex].x &&
- newPosition.y == positions[currentIndex].y &&
- newPosition.z == positions[currentIndex].z ) {
- if (debugFlag)
- debugPrint(" No change in pos, so don't reset");
- return;
- }
-
- incrementIndices();
- // store new transformed position
- times[currentIndex] = System.currentTimeMillis();
- positions[currentIndex].set(newPosition);
- if (debugFlag)
- debugPrint(" xform(sound)Position -" +
- " positions[" + currentIndex + "] = (" +
- positions[currentIndex].x + ", " +
- positions[currentIndex].y + ", " +
- positions[currentIndex].z + ")");
-
- // since this is a change to the sound position and not the
- // head save the last head position into the current element
- if (numDistances > 1)
- centerEars[currentIndex].set(centerEars[lastIndex]);
-
- }
-
- /**
- * Set Doppler effect Rate
- *
- * Calculate the rate of change in for the head and sound
- * between the two time stamps (last two times position or
- * VirtualWorld transform was updated).
- * First determine if the head and sound source are moving
- * towards each other (distance between them is decreasing),
- * moving away from each other (distance between them is
- * increasing), or no change (distance is the same, not moving
- * or moving the same speed/direction).
- * The following equation is used for determining the change in frequency -
- * If there has been a change in the distance between the head and sound:
- *
- * f' = f * frequencyScaleFactor * velocityRatio
- *
- * For no change in the distance bewteen head and sound, velocityRatio is 1:
- *
- * f' = f
- *
- * For head and sound moving towards each other, velocityRatio (> 1.0) is:
- *
- * | speedOfSound*rollOff + velocityOfHead*velocityScaleFactor |
- * | ------------------------------------------------------------- |
- * | speedOfSound*rollOff - velocityOfSource*velocityScaleFactor |
- *
- * For head and sound moving away from each other, velocityRatio (< 1.0) is:
- *
- * | speedOfSound*rollOff - velocityOfHead*velocityScaleFactor |
- * | ------------------------------------------------------------- |
- * | speedOfSound*rollOff + velocityOfSource*velocityScaleFactor |
- *
- * where frequencyScaleFactor, rollOff, velocityScaleFactor all come from
- * the active AuralAttributes parameters.
- * The following special cases must be test for AuralAttribute parameters:
- * rolloff
- * Value MUST be > zero for any sound to be heard!
- * If value is zero, all sounds affected by AuralAttribute region are silent.
- * velocityScaleFactor
- * Value MUST be > zero for any sound to be heard!
- * If value is zero, all sounds affected by AuralAttribute region are paused.
- * frequencyScaleFactor
- * Value of zero disables Doppler calculations:
- * Sfreq' = Sfreq * frequencyScaleFactor
- *
- * This rate is passed to device drive as a change to playback sample
- * rate, in this case the frequency need not be known.
- *
- * Return value of zero denotes no change
- * Return value of -1 denotes ERROR
- */
- float calculateDoppler(AuralParameters attribs) {
- double sampleRateRatio = 1.0;
- double headVelocity = 0.0; // in milliseconds
- double soundVelocity = 0.0; // in milliseconds
- double distanceSourceToHead = 0.0; // in meters
- double lastDistanceSourceToHead = 0.0; // in meters
- float speedOfSound = attribs.SPEED_OF_SOUND;
- double numerator = 1.0;
- double denominator = 1.0;
- int direction = NO_CHANGE; // sound movement away or towards listener
-
- Point3f lastXformPosition;
- Point3f lastXformCenterEar;
- Point3f xformPosition;
- Point3f xformCenterEar;
- float averagedSoundDistances = 0.0f;
- float averagedEarsDistances = 0.0f;
-
- /*
- * Average the differences between the last MAX_DISTANCE
- * sound positions and head positions
- */
- if (!averageDistances) {
- // TODO: Use some EPSilion to do 'equals' test against
- if (dopplerFlag)
- debugPrint("JSPositionalSample.calculateDoppler - " +
- "not enough distance data collected, " +
- "dopplerRatio set to zero");
- // can't calculate change in direction
- return 0.0f; // sample rate ratio is zero
- }
-
- lastXformPosition = positions[lastIndex];
- lastXformCenterEar = centerEars[lastIndex];
- xformPosition = positions[currentIndex];
- xformCenterEar = centerEars[currentIndex];
- distanceSourceToHead = xformPosition.distance(xformCenterEar);
- lastDistanceSourceToHead = lastXformPosition.distance(lastXformCenterEar);
- if (dopplerFlag) {
- debugPrint("JSPositionalSample.calculateDoppler - distances: " +
- "current,last = " + distanceSourceToHead + ", " +
- lastDistanceSourceToHead );
- debugPrint(" " +
- "current position = " +
- xformPosition.x + ", " + xformPosition.y +
- ", " + xformPosition.z);
- debugPrint(" " +
- "current ear = " +
- xformCenterEar.x + ", " + xformCenterEar.y +
- ", " + xformCenterEar.z);
- debugPrint(" " +
- "last position = " +
- lastXformPosition.x + ", " + lastXformPosition.y +
- ", " + lastXformPosition.z);
- debugPrint(" " +
- "last ear = " +
- lastXformCenterEar.x + ", " + lastXformCenterEar.y +
- ", " + lastXformCenterEar.z);
- }
- if (distanceSourceToHead == lastDistanceSourceToHead) {
- // TODO: Use some EPSilion to do 'equals' test against
- if (dopplerFlag)
- debugPrint("JSPositionalSample.calculateDoppler - " +
- "distance diff = 0, dopplerRatio set to zero");
- // can't calculate change in direction
- return 0.0f; // sample rate ratio is zero
- }
-
- deltaTime = times[currentIndex] - times[firstIndex];
- for (int i=0; i<(MAX_DISTANCES-1); i++) {
- averagedSoundDistances += positions[i+1].distance(positions[i]);
- averagedEarsDistances += centerEars[i+1].distance(centerEars[i]);
- }
- averagedSoundDistances /= (MAX_DISTANCES-1);
- averagedEarsDistances /= (MAX_DISTANCES-1);
- soundVelocity = averagedSoundDistances/deltaTime;
- headVelocity = averagedEarsDistances/deltaTime;
- if (dopplerFlag) {
- debugPrint(" " +
- "delta time = " + deltaTime );
- debugPrint(" " +
- "soundPosition delta = " +
- xformPosition.distance(lastXformPosition));
- debugPrint(" " +
- "soundVelocity = " + soundVelocity);
- debugPrint(" " +
- "headPosition delta = " +
- xformCenterEar.distance(lastXformCenterEar));
- debugPrint(" " +
- "headVelocity = " + headVelocity);
- }
- if (attribs != null) {
-
- float rolloff = attribs.rolloff;
- float velocityScaleFactor = attribs.velocityScaleFactor;
- if (rolloff != 1.0f) {
- speedOfSound *= rolloff;
- if (dopplerFlag)
- debugPrint(" " +
- "attrib rollof = " + rolloff);
- }
- if (velocityScaleFactor != 1.0f) {
- soundVelocity *= velocityScaleFactor;
- headVelocity *= velocityScaleFactor;
- if (dopplerFlag) {
- debugPrint(" " +
- "attrib velocity scale factor = " +
- velocityScaleFactor );
- debugPrint(" " +
- "new soundVelocity = " + soundVelocity);
- debugPrint(" " +
- "new headVelocity = " + headVelocity);
- }
- }
- }
- if (distanceSourceToHead < lastDistanceSourceToHead) {
- // sound and head moving towards each other
- if (dopplerFlag)
- debugPrint(" " +
- "moving towards...");
- direction = TOWARDS;
- numerator = speedOfSound + headVelocity;
- denominator = speedOfSound - soundVelocity;
- }
- else {
- // sound and head moving away from each other
- // note: no change in distance case covered above
- if (dopplerFlag)
- debugPrint(" " +
- "moving away...");
- direction = AWAY;
- numerator = speedOfSound - headVelocity;
- denominator = speedOfSound + soundVelocity;
- }
- if (numerator <= 0.0) {
- if (dopplerFlag)
- debugPrint("JSPositionalSample.calculateDoppler: " +
- "BOOM!! - velocity of head > speed of sound");
- return -1.0f;
- }
- else if (denominator <= 0.0) {
- if (dopplerFlag)
- debugPrint("JSPositionalSample.calculateDoppler: " +
- "BOOM!! - velocity of sound source negative");
- return -1.0f;
- }
- else {
- if (dopplerFlag)
- debugPrint("JSPositionalSample.calculateDoppler: " +
- "numerator = " + numerator +
- ", denominator = " + denominator );
- sampleRateRatio = numerator / denominator;
- }
-
-/********
- IF direction WERE important to calling method...
- * Return value greater than 0 denotes direction of sound source is
- * towards the listener
- * Return value less than 0 denotes direction of sound source is
- * away from the listener
- if (direction == AWAY)
- return -((float)sampleRateRatio);
- else
- return (float)sampleRateRatio;
-*********/
- return (float)sampleRateRatio;
- }
-
- void updateEar(int dirtyFlags, View view) {
- if (debugFlag)
- debugPrint("*** updateEar fields");
- // xform Ear
- Point3f xformCenterEar = new Point3f();
- if (!calculateNewEar(dirtyFlags, view, xformCenterEar)) {
- if (debugFlag)
- debugPrint("calculateNewEar returned false");
- return;
- }
- // store ear and increment indices ONLY if there is an actual change
- if (xformCenterEar.x == centerEars[currentIndex].x &&
- xformCenterEar.y == centerEars[currentIndex].y &&
- xformCenterEar.z == centerEars[currentIndex].z ) {
- if (debugFlag)
- debugPrint(" No change in ear, so don't reset");
- return;
- }
- // store xform Ear
- incrementIndices();
- times[currentIndex] = System.currentTimeMillis();
- centerEars[currentIndex].set(xformCenterEar);
- // since this is a change to the head position and not the sound
- // position save the last sound position into the current element
- if (numDistances > 1)
- positions[currentIndex].set(positions[lastIndex]);
- }
-
- boolean calculateNewEar(int dirtyFlags, View view, Point3f xformCenterEar) {
- /*
- * Transform ear position (from Head) into Virtual World Coord space
- */
- Point3d earPosition = new Point3d(); // temporary double Point
-
- // TODO: check dirty flags coming in
- // For now, recalculate ear positions by forcing earsXformed false
- boolean earsXformed = false;
- if (!earsXformed) {
- if (view != null) {
- PhysicalBody body = view.getPhysicalBody();
- if (body != null) {
-
- // Get Head Coord. to Virtual World transform
- // TODO: re-enable this when userHeadToVworld is
- // implemented correctly!!!
- Transform3D headToVwrld = new Transform3D();
- view.getUserHeadToVworld(headToVwrld);
- if (debugFlag) {
- debugPrint("user head to Vwrld colum-major:");
- double[] matrix = new double[16];
- headToVwrld.get(matrix);
- debugPrint("JSPosSample " + matrix[0]+", " +
- matrix[1]+", "+matrix[2]+", "+matrix[3]);
- debugPrint("JSPosSample " + matrix[4]+", " +
- matrix[5]+", "+matrix[6]+", "+matrix[7]);
- debugPrint("JSPosSample " + matrix[8]+", " +
- matrix[9]+", "+matrix[10]+", "+matrix[11]);
- debugPrint("JSPosSample " + matrix[12]+", " +
- matrix[13]+", "+matrix[14]+", "+matrix[15]);
- }
-
- // Get left and right ear positions in Head Coord.s
- // Transforms left and right ears to Virtual World coord.s
- body.getLeftEarPosition(earPosition);
- xformLeftEar.x = (float)earPosition.x;
- xformLeftEar.y = (float)earPosition.y;
- xformLeftEar.z = (float)earPosition.z;
- body.getRightEarPosition(earPosition);
- xformRightEar.x = (float)earPosition.x;
- xformRightEar.y = (float)earPosition.y;
- xformRightEar.z = (float)earPosition.z;
- headToVwrld.transform(xformRightEar);
- headToVwrld.transform(xformLeftEar);
- // Transform head viewing (Z) axis to Virtual World coord.s
- xformHeadZAxis.set(0.0f, 0.0f, -1.0f); // Va
- headToVwrld.transform(xformHeadZAxis);
-
- // calculate the new (current) mid-point between the ears
- // find the mid point between left and right ear positions
- xformCenterEar.x = xformLeftEar.x +
- ((xformRightEar.x - xformLeftEar.x)*0.5f);
- xformCenterEar.y = xformLeftEar.y +
- ((xformRightEar.y - xformLeftEar.y)*0.5f);
- xformCenterEar.z = xformLeftEar.z +
- ((xformRightEar.z - xformLeftEar.z)*0.5f);
- // TODO: when head changes earDirty should be set!
- // earDirty = false;
- if (debugFlag) {
- debugPrint(" earXformed CALCULATED");
- debugPrint(" xformCenterEar = " +
- xformCenterEar.x + " " +
- xformCenterEar.y + " " +
- xformCenterEar.z );
- }
- earsXformed = true;
- } // end of body NOT null
- } // end of view NOT null
- } // end of earsDirty
- else {
- // TODO: use existing transformed ear positions
- }
-
- if (!earsXformed) {
- // uses the default head position of (0.0, -0.03, 0.095)
- if (debugFlag)
- debugPrint(" earXformed NOT calculated");
- }
- return earsXformed;
- }
-
- /**
- * Render this sample
- *
- * Calculate the audiodevice parameters necessary to spatially play this
- * sound.
- */
- @Override
- public void render(int dirtyFlags, View view, AuralParameters attribs) {
- if (debugFlag)
- debugPrint("JSPositionalSample.render");
- updateEar(dirtyFlags, view);
-
- /*
- * Time to check velocities and change the playback rate if necessary...
- *
- * Rolloff value MUST be > zero for any sound to be heard!
- * If rolloff is zero, all sounds affected by AuralAttribute region
- * are silent.
- * FrequencyScaleFactor value MUST be > zero for any sound to be heard!
- * since Sfreq' = Sfreq * frequencyScaleFactor.
- * If FrequencyScaleFactor is zero, all sounds affected by
- * AuralAttribute region are paused.
- * VelocityScaleFactor value of zero disables Doppler calculations.
- *
- * Scale 'Doppler' rate (or lack of Doppler) by frequencyScaleFactor.
- */
- float dopplerRatio = 1.0f;
- if (attribs != null) {
- float rolloff = attribs.rolloff;
- float frequencyScaleFactor = attribs.frequencyScaleFactor;
- float velocityScaleFactor = attribs.velocityScaleFactor;
- if (debugFlag || dopplerFlag)
- debugPrint("JSPositionalSample: attribs NOT null");
- if (rolloff <= 0.0f) {
- if (debugFlag)
- debugPrint(" rolloff = " + rolloff + " <= 0.0" );
- // TODO: Make sound silent
- // return ???
- }
- else if (frequencyScaleFactor <= 0.0f) {
- if (debugFlag)
- debugPrint(" freqScaleFactor = " + frequencyScaleFactor +
- " <= 0.0" );
- // TODO: Pause sound silent
- // return ???
- }
- else if (velocityScaleFactor > 0.0f) {
- if (debugFlag || dopplerFlag)
- debugPrint(" velocityScaleFactor = " +
- velocityScaleFactor);
-/*******
- if (deltaTime > 0) {
-*******/
- // Doppler can be calculated after the second time
- // updateXformParams() is executed
- dopplerRatio = calculateDoppler(attribs);
-
- if (dopplerRatio == 0.0f) {
- // dopplerRatio zeroo denotes no changed
- // TODO: But what if frequencyScaleFactor has changed
- if (debugFlag) {
- debugPrint("JSPositionalSample: render: " +
- "dopplerRatio returned zero; no change");
- }
- }
- else if (dopplerRatio == -1.0f) {
- // error returned by calculateDoppler
- if (debugFlag) {
- debugPrint("JSPositionalSample: render: " +
- "dopplerRatio returned = " +
- dopplerRatio + "< 0");
- }
- // TODO: Make sound silent
- // return ???
- }
- else if (dopplerRatio > 0.0f) {
- // rate could be changed
- rateRatio = dopplerRatio * frequencyScaleFactor *
- getRateScaleFactor();
- if (debugFlag) {
- debugPrint(" scaled by frequencyScaleFactor = " +
- frequencyScaleFactor );
- }
- }
-/******
- }
- else {
- if (debugFlag)
- debugPrint("deltaTime <= 0 - skip Doppler calc");
- }
-******/
- }
- else { // auralAttributes not null but velocityFactor <= 0
- // Doppler is disabled
- rateRatio = frequencyScaleFactor * getRateScaleFactor();
- }
- }
- /*
- * since aural attributes undefined, default values are used,
- * thus no Doppler calculated
- */
- else {
- if (debugFlag || dopplerFlag)
- debugPrint("JSPositionalSample: attribs null");
- rateRatio = 1.0f;
- }
-
- this.panSample(attribs);
- }
-
- /* *****************
- *
- * Calculate Angular Gain
- *
- * *****************/
- /*
- * Calculates the Gain scale factor applied to the overall gain for
- * a sound based on angle between a sound's projected direction and the
- * vector between the sounds position and center ear.
- *
- * For Point Sounds this value is always 1.0f.
- */
- float calculateAngularGain() {
- return(1.0f);
- }
-
- /* *****************
- *
- * Calculate Filter
- *
- * *****************/
- /*
- * Calculates the low-pass cutoff frequency filter value applied to the
- * a sound based on both:
- * Distance Filter (from Aural Attributes) based on distance
- * between the sound and the listeners position
- * Angular Filter (for Directional Sounds) based on the angle
- * between a sound's projected direction and the
- * vector between the sounds position and center ear.
- * The lowest of these two filter is used.
- * This filter value is stored into the sample's filterFreq field.
- */
- void calculateFilter(float distance, AuralParameters attribs) {
- // setting filter cutoff freq to 44.1kHz which, in this
- // implementation, is the same as not performing filtering
- float distanceFilter = 44100.0f;
- float angularFilter = 44100.0f;
- int arrayLength = attribs.getDistanceFilterLength();
- int filterType = attribs.getDistanceFilterType();
- boolean distanceFilterFound = false;
- boolean angularFilterFound = false;
- if ((filterType != AuralParameters.NO_FILTERING) && arrayLength > 0) {
- double[] distanceArray = new double[arrayLength];
- float[] cutoffArray = new float[arrayLength];
- attribs.getDistanceFilter(distanceArray, cutoffArray);
-
- if (debugFlag) {
- debugPrint("distanceArray cutoffArray");
- for (int i=0; i<arrayLength; i++)
- debugPrint((float)(distanceArray[i]) + ", " + cutoffArray[i]);
- }
- distanceFilter = findFactor((double)distance,
- distanceArray, cutoffArray);
- if (distanceFilter < 0.0f)
- distanceFilterFound = false;
- else
- distanceFilterFound = true;
- }
- else {
- distanceFilterFound = false;
- distanceFilter = -1.0f;
- }
-
- if (debugFlag)
- debugPrint(" calculateFilter arrayLength = " + arrayLength);
-
- // Angular filter only applies to directional sound sources.
- angularFilterFound = false;
- angularFilter = -1.0f;
-
- filterFlag = distanceFilterFound || angularFilterFound;
- filterFreq = distanceFilter;
- if (debugFlag)
- debugPrint(" calculateFilter flag,freq = " + filterFlag +
- "," + filterFreq );
- }
-
- /* *****************
- *
- * Find Factor
- *
- * *****************/
- /*
- * Interpolates the correct output factor given a 'distance' value
- * and references to the distance array and factor array used in
- * the calculation. These array parameters could be either linear or
- * angular distance arrays, or filter arrays.
- * The values in the distance array are monotonically increasing.
- * This method looks at pairs of distance array values to find which
- * pair the input distance argument is between distanceArray[index] and
- * distanceArray[index+1].
- * The index is used to get factorArray[index] and factorArray[index+1].
- * Then the ratio of the 'distance' between this pair of distanceArray
- * values is used to scale the two found factorArray values proportionally.
- * The resulting factor is returned, unless there is an error, then -1.0
- * is returned.
- */
- float findFactor(double distance,
- double[] distanceArray, float[] factorArray) {
- int index, lowIndex, highIndex, indexMid;
-
- if (debugFlag)
- debugPrint("JSPositionalSample.findFactor entered");
-
- /*
- * Error checking
- */
- if (distanceArray == null || factorArray == null) {
- if (debugFlag)
- debugPrint(" findFactor: arrays null");
- return -1.0f; // no value
- }
- int arrayLength = distanceArray.length;
- if (arrayLength < 2) {
- if (debugFlag)
- debugPrint(" findFactor: arrays length < 2");
- return -1.0f; // no value
- }
- int largestIndex = arrayLength - 1;
-
- /*
- * Calculate distanceGain scale factor
- */
- if (distance >= distanceArray[largestIndex]) {
- if (debugFlag) {
- debugPrint(" findFactor: distance > " +
- distanceArray[largestIndex]);
- debugPrint(" distanceArray length = "+ arrayLength);
- }
- return factorArray[largestIndex];
- }
- else if (distance <= distanceArray[0]) {
- if (debugFlag)
- debugPrint(" findFactor: distance < " +
- distanceArray[0]);
- return factorArray[0];
- }
- /*
- * Distance between points within attenuation array.
- * Use binary halfing of distance array
- */
- else {
- lowIndex = 0;
- highIndex = largestIndex;
- if (debugFlag)
- debugPrint(" while loop to find index: ");
- while (lowIndex < (highIndex-1)) {
- if (debugFlag) {
- debugPrint(" lowIndex " + lowIndex +
- ", highIndex " + highIndex);
- debugPrint(" d.A. pair for lowIndex " +
- distanceArray[lowIndex] + ", " + factorArray[lowIndex] );
- debugPrint(" d.A. pair for highIndex " +
- distanceArray[highIndex] + ", " + factorArray[highIndex] );
- }
- /*
- * we can assume distance is between distance atttenuation vals
- * distanceArray[lowIndex] and distanceArray[highIndex]
- * calculate gain scale factor based on distance
- */
- if (distanceArray[lowIndex] >= distance) {
- if (distance < distanceArray[lowIndex]) {
- if (internalErrors)
- debugPrint("Internal Error: binary halving in " +
- " findFactor failed; distance < index value");
- }
- if (debugFlag) {
- debugPrint( " index == distanceGain " +
- lowIndex);
- debugPrint(" findFactor returns [LOW=" +
- lowIndex + "] " + factorArray[lowIndex]);
- }
- // take value of scale factor directly from factorArray
- return factorArray[lowIndex];
- }
- else if (distanceArray[highIndex] <= distance) {
- if (distance > distanceArray[highIndex]) {
- if (internalErrors)
- debugPrint("Internal Error: binary halving in " +
- " findFactor failed; distance > index value");
- }
- if (debugFlag) {
- debugPrint( " index == distanceGain " +
- highIndex);
- debugPrint(" findFactor returns [HIGH=" +
- highIndex + "] " + factorArray[highIndex]);
- }
- // take value of scale factor directly from factorArray
- return factorArray[highIndex];
- }
- if (distance > distanceArray[lowIndex] &&
- distance < distanceArray[highIndex] ) {
- indexMid = lowIndex + ((highIndex - lowIndex) / 2);
- if (distance <= distanceArray[indexMid])
- // value of distance in lower "half" of list
- highIndex = indexMid;
- else // value if distance in upper "half" of list
- lowIndex = indexMid;
- }
- } /* of while */
-
- /*
- * ratio: distance from listener to sound source
- * between lowIndex and highIndex times
- * attenuation value between lowIndex and highIndex
- * gives linearly interpolationed attenuation value
- */
- if (debugFlag) {
- debugPrint( " ratio calculated using lowIndex " +
- lowIndex + ", highIndex " + highIndex);
- debugPrint( " d.A. pair for lowIndex " +
- distanceArray[lowIndex]+", "+factorArray[lowIndex] );
- debugPrint( " d.A. pair for highIndex " +
- distanceArray[highIndex]+", "+factorArray[highIndex] );
- }
-
- float outputFactor =
- ((float)(((distance - distanceArray[lowIndex])/
- (distanceArray[highIndex] - distanceArray[lowIndex]) ) ) *
- (factorArray[highIndex] - factorArray[lowIndex]) ) +
- factorArray[lowIndex] ;
- if (debugFlag)
- debugPrint(" findFactor returns " + outputFactor);
- return outputFactor;
- }
- }
-
- /**
- * CalculateDistanceAttenuation
- *
- * Simply calls generic (for PointSound) 'findFactor()' with
- * a single set of attenuation distance and gain scale factor arrays.
- */
- float calculateDistanceAttenuation(float distance) {
- float factor = 1.0f;
- factor = findFactor((double)distance, this.attenuationDistance,
- this.attenuationGain);
- if (factor >= 0.0)
- return (factor);
- else
- return (1.0f);
- }
-
- /* ******************
- *
- * Pan Sample
- *
- * ******************/
- /*
- * Sets pan and delay for a single sample associated with this Sound.
- * Front and Back quadrants are treated the same.
- */
- void panSample(AuralParameters attribs) {
- int quadrant = 1;
- float intensityHigh = 1.0f;
- float intensityLow = 0.125f;
- float intensityDifference = intensityHigh - intensityLow;
-
- //TODO: time around "average" default head
- // int delayHigh = 32; // 32.15 samples = .731 ms
- // int delayLow = 0;
-
- float intensityOffset; // 0.0 -> 1.0 then 1.0 -> 0.0 for full rotation
- float halfX;
- int id;
- int err;
-
- float nearZero = 0.000001f;
- float nearOne = 0.999999f;
- float nearNegativeOne = -nearOne;
- float halfPi = (float)Math.PI * 0.5f;
- /*
- * Parameters used for IID and ITD equations.
- * Name of parameters (as used in Guide, E.3) are denoted in comments.
- */
- float distanceSourceToCenterEar = 0.0f; // Dh
- float lastDistanceSourceToCenterEar = 0.0f;
- float distanceSourceToRightEar = 0.0f; // Ef or Ec
- float distanceSourceToLeftEar = 0.0f; // Ef or Ec
- float distanceBetweenEars = 0.18f; // De
- float radiusOfHead = 0.0f; // De/2
- float radiusOverDistanceToSource = 0.0f; // De/2 * 1/Dh
-
- float alpha = 0.0f; // 'alpha'
- float sinAlpha = 0.0f; // sin(alpha);
- float gamma = 0.0f; // 'gamma'
-
- // Speed of Sound (unaffected by rolloff) in millisec/meters
- float speedOfSound = attribs.SPEED_OF_SOUND;
- float invSpeedOfSound = 1.0f / attribs.SPEED_OF_SOUND;
-
- float sampleRate = 44.1f; // 44 samples/millisec
-
- boolean rightEarClosest = false;
- boolean soundFromBehind = false;
-
- float distanceGain = 1.0f;
- float allGains = this.gain; // product of gain scale factors
-
- Point3f workingPosition = new Point3f();
- Point3f workingCenterEar = new Point3f();
-
- // Asuumes that head and ear positions can be retrieved from universe
-
- Vector3f mixScale = new Vector3f(); // for mix*Samples code
-
- // Use transformed position of this sound
- workingPosition.set(positions[currentIndex]);
- workingCenterEar.set(centerEars[currentIndex]);
- if (debugFlag) {
- debugPrint("panSample:workingPosition from" +
- " positions["+currentIndex+"] -> " +
- workingPosition.x + ", " + workingPosition.y + ", " +
- workingPosition.z + " for pointSound " + this);
- debugPrint("panSample:workingCenterEar " +
- workingCenterEar.x + " " + workingCenterEar.y + " " +
- workingCenterEar.z);
- debugPrint("panSample:xformLeftEar " +
- xformLeftEar.x + " " + xformLeftEar.y + " " +
- xformLeftEar.z);
- debugPrint("panSample:xformRightEar " +
- xformRightEar.x + " " + xformRightEar.y + " " +
- xformRightEar.z);
- }
-
- // Create the vectors from the sound source to head positions
- sourceToCenterEar.x = workingCenterEar.x - workingPosition.x;
- sourceToCenterEar.y = workingCenterEar.y - workingPosition.y;
- sourceToCenterEar.z = workingCenterEar.z - workingPosition.z;
- sourceToRightEar.x = xformRightEar.x - workingPosition.x;
- sourceToRightEar.y = xformRightEar.y - workingPosition.y;
- sourceToRightEar.z = xformRightEar.z - workingPosition.z;
- sourceToLeftEar.x = xformLeftEar.x - workingPosition.x;
- sourceToLeftEar.y = xformLeftEar.y - workingPosition.y;
- sourceToLeftEar.z = xformLeftEar.z - workingPosition.z;
-
- /*
- * get distances from SoundSource to
- * (i) head origin
- * (ii) right ear
- * (iii) left ear
- */
- distanceSourceToCenterEar = workingPosition.distance(workingCenterEar);
- distanceSourceToRightEar = workingPosition.distance(xformRightEar);
- distanceSourceToLeftEar = workingPosition.distance(xformLeftEar);
- distanceBetweenEars = xformRightEar.distance(xformLeftEar);
- if (debugFlag)
- debugPrint(" distance from left,right ears to source: = (" +
- distanceSourceToLeftEar + ", " + distanceSourceToRightEar + ")");
-
- radiusOfHead = distanceBetweenEars * 0.5f;
- if (debugFlag)
- debugPrint(" radius of head = " + radiusOfHead );
- radiusOverDistanceToSource = // De/2 * 1/Dh
- radiusOfHead/distanceSourceToCenterEar;
- if (debugFlag)
- debugPrint(" radius over distance = " + radiusOverDistanceToSource );
- if (debugFlag) {
- debugPrint("panSample:source to center ear " +
- sourceToCenterEar.x + " " + sourceToCenterEar.y + " " +
- sourceToCenterEar.z );
- debugPrint("panSample:xform'd Head ZAxis " +
- xformHeadZAxis.x + " " + xformHeadZAxis.y + " " +
- xformHeadZAxis.z );
- debugPrint("panSample:length of sourceToCenterEar " +
- sourceToCenterEar.length());
- debugPrint("panSample:length of xformHeadZAxis " +
- xformHeadZAxis.length());
- }
-
- // Dot Product
- double dotProduct = (double)(
- (sourceToCenterEar.dot(xformHeadZAxis))/
- (sourceToCenterEar.length() * xformHeadZAxis.length()));
- if (debugFlag)
- debugPrint( " dot product = " + dotProduct );
- alpha = (float)(Math.acos(dotProduct));
- if (debugFlag)
- debugPrint( " alpha = " + alpha );
-
- if (alpha > halfPi) {
- if (debugFlag)
- debugPrint(" sound from behind");
- soundFromBehind = true;
- alpha = (float)Math.PI - alpha;
- if (debugFlag)
- debugPrint( " PI minus alpha =>" + alpha );
- }
- else {
- soundFromBehind = false;
- if (debugFlag)
- debugPrint(" sound from in front");
- }
-
- gamma = (float)(Math.acos(radiusOverDistanceToSource));
- if (debugFlag)
- debugPrint( " gamma " + gamma );
-
- rightEarClosest =
- (distanceSourceToRightEar>distanceSourceToLeftEar) ? false : true ;
- /*
- * Determine the quadrant sound is in
- */
- if (rightEarClosest) {
- if (debugFlag)
- debugPrint( " right ear closest");
- if (soundFromBehind)
- quadrant = 4;
- else
- quadrant = 1;
- }
- else {
- if (debugFlag)
- debugPrint( " left ear closest");
- if (soundFromBehind)
- quadrant = 3;
- else
- quadrant = 2;
- }
- sinAlpha = (float)(Math.sin((double)alpha));
- if (sinAlpha < 0.0) sinAlpha = -sinAlpha;
- if (debugFlag)
- debugPrint( " sin(alpha) " + sinAlpha );
-
- /*
- * The path from sound source to the farthest ear is always indirect
- * (it wraps around part of the head).
- * Calculate distance wrapped around the head for farthest ear
- */
- float DISTANCE = (float)Math.sqrt((double)
- distanceSourceToCenterEar * distanceSourceToCenterEar +
- radiusOfHead * radiusOfHead);
- if (debugFlag)
- debugPrint( " partial distance from edge of head to source = "
- + distanceSourceToCenterEar);
- if (rightEarClosest) {
- distanceSourceToLeftEar =
- DISTANCE + radiusOfHead * (halfPi+alpha-gamma);
- if (debugFlag)
- debugPrint(" new distance from left ear to source = "
- + distanceSourceToLeftEar);
- }
- else {
- distanceSourceToRightEar =
- DISTANCE + radiusOfHead * (halfPi+alpha-gamma);
- if (debugFlag)
- debugPrint(" new distance from right ear to source = "
- + distanceSourceToRightEar);
- }
- /*
- * The path from the source source to the closest ear could either
- * be direct or indirect (wraps around part of the head).
- * if sinAlpha >= radiusOverDistance path of sound to closest ear
- * is direct, otherwise it is indirect
- */
- if (sinAlpha < radiusOverDistanceToSource) {
- if (debugFlag)
- debugPrint(" closest path is also indirect ");
- // Path of sound to closest ear is indirect
-
- if (rightEarClosest) {
- distanceSourceToRightEar =
- DISTANCE + radiusOfHead * (halfPi-alpha-gamma);
- if (debugFlag)
- debugPrint(" new distance from right ear to source = "
- + distanceSourceToRightEar);
- }
- else {
- distanceSourceToLeftEar =
- DISTANCE + radiusOfHead * (halfPi-alpha-gamma);
- if (debugFlag)
- debugPrint(" new distance from left ear to source = "
- + distanceSourceToLeftEar);
- }
- }
- else {
- if (debugFlag)
- debugPrint(" closest path is direct ");
- if (rightEarClosest) {
- if (debugFlag)
- debugPrint(" direct distance from right ear to source = "
- + distanceSourceToRightEar);
- }
- else {
- if (debugFlag)
- debugPrint(" direct distance from left ear to source = "
- + distanceSourceToLeftEar);
- }
- }
-
- /**
- * Short-cut taken. Rather than using actual delays from source
- * (where the overall distances would be taken into account in
- * determining delay) the difference in the left and right delay
- * are applied.
- * This approach will be preceptibly wrong for sound sources that
- * are very far away from the listener so both ears would have
- * large delay.
- */
- sampleRate = channel.rateInHz * (0.001f); // rate in milliseconds
- if (rightEarClosest) {
- rightDelay = 0;
- leftDelay = (int)((distanceSourceToLeftEar - distanceSourceToRightEar) *
- invSpeedOfSound * sampleRate);
- }
- else {
- leftDelay = 0;
- rightDelay = (int)((distanceSourceToRightEar - distanceSourceToLeftEar) *
- invSpeedOfSound * sampleRate);
- }
-
- if (debugFlag) {
- debugPrint(" using inverted SoS = " + invSpeedOfSound);
- debugPrint(" and sample rate = " + sampleRate);
- debugPrint(" left and right delay = ("
- + leftDelay + ", " + rightDelay + ")");
- }
-
- // What should the gain be for the different ears???
- // TODO: now using a hack that sets gain based on a unit circle!!!
- workingPosition.sub(workingCenterEar); // offset sound pos. by head origin
- // normalize; put Sound on unit sphere around head origin
- workingPosition.scale(1.0f/distanceSourceToCenterEar);
- if (debugFlag)
- debugPrint(" workingPosition after unitization " +
- workingPosition.x+" "+workingPosition.y+" "+workingPosition.z );
-
- /*
- * Get the correct distance gain scale factor from attenuation arrays.
- * This requires that sourceToCenterEar vector has been calculated.
- */
- // TODO: now using distance from center ear to source
- // Using distances from each ear to source would be more accurate
- distanceGain = calculateDistanceAttenuation(distanceSourceToCenterEar);
-
- allGains *= distanceGain;
-
- /*
- * Add angular gain (for Cone sound)
- */
- if (debugFlag)
- debugPrint(" all Gains (without angular gain) " + allGains);
- // assume that transfromed Position is already calculated
- allGains *= this.calculateAngularGain();
- if (debugFlag)
- debugPrint(" (incl. angular gain) " + allGains);
-
- halfX = workingPosition.x/2.0f;
- if (halfX >= 0)
- intensityOffset = (intensityDifference * (0.5f - halfX));
- else
- intensityOffset = (intensityDifference * (0.5f + halfX));
-
- /*
- * For now have delay constant for front back sound for now
- */
- if (debugFlag)
- debugPrint("panSample: quadrant " + quadrant);
- switch (quadrant) {
- case 1:
- // Sound from front, right of center of head
- case 4:
- // Sound from back, right of center of head
- rightGain = allGains * (intensityHigh - intensityOffset);
- leftGain = allGains * (intensityLow + intensityOffset);
- break;
-
- case 2:
- // Sound from front, left of center of head
- case 3:
- // Sound from back, right of center of head
- leftGain = allGains * (intensityHigh - intensityOffset);
- rightGain = allGains * (intensityLow + intensityOffset);
- break;
- } /* switch */
- if (debugFlag)
- debugPrint("panSample: left/rightGain " + leftGain +
- ", " + rightGain);
-
- // Combines distance and angular filter to set this sample's current
- // frequency cutoff value
- calculateFilter(distanceSourceToCenterEar, attribs);
-
- } /* panSample() */
-
-// NOTE: setGain in audioengines.Sample is used to set/get user suppled factor
-// this class uses this single gain value to calculate the left and
-// right gain values
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSSample.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSSample.java
deleted file mode 100644
index c904b8c..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSSample.java
+++ /dev/null
@@ -1,363 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-/*
- * Java Sound Sample object
- *
- * IMPLEMENTATION NOTE: The JavaSoundMixer is incomplete and really needs
- * to be rewritten.
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-import java.io.InputStream;
-import java.net.URL;
-
-import javax.media.j3d.MediaContainer;
-import javax.media.j3d.View;
-import javax.sound.sampled.AudioInputStream;
-import javax.sound.sampled.DataLine;
-
-import com.sun.j3d.audioengines.AuralParameters;
-
-/**
- * The Sample Class extended for Java Sound Mixer specific audio device.
- */
-
-class JSSample extends com.sun.j3d.audioengines.Sample
-{
- /*
- * NOTE: for this device type there is exactly one sample associated
- * with each sound.
- */
-
- /**
- * Sound Data Types
- *
- * Samples can be processed as streaming or buffered data.
- * Fully spatializing sound sources may require data to be buffered.
- *
- * Sound data specified as Streaming is not copied by the AudioDevice
- * driver implementation. It is up the application to ensure that
- * this data is continuously accessible during sound rendering.
- * Futhermore, full sound spatialization may not be possible, for
- * all AudioDevice implementations on unbuffered sound data.
- */
- static final int STREAMING_AUDIO_DATA = 1;
- /**
- * Sound data specified as Buffered is copied by the AudioDevice
- * driver implementation.
- */
- static final int BUFFERED_AUDIO_DATA = 2;
- /**
- * MIDI data
- * TODO: differentiate between STREAMING and BUFFERED MIDI data
- * right now all MIDI data is buffered
- */
- static final int STREAMING_MIDI_DATA = 3;
- static final int BUFFERED_MIDI_DATA = 3;
- static final int UNSUPPORTED_DATA_TYPE = -1;
-
- static final int NULL_SAMPLE = -1;
-
- /**
- * sound data types: BUFFERED (cached) or STREAMING (non-cached)
- */
- int dataType = BUFFERED_AUDIO_DATA;
-
- JSChannel channel = null;
-
- /**
- * Offset pointer within currently playing sample data
- */
- long dataOffset = 0;
-
- /*
- * Maintain continuously playing silent sound sources.
- */
- long timeDeactivated = 0;
- long positionDeactivated = 0;
-
- long sampleLength = 0;
- long loopStartOffset = 0; // for most this will be 0
- long loopLength = 0; // for most this is end sample - sampleLength
- long attackLength = 0; // portion of sample before loop section
- long releaseLength = 0; // portion of sample after loop section
-
- float rateRatio = 1.0f;
- float currentRateRatio = -1.0f; // last actual rate ratio send to device
- float targetRateRatio = -1.0f;
- boolean rampRateFlag = false;
-
- public JSSample() {
- super();
- if (debugFlag)
- debugPrintln("JSSample constructor");
- }
-
- // the only public methods are those declared in the audioengines
- // package as public
-
- /*
- * This excutes code necessary to set current fields to their current
- * correct values before JavaSoundMixer either start or updates the
- * sample thru calls to JSThread.
- */
- @Override
- public void render(int dirtyFlags, View view, AuralParameters attribs) {
- if (debugFlag)
- debugPrint("JSSample.render ");
- // if this is starting set gain, delay (for Pos), freq rate ...
- // TODO: NOT SURE - leaving this in for now
- float freqScaleFactor = attribs.frequencyScaleFactor;
- if (attribs != null) {
- if (freqScaleFactor <= 0.0f) {
- // TODO: Pause Sample
- }
- else
- rateRatio = currentRateRatio * freqScaleFactor;
- }
- else
- rateRatio = currentRateRatio;
- }
-
- /**
- * Clears/re-initialize fields associated with sample data for
- * this sound,
- * and frees any device specific data associated with this sample.
- */
- @Override
- public void clear() {
- super.clear();
- if (debugFlag)
- debugPrintln("JSSample.clear() entered");
- // TODO: unload sound data at device
-// null out samples element that points to this?
-// would this cause samples list size to shrink?
-// if sample elements are never freed then does this mean
-// a have a memory leak?
- dataType = UNSUPPORTED_DATA_TYPE;
- dataOffset = 0;
- timeDeactivated = 0;
- positionDeactivated = 0;
- sampleLength = 0;
- loopStartOffset = 0;
- loopLength = 0;
- attackLength = 0;
- releaseLength = 0;
- rateRatio = 1.0f;
- channel = null;
- if (debugFlag)
- debugPrintln("JSSample.clear() exited");
- }
-
- // @return error true if error occurred
- boolean load(MediaContainer soundData) {
- /**
- * Get the AudioInputStream first.
- * MediaContiner passed to method assumed to be a clone of the
- * application node with the query capability bits set on.
- */
- String path = soundData.getURLString();
- URL url = soundData.getURLObject();
- InputStream inputStream = soundData.getInputStream();
- boolean cacheFlag = soundData.getCacheEnable();
- AudioInputStream ais = null;
- DataLine dataLine = null;
-
- // TODO: How do we determine if the file is a MIDI file???
- // for now set dataType to BUFFERED_ or STREAMING_AUDIO_DATA
- // used to test for ais instanceof AudioMidiInputStream ||
- // ais instanceof AudioRmfInputStream )
- // then set dataType = JSSample.BUFFERED_MIDI_DATA;
- // QUESTION: can non-cached MIDI files ever be supported ?
- /****************
- // TODO: when we have a way to determine data type use code below
- if (dataType==UNSUPPORTED_DATA_TYPE OR error_occurred)
- clearSound(index);
- if (debugFlag)
- debugPrintln("JavaSoundMixer.prepareSound get dataType failed");
- return true;
- }
- *****************/
- // ...for now just check cacheFlag
- if (cacheFlag)
- dataType = BUFFERED_AUDIO_DATA;
- else
- dataType = STREAMING_AUDIO_DATA;
-
- if ((url == null) && (inputStream == null) && (path == null)) {
- if (debugFlag)
- debugPrint("JavaSoundMixer.loadSound null data - return error");
- return true;
- }
-
- // get ais
- if (path != null) {
- // generate url from string, and pass url to driver
- if (debugFlag) {
- debugPrint("JavaSoundMixer.loadSound with path = " + path);
- }
- try {
- url = new URL(path);
- }
- catch (Exception e) {
- // do not throw an exception while rendering
- return true;
- }
- }
-
- // get DataLine channel based on data type
- if (dataType == BUFFERED_AUDIO_DATA) {
- if (debugFlag)
- debugPrintln("JSSample.load dataType = BUFFERED ");
- channel = new JSClip();
- if (debugFlag)
- debugPrintln(" calls JSClip.initAudioInputStream");
- if (url != null)
- ais = channel.initAudioInputStream(url, cacheFlag);
- else if (inputStream != null)
- ais = channel.initAudioInputStream(inputStream, cacheFlag);
- if (ais == null) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer.prepareSound " +
- "initAudioInputStream() failed");
- return true;
- }
- if (debugFlag)
- debugPrintln(" calls JSClip.initDataLine");
- dataLine = channel.initDataLine(ais);
- }
- else if (dataType == STREAMING_AUDIO_DATA) {
- if (debugFlag)
- debugPrintln("JSSample.load dataType = STREAMING ");
- channel = new JSStream();
- if (debugFlag)
- debugPrintln(" calls JSStream.initAudioInputStream");
- if (url != null)
- ais = channel.initAudioInputStream(url, cacheFlag);
- else if (inputStream != null)
- ais = channel.initAudioInputStream(inputStream, cacheFlag);
- if (ais == null) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer.prepareSound " +
- "initAudioInputStream() failed");
- return true;
- }
- if (debugFlag)
- debugPrintln(" calls JSStream.initDataLine");
- dataLine = channel.initDataLine(ais);
- }
- else {
- if (debugFlag)
- debugPrintln("JSSample.load doesn't support MIDI yet");
- }
- if (dataLine == null) {
- if (debugFlag)
- debugPrint("JSSample.load initDataLine failed ");
- channel = null;
- return true;
- }
- duration = channel.getDuration();
- if (debugFlag)
- debugPrint("JSSample.load channel duration = " + duration);
- /*
- * Since no error occurred while loading, save all the characteristics
- * for the sound in the sample.
- */
- setDirtyFlags(0xFFFF);
- setSoundType(soundType);
- setSoundData(soundData);
-
- if (debugFlag)
- debugPrintln("JSSample.load returned without error");
- return false;
- }
-
- void reset() {
- if (debugFlag)
- debugPrint("JSSample.reset() exit");
- rateRatio = 1.0f;
- }
-
-// TODO: NEED methods for any field accessed by both JSThread and
-// JavaSoundMixer so that we can make these MT safe??
- /*
- * Process request for Filtering fields
- */
- boolean getFilterFlag() {
- return false;
- }
- float getFilterFreq() {
- return -1.0f;
- }
-
- void setCurrentRateRatio(float ratio) {
- currentRateRatio = ratio;
- }
-
- float getCurrentRateRatio() {
- return currentRateRatio;
- }
-
- void setTargetRateRatio(float ratio) {
- targetRateRatio = ratio;
- }
-
- float getTargetRateRatio() {
- return targetRateRatio;
- }
-
- void setRampRateFlag(boolean flag) {
- rampRateFlag = flag;
- }
-
- boolean getRampRateFlag() {
- return rampRateFlag;
- }
-
- void setDataType(int type) {
- dataType = type;
- }
-
- int getDataType() {
- return dataType;
- }
-
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSStream.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSStream.java
deleted file mode 100644
index 22a87bc..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSStream.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-/**
- * The JSStream class defines audio output methods that call the JavaSound
- * API methods for streams.
- *
- * <p>
- * NOTE: This class is not yet implemented.
- */
-
-class JSStream extends JSChannel {
- private static boolean warningReported = false;
-
- JSStream() {
- // Report a "not implemented" warning message
- if (!warningReported) {
- System.err.println("***");
- System.err.println("*** WARNING: JavaSoundMixer: Streaming (uncached) audio not implemented");
- System.err.println("***");
- warningReported = true;
- }
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JSThread.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JSThread.java
deleted file mode 100644
index 0d5a090..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JSThread.java
+++ /dev/null
@@ -1,854 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-/*
- * JavaSound engine Thread
- *
- * IMPLEMENTATION NOTE: The JavaSoundMixer is incomplete and really needs
- * to be rewritten. When this is done, we may or may not need this class.
- */
-
-import com.sun.j3d.audioengines.AudioEngine3D;
-import com.sun.j3d.audioengines.AudioEngine3DL2;
-
-/**
- * The Thread Class extended for JavaSound Mixer specific audio device
- * calls that dynamically, in 'real-time" change engine parameters
- * such as volume/gain and sample-rate/frequency(pitch).
- */
-
-class JSThread extends com.sun.j3d.audioengines.AudioEngineThread {
-
- /**
- * The thread data for this thread
- */
- int totalChannels = 0;
- /**
- * flags denoting if dynamic gain or rate interpolation is to be performed
- */
- boolean rampGain = false;
-
- // global thread flat rampRate set true only when setTargetRate called
- // for any sample. but it is cleared only by doWork when no sample
- // has a need for the rate to be ramped any further.
- boolean rampRate = false;
-
-/*** TODO:
- *
- * scalefactors applied to current sample rate to determine delta changes
- * in rate (in Hz)
- *
- float currentGain = 1.0f;
- float targetGain = 1.0f;
-***********/
-
- // reference to engine that created this thread
- AudioEngine3D audioEngine = null;
-
- /**
- * This constructor simply assigns the given id.
- */
- JSThread(ThreadGroup t, AudioEngine3DL2 engine) {
- super(t, "J3D-JavaSoundThread");
- audioEngine = engine;
- // TODO: really get total JavaSound channels
- totalChannels = 32;
- if (debugFlag)
- debugPrint("JSThread.constructor("+t+")");
- }
-
-
-
- /**
- * This method performs one iteration of pending work to do
- *
- * Wildly "garbled" sounds was caused by unequal changes in delta
- * time verses delta distances (resulting in jumps in rate factors
- * calculated for Doppler. This work thread is meant to smoothly
- * increment/decrement changes in rate (and other future parameters)
- * until the target value is reached.
- */
- @Override
- synchronized public void doWork() {
- if (debugFlag)
- debugPrint("JSThread.doWork()");
-/*******
- while (rampRate || rampGain) {
-*********/
-/****** DESIGN
-// Loop while sound is playing, reget attributes and gains/reverb,... params
-// update lowlevel params then read modify then copy to line(s)
-
-can keep my own loop count for streams??? not really
-
-*******/
- // QUESTION: will size ever get smaller after get performed???
- int numSamples = audioEngine.getSampleListSize();
- JSSample sample = null;
- int numRateRamps = 0;
- for (int index = 0; index < numSamples; index++) {
- // loop thru samples looking for ones needing rate incremented
- sample = (JSSample)audioEngine.getSample(index);
- if (sample == null)
- continue;
- if (sample.getRampRateFlag()) {
- if (debugFlag)
- debugPrint(" rampRate true");
- boolean endOfRampReached = adjustRate(sample);
- sample.setRampRateFlag(!endOfRampReached);
- if (!endOfRampReached)
- numRateRamps++;
- }
- // TODO: support changes in gain this way as well
- }
- if (numRateRamps > 0) {
- rampRate = true;
-runMonitor(RUN, 0, null);
- }
- else
- rampRate = false;
-/*********
- try {
- Thread.sleep(4);
- } catch (InterruptedException e){}
-*********/
-/********
- } // while
-*********/
- // otherwise do nothing
- }
-
- int getTotalChannels() {
- return (totalChannels);
- }
-
- /**
- * Gradually change rate scale factor
- *
- * If the rate change is too great suddenly, it sounds like a
- * jump, so we need to change gradually over time.
- * Since an octive delta change up is 2.0 but down is 0.5, forced
- * "max" rate of change is different for both.
- * @return true if target rate value was reached
- */
- boolean adjustRate(JSSample sample) {
- // QUESTION: what should max delta rate changes be
- // Using 1/32 of a half-step (1/12 of an octive)???
- double maxRateChangeDown = 0.00130213;
- double maxRateChangeUp = 0.00260417;
-
- double lastActualRateRatio = sample.getCurrentRateRatio();
- double requestedRateRatio = sample.getTargetRateRatio();
- boolean endOfRamp = false; // flag denotes if target rate reached
- if ( lastActualRateRatio > 0 ) {
- double sampleRateRatio = requestedRateRatio; // in case diff = 0
- double diff = 0.0;
- if (debugFlag) {
- debugPrint("JSThread.adjustRate: between " +
- lastActualRateRatio + " & " + requestedRateRatio);
- }
- diff = requestedRateRatio - lastActualRateRatio;
- if (diff > 0.0) { // direction of movement is towards listener
- // inch up towards the requested target rateRatio
- if (diff >= maxRateChangeUp) {
- sampleRateRatio = lastActualRateRatio + maxRateChangeUp;
- if (debugFlag) {
- debugPrint(" adjustRate: " +
- "diff >= maxRateChangeUp so ");
- debugPrint(" adjustRate: " +
- " sampleRateRatio incremented up by max");
- }
- endOfRamp = false; // target value not reached
- }
- /*
- * otherwise delta change is within tolerance
- * so use requested RateRatio as calculated w/out change
- */
- else {
- sampleRateRatio = requestedRateRatio;
- if (debugFlag) {
- debugPrint(" adjustRate: " +
- " requestedRateRatio reached");
- }
- endOfRamp = true; // reached
- }
- }
- else if (diff < 0.0) { // movement is away from listener
- // inch down towards the requested target rateRatio
- if ((-diff) >= maxRateChangeDown) {
- sampleRateRatio = lastActualRateRatio - maxRateChangeDown;
- if (debugFlag) {
- debugPrint(" adjustRate: " +
- "-(diff) >= maxRateChangeUp so ");
- debugPrint(" adjustRate: " +
- " sampleRateRatio incremented down by max ");
- }
- endOfRamp = false; // target value not reached
- }
- /*
- * otherwise negitive delta change is within tolerance so
- * use sampleRateRatio as calculated w/out change
- */
- else {
- sampleRateRatio = requestedRateRatio;
- if (debugFlag) {
- debugPrint(" adjustRate: " +
- " requestedRateRatio reached");
- }
- endOfRamp = true; // reached
- }
- }
- else // there is no difference between last set and requested rates
- return true;
-
- this.setSampleRate(sample, (float)sampleRateRatio);
- }
- else {
- // this is the first time thru with a rate change
- if (debugFlag) {
- debugPrint(" adjustRate: " +
- "last requested rateRatio not set yet " +
- "so sampleRateRatio left unchanged");
- }
- this.setSampleRate(sample, (float)requestedRateRatio);
- endOfRamp = false; // target value not reached
- }
- return endOfRamp;
- } // adjustRate
-
- void setSampleRate(JSSample sample, JSAuralParameters attribs) {
-// TODO:
- }
-
- // gain set at start sample time as well
- void setSampleGain(JSSample sample, JSAuralParameters attribs) {
-/*******
- // take fields as already set in sample and updates gain
- // called after sample.render performed
- if (debugFlag)
- debugPrint("JSThread.setSampleGain()");
-leftGain, rightGain
- if (debugFlag) {
- debugPrint(" " +
- "StereoGain during update " + leftGain +
- ", " + rightGain);
- debugPrint(" " +
- "StereoDelay during update " + leftDelay +
- ", " + rightDelay);
- }
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
- boolean muted = sample.getMuteFlag();
-
- if (debugFlag)
- debugPrint("setStereoGain for sample "+sample+" " + leftGain +
- ", " + rightGain);
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA ||
- dataType == JSAuralParameters.BUFFERED_AUDIO_DATA ) {
- thread.setSampleGain(sample, leftGain);
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- thread.setSampleGain(
- ((JSPositionalSample)sample).getSecondIndex(), rightGain); thread.setSampleGain(
- ((JSPositionalSample)sample).getReverbIndex(), reverbGain);
- }
- }
- // TODO: JavaSound does not support MIDI song panning yet
- else if (dataType == JSAuralParameters.STREAMING_MIDI_DATA ||
-
- dataType == JSAuralParameters.BUFFERED_MIDI_DATA) {
- // Stereo samples not used for Midi Song playback
- thread.setSampleGain(sample, (leftGain+rightGain) );
- ******
- // -1.0 far left, 0.0 center, 1.0 far right
- position = (leftGain - rightGain) / (leftGain + rightGain);
- JSMidi.setSamplePan(sample, position);
-
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- JSMidi.setSampleGain(
- ((JSPositionalSample)sample).getSecondIndex(), rightGain); JSMidi.setSampleGain(
- ((JSPositionalSample)sample).getReverbIndex(), reverbGain);
- }
- ******
- }
- else {
- if (debugFlag)
- debugPrint( "JSThread: Internal Error setSampleGain dataType " +
- dataType + " invalid");
- return;
- }
- *****
- // force specific gain
- // go ahead and set gain immediately
- this.setSampleGain(sample, scaleFactor);
- rampGain = false; // disable ramping of gain
-******/
- }
-
- void setSampleDelay(JSSample sample, JSAuralParameters attribs) {
-/******
- // take fields as already set in sample and updates delay
- // called after sample.render performed
- // adjust by attrib rolloff
- float delayTime = attribs.reverbDelay * attribs.rolloff;
-
- leftDelay = (int)(sample.leftDelay * attribs.rolloff);
- rightDelay = (int)(sample.rightDelay * attribs.rolloff);
-leftDelay, rightDelay
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
- if (debugFlag)
- debugPrint("setStereoDelay for sample "+sample+" " + leftDelay +
- ", " + rightDelay);
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- JSStream.setSampleDelay(
- sample, leftDelay);
- JSStream.setSampleDelay(
- ((JSPositionalSample)sample).getSecondIndex(), rightDelay);
- }
- else
- JSStream.setSampleDelay(sample, 0);
- }
- else if (dataType == JSAuralParameters.BUFFERED_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- JSClip.setSampleDelay(
- sample, leftDelay);
- JSClip.setSampleDelay(
- ((JSPositionalSample)sample).getSecondIndex(), rightDelay);
- }
- else
- JSClip.setSampleDelay(sample, 0);
- }
- else if (dataType == JSAuralParameters.STREAMING_MIDI_DATA ||
-
- dataType == JSAuralParameters.BUFFERED_MIDI_DATA) {
- ********
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- JSMidi.setSampleDelay(
- sample, leftDelay);
- JSMidi.setSampleDelay(
- ((JSPositionalSample)sample).getSecondIndex(), rightDelay);
- }
- else
- ********
- JSMidi.setSampleDelay(sample, 0);
- }
- else {
- if (debugFlag)
- debugPrint( "JSThread: Internal Error setSampleDelay dataType " +
- dataType + " invalid");
- return;
- }
-******/
- }
-
- void setTargetGain(JSSample sample, float scaleFactor) {
-/**********
-// TODO: implement this
- // current gain is used as starting scalefactor for ramp
-// TEMPORARY: for now just set gain
- this.setSampleGain(sample, scaleFactor);
- rampGain = false;
- rampGain = true;
- targetGain = scaleFactor;
- runMonitor(RUN, 0, null);
-**********/
- }
-
- void setRate(JSSample sample, float rateScaleFactor) {
- // force specific rate
- // go ahead and set rate immediately
- // take fields as already set in sample and updates rate
- // called after sample.render performed
- this.setSampleRate(sample, rateScaleFactor);
- // disables rate from being gradually increased or decreased
- // don't set global thread flat rampRate false just because
- // one sample's rate is set to a specific value.
- sample.setRampRateFlag(false);
- }
-
- void setTargetRate(JSSample sample, float rateScaleFactor) {
- // make gradual change in rate factors up or down to target rate
- sample.setRampRateFlag(true);
- sample.setTargetRateRatio(rateScaleFactor);
- rampRate = true;
- runMonitor(RUN, 0, null);
- }
-
-// TODO: should have methods for delay and pan as well
-
- void setSampleGain(JSSample sample, float gain) {
-/***********
-// QUESTION: What needs to be synchronized???
- if (debugFlag)
- debugPrint("JSThread.setSampleGain for sample "+sample+" " + gain );
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
- boolean muted = sample.getMuteFlag();
-// TODO:
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA)
-{
- com.sun.j3d.audio.J3DHaeStream.setSampleGain(index, gain);
- }
- else if (dataType == JSAuralParameters.BUFFERED_AUDIO_DATA) {
- com.sun.j3d.audio.J3DHaeClip.setSampleGain(index, gain);
- }
- else {
- // dataType==JSAuralParameters.STREAMING_MIDI_DATA
- // dataType==JSAuralParameters.BUFFERED_MIDI_DATA
- com.sun.j3d.audio.J3DHaeMidi.setSampleGain(index, gain);
- }
-***************/
- }
-
- void setSampleRate(JSSample sample, float scaleFactor) {
-/*********
-// QUESTION: What needs to be synchronized???
- // TODO: use sample.rateRatio??
- if (debugFlag)
- debugPrint("JSThread.setSampleRate sample " +
- sample + ", scale factor = " + scaleFactor);
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
-
-// TODO:
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA) {
- com.sun.j3d.audio.J3DHaeStream.scaleSampleRate(index, scaleFactor);
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- com.sun.j3d.audio.J3DHaeStream.scaleSampleRate(
- ((JSPositionalSample)sample).getSecondIndex(),
- scaleFactor);
- com.sun.j3d.audio.J3DHaeStream.scaleSampleRate(
- ((JSPositionalSample)sample).getReverbIndex(),
- scaleFactor);
- }
- }
- else if (dataType == JSAuralParameters.BUFFERED_AUDIO_DATA) {
- com.sun.j3d.audio.J3DHaeClip.scaleSampleRate(index, scaleFactor);
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- com.sun.j3d.audio.J3DHaeClip.scaleSampleRate(
- ((JSPositionalSample)sample).getSecondIndex(),
- scaleFactor);
- com.sun.j3d.audio.J3DHaeClip.scaleSampleRate(
- ((JSPositionalSample)sample).getReverbIndex(),
- scaleFactor);
- }
- }
- else if (dataType == JSAuralParameters.STREAMING_MIDI_DATA ||
- dataType == JSAuralParameters.BUFFERED_MIDI_DATA) {
- com.sun.j3d.audio.J3DHaeMidi.scaleSampleRate(index, scaleFactor);
- // TODO: MIDI only supported for Background sounds
- }
-***********/
- sample.setCurrentRateRatio(scaleFactor);
- }
-
- boolean startSample(JSSample sample) {
-/**********
-// QUESTION: should this have a return values - error - or not??
-
- int returnValue = 0;
- AuralParameters attribs = audioEngine.getAuralParameters();
- int soundType = sample.getSoundType();
- boolean muted = sample.getMuteFlag();
- int dataType = sample.getDataType();
- int loopCount = sample.getLoopCount();
- float leftGain = sample.leftGain;
- float rightGain = sample.rightGain;
- int leftDelay = (int)(sample.leftDelay * attribs.rolloff);
- int rightDelay = (int)(sample.rightDelay * attribs.rolloff);
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA) {
- if (soundType == AudioDevice3D.BACKGROUND_SOUND) {
- returnValue = JSStream.startSample(sample,
- loopCount, leftGain);
- if (debugFlag)
- debugPrint("JSThread " +
- "start stream backgroundSound with gain " + leftGain);
- }
- else { // soundType is POINT_SOUND or CONE_SOUND
- // start up main left and right channels for spatial rendered sound
- returnValue = JSStream.startSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex(),
- loopCount, leftGain, rightGain, leftDelay, rightDelay);
- //
- // start up reverb channel w/out delay even if reverb not on now //
- float reverbGain = 0.0f;
- if (!muted && auralParams.reverbFlag) {
- reverbGain = sample.getGain() *
- attribs.reflectionCoefficient;
- }
- int reverbRtrnVal = JSStream.startSample(
- ((JSPositionalSample)sample).getReverbIndex(), loopCount, reverbGain);
- if (debugFlag)
- debugPrint("JSThread " +
- "start stream positionalSound with gain "+ leftGain +
- ", " + rightGain);
- }
- }
-
- else if (dataType == JSAuralParameters.BUFFERED_AUDIO_DATA) {
- if (soundType == AudioDevice3D.BACKGROUND_SOUND) {
- returnValue = JSClip.startSample(sample,
- loopCount, leftGain );
- if (debugFlag)
- debugPrint("JSThread " +
- "start buffer backgroundSound with gain " + leftGain);
- }
- else { // soundType is POINT_SOUND or CONE_SOUND
- // start up main left and right channels for spatial rendered sound
- returnValue = JSClip.startSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex(),
- loopCount, leftGain, rightGain, leftDelay, rightDelay);
- //
- // start up reverb channel w/out delay even if reverb not on now //
- float reverbGain = 0.0f;
- if (!muted && auralParams.reverbFlag) {
- reverbGain = sample.getGain() *
- attribs.reflectionCoefficient;
- }
- int reverbRtrnVal = JSClip.startSample(
- ((JSPositionalSample)sample).getReverbIndex(),
- loopCount, reverbGain);
-
- if (debugFlag)
- debugPrint("JSThread " +
- "start stream positionalSound with gain " + leftGain
- + ", " + rightGain);
- }
- }
- else if (dataType == JSAuralParameters.STREAMING_MIDI_DATA ||
- dataType == JSAuralParameters.BUFFERED_MIDI_DATA) {
- if (soundType == AudioDevice3D.BACKGROUND_SOUND) {
- returnValue = JSMidi.startSample(sample,
- loopCount, leftGain);
- if (debugFlag)
- debugPrint("JSThread " +
- "start Midi backgroundSound with gain " + leftGain);
- }
- else { // soundType is POINT_SOUND or CONE_SOUND
- // start up main left and right channels for spatial rendered sound
- returnValue = JSMidi.startSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex(),
- loopCount, leftGain, rightGain, leftDelay, rightDelay);
- *******
- // TODO: positional MIDI sounds not supported.
- // The above startSamples really just start on sample
- // Don't bother with reverb channel for now.
-
- //
- // start up reverb channel w/out delay even if reverb not on now //
- float reverbGain = 0.0f;
- if (!muted && auralParams.reverbFlag) {
- reverbGain = sample.getGain() *
- attribs.reflectionCoefficient;
- }
- int reverbRtrnVal = JSMidi.startSample(
- ((JSPositionalSample)sample).getReverbIndex(), loopCount, reverbGain);
- *******
- if (debugFlag)
- debugPrint("JSThread " +
- "start Midi positionalSound with gain "+ leftGain +
- ", " + rightGain);
- }
- }
-
- else {
- if (debugFlag)
- debugPrint(
- "JSThread: Internal Error startSample dataType " +
- dataType + " invalid");
- return false;
- }
- // TODO: have to look at return values and conditionally return 'success'
-**********/
- return true;
- }
-
- boolean stopSample(JSSample sample) {
-/***********
-// QUESTION: should this have a return values - error - or not??
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
-
- int returnValue = 0;
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSStream.stopSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSStream.stopSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- returnValue = JSStream.stopSample(sample);
- }
- else if (dataType == JSAuralParameters.BUFFERED_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSClip.stopSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSClip.stopSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- returnValue = JSClip.stopSample(sample);
- }
- else if (dataType == JSAuralParameters.STREAMING_MIDI_DATA ||
- dataType == JSAuralParameters.BUFFERED_MIDI_DATA) {
-
- *****
- // TODO: positional sounds NOT supported yet
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSMidi.stopSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSMidi.stopSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- *****
- returnValue = JSMidi.stopSample(sample);
- }
- else {
- if (debugFlag)
- debugPrint( "JSThread: Internal Error stopSample dataType " +
- dataType + " invalid");
- return -1;
- }
-
-************/
- return true;
- }
-
-
- void pauseSample(JSSample sample) {
-/**********
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
- int returnValue = 0;
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSStream.pauseSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSStream.pauseSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- returnValue = JSStream.pauseSample(sample);
- }
- else if (dataType == JSAuralParameters.BUFFERED_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSClip.pauseSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSClip.pauseSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- returnValue = JSClip.pauseSample(sample);
- }
- else if (dataType == JSAuralParameters.STREAMING_MIDI_DATA ||
-
- dataType == JSAuralParameters.BUFFERED_MIDI_DATA) {
- *******
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSMidi.pauseSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSMidi.pauseSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- *****
- returnValue = JSMidi.pauseSample(sample);
- }
- else {
- if (debugFlag)
- debugPrint(
- "JSThread: Internal Error pauseSample dataType " +
- dataType + " invalid");
- }
- if (returnValue < 0) {
- if (debugFlag)
- debugPrint( "JSThread: Internal Error pauseSample " +
- "for sample " + sample + " failed");
- }
-// QUESTION: return value or not???
- return;
-*************/
- }
-
- void unpauseSample(JSSample sample) {
-/**************
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
- int returnValue = 0;
- if (dataType == JSAuralParameters.STREAMING_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSStream.unpauseSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSStream.unpauseSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- returnValue = JSStream.unpauseSample(sample);
- }
- else if (dataType == JSAuralParameters.BUFFERED_AUDIO_DATA) {
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSClip.unpauseSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSClip.unpauseSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- returnValue = JSClip.unpauseSample(sample);
- }
- else if (dataType == JSAuralParameters.STREAMING_MIDI_DATA ||
-
- dataType == JSAuralParameters.BUFFERED_MIDI_DATA) {
- *********
- // TODO: positional Midi sounds
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- returnValue = JSMidi.unpauseSamples(sample,
- ((JSPositionalSample)sample).getSecondIndex());
- returnValue = JSMidi.unpauseSample(
- ((JSPositionalSample)sample).getReverbIndex());
- }
- else
- *********
- returnValue = JSMidi.unpauseSample(sample);
- }
- else {
- if (debugFlag)
- debugPrint(
- "JSThread: Internal Error unpauseSample dataType " + dataType + " invalid");
- }
- if (returnValue < 0) {
- if (debugFlag)
- debugPrint( "JSThread: Internal Error unpauseSample " +
- "for sample " + sample + " failed");
-
- }
-// QUESTION: return value or not???
- return;
-*************/
- }
-
-// TODO:
- void muteSample(JSSample sample) {
- // is this already muted? if so don't do anytning
-
- // This determines if mute is done as a zero gain or
- // as a stop, advance restart...
- }
-
-// TODO:
- void unmuteSample(JSSample sample) {
- if (debugFlag)
- debugPrint( "JSThread.unmuteSample not implemented");
- }
-
- int startStreams() {
-// QUESTION: return value or not???
- return 0;
- }
- int startStream() {
-// QUESTION: return value or not???
- return 0;
- }
- int startClips() {
-// QUESTION: return value or not???
- return 0;
- }
- int startClip() {
-// QUESTION: return value or not???
- return 0;
- }
-
- /**
- * This initializes this thread. Once this method returns, the thread is
- * ready to do work.
- */
- @Override
- public void initialize() {
- super.initialize();
- // this.setPriority(Thread.MAX_PRIORITY);
- // TODO: init values of fields???
- if (debugFlag)
- debugPrint("JSThread.initialize()");
- // TODO: doesn't do anything yet
- }
-
- /**
- * Code to close the device
- * @return flag: true is closed sucessfully, false if error
- */
- boolean close() {
- // TODO: for now do nothing
- return false;
- }
-
- @Override
- public void shutdown() {
- }
-
-
-
-
- // default resource clean up method
- @Override
- public void cleanup() {
- super.cleanup();
- if (debugFlag)
- debugPrint("JSThread.cleanup()");
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/JavaSoundMixer.java b/src/classes/share/com/sun/j3d/audioengines/javasound/JavaSoundMixer.java
deleted file mode 100644
index 1e96fee..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/JavaSoundMixer.java
+++ /dev/null
@@ -1,984 +0,0 @@
-/*
- * Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistribution of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- *
- * - Redistribution in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * Neither the name of Sun Microsystems, Inc. or the names of
- * contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * This software is provided "AS IS," without a warranty of any
- * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
- * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
- * EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
- * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
- * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
- * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
- * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
- * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
- * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
- * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGES.
- *
- * You acknowledge that this software is not designed, licensed or
- * intended for use in the design, construction, operation or
- * maintenance of any nuclear facility.
- *
- */
-
-/*
- * Audio device driver using Java Sound Mixer Engine.
- *
- * IMPLEMENTATION NOTE: The JavaSoundMixer is incomplete and really needs
- * to be rewritten.
- */
-
-package com.sun.j3d.audioengines.javasound;
-
-import javax.media.j3d.AudioDevice3D;
-import javax.media.j3d.MediaContainer;
-import javax.media.j3d.PhysicalEnvironment;
-import javax.media.j3d.Sound;
-import javax.media.j3d.Transform3D;
-import javax.vecmath.Point3d;
-import javax.vecmath.Vector3d;
-
-import com.sun.j3d.audioengines.AudioEngine3DL2;
-import com.sun.j3d.audioengines.Sample;
-
-/**
- * The JavaSoundMixer Class defines an audio output device that accesses
- * JavaSound functionality stream data.
- */
-public class JavaSoundMixer extends AudioEngine3DL2 {
-
- // Debug print flags and methods
- static final boolean debugFlag = false;
- static final boolean internalErrors = false;
-
- void debugPrint(String message) {
- if (debugFlag)
- System.out.println(message);
- }
-
- void debugPrintln(String message) {
- if (debugFlag)
- System.out.println(message);
- }
-
- // Determines method to call for added or setting sound into ArrayList
- static final int ADD_TO_LIST = 1;
- static final int SET_INTO_LIST = 2;
-
- // current Aural Parameters = Aural Attributes from core + JavaSound
- // specific fields, including reverberation parameters.
- JSAuralParameters auralParams = null;
-
- // thread for dynamically changing audio parameters such as volume
- // and sample rate.
- JSThread thread = null;
-
- /*
- * new fields in extended class
- */
- protected float deviceGain = 1.0f;
-
- protected static final int NOT_PAUSED = 0;
- protected static final int PAUSE_PENDING = 1;
- protected static final int PAUSED = 2;
- protected static final int RESUME_PENDING = 3;
- protected int pause = NOT_PAUSED;
-
- /*
- * Construct a new JavaSoundMixer with the specified P.E.
- * @param physicalEnvironment the physical environment object where we
- * want access to this device.
- */
- public JavaSoundMixer(PhysicalEnvironment physicalEnvironment ) {
- super(physicalEnvironment);
- thread = new JSThread(Thread.currentThread().getThreadGroup(), this);
- }
-
- /**
- * Query total number of channels available for sound rendering
- * for this audio device.
- * Overridden method from AudioEngine.
- * @return number of maximum voices play simultaneously on JavaSound Mixer.
- */
- @Override
- public int getTotalChannels() {
- if (thread != null)
- return thread.getTotalChannels();
- else
- return 32;
- }
-
- /**
- * Code to initialize the device
- * New interface to mixer/engine specific methods
- * @return flag: true is initialized sucessfully, false if error
- */
- @Override
- public boolean initialize() {
- if (thread == null) {
- return false;
- }
- // init JavaSound dynamic thread
- thread.initialize();
- auralParams = new JSAuralParameters();
- if (debugFlag)
- debugPrintln("JavaSoundMixer: JSStream.initialize returned true");
- return true;
- }
-
- /**
- * Code to close the device.
- * New interface to mixer/engine specific methods
- * @return flag: true is closed sucessfully, false if error
- */
- @Override
- public boolean close() {
- if (thread == null)
- return false;
- if (thread.close()) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: JSStream.close returned true");
- return true;
- }
- else {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: JSStream.close returned false");
- return false;
- }
- }
-
-
- /**
- * Code to load sound data into a channel of device mixer.
- * Load sound as one or mores sample into the Java Sound Mixer:
- * a) as either a STREAM or CLIP based on whether cached is enabled
- * b) positional and directional sounds use three samples per
- * sound
- * Overriden method from AudioEngine3D.
- *
- * Sound type determines if this is a Background, Point or Cone
- * sound source and thus the JSXxxxSample object type
- * Call JSXxxxxSample.loadSample()
- * If no error
- * Get the next free index in the samples list.
- * Store a reference to JSXxxxSample object in samples list.
- * @return index to the sample in samples list.
- */
- @Override
- public int prepareSound(int soundType, MediaContainer soundData) {
- int index = JSSample.NULL_SAMPLE;
- int methodType = ADD_TO_LIST;
- if (soundData == null)
- return JSSample.NULL_SAMPLE;
- synchronized(samples) {
- // for now force to just add to end of samples list
- int samplesSize = samples.size();
- index = samplesSize;
- samples.ensureCapacity(index+1);
- boolean error = false;
-
- if (soundType == AudioDevice3D.CONE_SOUND) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer.prepareSound type=CONE");
- JSDirectionalSample dirSample = new JSDirectionalSample();
- error = dirSample.load(soundData);
- if (error)
- return JSSample.NULL_SAMPLE;
- if (methodType == SET_INTO_LIST)
- samples.set(index, dirSample);
- else
- samples.add(index, dirSample);
- /*
- * Since no error occurred while loading, save all the
- * characterstics for the sound in the sample.
- */
- dirSample.setDirtyFlags(0xFFFF);
- dirSample.setSoundType(soundType);
- dirSample.setSoundData(soundData);
-
- }
- else if (soundType == AudioDevice3D.POINT_SOUND) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer.prepareSound type=POINT");
- JSPositionalSample posSample = new JSPositionalSample();
- error = posSample.load(soundData);
- if (error)
- return JSSample.NULL_SAMPLE;
- if (methodType == SET_INTO_LIST)
- samples.set(index, posSample);
- else
- samples.add(index, posSample);
- posSample.setDirtyFlags(0xFFFF);
- posSample.setSoundType(soundType);
- posSample.setSoundData(soundData);
- }
- else { // soundType == AudioDevice3D.BACKGROUND_SOUND
- if (debugFlag)
- debugPrintln("JavaSoundMixer.prepareSound type=BACKGROUND");
- JSSample sample = null;
- sample = new JSSample();
- error = sample.load(soundData);
- if (error)
- return JSSample.NULL_SAMPLE;
- if (methodType == SET_INTO_LIST)
- samples.set(index, sample);
- else
- samples.add(index, sample);
- sample.setDirtyFlags(0xFFFF);
- sample.setSoundType(soundType);
- sample.setSoundData(soundData);
- }
- }
-
- if (debugFlag) {
- debugPrint(" prepareSound type = "+soundType);
- debugPrintln("JavaSoundMixer.prepareSound returned "+index);
- }
- return index;
- }
-
- /**
- * Clears the fields associated with sample data for this sound.
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void clearSound(int index) {
- // TODO: call JSXXXX clear method
- JSSample sample = null;
- if ( (sample = (JSSample)getSample(index)) == null)
- return;
- sample.clear();
- synchronized(samples) {
- samples.set(index, null);
- }
- }
-
- /**
- * Save a reference to the local to virtual world coordinate space
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void setVworldXfrm(int index, Transform3D trans) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: setVworldXfrm for index " + index);
- super.setVworldXfrm(index, trans);
- if (debugFlag) {
- double[] matrix = new double[16];
- trans.get(matrix);
- debugPrintln("JavaSoundMixer column-major transform ");
- debugPrintln("JavaSoundMixer " + matrix[0]+", "+matrix[1]+
- ", "+matrix[2]+", "+matrix[3]);
- debugPrintln("JavaSoundMixer " + matrix[4]+", "+matrix[5]+
- ", "+matrix[6]+", "+matrix[7]);
- debugPrintln("JavaSoundMixer " + matrix[8]+", "+matrix[9]+
- ", "+matrix[10]+", "+matrix[11]);
- debugPrintln("JavaSoundMixer " + matrix[12]+", "+matrix[13]+
- ", "+matrix[14]+", "+matrix[15]);
- }
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return;
- int soundType = sample.getSoundType();
-
- if (soundType == AudioDevice3D.CONE_SOUND) {
- JSDirectionalSample dirSample = null;
- if ((dirSample = (JSDirectionalSample)getSample(index)) == null)
- return;
- dirSample.setXformedDirection();
- dirSample.setXformedPosition();
- // flag that VirtualWorld transform set
- dirSample.setVWrldXfrmFlag(true);
- }
- else if (soundType == AudioDevice3D.POINT_SOUND) {
- JSPositionalSample posSample = null;
- if ((posSample = (JSPositionalSample)getSample(index)) == null)
- return;
- posSample.setXformedPosition();
- // flag that VirtualWorld transform set
- posSample.setVWrldXfrmFlag(true);
- }
- return;
- }
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void setPosition(int index, Point3d position) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: setPosition for index " + index);
- super.setPosition(index, position);
- JSPositionalSample posSample = null;
- if ((posSample = (JSPositionalSample)getSample(index)) == null)
- return;
- int soundType = posSample.getSoundType();
- if ( (soundType == AudioDevice3D.POINT_SOUND) ||
- (soundType == AudioDevice3D.CONE_SOUND) ) {
- posSample.setXformedPosition();
- }
- return;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void setDirection(int index, Vector3d direction) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: setDirection for index " + index);
- super.setDirection(index, direction);
- JSDirectionalSample dirSample = null;
- if ((dirSample = (JSDirectionalSample)getSample(index)) == null)
- return;
- int soundType = dirSample.getSoundType();
- if (soundType == AudioDevice3D.CONE_SOUND) {
- dirSample.setXformedDirection();
- }
- return;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void setReflectionCoefficient(float coefficient) {
- super.setReflectionCoefficient(coefficient);
- auralParams.reverbDirty |= JSAuralParameters.REFLECTION_COEFF_CHANGED;
- return;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void setReverbDelay(float reverbDelay) {
- super.setReverbDelay(reverbDelay);
- auralParams.reverbDirty |= JSAuralParameters.REVERB_DELAY_CHANGED;
- return;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void setReverbOrder(int reverbOrder) {
- super.setReverbOrder(reverbOrder);
- auralParams.reverbDirty |= JSAuralParameters.REVERB_ORDER_CHANGED;
- return;
- }
-
- /*
- * QUESTION: if this is used, for now, exclusively, to start a Background
- * or any single sampled Sounds, why are there if-else cases to handle
- * Point and Cone sounds??
- *
- * For now background sounds are not reverberated
- *
- * Overriden method from AudioEngine3D.
- */
- @Override
- public int startSample(int index) {
- // TODO: Rewrite this function
-
- if (debugFlag)
- debugPrintln("JavaSoundMixer: STARTSample for index " + index);
-
- JSSample sample = null;
- if ( ( (sample = (JSSample)getSample(index)) == null) ||
- thread == null )
- return JSSample.NULL_SAMPLE;
-
- int soundType = sample.getSoundType();
- boolean muted = sample.getMuteFlag();
- if (muted) {
- if (debugFlag)
- debugPrintln(" MUTEd start");
- thread.muteSample(sample);
- if (soundType != AudioDevice3D.BACKGROUND_SOUND)
- setFilter(index, false, Sound.NO_FILTER);
- }
- else {
- sample.render(sample.getDirtyFlags(), getView(), auralParams);
- this.scaleSampleRate(index, sample.rateRatio);
- // filtering
- if (soundType != AudioDevice3D.BACKGROUND_SOUND)
- setFilter(index, sample.getFilterFlag(), sample.getFilterFreq());
- }
-
- boolean startSuccessful;
- startSuccessful = thread.startSample(sample);
-
- sample.channel.startSample(sample.getLoopCount(), sample.getGain(), 0);
-
- if (!startSuccessful) {
- if (internalErrors)
- debugPrintln(
- "JavaSoundMixer: Internal Error startSample for index " +
- index + " failed");
- return JSSample.NULL_SAMPLE;
- }
- else {
- if (debugFlag)
- debugPrintln(" startSample worked, " +
- "returning " + startSuccessful);
- // NOTE: Set AuralParameters AFTER sound started
- // Setting AuralParameters before you start sound doesn't work
- if (!muted) {
- if (auralParams.reverbDirty > 0) {
- if (debugFlag) {
- debugPrintln("startSample: reverb settings are:");
- debugPrintln(" coeff = "+
- auralParams.reflectionCoefficient +
- ", delay = " + auralParams.reverbDelay +
- ", order = " + auralParams.reverbOrder);
- }
- float delayTime = auralParams.reverbDelay * auralParams.rolloff;
- calcReverb(sample);
- }
- // NOTE: it apprears that reverb has to be reset in
- // JavaSound engine when sound re-started??
- // force reset of reverb parameters when sound is started
- setReverb(sample);
- }
- return index;
- }
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public int stopSample(int index) {
- // TODO: Rewrite this function
-
- if (debugFlag)
- debugPrintln("JavaSoundMixer: STOPSample for index " + index);
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return -1;
-
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
-
- boolean stopSuccessful = true;
- stopSuccessful = thread.stopSample(sample);
-
- sample.channel.stopSample();
-
- if (!stopSuccessful) {
- if (internalErrors)
- debugPrintln( "JavaSoundMixer: Internal Error stopSample(s) for index " +
- index + " failed");
- return -1;
- }
- else {
- // set fields in sample to reset for future start
- sample.reset();
- if (debugFlag)
- debugPrintln("JavaSoundMixer: stopSample for index " +
- index + " worked, returning " + stopSuccessful);
- return 0;
- }
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void pauseSample(int index) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: PAUSESample for index " + index);
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return;
- // check thread != null
- thread.pauseSample(sample);
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void unpauseSample(int index) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: UNPAUSESample for index " + index);
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return;
- thread.unpauseSample(sample);
- }
-
- /*
- * Force thread to update sample.
- * Overriden method from AudioEngine3D.
- */
-
- @Override
- public void updateSample(int index) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: UPDATESample for index " + index);
- JSSample sample = null;
- if ( ( (sample = (JSSample)getSample(index)) == null) ||
- thread == null )
- return;
-
- int soundType = sample.getSoundType();
- boolean muted = sample.getMuteFlag();
-
- if (muted) {
- if (soundType != AudioDevice3D.BACKGROUND_SOUND)
- setFilter(index, false, Sound.NO_FILTER);
- thread.muteSample(sample);
- if (debugFlag)
- debugPrintln(" Mute during update");
- }
- else {
- // If reverb parameters changed resend to audio device
- if (auralParams.reverbDirty > 0) {
- if (debugFlag) {
- debugPrintln("updateSample: reverb settings are:");
- debugPrintln(" coeff = " + auralParams.reflectionCoefficient+
- ", delay = " + auralParams.reverbDelay +
- ", order = " + auralParams.reverbOrder);
- }
- float delayTime = auralParams.reverbDelay * auralParams.rolloff;
- calcReverb(sample);
- }
- // TODO: Only re-set reverb if values different
- // For now force reset to ensure that reverb is currently correct
- setReverb(sample); // ensure reverb is current/correct
-
- // TODO: For now sum left & rightGains for reverb gain
- float reverbGain = 0.0f;
- if (!muted && auralParams.reverbFlag) {
- reverbGain = sample.getGain() * auralParams.reflectionCoefficient;
- }
-
- sample.render(sample.getDirtyFlags(), getView(), auralParams);
-
- // filtering
- if (soundType != AudioDevice3D.BACKGROUND_SOUND)
- setFilter(index, sample.getFilterFlag(), sample.getFilterFreq());
- thread.setSampleGain(sample, auralParams);
- thread.setSampleRate(sample, auralParams);
- thread.setSampleDelay(sample, auralParams);
- }
- return;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void muteSample(int index) {
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return;
-
- if (debugFlag)
- debugPrintln("JavaSoundMixer: muteSample");
- sample.setMuteFlag(true);
- thread.muteSample(sample);
- return;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public void unmuteSample(int index) {
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return;
-
- if (debugFlag)
- debugPrintln("JavaSoundMixer: unmuteSample");
- sample.setMuteFlag(false);
-
- // since while mute the reverb type and state was not updated...
- // Reverb has to be recalculated when sound is unmuted .
- auralParams.reverbDirty = 0xFFFF; // force an update of reverb params
- sample.setDirtyFlags(0xFFFF); // heavy weight forcing of gain/delay update
-
- // TODO: force an update of ALL parameters that could have changed
- // while muting disabled...
-
- thread.unmuteSample(sample);
- return;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public long getSampleDuration(int index) {
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return Sample.DURATION_UNKNOWN;
- long duration;
-
- if (sample != null)
- duration = sample.getDuration();
- else
- duration = Sample.DURATION_UNKNOWN;
- if (debugFlag)
- debugPrintln(" return duration " + duration);
- return duration;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public int getNumberOfChannelsUsed(int index) {
- /*
- * Calls same method with different signature containing the
- * sample's mute flag passed as the 2nd parameter.
- */
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return 0;
- else
- return getNumberOfChannelsUsed(index, sample.getMuteFlag());
- }
-
- /**
- * Overriden method from AudioEngine3D.
- */
- @Override
- public int getNumberOfChannelsUsed(int index, boolean muted) {
- /*
- * The JavaSoundMixer implementation uses THREE channels to render
- * the stereo image of each Point and Cone Sounds:
- * Two for rendering the right and left portions of the rendered
- * spatialized sound image - panned hard right or left respectively.
- * This implementation uses one channel to render Background sounds
- * whether the sample is mono or stereo.
- *
- * TODO: When muted is implemented, that flag should be check
- * so that zero is returned.
- */
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return 0;
-
- int soundType = sample.getSoundType();
- int dataType = sample.getDataType();
-
- // TODO: for now positional Midi sound used only 1 sample
- if (dataType == JSSample.STREAMING_MIDI_DATA ||
- dataType == JSSample.BUFFERED_MIDI_DATA)
- return 1;
-
- if (soundType == BACKGROUND_SOUND)
- return 1;
- else // for Point and Cone sounds
- return 3;
- }
-
- /*
- * Overriden method from AudioEngine3D.
- */
- @Override
- public long getStartTime(int index) {
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return 0L;
- if (sample.channel == null)
- return 0L;
- return (long)sample.channel.startTime;
- }
-
- /*
- * Methods called during rendering
- */
- void scaleSampleRate(int index, float scaleFactor) {
- if (debugFlag)
- debugPrintln("JavaSoundMixer: scaleSampleRate index " +
- index + ", scale factor = " + scaleFactor);
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null ||
- thread == null)
- return;
- int dataType = sample.getDataType();
- if (debugFlag)
- debugPrintln(" scaleSampleRate.dataType = " + dataType +
- "using sample " + sample + " from samples[" +
- index +"]");
- int soundType = sample.getSoundType();
-
- if (dataType == JSSample.STREAMING_AUDIO_DATA ||
- dataType == JSSample.BUFFERED_AUDIO_DATA) {
- thread.setSampleRate(sample, scaleFactor);
- /**********
- // TODO:
- if (soundType != AudioDevice3D.BACKGROUND_SOUND) {
- thread.setSampleRate( ((JSPositionalSample)sample).getSecondIndex(),
- scaleFactor);
- thread.setSampleRate(((JSPositionalSample)sample).getReverbIndex(),
- scaleFactor);
- }
- **********/
- }
- else if (dataType == JSSample.STREAMING_MIDI_DATA ||
- dataType == JSSample.BUFFERED_MIDI_DATA) {
- thread.setSampleRate(sample, scaleFactor);
- /**********
- if (soundType != AudioDevice3D.BACKGROUND_SOUND) {
- thread.setSampleRate(((JSPositionalSample)sample).getSecondIndex(),
- scaleFactor);
- thread.setSampleRate(((JSPositionalSample)sample).getReverbIndex(),
- scaleFactor);
- }
- **********/
- }
- else {
- if (internalErrors)
- debugPrintln(
- "JavaSoundMixer: Internal Error scaleSampleRate dataType " +
- dataType + " invalid");
- }
- }
-
- /*
- * Methods called during rendering
- */
- void calcReverb(JSSample sample) {
- /*
- * Java Sound reverb parameters are a subset of Java 3D parameters
- */
- int dataType = sample.getDataType();
- int soundType = sample.getSoundType();
- float decay = auralParams.decayTime;
- float delay = auralParams.reverbDelay * auralParams.rolloff;
- float reflection = auralParams.reflectionCoefficient;
- int order = auralParams.reverbOrder;
- /*
- * Remember Coeff change is choosen over Order change if BOTH made
- * otherwise the last one changed take precidence.
- */
- if (auralParams.reflectionCoefficient == 0.0f ||
- auralParams.reverbCoefficient == 0.0f)
- auralParams.reverbFlag = false;
- else {
- auralParams.reverbFlag = true;
- if (order > 0) {
- // clamp reverb decay time to order*delay
- float clampedTime = order * delay;
- if ( clampedTime < decay)
- decay = clampedTime;
- }
- if (delay < 100.0f) {
- // "small" reverberant space
- if (decay <= 1500.0f)
- auralParams.reverbType = 2;
- else
- auralParams.reverbType = 4;
- }
- else if (delay < 500.0f) {
- // "medium" reverberant space
- if (decay <= 1500.0f)
- auralParams.reverbType = 3;
- else
- auralParams.reverbType = 6;
- }
- else { // delay >= 500.0f
- // "large" reverberant space
- if (decay <= 1500.0f)
- auralParams.reverbType = 6;
- else
- auralParams.reverbType = 5;
- }
- }
-
- if (debugFlag)
- debugPrintln("JavaSoundMixer: setReverb for " +
- sample + ", type = " + auralParams.reverbType + ", flag = " + auralParams.reverbFlag);
-
- auralParams.reverbDirty = 0; // clear the attribute reverb dirty flags
- }
-
- /*
- * Interal method for setting reverb parameters called during rendering.
- * This not called by SoundScheduler.
- */
- void setReverb(JSSample sample) {
- /*
- * Only third sample of multisample sounds has reverb parameters set.
- * For now, only positional and directional sounds are reverberated.
- */
- int soundType = sample.getSoundType();
- int dataType = sample.getDataType();
-
- // QUESTION: Should reverb be applied to background sounds?
- if ( (soundType == AudioDevice3D.CONE_SOUND) ||
- (soundType == AudioDevice3D.POINT_SOUND) ) {
- if (debugFlag)
- debugPrintln("setReverb called with type, on = " +
- auralParams.reverbType + ", " + auralParams.reverbFlag);
- if (sample == null)
- return;
- JSPositionalSample posSample = (JSPositionalSample)sample;
- if (posSample.channel == null)
- return;
-
- /**********
- // NOTE: no support for reverb channel yet...
- int reverbIndex = posSample.getReverbIndex();
- **********/
- if (dataType == JSSample.STREAMING_AUDIO_DATA) {
- JSStream stream = (JSStream)posSample.channel;
- stream.setSampleReverb(auralParams.reverbType, auralParams.reverbFlag);
- }
- else if (dataType == JSSample.BUFFERED_AUDIO_DATA) {
- JSClip clip = (JSClip)posSample.channel;
- clip.setSampleReverb(auralParams.reverbType, auralParams.reverbFlag);
- }
- /**********
- // TODO:
- else if (dataType == JSSample.STREAMING_MIDI_DATA ||
- dataType == JSSample.BUFFERED_MIDI_DATA) {
- JSMidi.setSampleReverb(reverbIndex,
- auralParams.reverbType, auralParams.reverbFlag);
- }
- **********/
- else {
- if (internalErrors)
- debugPrintln( "JavaSoundMixer: Internal Error setReverb " +
- "dataType " + dataType + " invalid");
- }
- }
- }
-
- // TEMPORARY: Override of method due to bug in Java Sound
- @Override
- public void setLoop(int index, int count) {
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return;
- int dataType = sample.getDataType();
-
- // WORKAROUND:
- // Bug in Java Sound engine hangs when INFINITE_LOOP count
- // for Audio Wave data. Leave count unchanged for Midi data.
- if (dataType==JSSample.STREAMING_AUDIO_DATA ||
- dataType==JSSample.BUFFERED_AUDIO_DATA) {
- if (count == Sound.INFINITE_LOOPS) {
- // LoopCount of 'loop Infinitely' forced to largest positive int
- count = 0x7FFFFFF;
- }
- }
- super.setLoop(index, count);
- return;
- }
-
- // Perform device specific filtering
- // Assumes that this is called for positional and directional sounds
- // not background sounds, so there are at lease two samples assigned
- // per sound.
- // TODO: remove assumption from method
- void setFilter(int index, boolean filterFlag, float filterFreq) {
- JSPositionalSample posSample = null;
- if ((posSample = (JSPositionalSample)getSample(index)) == null)
- return;
- if (posSample.channel == null)
- return;
- int dataType = posSample.getDataType();
-
- // Filtering can NOT be performed on MIDI Songs
- if (dataType == JSSample.STREAMING_MIDI_DATA ||
- dataType == JSSample.BUFFERED_MIDI_DATA) {
- return;
- }
-
- /****
- // TODO: multiple clips per channel
- int secondIndex = posSample.getSecondIndex();
- *****/
- if (dataType == JSSample.BUFFERED_AUDIO_DATA) {
- JSClip clip = (JSClip)posSample.channel;
- clip.setSampleFiltering(filterFlag,filterFreq);
- /*****
- JSClip.setSampleFiltering(econdIndex, filterFlag, filterFreq);
- ******/
- }
- else { // dataType == JSSample.STREAMING_AUDIO_DATA
- JSStream stream = (JSStream)posSample.channel;
- stream.setSampleFiltering(filterFlag,filterFreq);
- /*****
- JSStream.setSampleFiltering(secondIndex, ilterFlag, filterFreq);
- ******/
- }
- // QUESTION: should reverb channel be filtered???
-
- if (debugFlag) {
- debugPrintln("JavaSoundMixer:setFilter " +
- "of non-backgroundSound by (" +
- filterFlag + ", " + filterFreq + ")");
- }
- }
- //
- // Set overall gain for device
- // @since Java 3D 1.3
- //
- @Override
- public void setGain(float scaleFactor) {
- float oldDeviceGain = deviceGain;
- float gainFactor = scaleFactor/oldDeviceGain;
- // TODO: for each sample, change gain by gainFactor
- deviceGain = scaleFactor; // set given scalefactor as new device gain
- return;
- }
-
- /*
- * Set sample specific sample rate scale factor gain
- * @since Java 3D 1.3
- */
- @Override
- public void setRateScaleFactor(int index, float rateScaleFactor) {
- JSSample sample = null;
- if ((sample = (JSSample)getSample(index)) == null)
- return;
- sample.setRateScaleFactor(rateScaleFactor);
- this.scaleSampleRate(index, rateScaleFactor);
- }
-
- /**
- * Pauses audio device engine without closing the device and associated
- * threads.
- * Causes all cached sounds to be paused and all streaming sounds to be
- * stopped.
- */
- @Override
- public void pause() {
- pause = PAUSE_PENDING;
- // TODO: pause all sounds
- return;
- }
- /**
- * Resumes audio device engine (if previously paused) without reinitializing * the device.
- * Causes all paused cached sounds to be resumed and all streaming sounds
- * restarted.
- */
- @Override
- public void resume() {
- pause = RESUME_PENDING;
- // TODO: unpause all sounds
- return;
- }
-}
diff --git a/src/classes/share/com/sun/j3d/audioengines/javasound/package.html b/src/classes/share/com/sun/j3d/audioengines/javasound/package.html
deleted file mode 100644
index 5897bc9..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/javasound/package.html
+++ /dev/null
@@ -1,11 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-<html>
-<head>
- <meta content="text/html; charset=ISO-8859-1"
- http-equiv="content-type">
- <title>com.sun.j3d.audioengines.javasound</title>
-</head>
-<body>
-<p>Provides a JavaSound-based implementation of a Java 3D audio device.</p>
-</body>
-</html>
diff --git a/src/classes/share/com/sun/j3d/audioengines/package.html b/src/classes/share/com/sun/j3d/audioengines/package.html
deleted file mode 100644
index 59b80b4..0000000
--- a/src/classes/share/com/sun/j3d/audioengines/package.html
+++ /dev/null
@@ -1,11 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-<html>
-<head>
- <meta content="text/html; charset=ISO-8859-1"
- http-equiv="content-type">
- <title>com.sun.j3d.audioengines</title>
-</head>
-<body>
-<p>Provides abstract classes for creating Java 3D audio devices.</p>
-</body>
-</html>