diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..26b4508 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/bin/ +*.jar diff --git a/README.md b/README.md index 32858fd..680d1d7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,19 @@ -# processing-sound -Audio library for Processing built with JSyn +## Processing Sound library -This library replaces the prior Processing Sound library (). The API is 100% compatible, so all code written with the prior code base will work with the new version of the library. +The new Sound library for Processing 3 provides a simple way to work with audio. It can play, analyze, and synthesize sound. The library comes with a collection of oscillators for basic wave forms, a variety of noise generators, and effects and filters to alter sound files and other generated sounds. The syntax is minimal to make it easy for beginners who want a straightforward way to add some sound to their Processing sketches! + +### How to use + +The easiest way to install the Sound library is through Processing's Contribution Manager. The library comes with many example sketches, the full online reference can be found [here](https://www.processing.org/reference/libraries/sound/). Please report bugs [https://github.com/processing/processing-sound/issues](here). + +### How to build + +1. `git clone git@github.com:processing/processing-sound.git` +2. into the `library/` folder copy (or soft-link) your Processsing's `core.jar` (and, optionally, also your Android SDK's `android.jar`, API level 26 or higher). Other dependencies (in particular Phil Burk's [JSyn](http://www.softsynth.com/jsyn/) engine on which this library is based) are downloaded automatically. +3. `ant dist` (or, alternatively, run build.xml from within Eclipse) + +The resulting `processing-sound.zip` can be extracted into your Processing installation's `libraries/` folder. + +### License + +LGPL v2.1 diff --git a/build.xml b/build.xml new file mode 100644 index 0000000..119eebe --- /dev/null +++ b/build.xml @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/Analysis/FFTSpectrum/FFTSpectrum.pde b/examples/Analysis/FFTSpectrum/FFTSpectrum.pde new file mode 100644 index 0000000..c51b04f --- /dev/null +++ b/examples/Analysis/FFTSpectrum/FFTSpectrum.pde @@ -0,0 +1,64 @@ +/** + * This sketch shows how to use the FFT class to analyze a stream + * of sound. Change the number of bands to get more spectral bands + * (at the expense of more coarse-grained time resolution of the spectrum). + */ + +import processing.sound.*; + +// Declare the sound source and FFT analyzer variables +SoundFile sample; +FFT fft; + +// Define how many FFT bands to use (this needs to be a power of two) +int bands = 128; + +// Define a smoothing factor which determines how much the spectrums of consecutive +// points in time should be combined to create a smoother visualisation of the spectrum. +// A smoothing factor of 1.0 means no smoothing (only the data from the newest analysis +// is rendered), decrease the factor down towards 0.0 to have the visualisation update +// more slowly, which is easier on the eye. +float smoothingFactor = 0.2; + +// Create a vector to store the smoothed spectrum data in +float[] sum = new float[bands]; + +// Variables for drawing the spectrum: +// Declare a scaling factor for adjusting the height of the rectangles +int scale = 5; +// Declare a drawing variable for calculating the width of the +float barWidth; + +public void setup() { + size(640, 360); + background(255); + + // Calculate the width of the rects depending on how many bands we have + barWidth = width/float(bands); + + // Load and play a soundfile and loop it. + sample = new SoundFile(this, "beat.aiff"); + sample.loop(); + + // Create the FFT analyzer and connect the playing soundfile to it. + fft = new FFT(this, bands); + fft.input(sample); +} + +public void draw() { + // Set background color, noStroke and fill color + background(125, 255, 125); + fill(255, 0, 150); + noStroke(); + + // Perform the analysis + fft.analyze(); + + for (int i = 0; i < bands; i++) { + // Smooth the FFT spectrum data by smoothing factor + sum[i] += (fft.spectrum[i] - sum[i]) * smoothingFactor; + + // Draw the rectangles, adjust their height using the scale factor + rect(i*barWidth, height, barWidth, -sum[i]*height*scale); + } +} diff --git a/examples/Analysis/FFTSpectrum/data/beat.aiff b/examples/Analysis/FFTSpectrum/data/beat.aiff new file mode 100644 index 0000000..017b7ce Binary files /dev/null and b/examples/Analysis/FFTSpectrum/data/beat.aiff differ diff --git a/examples/Analysis/PeakAmplitude/PeakAmplitude.pde b/examples/Analysis/PeakAmplitude/PeakAmplitude.pde new file mode 100644 index 0000000..3485b14 --- /dev/null +++ b/examples/Analysis/PeakAmplitude/PeakAmplitude.pde @@ -0,0 +1,49 @@ +/** + * This sketch shows how to use the Amplitude class to analyze the changing + * "loudness" of a stream of sound. In this case an audio sample is analyzed. + */ + +import processing.sound.*; + +// Declare the processing sound variables +SoundFile sample; +Amplitude rms; + +// Declare a smooth factor to smooth out sudden changes in amplitude. +// With a smooth factor of 1, only the last measured amplitude is used for the +// visualisation, which can lead to very abrupt changes. As you decrease the +// smooth factor towards 0, the measured amplitudes are averaged across frames, +// leading to more pleasant gradual changes +float smoothingFactor = 0.25; + +// Used for storing the smoothed amplitude value +float sum; + +public void setup() { + size(640, 360); + + //Load and play a soundfile and loop it + sample = new SoundFile(this, "beat.aiff"); + sample.loop(); + + // Create and patch the rms tracker + rms = new Amplitude(this); + rms.input(sample); +} + +public void draw() { + // Set background color, noStroke and fill color + background(125, 255, 125); + noStroke(); + fill(255, 0, 150); + + // smooth the rms data by smoothing factor + sum += (rms.analyze() - sum) * smoothingFactor; + + // rms.analyze() return a value between 0 and 1. It's + // scaled to height/2 and then multiplied by a fixed scale factor + float rms_scaled = sum * (height/2) * 5; + + // We draw a circle whose size is coupled to the audio analysis + ellipse(width/2, height/2, rms_scaled, rms_scaled); +} diff --git a/examples/Analysis/PeakAmplitude/data/beat.aiff b/examples/Analysis/PeakAmplitude/data/beat.aiff new file mode 100644 index 0000000..017b7ce Binary files /dev/null and b/examples/Analysis/PeakAmplitude/data/beat.aiff differ diff --git a/examples/Effects/BandPassFilter/BandPassFilter.pde b/examples/Effects/BandPassFilter/BandPassFilter.pde new file mode 100644 index 0000000..ff32dfc --- /dev/null +++ b/examples/Effects/BandPassFilter/BandPassFilter.pde @@ -0,0 +1,40 @@ +/** + * In this example, a WhiteNoise generator (equal amount of noise at all frequencies) is + * passed through a BandPass filter. You can control both the central frequency + * (left/right) as well as the bandwidth of the filter (up/down) with the mouse. The + * position and size of the circle indicates how much of the noise's spectrum passes + * through the filter, and at what frequency range. + */ + +import processing.sound.*; + +WhiteNoise noise; +BandPass filter; + +void setup() { + size(640, 360); + + // Create the noise generator + Filter + noise = new WhiteNoise(this); + filter = new BandPass(this); + + noise.play(0.5); + filter.process(noise); +} + +void draw() { + // Map the left/right mouse position to a cutoff frequency between 20 and 10000 Hz + float frequency = map(mouseX, 0, width, 20, 10000); + // And the vertical mouse position to the width of the band to be passed through + float bandwidth = map(mouseY, 0, height, 1000, 100); + + filter.freq(frequency); + filter.bw(bandwidth); + + // Draw a circle indicating the position + width of the frequency window + // that is allowed to pass through + background(125, 255, 125); + noStroke(); + fill(255, 0, 150); + ellipse(mouseX, height, 2*(height - mouseY), 2*(height - mouseY)); +} diff --git a/examples/Effects/HighPassFilter/HighPassFilter.pde b/examples/Effects/HighPassFilter/HighPassFilter.pde new file mode 100644 index 0000000..9dfd34c --- /dev/null +++ b/examples/Effects/HighPassFilter/HighPassFilter.pde @@ -0,0 +1,33 @@ +/** + * This is a simple WhiteNoise generator, run through a HighPass filter which only lets + * the higher frequency components of the noise through. The cutoff frequency of the + * filter can be controlled through the left/right position of the mouse. + */ + +import processing.sound.*; + +WhiteNoise noise; +HighPass highPass; + +void setup() { + size(640, 360); + + // Create the noise generator + filter + noise = new WhiteNoise(this); + highPass = new HighPass(this); + + noise.play(0.5); + highPass.process(noise); +} + +void draw() { + // Map the left/right mouse position to a cutoff frequency between 10 and 15000 Hz + float cutoff = map(mouseX, 0, width, 10, 15000); + highPass.freq(cutoff); + + // Draw a circle indicating the position + width of the frequencies passed through + background(125, 255, 125); + noStroke(); + fill(255, 0, 150); + ellipse(width, height, 2*(width - mouseX), 2*(width - mouseX)); +} diff --git a/examples/Effects/LowPassFilter/LowPassFilter.pde b/examples/Effects/LowPassFilter/LowPassFilter.pde new file mode 100644 index 0000000..6235610 --- /dev/null +++ b/examples/Effects/LowPassFilter/LowPassFilter.pde @@ -0,0 +1,33 @@ +/** + * This is a simple WhiteNoise generator, run through a LowPass filter which only lets + * the lower frequency components of the noise through. The cutoff frequency of the + * filter can be controlled through the left/right position of the mouse. + */ + +import processing.sound.*; + +WhiteNoise noise; +LowPass lowPass; + +void setup() { + size(640, 360); + + // Create the noise generator + filter + noise = new WhiteNoise(this); + lowPass = new LowPass(this); + + noise.play(0.5); + lowPass.process(noise); +} + +void draw() { + // Map the left/right mouse position to a cutoff frequency between 20 and 10000 Hz + float cutoff = map(mouseX, 0, width, 20, 10000); + lowPass.freq(cutoff); + + // Draw a circle indicating the position + width of the frequencies passed through + background(125, 255, 125); + noStroke(); + fill(255, 0, 150); + ellipse(0, height, 2*mouseX, 2*mouseX); +} diff --git a/examples/Effects/Reverberation/Reverberation.pde b/examples/Effects/Reverberation/Reverberation.pde new file mode 100644 index 0000000..1d9a530 --- /dev/null +++ b/examples/Effects/Reverberation/Reverberation.pde @@ -0,0 +1,46 @@ +/** + * Play a sound sample and apply a reverb filter to it, changing the effect + * parameters based on the mouse position. + * + * With the mouse pointer at the top of the sketch you'll only hear the "dry" + * (unprocessed) signal, move the mouse downwards to add more of the "wet" + * reverb signal to the mix. The left-right position of the mouse controls the + * "room size" and damping of the effect, with a smaller room (and more refraction) + * at the left, and a bigger (but more dampened) room towards the right. + */ + +import processing.sound.*; + +SoundFile soundfile; +Reverb reverb; + +void setup() { + size(640, 360); + background(255); + + // Load a soundfile + soundfile = new SoundFile(this, "vibraphon.aiff"); + + // Create the effect object + reverb = new Reverb(this); + + // Play the file in a loop + soundfile.loop(); + + // Set soundfile as input to the reverb + reverb.process(soundfile); +} + +void draw() { + // Change the roomsize of the reverb + float roomSize = map(mouseX, 0, width, 0, 1.0); + reverb.room(roomSize); + + // Change the high frequency dampening parameter + float damping = map(mouseX, 0, width, 0, 1.0); + reverb.damp(damping); + + // Change the wet/dry relation of the effect + float effectStrength = map(mouseY, 0, height, 0, 1.0); + reverb.wet(effectStrength); +} diff --git a/examples/Effects/Reverberation/data/vibraphon.aiff b/examples/Effects/Reverberation/data/vibraphon.aiff new file mode 100644 index 0000000..b4e95a3 Binary files /dev/null and b/examples/Effects/Reverberation/data/vibraphon.aiff differ diff --git a/examples/Effects/VariableDelay/VariableDelay.pde b/examples/Effects/VariableDelay/VariableDelay.pde new file mode 100644 index 0000000..ac022c2 --- /dev/null +++ b/examples/Effects/VariableDelay/VariableDelay.pde @@ -0,0 +1,41 @@ +/** + * Play a sound sample and pass it through a tape delay, changing the delay + * parameters based on the mouse position. + */ + +import processing.sound.*; + +SoundFile soundfile; +Delay delay; + +void setup() { + size(640, 360); + background(255); + + // Load a soundfile + soundfile = new SoundFile(this, "vibraphon.aiff"); + + // Create the delay effect + delay = new Delay(this); + + // Play the file in a loop + soundfile.loop(); + + // Connect the soundfile to the delay unit, which is initiated with a + // five second "tape" + delay.process(soundfile, 5.0); +} + +void draw() { + // Map mouseX from -1.0 to 1.0 for left to right panning + float position = map(mouseX, 0, width, -1.0, 1.0); + soundfile.pan(position); + + // Map mouseX from 0 to 0.8 for the amount of delay feedback + float fb = map(mouseX, 0, width, 0.0, 0.8); + delay.feedback(fb); + + // Map mouseY from 0.001 to 2.0 seconds for the length of the delay + float delayTime = map(mouseY, 0, height, 0.001, 2.0); + delay.time(delayTime); +} diff --git a/examples/Effects/VariableDelay/data/vibraphon.aiff b/examples/Effects/VariableDelay/data/vibraphon.aiff new file mode 100644 index 0000000..b4e95a3 Binary files /dev/null and b/examples/Effects/VariableDelay/data/vibraphon.aiff differ diff --git a/examples/Env/Envelopes/Envelopes.pde b/examples/Env/Envelopes/Envelopes.pde new file mode 100644 index 0000000..a6e48f3 --- /dev/null +++ b/examples/Env/Envelopes/Envelopes.pde @@ -0,0 +1,79 @@ +/** + * This sketch shows how to use envelopes and oscillators. Envelopes are pre-defined + * amplitude distributions over time. The sound library provides an ASR envelope which + * stands for attack, sustain, release. The amplitude first rises, then sustains at the + * maximum level and decays slowly, all depending on the pre-defined length of the three + * time segments. + * + * .________ + * . --- + * . --- + * . --- + * A S R + */ + +import processing.sound.*; + +TriOsc triOsc; +Env env; + +// Times and levels for the ASR envelope +float attackTime = 0.001; +float sustainTime = 0.004; +float sustainLevel = 0.3; +float releaseTime = 0.2; + +// This is an octave in MIDI notes. +int[] midiSequence = { 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72 }; + +// Play a new note every 200ms +int duration = 200; + +// This variable stores the point in time when the next note should be triggered +int trigger = millis(); + +// An index to count up the notes +int note = 0; + +void setup() { + size(640, 360); + background(255); + + // Create triangle wave and start it + triOsc = new TriOsc(this); + + // Create the envelope + env = new Env(this); +} + +void draw() { + + // If the determined trigger moment in time matches up with the computer clock and + // the sequence of notes hasn't been finished yet, the next note gets played. + if ((millis() > trigger) && (note trigger) { + // Redraw the background every time to erase old rects + background(255); + + // By iterating through the playSound array we check for 1 or 0, 1 plays a sound and draws a rect, + // for 0 nothing happens. + + for (int i = 0; i < numsounds; i++) { + // Check which indexes are 1 and 0. + if (playSound[i] == 1) { + float rate; + // Choose a random color and get set to noStroke() + fill(int(random(255)), int(random(255)), int(random(255))); + noStroke(); + // Draw the rect in the positions we defined earlier in posx + rect(posx[i], 50, 128, 260); + // Choose a random index of the octave array + rate = octave[int(random(0, 5))]; + // Play the soundfile from the array with the respective rate and loop set to false + file[i].play(rate, 1.0); + } + + // Renew the indexes of playSound so that at the next event the order is different and randomized. + playSound[i] = int(random(0, 2)); + } + + // Create a new triggertime in the future, with a random offset between 200 and 1000 milliseconds + trigger = millis() + int(random(200, 1000)); + } +} diff --git a/examples/Soundfile/Sampler/data/1.aif b/examples/Soundfile/Sampler/data/1.aif new file mode 100644 index 0000000..fd65b31 Binary files /dev/null and b/examples/Soundfile/Sampler/data/1.aif differ diff --git a/examples/Soundfile/Sampler/data/2.aif b/examples/Soundfile/Sampler/data/2.aif new file mode 100644 index 0000000..742301e Binary files /dev/null and b/examples/Soundfile/Sampler/data/2.aif differ diff --git a/examples/Soundfile/Sampler/data/3.aif b/examples/Soundfile/Sampler/data/3.aif new file mode 100644 index 0000000..7b8da50 Binary files /dev/null and b/examples/Soundfile/Sampler/data/3.aif differ diff --git a/examples/Soundfile/Sampler/data/4.aif b/examples/Soundfile/Sampler/data/4.aif new file mode 100644 index 0000000..7884a34 Binary files /dev/null and b/examples/Soundfile/Sampler/data/4.aif differ diff --git a/examples/Soundfile/Sampler/data/5.aif b/examples/Soundfile/Sampler/data/5.aif new file mode 100644 index 0000000..5664dad Binary files /dev/null and b/examples/Soundfile/Sampler/data/5.aif differ diff --git a/examples/Soundfile/SimplePlayback/SimplePlayback.pde b/examples/Soundfile/SimplePlayback/SimplePlayback.pde new file mode 100644 index 0000000..1266f32 --- /dev/null +++ b/examples/Soundfile/SimplePlayback/SimplePlayback.pde @@ -0,0 +1,41 @@ +/** + * This is a simple sound file player. Use the mouse position to control playback + * speed, amplitude and stereo panning. + */ + +import processing.sound.*; + +SoundFile soundfile; + +void setup() { + size(640, 360); + background(255); + + // Load a soundfile + soundfile = new SoundFile(this, "vibraphon.aiff"); + + // These methods return useful infos about the file + println("SFSampleRate= " + soundfile.sampleRate() + " Hz"); + println("SFSamples= " + soundfile.frames() + " samples"); + println("SFDuration= " + soundfile.duration() + " seconds"); + + // Play the file in a loop + soundfile.loop(); +} + + +void draw() { + // Map mouseX from 0.25 to 4.0 for playback rate. 1 equals original playback speed, + // 2 is twice the speed and will sound an octave higher, 0.5 is half the speed and + // will make the file sound one ocative lower. + float playbackSpeed = map(mouseX, 0, width, 0.25, 4.0); + soundfile.rate(playbackSpeed); + + // Map mouseY from 0.2 to 1.0 for amplitude + float amplitude = map(mouseY, 0, width, 0.2, 1.0); + soundfile.amp(amplitude); + + // Map mouseY from -1.0 to 1.0 for left to right panning + float panning = map(mouseY, 0, height, -1.0, 1.0); + soundfile.pan(panning); +} diff --git a/examples/Soundfile/SimplePlayback/data/vibraphon.aiff b/examples/Soundfile/SimplePlayback/data/vibraphon.aiff new file mode 100644 index 0000000..b4e95a3 Binary files /dev/null and b/examples/Soundfile/SimplePlayback/data/vibraphon.aiff differ diff --git a/library.properties b/library.properties new file mode 100644 index 0000000..e736705 --- /dev/null +++ b/library.properties @@ -0,0 +1,11 @@ +name=Sound +category=Sound +authors=The Processing Foundation +url=https://processing.org/reference/libraries/sound/index.html +sentence=Provides a simple way to work with audio. +paragraph= +version=9 +prettyVersion=2.0.0 +lastUpdated=0 +minRevision=228 +maxRevision=0 diff --git a/library/.gitignore b/library/.gitignore new file mode 100644 index 0000000..5e7d273 --- /dev/null +++ b/library/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +!.gitignore diff --git a/src/processing/sound/Amplitude.java b/src/processing/sound/Amplitude.java new file mode 100644 index 0000000..53cc6e2 --- /dev/null +++ b/src/processing/sound/Amplitude.java @@ -0,0 +1,65 @@ +package processing.sound; + +import com.jsyn.ports.UnitOutputPort; +import com.jsyn.unitgen.PeakFollower; + +import processing.core.PApplet; + +/** + * This is a volume analyzer. It calculates the root mean square of the + * amplitude of each audio block and returns that value. + * + * @webref sound + */ +public class Amplitude extends Analyzer { + + private PeakFollower follower; + + /** + * @param parent typically use "this" + * @webref sound + */ + public Amplitude(PApplet parent) { + super(parent); + this.follower = new PeakFollower(); + this.follower.halfLife.set(0.1); + } + + protected void removeInput() { + this.follower.input.disconnectAll(); + this.input = null; + } + + protected void setInput(UnitOutputPort input) { + Engine.getEngine().add(this.follower); + this.follower.start(); + this.follower.input.connect(input); + } + + /** + * Queries a value from the analyzer and returns a float between 0. and 1. + * + * @webref sound + * @return amp An amplitude value between 0-1. + **/ + public float analyze() { + // TODO check if input exists, print warning if not + return (float) this.follower.current.getValue(); + } + + + // Below are just duplicated methods from superclasses which are required + // for the online reference to build the corresponding pages. + + /** + * Define the audio input for the analyzer. + * + * @param input + * the input sound source. Can be an oscillator, noise generator, + * SoundFile or AudioIn. + * @webref sound + **/ + public void input(SoundObject input) { + super.input(input); + } +} diff --git a/src/processing/sound/Analyzer.java b/src/processing/sound/Analyzer.java new file mode 100644 index 0000000..2e0b2ce --- /dev/null +++ b/src/processing/sound/Analyzer.java @@ -0,0 +1,47 @@ +package processing.sound; + +import com.jsyn.ports.UnitOutputPort; + +import processing.core.PApplet; + +abstract class Analyzer { + + protected SoundObject input; + + protected Analyzer(PApplet parent) { + Engine.getEngine(parent); + } + + /** + * Define the audio input for the analyzer. + * + * @param input The input sound source + **/ + public void input(SoundObject input) { + if (this.input == input) { + Engine.printWarning("This input was already connected to the analyzer"); + } else { + if (this.input != null) { + if (!this.input.isPlaying()) { + // unit was only analyzed but not playing out loud - remove from synth + Engine.getEngine().remove(this.input.circuit); + } + + this.removeInput(); + } + + this.input = input; + if (!this.input.isPlaying()) { + Engine.getEngine().add(input.circuit); + } + + this.setInput(input.circuit.output.output); + } + } + + // remove the current input + protected abstract void removeInput(); + + // connect sound source in subclass AND add analyser unit to Engine + protected abstract void setInput(UnitOutputPort input); +} diff --git a/src/processing/sound/AudioDevice.java b/src/processing/sound/AudioDevice.java new file mode 100644 index 0000000..74f9447 --- /dev/null +++ b/src/processing/sound/AudioDevice.java @@ -0,0 +1,16 @@ +package processing.sound; + +import processing.core.PApplet; + +/** + * Old Sound library configuration class, deprecated. Have a look at the Sound class instead. + * @deprecated + * @see Sound + */ +public class AudioDevice { + public AudioDevice(PApplet theParent, int sampleRate, int bufferSize) { + Engine.printWarning("the AudioDevice class is deprecated and will be removed in future versions of the library. For configuration, please have a look at the new Sound class instead."); + // bufferSize is ignored - the parameter was necessary for the original library's FFT to work + new Sound(theParent).sampleRate(sampleRate); + } +} diff --git a/src/processing/sound/AudioIn.java b/src/processing/sound/AudioIn.java new file mode 100644 index 0000000..69872e2 --- /dev/null +++ b/src/processing/sound/AudioIn.java @@ -0,0 +1,128 @@ +package processing.sound; + +import com.jsyn.unitgen.ChannelIn; +import com.jsyn.unitgen.Multiply; + +import processing.core.PApplet; + +/** + * AudioIn lets you grab the audio input from your sound card. + * + * @webref sound + **/ +public class AudioIn extends SoundObject { + + // ChannelIn for mono, LineIn for stereo + private ChannelIn input; + + // unlike the oscillator classes, ChannelIn does not have an amplitude + // port, so we need to control the amplitude via an extra multiplier unit + private Multiply multiplier; + + public AudioIn(PApplet parent) { + this(parent, 0); + } + + /** + * @param parent + * typically use "this" + * @param in + * input channel number (optional, default 0) + */ + public AudioIn(PApplet parent, int in) { + super(parent); + this.input = new ChannelIn(in); + this.multiplier = new Multiply(); + this.multiplier.inputA.connect(this.input.output); + this.amplitude = this.multiplier.inputB; + // set default amplitude + this.multiplier.inputB.set(1.0); + + this.circuit = new JSynCircuit(this.multiplier.output); + this.circuit.add(this.input); + } + + public void play() { + super.play(); + } + + public void play(float amp) { + this.amp(amp); + this.play(); + } + + public void play(float amp, float add) { + this.add(add); + this.play(amp); + } + + /** + * Start capturing the input stream and route it to the audio output + * + * @param amp + * the volume to grab the input at as a value from 0.0 (complete + * silence) to 1.0 (full volume) + * @param add + * offset the audio input by the given value + * @param pos + * pan the audio input in a stereo panorama. Allowed values are + * between -1.0 (left) and 1.0 (right) + * @webref sound + **/ + public void play(float amp, float add, float pos) { + this.set(amp, add, pos); + this.play(); + } + + public void start() { + Engine.getEngine().add(this.circuit); + } + + public void start(float amp) { + this.amp(amp); + this.start(); + } + + public void start(float amp, float add) { + this.add(add); + this.start(amp); + } + + /** + * Start the input stream without routing it to the audio output. This is useful + * if you only want to perform audio analysis based on the microphone input. + * + * @param amp + * the volume to grab the input at as a value from 0.0 (complete + * silence) to 1.0 (full volume) + * @param add + * offset the audio input by the given value + * @param pos + * pan the audio input in a stereo panorama. Allowed values are + * between -1.0 (left) and 1.0 (right) + * @webref sound + */ + public void start(float amp, float add, float pos) { + this.set(amp, add, pos); + this.start(); + } + + /** + * Sets amplitude, add and pan position with one method. + * + * @webref sound + * @param amp + * the volume to grab the input at as a value from 0.0 (complete + * silence) to 1.0 (full volume) + * @param add + * offset the audio input by the given value + * @param pos + * pan the audio input in a stereo panorama. Allowed values are + * between -1.0 (left) and 1.0 (right) + **/ + public void set(float amp, float add, float pos) { + this.amp(amp); + this.add(add); + this.pan(pos); + } +} diff --git a/src/processing/sound/AudioSample.java b/src/processing/sound/AudioSample.java new file mode 100644 index 0000000..5b09d2e --- /dev/null +++ b/src/processing/sound/AudioSample.java @@ -0,0 +1,626 @@ +package processing.sound; + +import com.jsyn.data.FloatSample; +import com.jsyn.unitgen.VariableRateDataReader; +import com.jsyn.unitgen.VariableRateMonoReader; +import com.jsyn.unitgen.VariableRateStereoReader; + +import processing.core.PApplet; + +/** + * This class allows you low-level access to an audio buffer to create, access, + * manipulate and play back sound samples. + * + * If you want to pre-load your audio sample with an audio file from disk you + * can do so using the SoundFile subclass. + * + * @see SoundFile + * @webref sound + */ +public class AudioSample extends SoundObject { + + protected FloatSample sample; + protected VariableRateDataReader player; + + protected int startFrame = 0; + + public AudioSample(PApplet parent, int frames) { + this(parent, frames, false); + } + + public AudioSample(PApplet parent, int frames, boolean stereo) { + this(parent, frames, stereo, 44100); // read current framerate from Engine instead? + } + + /** + * Allocate a new audiosample buffer with the given number of frames. + * + * @param parent + * typically use "this" + * @param frames + * the desired number of frames for this audiosample + * @param frameRate + * the underlying frame rate of the sample (default: 44100) + * @param stereo + * whether to treat the audiosample as 2-channel (stereo) or not + * (default: false) + * @webref sound + */ + public AudioSample(PApplet parent, int frames, boolean stereo, int frameRate) { + super(parent); + this.sample = new FloatSample(frames, stereo ? 2 : 1); + this.sample.setFrameRate(frameRate); + this.initiatePlayer(); + } + + // TODO add another set of constructors: AudioSample(PApplet parent, float + // duration)? + // risk of accidental overloading through int/float, but could be interesting.. + + /** + * @param data + * an array of float values to be used as this audiosample's sound + * data. The audiosample will consequently have as many frames as the + * length of the given array. + * @webref sound + */ + public AudioSample(PApplet parent, float[] data) { + this(parent, data, false); + } + + public AudioSample(PApplet parent, float[] data, boolean stereo) { + this(parent, data, stereo, 44100); // read current framerate from Engine instead? + } + + public AudioSample(PApplet parent, float[] data, int frameRate) { + this(parent, data, false, frameRate); + } + + public AudioSample(PApplet parent, float[] data, boolean stereo, int frameRate) { + super(parent); + this.sample = new FloatSample(data, stereo ? 2 : 1); + this.sample.setFrameRate(frameRate); + this.initiatePlayer(); + } + + // called by subclasses who initialise their own sample object + protected AudioSample(PApplet parent) { + super(parent); + } + + // private constructor for cloning (see getUnusedPlayer() method below) + protected AudioSample(AudioSample original) { + super(null); + this.sample = original.sample; + this.initiatePlayer(); + this.player.amplitude.set(original.player.amplitude.get()); + this.player.rate.set(original.player.rate.get()); + this.startFrame = original.startFrame; + } + + // should be called by the constructor after the sample object has been set + protected void initiatePlayer() { + if (this.channels() == 2) { + this.player = new VariableRateStereoReader(); + } else { + this.player = new VariableRateMonoReader(); + } + + // needs to be set explicitly + this.player.rate.set(this.sampleRate()); + this.circuit = new JSynCircuit(this.player.output); + this.amplitude = this.player.amplitude; + + // unlike the Oscillator and Noise classes, the sample player units can + // always stay connected to the JSyn synths, since they make no noise + // as long as their dataQueue is empty + super.play(); // doesn't actually start playback, just adds the (silent) units + } + + /** + * Change the amplitude/volume of this audiosample. + * + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + * @webref sound + **/ + public void amp(float amp) { + if (Engine.checkAmp(amp)) { + this.player.amplitude.set(amp); + } + } + + /** + * Returns the number of channels in the audiosample. + * + * @return the number of channels in the audiosample (1 for mono, 2 for stereo) + * @webref sound + **/ + public int channels() { + return this.sample.getChannelsPerFrame(); + } + + /** + * Cues the playhead to a fixed position in the audiosample. + * + * @param time + * position in the audiosample that the next playback should start + * from, in seconds. + * @webref sound + **/ + public void cue(float time) { + this.setStartTime(time); + } + + /** + * Cues the playhead to a fixed position in the audiosample. + * + * @webref sound + * @param frameNumber + * frame number to start playback from. + **/ + public void cueFrame(int frameNumber) { + this.setStartFrame(frameNumber); + } + + /** + * Returns the duration of the audiosample in seconds. + * + * @webref sound + * @return The duration of the audiosample in seconds. + **/ + public float duration() { + return (float) (this.frames() / this.sample.getFrameRate()); + } + + /** + * Returns the number of frames of the audiosample. + * + * @webref sound + * @return The number of frames of the audiosample. + * @see duration() + **/ + public int frames() { + return this.sample.getNumFrames(); + } + + public void resize(int frames) { + this.resize(frames, false); + } + + private void setStartFrame(int frameNumber) { + if (this.checkStartFrame(frameNumber)) { + this.startFrame = frameNumber; + } + } + + private boolean setStartTime(float time) { + if (time < 0) { + Engine.printError("absolute position can't be < 0"); + return false; + } + int startFrame = Math.round(this.sampleRate() * time); + if (startFrame >= this.frames()) { + Engine.printError("can't cue past of end of sample (total duration is " + this.duration() + "s)"); + return false; + } + this.startFrame = startFrame; + return true; + } + + /** + * Jump to a specific position in the audiosample while continuing to play. + * + * @param time + * position to jump to, in seconds. + * @see cue + * @see play + * @webref sound + **/ + public void jump(float time) { + if (this.setStartTime(time)) { + this.stop(); + this.play(); // if the file wasn't playing when jump() was called, just start playing it + } + } + + // helper function: when called on a soundfile already running, the original + // library triggered a second (concurrent) playback. with JSyn, every data + // reader can only do one playback at a time, so if the present player + // is busy we need to create a new one with the exact same settings and + // trigger it instead (see JSyn's VoiceAllocator class) + protected AudioSample getUnusedPlayer() { + // TODO could implement a more intelligent player allocation pool method here to + // limit the total number of playback voices + if (this.isPlaying()) { + // use private constructor which copies the sample as well as all playback + // settings over + return new AudioSample(this); + } else { + return this; + } + } + + public void loop() { + AudioSample source = this.getUnusedPlayer(); + source.player.dataQueue.queueLoop(source.sample, 0, source.frames() - source.startFrame); + // for improved handling by the user, could return a reference to whichever + // sound file is the source of the newly triggered playback + // return source; + } + + public void loop(float rate) { + this.rate(rate); + this.loop(); + } + + public void loop(float rate, float amp) { + this.rate(rate); + this.amp(amp); + this.loop(); + } + + public void loop(float rate, float pos, float amp) { + this.pan(pos); + this.loop(rate, amp); + } + + /** + * Starts playback which will loop at the end of the sample. + * + * @param rate + * relative playback rate to use. 1 is the original speed. 0.5 is + * half speed and one octave down. 2 is double the speed and one + * octave up. + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). Only works for mono audiosamples! + * @param amp + * the desired playback amplitude of the audiosample as a value from + * 0.0 (complete silence) to 1.0 (full volume) + * @param add + * offset the output of the generator by the given value + * @webref sound + */ + public void loop(float rate, float pos, float amp, float add) { + this.add(add); + this.loop(rate, pos, amp); + } + + /* + * FIXME cueing a position for loops has to be handled differently than for + * simple playback, because passing a startFrame to dataQueue.queueLoop() causes + * repetitions of the loop to only ever be played from that position, instead of + * jumping back to the very beginning of the sample after reaching the end + * + * @param cue position in the audiosample that the loop should start from, in + * seconds. public void loop(float rate, float pos, float amp, float add, float + * cue) { this.cue(cue); this.loop(rate, pos, amp, add); } + */ + + public void play() { + AudioSample source = this.getUnusedPlayer(); + source.player.dataQueue.queue(source.sample, source.startFrame, source.frames() - source.startFrame); + // for improved handling by the user, could return a reference to + // whichever audiosample object is the actual source (i.e. JSyn + // container) of the newly triggered playback + // return source; + } + + public void play(float rate) { + this.rate(rate); + this.play(); + } + + public void play(float rate, float amp) { + this.amp(amp); + this.play(rate); + } + + public void play(float rate, float pos, float amp) { + this.pan(pos); + this.play(rate, amp); + } + + public void play(float rate, float pos, float amp, float add) { + this.add(add); + this.play(rate, pos, amp); + } + + /** + * Starts the playback of the audiosample. Only plays to the end of the + * audiosample once. + * + * @param rate + * relative playback rate to use. 1 is the original speed. 0.5 is + * half speed and one octave down. 2 is double the speed and one + * octave up. + * @param amp + * the desired playback amplitude of the audiosample as a value from + * 0.0 (complete silence) to 1.0 (full volume) + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). Only works for mono audiosamples! + * @param cue + * position in the audiosample that playback should start from, in + * seconds. + * @param add + * offset the output of the generator by the given value + * @webref sound + **/ + public void play(float rate, float pos, float amp, float add, float cue) { + this.cue(cue); + this.play(rate, pos, amp, add); + } + + /** + * Set the playback rate of the audiosample. + * + * @param rate + * Relative playback rate to use. 1 is the original speed. 0.5 is + * half speed and one octave down. 2 is double the speed and one + * octave up. + * @webref sound + **/ + public void rate(float rate) { + if (rate <= 0) { + Engine.printError("rate needs to be positive"); + } else { + // 1.0 = original + this.player.rate.set(this.sampleRate() * rate); + } + } + + /** + * Resizes the underlying buffer of the audiosample to the given number of + * frames. + * + * @param frames + * the desired number of frames for this audiosample + * @param stereo + * whether to treat the audiosample as 2-channel (stereo) or not + * (default: false) + * @webref sound + */ + public void resize(int frames, boolean stereo) { + this.stop(); + this.sample.allocate(frames, stereo ? 2 : 1); + this.initiatePlayer(); // TODO re-initiation might be redundant + } + + /** + * Returns the underlying sample rate of the audiosample. + * + * @webref sound + * @return Returns the underlying sample rate of the audiosample as an int. + **/ + public int sampleRate() { + return (int) Math.round(this.sample.getFrameRate()); + } + + /** + * Move the sound in a stereo panorama. Only works for mono audiosamples! + * + * @webref sound + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Set multiple parameters at once + * + * @webref sound + * @param rate + * Relative playback rate to use. 1 is the original speed. 0.5 is + * half speed and one octave down. 2 is double the speed and one + * octave up. + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). + * @param amp + * the desired playback amplitude of the audiosample as a value from + * 0.0 (complete silence) to 1.0 (full volume) + * @param add + * offset the output of the generator by the given value + **/ + public void set(float rate, float pos, float amp, float add) { + this.rate(rate); + this.pan(pos); + this.amp(amp); + this.add(add); + } + + /** + * Stops the playback. + * + * @webref sound + **/ + public void stop() { + this.player.dataQueue.clear(); + } + + // new methods go here + + /** + * Get current sound file playback position in seconds. + * + * @return The current position of the sound file playback in seconds + * @webref sound + */ + public float position() { + // TODO progress in sample seconds or current-rate-playback seconds?? + // TODO might have to offset getFrameCount by this.startFrame? + return (this.player.dataQueue.getFrameCount() % this.frames()) / (float) this.sampleRate(); + } + + /** + * Get current sound file playback position in percent. + * + * @return The current position of the sound file playback in percent (a value + * between 0 and 100). + * @webref sound + */ + public float percent() { + // TODO might have to offset getFrameCount by this.startFrame? + return 100f * (this.player.dataQueue.getFrameCount() % this.frames()) / (float) this.frames(); + } + + /** + * Check whether this audiosample is currently playing. + * + * @return `true` if the audiosample is currently playing, `false` if it is not. + * @webref sound + */ + public boolean isPlaying() { + // overrides the SoundObject's default implementation + return this.player.dataQueue.hasMore(); + } + + /** + * Stop the playback of the sample, but cue it to the current position. + * + * @see cue + * @webref sound + */ + public void pause() { + if (this.isPlaying()) { + this.startFrame = (int) this.player.dataQueue.getFrameCount() % this.frames(); + this.stop(); + } else { + Engine.printWarning("audio sample is not currently playing"); + } + } + + protected boolean checkStartFrame(int startFrame) { + return this.checkStartFrame(startFrame, true); + } + + protected boolean checkStartFrame(int startFrame, boolean verbose) { + if (startFrame < 0 || startFrame >= this.frames()) { + if (verbose) { + Engine.printError(Integer.toString(startFrame) + " is not a valid start frame number (has to be in [0," + + Integer.toString(this.frames() - 1) + "]"); + } + return false; + } else { + return true; + } + } + + /** + * Get the current sample data and write it into the given array. + * + * Get the current sample data and write it into the given array. The array has + * to be able to store as many floats as there are frames in this sample. + * + * @param data + * the target array that the read data is written to + */ + public void read(float[] data) { + if (data.length != this.frames()) { + Engine.printWarning( + "the length of the given array does not match the number of frames of this audio sample"); + } + // TODO catch exception and print understandable error message + this.sample.read(data); + } + + /** + * Read some frames of this audio sample into an array. + * + * @param startFrame + * the index of the first frame of the audiosample that should be + * read + * @param startIndex + * the position in the array where the first read frame should be + * written to (typically 0) + * @param numFrames + * the number of frames that should be read (can't be greater than + * data.length - startIndex) + * @webref sound + */ + public void read(int startFrame, float[] data, int startIndex, int numFrames) { + if (this.checkStartFrame(startFrame)) { + if (startFrame + numFrames < this.frames()) { + this.sample.read(startFrame, data, startIndex, numFrames); + } else { + // overflow at end of sample, need to do two partial copies + int firstReadLength = this.frames() - startFrame; + this.sample.read(startFrame, data, startIndex, firstReadLength); + this.sample.read(0, data, startIndex + firstReadLength, numFrames - firstReadLength); + } + } + } + + /** + * @param index + * the index of the single frame of the audiosample that should be + * read and returned as a float + * @return the value of the audio sample at the given frame + */ + public float read(int index) { + // TODO catch exception and print understandable error message + return (float) this.sample.readDouble(index); + } + + /** + * Overwrite the sample with the data from the given array. The array can + * contain up to as many floats as there are frames in this sample. + * + * @param data + * the array from which the sample data should be taken + */ + public void write(float[] data) { + if (data.length != this.frames()) { + Engine.printWarning( + "the length of the given array does not match the number of frames of this audio sample"); + } + this.sample.write(data); + } + + /** + * Write some frames of this audio sample. + * + * @param startFrame + * the index of the first frame of the audiosample that should be + * written to + * @param startIndex + * the position in the array that the first value to write should be + * taken from (typically 0) + * @param numFrames + * the number of frames that should be written (can't be greater than + * data.length - startIndex) + * @webref sound + */ + public void write(int startFrame, float[] data, int startIndex, int numFrames) { + if (this.checkStartFrame(startFrame)) { + if (startFrame + numFrames < this.frames()) { + this.sample.write(startFrame, data, startIndex, numFrames); + } else { + // overflow at end of sample, need to do two partial copies + int firstReadLength = this.frames() - startFrame; + this.sample.write(startFrame, data, startIndex, firstReadLength); + this.sample.write(0, data, startIndex + firstReadLength, numFrames - firstReadLength); + } + + } + } + + /** + * @param index + * the index of the single frame of the audiosample that should be + * set to the given value + * @param value + * the float value that the given audio frame should be set to + */ + public void write(int index, float value) { + if (this.checkStartFrame(startFrame)) { + this.sample.writeDouble(index, value); + } + } +} diff --git a/src/processing/sound/BandPass.java b/src/processing/sound/BandPass.java new file mode 100644 index 0000000..dd0f5a9 --- /dev/null +++ b/src/processing/sound/BandPass.java @@ -0,0 +1,65 @@ +package processing.sound; + +import com.jsyn.unitgen.FilterBandPass; + +import processing.core.PApplet; + +/** + * This is a band pass filter. + * @webref sound + * @param parent PApplet: typically use "this" + **/ +public class BandPass extends Effect { + + public BandPass(PApplet parent) { + super(parent); + } + + @Override + protected FilterBandPass newInstance() { + return new FilterBandPass(); + } + + /** + * Set the bandwidth for the filter. + * @webref sound + * @param freq Bandwidth in Hz + **/ + public void bw(float bw) { + // TODO check filter quality + this.left.Q.set(this.left.frequency.get() / bw); + this.right.Q.set(this.right.frequency.get() / bw); + } + + /** + * Set the cutoff frequency for the filter + * @webref sound + * @param freq Cutoff frequency between 0 and 20000 + **/ + public void freq(float freq) { + this.left.frequency.set(freq); + this.right.frequency.set(freq); + } + + public void process(SoundObject input, float freq) { + this.freq(freq); + this.process(input); + } + + public void process(SoundObject input, float freq, float bw) { + this.freq(freq); + this.bw(bw); + this.process(input); + } + + /** + * Sets frequency and bandwidth of the filter with one method. + * @webref sound + * @param freq Set the frequency + * @param bw Set the bandwidth + **/ + public void set(float freq, float bw) { + this.freq(freq); + this.bw(bw); + } +} diff --git a/src/processing/sound/BrownNoise.java b/src/processing/sound/BrownNoise.java new file mode 100644 index 0000000..4ad0cfa --- /dev/null +++ b/src/processing/sound/BrownNoise.java @@ -0,0 +1,98 @@ +package processing.sound; + +import processing.core.PApplet; + +/** + * This is a brown noise generator. Brown noise has a decrease of 6db per octave. + * @webref sound + * @param parent typically use "this" + **/ +public class BrownNoise extends Noise { + + /** + * @param parent typically use "this" + * @webref sound + */ + public BrownNoise(PApplet parent) { + super(parent, new com.jsyn.unitgen.BrownNoise()); + this.amplitude = this.noise.amplitude; + // explicitly set amplitude to override default (see amp() below) + this.amp(1.0f); + } + + // Below are just duplicated methods from the Noise and SoundObject superclass which + // are required for the reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float amp) { + super.play(amp); + } + + public void play(float amp, float pos) { + super.play(amp, pos); + } + + /** + * Start the generator + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @param add offset the output of the noise by given value + * @param pos pan the generator in stereo panorama. Allowed values are between -1.0 and 1.0. + * @webref sound + **/ + public void play(float amp, float add, float pos) { + super.play(amp, add, pos); + } + + /** + * Set multiple parameters at once. + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @param add offset the output of the noise by given value + * @param pos pan the generator in stereo panorama. Allowed values are between -1.0 and 1.0. + * @webref sound + **/ + public void set(float amp, float add, float pos) { + super.set(amp, add, pos); + } + + /** + * Change the amplitude/volume of this sound. + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @webref sound + **/ + public void amp(float amp) { + // the JSyn Brownian noise generator can drift to exceed one, so tone down the volume a bit + super.amp(amp / 4); + } + + /** + * Offset the output of this generator by a fixed value + * @param add offset the output of the generator by the given value + * @webref sound + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). + * @webref sound + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the generator + * @webref sound + **/ + public void stop() { + super.stop(); + } + +} diff --git a/src/processing/sound/Delay.java b/src/processing/sound/Delay.java new file mode 100644 index 0000000..277f3b2 --- /dev/null +++ b/src/processing/sound/Delay.java @@ -0,0 +1,83 @@ +package processing.sound; + +import processing.core.PApplet; + +/** + * This is a simple delay effect. + * + * @webref sound + * @param parent + * PApplet: typically use "this" + **/ +public class Delay extends Effect { + + public Delay(PApplet parent) { + super(parent); + } + + @Override + protected JSynDelay newInstance() { + return new JSynDelay(); + } + + /** + * Start the delay effect + * + * @webref sound + * @param input + * Input audio source + * @param maxDelayTime Maximum delay time in seconds. + * @param delayTime Delay time to use when starting to process, in seconds. + **/ + public void process(SoundObject input, float maxDelayTime, float delayTime) { + this.left.setMaxDelayTime(maxDelayTime); + this.right.setMaxDelayTime(maxDelayTime); + this.time(delayTime); + // connect input in superclass method + super.process(input); + } + + public void process(SoundObject input, float maxDelayTime) { + // set delayTime to maximum + this.process(input, maxDelayTime, maxDelayTime); + } + + /** + * Set delay time and feedback values at once + * + * @webref sound + * @param delayTime + * Maximum delay time in seconds. + * @param feedback + * Feedback amount as a float + **/ + public void set(float delayTime, float feedback) { + this.time(delayTime); + this.feedback(feedback); + } + + /** + * Changes the delay time of the effect. + * + * @webref sound + * @param delayTime + * Delay time in seconds. + **/ + public void time(float delayTime) { + // TODO check that delayTime is not greater than effect buffer + this.left.setDelayTime(delayTime); + this.right.setDelayTime(delayTime); + } + + /** + * Change the feedback of the delay effect. + * + * @webref sound + * @param feedback + * Feedback amount as a float. + **/ + public void feedback(float feedback) { + this.left.setFeedback(feedback); + this.right.setFeedback(feedback); + } +} diff --git a/src/processing/sound/Effect.java b/src/processing/sound/Effect.java new file mode 100644 index 0000000..311e9f3 --- /dev/null +++ b/src/processing/sound/Effect.java @@ -0,0 +1,82 @@ +package processing.sound; + +import java.util.HashSet; +import java.util.Set; + +import com.jsyn.ports.UnitOutputPort; +import com.jsyn.unitgen.TwoInDualOut; +import com.jsyn.unitgen.UnitFilter; + +import processing.core.PApplet; + +/** + * For advanced users: common superclass of all effect types + */ +// helper class for applying the same effect (with the same parameters) on two channels. +// a basic design question is what to do if the same effect is applied to several different +// input sources -- do we consider them all to feed into the same effect +// unit(s), or should we instantiate new units every time process() is called? +// presently all input sources get connected to the same two left/right effect +// units, where their input signals are automatically added together. calling +// stop() on the effect also disconnects all input sources before removing the +// effect from the synthesis. +public abstract class Effect { + + // store references to all input sources + protected Set inputs = new HashSet(); + + protected EffectType left; + protected EffectType right; + protected UnitOutputPort output; + + // invoked by subclasses + protected Effect(PApplet parent) { + Engine.getEngine(parent); + this.left = this.newInstance(); + this.right = this.newInstance(); + TwoInDualOut merge = new TwoInDualOut(); + merge.inputA.connect(this.left.output); + merge.inputB.connect(this.right.output); + this.output = merge.output; + } + + protected abstract EffectType newInstance(); + + /** + * Get information on whether this effect is currently active. + * @return true if this effect is currently processing at least one sound source + */ + public boolean isProcessing() { + return ! this.inputs.isEmpty(); + } + + /** + * Start the Filter + * @webref sound + * @param input Input sound source + **/ + public void process(SoundObject input) { + if (this.inputs.add(input)) { + // attach effect to circuit until removed with effect.stop() + input.setEffect(this); + } else { + Engine.printWarning("the effect is already processing this sound source"); + } + } + + /** + * Stop the effect. + */ + public void stop() { + if (this.inputs.isEmpty()) { + Engine.printWarning("this effect is not currently processing any signals."); + } else { + for (SoundObject o : this.inputs) { + o.removeEffect(this); + } + this.inputs.clear(); + Engine.getEngine().remove(this.left); + Engine.getEngine().remove(this.right); + } + } +} diff --git a/src/processing/sound/Engine.java b/src/processing/sound/Engine.java new file mode 100644 index 0000000..3ef747b --- /dev/null +++ b/src/processing/sound/Engine.java @@ -0,0 +1,199 @@ +package processing.sound; + +import com.jsyn.JSyn; +import com.jsyn.Synthesizer; +import com.jsyn.devices.AudioDeviceFactory; +import com.jsyn.devices.AudioDeviceManager; +import com.jsyn.unitgen.LineOut; +import com.jsyn.unitgen.Multiply; +import com.jsyn.unitgen.UnitGenerator; +import com.jsyn.unitgen.UnitSource; + +import processing.core.PApplet; + +class Engine { + + private static AudioDeviceManager audioManager; + private static Engine singleton; + + protected Synthesizer synth; + // the stereo lineout + private LineOut lineOut; + // two multipliers for controlling the global output volume + private Multiply leftOut; + private Multiply rightOut; + + private int sampleRate = 44100; + + // set in constructor + private int inputDevice; + private int outputDevice; + + protected static Engine getEngine(PApplet parent) { + if (Engine.singleton == null) { + Engine.singleton = new Engine(parent); + } + return Engine.singleton; + } + + protected static Engine getEngine() { + return Engine.singleton; + } + + private Engine(PApplet theParent) { + // only call initalisation steps if not already initialised + if (Engine.singleton != null) { + return; + } + + // create and start the synthesizer, and set this object as the singleton. + this.synth = JSyn.createSynthesizer(Engine.getAudioManager()); + this.inputDevice = Engine.getAudioManager().getDefaultInputDeviceID(); + this.outputDevice = Engine.getAudioManager().getDefaultOutputDeviceID(); + + this.lineOut = new LineOut(); // stereo lineout by default + this.synth.add(lineOut); + this.lineOut.start(); + + this.leftOut = new Multiply(); + this.rightOut = new Multiply(); + this.setVolume(1.0f); + this.leftOut.output.connect(0, this.lineOut.input, 0); + this.rightOut.output.connect(0, this.lineOut.input, 1); + this.synth.add(this.leftOut); + this.synth.add(this.rightOut); + + this.startSynth(); + Engine.singleton = this; + + // register Processing library callback methods + Object callback = new Callback(); + theParent.registerMethod("dispose", callback); + // Android only + theParent.registerMethod("pause", callback); + theParent.registerMethod("resume", callback); + } + + protected void startSynth() { + if (this.synth.isRunning()) { + this.synth.stop(); + } + + // TODO do some more user-friendly checks based on getMaxInput/OutputChannels + this.synth.start(this.sampleRate, + this.inputDevice, Engine.getAudioManager().getMaxInputChannels(this.inputDevice), + // TODO limit number of output channels to 2? + this.outputDevice, Engine.getAudioManager().getMaxOutputChannels(this.outputDevice)); + } + + protected static AudioDeviceManager getAudioManager() { + if (Engine.audioManager == null) { + try { + Class.forName("javax.sound.sampled.AudioSystem"); + Engine.audioManager = AudioDeviceFactory.createAudioDeviceManager(); + } catch (ClassNotFoundException e) { + Engine.audioManager = new JSynAndroidAudioDeviceManager(); + } + } + return Engine.audioManager; + } + + protected void setSampleRate(int sampleRate) { + Engine.singleton.sampleRate = sampleRate; + Engine.singleton.startSynth(); + } + + protected void selectInputDevice(int deviceId) { + Engine.singleton.inputDevice = deviceId; + Engine.singleton.startSynth(); + } + + protected void selectOutputDevice(int deviceId) { + Engine.singleton.outputDevice = deviceId; + Engine.singleton.startSynth(); + } + + protected void setVolume(double volume) { + if (Engine.checkRange(volume, "volume")) { + this.leftOut.inputB.set(volume); + this.rightOut.inputB.set(volume); + } + } + + protected int getSampleRate() { + return this.synth.getFrameRate(); + } + + protected void add(UnitGenerator generator) { + if (generator.getSynthesisEngine() == null) { + this.synth.add(generator); + } + } + + protected void remove(UnitGenerator generator) { + this.synth.remove(generator); + } + + protected void play(UnitSource source) { + // TODO check if unit is already connected + source.getOutput().connect(0, this.leftOut.inputA, 0); + source.getOutput().connect(1, this.rightOut.inputA, 0); + } + + protected void stop(UnitSource source) { + source.getOutput().disconnect(0, this.leftOut.inputA, 0); + source.getOutput().disconnect(1, this.rightOut.inputA, 0); + } + + protected static boolean checkAmp(float amp) { + if (amp < -1 || amp > 1) { + Engine.printError("amplitude has to be in [-1,1]"); + return false; + } else if (amp == 0.0) { + Engine.printWarning("an amplitude of 0 means this sound is not audible now"); + } + return true; + } + + protected static boolean checkPan(float pan) { + if (pan < -1 || pan > 1) { + Engine.printError("pan has to be in [-1,1]"); + return false; + } + return true; + } + + protected static boolean checkRange(double value, String name) { + if (value < 0 || value > 1) { + Engine.printError(name + " parameter has to be between 0 and 1 (inclusive)"); + return false; + } + return true; + } + + protected static void printWarning(String message) { + PApplet.println("Sound library warning: " + message); + } + + protected static void printError(String message) { + PApplet.println("Sound library error: " + message); + } + + /** + * Internal helper class for Processing library callbacks + */ + public class Callback { + public void dispose() { + lineOut.stop(); + synth.stop(); + } + + public void pause() { + // TODO + } + + public void resume() { + // TODO + } + } +} diff --git a/src/processing/sound/Env.java b/src/processing/sound/Env.java new file mode 100644 index 0000000..9e3f913 --- /dev/null +++ b/src/processing/sound/Env.java @@ -0,0 +1,55 @@ +package processing.sound; + +import com.jsyn.data.SegmentedEnvelope; +import com.jsyn.unitgen.VariableRateMonoReader; +import com.softsynth.shared.time.TimeStamp; + +import processing.core.PApplet; + +/** +* This is an ASR (Attack Sustain Release) Envelope Generator +* @webref sound +* @param parent PApplet: typically use "this" +**/ +public class Env { + + public Env(PApplet parent) { + Engine.getEngine(parent); + } + + /** + * Triggers the envelope + * @webref sound + * @param input Input sound source + * @param attackTime Attack time value as a float. + * @param sustainTime Sustain time value as a float. + * @param sustain Sustain level value as a float. + * @param releaseTime Release time value as a float. + **/ + public void play(SoundObject input, float attackTime, float sustainTime, float sustainLevel, float releaseTime) { + SegmentedEnvelope env = new SegmentedEnvelope(new double[] { + attackTime, 1.0, // attack + // gradual decay towards sustain level across entire sustain period + sustainTime, sustainLevel, // sustain + releaseTime, 0.0 }); + + // TODO re-use player from fixed or dynamic pool + VariableRateMonoReader player = new VariableRateMonoReader(); + + // this would make sense to me but breaks the envelope for some reason +// input.amplitude.disconnectAll(); + player.output.connect(input.amplitude); + Engine.getEngine().add(player); + + player.dataQueue.queue(env); + if (!input.isPlaying()) { + input.play(); + } + + // disconnect player from amplitude port after finished and set amplitude to 0 + TimeStamp envFinished = Engine.getEngine().synth.createTimeStamp().makeRelative(attackTime + sustainTime + releaseTime); + player.output.disconnect(0, input.amplitude, 0, envFinished); + // TODO better: trigger unit stop() so that isPlaying() is set to false as well? + input.amplitude.set(0, envFinished); + } +} diff --git a/src/processing/sound/FFT.java b/src/processing/sound/FFT.java new file mode 100644 index 0000000..7ae7700 --- /dev/null +++ b/src/processing/sound/FFT.java @@ -0,0 +1,100 @@ +package processing.sound; + +import com.jsyn.ports.UnitOutputPort; + +import processing.core.PApplet; + +/** + * This is a Fast Fourier Transform (FFT) analyzer. It calculates the normalized + * power spectrum of an audio stream the moment it is queried with the analyze() + * method. + * + * @webref sound + **/ +public class FFT extends Analyzer { + + public float[] spectrum; + + private JSynFFT fft; + + public FFT(PApplet parent) { + this(parent, 512); + } + + /** + * @param parent + * typically use "this" + * @param bands + * number of frequency bands for the FFT as an integer (default 512). + * This parameter needs to be a power of 2 (e.g. 16, 32, 64, 128, + * ...). + * @webref sound + */ + public FFT(PApplet parent, int bands) { + super(parent); + if (bands < 0 || Integer.bitCount(bands) != 1) { + // TODO throw RuntimeException? + Engine.printError("number of FFT bands needs to be a power of 2"); + } else { + // FFT buffer size is twice the number of frequency bands + this.fft = new JSynFFT(2 * bands); + this.spectrum = new float[bands]; + } + } + + protected void removeInput() { + this.fft.input.disconnectAll(); + this.input = null; + } + + protected void setInput(UnitOutputPort input) { + // superclass makes sure that input unit is actually playing, just connect it + Engine.getEngine().add(this.fft); + this.fft.input.connect(input); + this.fft.start(); + } + + /** + * Calculates the current frequency spectrum from the input source, writes it + * into this FFT's `spectrum` array, and returns it. + * + * @return the current frequency spectrum of the input source. The array has as + * many elements as this FFT analyzer's number of frequency bands + */ + public float[] analyze() { + return this.analyze(this.spectrum); + } + + /** + * Calculates the current frequency spectrum from the input source. + * + * @param value + * an array with as many elements as this FFT analyzer's number of + * frequency bands + * @return The current frequency spectrum of the input source. The array has as + * many elements as this FFT analyzer's number of frequency bands. + * @webref sound + **/ + public float[] analyze(float[] value) { + if (this.input == null) { + Engine.printWarning("this FFT has no sound source connected to it, nothing to analyze"); + } + this.fft.calculateMagnitudes(value); + return value; + } + + // Below are just duplicated methods from superclasses which are required + // for the online reference to build the corresponding pages. + + /** + * Define the audio input for the analyzer. + * + * @param input + * the input sound source. Can be an oscillator, noise generator, + * SoundFile or AudioIn. + * @webref sound + **/ + public void input(SoundObject input) { + super.input(input); + } +} diff --git a/src/processing/sound/HighPass.java b/src/processing/sound/HighPass.java new file mode 100644 index 0000000..1905219 --- /dev/null +++ b/src/processing/sound/HighPass.java @@ -0,0 +1,37 @@ +package processing.sound; + +import com.jsyn.unitgen.FilterHighPass; + +import processing.core.PApplet; + +/** + * This is a high pass filter + * @sound webref + * @param parent PApplet: typically use "this" + **/ +public class HighPass extends Effect { + + public HighPass(PApplet parent) { + super(parent); + } + + @Override + protected FilterHighPass newInstance() { + return new FilterHighPass(); + } + + /** + * Set the cut off frequency for the filter + * @webref sound + * @param freq the cutoff frequency in Hertz + */ + public void freq(float freq) { + this.left.frequency.set(freq); + this.right.frequency.set(freq); + } + + public void process(SoundObject input, float freq) { + this.freq(freq); + this.process(input); + } +} diff --git a/src/processing/sound/JSynAllPass.java b/src/processing/sound/JSynAllPass.java new file mode 100644 index 0000000..1103e09 --- /dev/null +++ b/src/processing/sound/JSynAllPass.java @@ -0,0 +1,36 @@ +package processing.sound; + +import com.jsyn.ports.UnitInputPort; +import com.jsyn.ports.UnitOutputPort; +import com.jsyn.unitgen.Circuit; +import com.jsyn.unitgen.Delay; +import com.jsyn.unitgen.MultiplyAdd; + +// https://ccrma.stanford.edu/~jos/pasp/Allpass_Two_Combs.html +// y = b0*x + x(-N) - b0*y(-N) +class JSynAllPass extends Circuit { + + protected UnitInputPort input; + protected UnitOutputPort output; + + public JSynAllPass(double g, int N) { + MultiplyAdd pre = new MultiplyAdd(); + this.add(pre); + this.input = pre.inputC; + + Delay delay = new Delay(); + delay.allocate(N); + this.add(delay); + delay.input.connect(pre.output); + pre.inputA.set(-g); + pre.inputB.connect(delay.output); + + MultiplyAdd post = new MultiplyAdd(); + post.inputA.set(g); + post.inputB.connect(pre.output); + post.inputC.connect(delay.output); + + this.output = post.output; + } + +} diff --git a/src/processing/sound/JSynAndroidAudioDeviceManager.java b/src/processing/sound/JSynAndroidAudioDeviceManager.java new file mode 100644 index 0000000..a2987d0 --- /dev/null +++ b/src/processing/sound/JSynAndroidAudioDeviceManager.java @@ -0,0 +1,220 @@ +package processing.sound; + +import java.util.ArrayList; + +import com.jsyn.devices.AudioDeviceInputStream; +import com.jsyn.devices.AudioDeviceManager; +import com.jsyn.devices.AudioDeviceOutputStream; + +import android.media.AudioAttributes; +import android.media.AudioFormat; +import android.media.AudioTrack; + +class JSynAndroidAudioDeviceManager implements AudioDeviceManager { + + ArrayList deviceRecords; + private double suggestedOutputLatency = 0.1; + private double suggestedInputLatency = 0.1; + private int defaultInputDeviceID = 0; + private int defaultOutputDeviceID = 0; + + public JSynAndroidAudioDeviceManager() { + this.deviceRecords = new ArrayList(); + DeviceInfo deviceInfo = new DeviceInfo(); + + deviceInfo.name = "Android Audio"; + deviceInfo.maxInputs = 0; + deviceInfo.maxOutputs = 2; + this.deviceRecords.add(deviceInfo); + } + + public String getName() { + return "JSyn Android Audio for Processing"; + } + + class DeviceInfo { + String name; + int maxInputs; + int maxOutputs; + + public String toString() { + return "AudioDevice: " + name + ", max in = " + maxInputs + ", max out = " + maxOutputs; + } + } + + private class AndroidAudioStream { + short[] shortBuffer; + int frameRate; + int samplesPerFrame; + AudioTrack audioTrack; + int minBufferSize; + int bufferSize; + + public AndroidAudioStream(int deviceID, int frameRate, int samplesPerFrame) { + this.frameRate = frameRate; + this.samplesPerFrame = samplesPerFrame; + } + + public double getLatency() { + int numFrames = this.bufferSize / this.samplesPerFrame; + return ((double) numFrames) / this.frameRate; + } + + } + + private class AndroidAudioOutputStream extends AndroidAudioStream implements AudioDeviceOutputStream { + public AndroidAudioOutputStream(int deviceID, int frameRate, int samplesPerFrame) { + super(deviceID, frameRate, samplesPerFrame); + } + + public void start() { + this.minBufferSize = AudioTrack.getMinBufferSize(this.frameRate, AudioFormat.CHANNEL_OUT_STEREO, + AudioFormat.ENCODING_PCM_16BIT); + this.bufferSize = (3 * (this.minBufferSize / 2)) & ~3; + this.audioTrack = new AudioTrack.Builder() + .setAudioAttributes(new AudioAttributes.Builder() + .setUsage(AudioAttributes.USAGE_MEDIA) + .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC) + .build()) + .setAudioFormat(new AudioFormat.Builder() + .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO) + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(this.frameRate) + .build()) + .setBufferSizeInBytes(this.bufferSize) + .setTransferMode(AudioTrack.MODE_STREAM) + .build(); + this.audioTrack.play(); + } + + public void write(double value) { + double[] buffer = new double[1]; + buffer[0] = value; + this.write(buffer, 0, 1); + } + + public void write(double[] buffer) { + this.write(buffer, 0, buffer.length); + } + + public void write(double[] buffer, int start, int count) { + if ((this.shortBuffer == null) || (this.shortBuffer.length < count)) { + this.shortBuffer = new short[count]; + } + + for (int i = 0; i < count; i++) { + int sample = (int) (32767.0 * buffer[i + start]); + if (sample > Short.MAX_VALUE) { + sample = Short.MAX_VALUE; + } else if (sample < Short.MIN_VALUE) { + sample = Short.MIN_VALUE; + } + this.shortBuffer[i] = (short) sample; + } + + this.audioTrack.write(this.shortBuffer, 0, count); + } + + public void stop() { + this.audioTrack.stop(); + this.audioTrack.release(); + } + + public void close() { + } + + } + + private class AndroidAudioInputStream extends AndroidAudioStream implements AudioDeviceInputStream { + + public AndroidAudioInputStream(int deviceID, int frameRate, int samplesPerFrame) { + super(deviceID, frameRate, samplesPerFrame); + } + + public void start() { + } + + public double read() { + double[] buffer = new double[1]; + this.read(buffer, 0, 1); + return buffer[0]; + } + + public int read(double[] buffer) { + return this.read(buffer, 0, buffer.length); + } + + public int read(double[] buffer, int start, int count) { + return 0; + } + + public void stop() { + } + + public int available() { + return 0; + } + + public void close() { + } + } + + public AudioDeviceOutputStream createOutputStream(int deviceID, int frameRate, int samplesPerFrame) { + return new AndroidAudioOutputStream(deviceID, frameRate, samplesPerFrame); + } + + public AudioDeviceInputStream createInputStream(int deviceID, int frameRate, int samplesPerFrame) { + return new AndroidAudioInputStream(deviceID, frameRate, samplesPerFrame); + } + + public double getDefaultHighInputLatency(int deviceID) { + return 0.3; + } + + public double getDefaultHighOutputLatency(int deviceID) { + return 0.3; + } + + public int getDefaultInputDeviceID() { + return this.defaultInputDeviceID; + } + + public int getDefaultOutputDeviceID() { + return this.defaultOutputDeviceID; + } + + public double getDefaultLowInputLatency(int deviceID) { + return 0.1; + } + + public double getDefaultLowOutputLatency(int deviceID) { + return 0.1; + } + + public int getDeviceCount() { + return this.deviceRecords.size(); + } + + public String getDeviceName(int deviceID) { + return this.deviceRecords.get(deviceID).name; + } + + public int getMaxInputChannels(int deviceID) { + return this.deviceRecords.get(deviceID).maxInputs; + } + + public int getMaxOutputChannels(int deviceID) { + return this.deviceRecords.get(deviceID).maxOutputs; + } + + public int setSuggestedOutputLatency(double latency) { + this.suggestedOutputLatency = latency; + return 0; + } + + public int setSuggestedInputLatency(double latency) { + this.suggestedInputLatency = latency; + return 0; + } + +} diff --git a/src/processing/sound/JSynCircuit.java b/src/processing/sound/JSynCircuit.java new file mode 100644 index 0000000..d2eee98 --- /dev/null +++ b/src/processing/sound/JSynCircuit.java @@ -0,0 +1,72 @@ +package processing.sound; + +import com.jsyn.ports.UnitOutputPort; +import com.jsyn.unitgen.Circuit; +import com.jsyn.unitgen.TwoInDualOut; +import com.jsyn.unitgen.UnitFilter; +import com.jsyn.unitgen.UnitGenerator; +import com.jsyn.unitgen.UnitSource; + +/** + * Helper class wrapping a source unit generator, add/pan processor and effect into one circuit. + */ +class JSynCircuit extends Circuit implements UnitSource { + + private UnitGenerator source; + protected JSynProcessor processor; + protected UnitOutputPort preEffect; + protected Effect effect; + protected TwoInDualOut output; + + public JSynCircuit(UnitOutputPort input) { + this.output = new TwoInDualOut(); + this.add(this.output); + + this.source = input.getUnitGenerator(); + this.add(this.source); + + if (input.getNumParts() == 2) { + // stereo source - no need for pan, so bypass processor + this.preEffect = input; + } else { + this.processor = new JSynProcessor(); + this.add(this.processor); + this.processor.input.connect(input); + this.preEffect = this.processor.output; + } + this.wireBypass(); + } + + protected void wireBypass() { + this.preEffect.connect(0, this.output.inputA, 0); + this.preEffect.connect(1, this.output.inputB, 0); + } + + protected void removeEffect() { + if (this.effect != null) { + this.wireBypass(); + this.effect.left.output.disconnect(this.output.inputA); + this.effect.right.output.disconnect(this.output.inputB); + this.effect.left.input.disconnect(0, this.preEffect, 0); + this.effect.right.input.disconnect(0, this.preEffect, 1); + this.effect = null; + } + } + + protected void setEffect(Effect effect) { + this.effect = effect; + this.preEffect.connect(0, this.effect.left.input, 0); + this.preEffect.connect(1, this.effect.right.input, 0); + + this.effect.left.output.connect(this.output.inputA); + this.preEffect.disconnect(0, this.output.inputA, 0); + + this.effect.right.output.connect(this.output.inputB); + this.preEffect.disconnect(1, this.output.inputB, 0); + } + + @Override + public UnitOutputPort getOutput() { + return this.output.output; + } +} diff --git a/src/processing/sound/JSynDelay.java b/src/processing/sound/JSynDelay.java new file mode 100644 index 0000000..a671b1a --- /dev/null +++ b/src/processing/sound/JSynDelay.java @@ -0,0 +1,60 @@ +package processing.sound; + +import com.jsyn.engine.SynthesisEngine; +import com.jsyn.unitgen.Circuit; +import com.jsyn.unitgen.InterpolatingDelay; +import com.jsyn.unitgen.MultiplyAdd; +import com.jsyn.unitgen.UnitFilter; + +/** + * A custom JSyn delay circuit with feedback. + */ +class JSynDelay extends UnitFilter { + + private Circuit feedbackCircuit; + + private InterpolatingDelay delay = new InterpolatingDelay(); + private MultiplyAdd feedback = new MultiplyAdd(); + + public JSynDelay() { + super(); + this.feedbackCircuit = new Circuit(); + this.feedbackCircuit.add(this.delay); + this.feedbackCircuit.add(this.feedback); + + // put the feedback multiplier unit before the delay -- this way + // the original signal is not played back immediately, but playback + // will be delayed for the length of the delay time + // TODO could add 'mix' parameter which allows direct passthrough of + // the original signal? + this.input = this.feedback.inputC; + this.feedback.inputA.set(0.0); + + this.feedback.inputB.connect(this.delay.output); + this.feedback.output.connect(this.delay.input); + this.output = this.delay.output; + } + + @Override + public void setSynthesisEngine(SynthesisEngine synthesisEngine) { + this.feedbackCircuit.setSynthesisEngine(synthesisEngine); + } + + public void generate(int start, int limit) { + // not called + } + + protected void setDelayTime(float delayTime) { + this.delay.delay.set(delayTime); + } + + protected void setFeedback(float feedback) { + // TODO check range + this.feedback.inputA.set(feedback); + } + + protected void setMaxDelayTime(float maxDelayTime) { + int maxSamples = (int) (Engine.getEngine().getSampleRate() * maxDelayTime); + this.delay.allocate(maxSamples); + } +} diff --git a/src/processing/sound/JSynFFT.java b/src/processing/sound/JSynFFT.java new file mode 100644 index 0000000..8ccdbc4 --- /dev/null +++ b/src/processing/sound/JSynFFT.java @@ -0,0 +1,46 @@ +package processing.sound; + +import java.util.Arrays; + +import com.jsyn.data.FloatSample; +import com.jsyn.unitgen.FixedRateMonoWriter; +import com.softsynth.math.FourierMath; + +/** + * This class copies all input to an audio buffer of the given size and performs + * an FFT on it when required. + * @author kevin + */ +class JSynFFT extends FixedRateMonoWriter { + + private FloatSample buffer; + private double[] real; + private double[] imaginary; + private double[] magnitude; + + protected JSynFFT(int bufferSize) { + super(); + this.buffer = new FloatSample(bufferSize); + this.real = new double[bufferSize]; + this.imaginary = new double[bufferSize]; + this.magnitude = new double[bufferSize / 2]; + + // write any connected input into the output buffer ad infinitum + this.dataQueue.queueLoop(this.buffer); + } + + protected void calculateMagnitudes(float[] target) { + // get position currently being written to + int pos = (int) this.dataQueue.getFrameCount() % this.buffer.getNumFrames(); + for (int i = 0; i < this.buffer.getNumFrames(); i++) { + // TODO could apply window? + this.real[i] = this.buffer.readDouble((pos + i) % this.buffer.getNumFrames()); + } + Arrays.fill(this.imaginary, 0); + FourierMath.fft(this.real.length, this.real, this.imaginary); + FourierMath.calculateMagnitudes(this.real, this.imaginary, this.magnitude); + for (int i = 0; i < target.length; i++) { + target[i] = (float) (2 * this.magnitude[i]); + } + } +} diff --git a/src/processing/sound/JSynLBCF.java b/src/processing/sound/JSynLBCF.java new file mode 100644 index 0000000..4b5361c --- /dev/null +++ b/src/processing/sound/JSynLBCF.java @@ -0,0 +1,51 @@ +package processing.sound; + +import com.jsyn.ports.UnitInputPort; +import com.jsyn.ports.UnitOutputPort; +import com.jsyn.unitgen.Circuit; +import com.jsyn.unitgen.Delay; +import com.jsyn.unitgen.FilterOnePole; +import com.jsyn.unitgen.MultiplyAdd; +import com.jsyn.unitgen.PassThrough; + +// see https://ccrma.stanford.edu/~jos/pasp/Lowpass_Feedback_Comb_Filter.html +class JSynLBCF extends Circuit { + + protected UnitInputPort input; + protected UnitOutputPort output; + + private MultiplyAdd mixer; + private Delay delay; + private FilterOnePole filter; + + public JSynLBCF(double f, double d, int N) { + PassThrough in = new PassThrough(); + this.add(in); + + this.add(this.mixer = new MultiplyAdd()); + this.setF(f); + + this.add(this.filter = new FilterOnePole()); + this.setD(d); + + this.add(this.delay = new Delay()); + this.delay.allocate(N); + + this.input = in.input; +// in.output.connect(this.mixer.inputC); + in.output.connect(this.delay.input); + this.delay.output.connect(this.filter.input); + this.filter.output.connect(this.mixer.inputB); + this.output = this.mixer.output; + } + + // see https://ccrma.stanford.edu/~jos/fp/One_Pole.html + protected void setD(double d) { + this.filter.a0.setValue(1 - d); + this.filter.b1.setValue(-d); + } + + protected void setF(double f) { + this.mixer.inputA.set(f); + } +} diff --git a/src/processing/sound/JSynProcessor.java b/src/processing/sound/JSynProcessor.java new file mode 100644 index 0000000..23fa657 --- /dev/null +++ b/src/processing/sound/JSynProcessor.java @@ -0,0 +1,42 @@ +package processing.sound; + +import com.jsyn.ports.UnitInputPort; +import com.jsyn.ports.UnitOutputPort; +import com.jsyn.unitgen.UnitFilter; + +/** + * A custom JSyn unit generator that takes care of adding and panning + */ +class JSynProcessor extends UnitFilter { + + private float add; + private float pan; + + public JSynProcessor() { + this.input = new UnitInputPort("Input"); + this.output = new UnitOutputPort(2, "Output"); + this.addPort(this.input); + this.addPort(this.output); + } + + @Override + public void generate(int start, int limit) { + double[] input = this.input.getValues(); + double right = 0.5 + this.pan * 0.5; + double left = 1 - right; + double[] outleft = output.getValues(0); + double[] outright = output.getValues(1); + for (int i = start; i < limit; i++) { + outleft[i] = ( input[i] + this.add ) * left; + outright[i] = ( input[i] + this.add ) * right; + } + } + + public void add(float add) { + this.add = add; + } + + public void pan(float pos) { + this.pan = pos; + } +} diff --git a/src/processing/sound/JSynReverb.java b/src/processing/sound/JSynReverb.java new file mode 100644 index 0000000..cb11223 --- /dev/null +++ b/src/processing/sound/JSynReverb.java @@ -0,0 +1,87 @@ +package processing.sound; + +import com.jsyn.engine.SynthesisEngine; +import com.jsyn.unitgen.Circuit; +import com.jsyn.unitgen.MixerMono; +import com.jsyn.unitgen.PassThrough; +import com.jsyn.unitgen.UnitFilter; + +/** + * A JSyn implementation of the classic Freeverb design. + * @seealso https://ccrma.stanford.edu/~jos/pasp/Freeverb.html + */ +class JSynReverb extends UnitFilter { + + private Circuit reverbCircuit; + + // see https://ccrma.stanford.edu/~jos/pasp/Freeverb.html + private static int[] Ns = new int[] { 1557, 1617, 1491, 1422, 1277, 1356, 1188, 1116 }; + private JSynLBCF[] lbcfs = new JSynLBCF[JSynReverb.Ns.length]; + + private static int[] As = new int[] { 225, 556, 441, 341 }; + + private MixerMono mixer; + + public JSynReverb() { + this.reverbCircuit = new Circuit(); + PassThrough in = new PassThrough(); + this.reverbCircuit.add(in); + this.input = in.input; + + JSynAllPass first = new JSynAllPass(0.5, JSynReverb.As[0]); + this.reverbCircuit.add(first); + JSynAllPass ap = first; + for (int i = 1; i < JSynReverb.As.length; i++) { + JSynAllPass next = new JSynAllPass(0.5, JSynReverb.As[i]); + ap.output.connect(next.input); + ap = next; + this.reverbCircuit.add(ap); + } + + for (int i = 0; i < JSynReverb.Ns.length; i++) { + this.lbcfs[i] = new JSynLBCF(0.84, 0.2, JSynReverb.Ns[i]); + this.reverbCircuit.add(this.lbcfs[i]); + this.lbcfs[i].input.connect(in.output); + + // multiple connected inputs to first AllPass are summed automatically + this.lbcfs[i].output.connect(first.input); + } + + this.mixer = new MixerMono(2); + this.mixer.amplitude.set(1.0); + this.setWet(0.5f); + + in.output.connect(0, this.mixer.input, 0); + ap.output.connect(0, this.mixer.input, 1); + this.output = this.mixer.output; + } + + @Override + public void setSynthesisEngine(SynthesisEngine synthesisEngine) { + this.reverbCircuit.setSynthesisEngine(synthesisEngine); + } + + @Override + public void generate(int start, int limit) { + // not called + } + + protected void setDamp(float damp) { + // damp = initialdamp * 0.4 + for (JSynLBCF lbcf : this.lbcfs) { + lbcf.setD(damp * 0.4); + } + } + + protected void setRoom(float room) { + // roomsize = initialroom * 0.28 + 0.7 + for (JSynLBCF lbcf : this.lbcfs) { + lbcf.setF(room * 0.28 + 0.7); + } + } + + protected void setWet(float wet) { + this.mixer.gain.set(0, 1 - wet); + this.mixer.gain.set(1, wet); + } +} diff --git a/src/processing/sound/LowPass.java b/src/processing/sound/LowPass.java new file mode 100644 index 0000000..4f2e6ee --- /dev/null +++ b/src/processing/sound/LowPass.java @@ -0,0 +1,37 @@ +package processing.sound; + +import com.jsyn.unitgen.FilterLowPass; + +import processing.core.PApplet; + +/** + * This is a low pass filter + * @sound webref + * @param parent PApplet: typically use "this" + **/ +public class LowPass extends Effect { + + public LowPass(PApplet parent) { + super(parent); + } + + @Override + protected FilterLowPass newInstance() { + return new FilterLowPass(); + } + + /** + * Set the cut off frequency for the filter + * @webref sound + * @param freq the cutoff frequency in Hertz + */ + public void freq(float freq) { + this.left.frequency.set(freq); + this.right.frequency.set(freq); + } + + public void process(SoundObject input, float freq) { + this.freq(freq); + this.process(input); + } +} diff --git a/src/processing/sound/Noise.java b/src/processing/sound/Noise.java new file mode 100644 index 0000000..4861333 --- /dev/null +++ b/src/processing/sound/Noise.java @@ -0,0 +1,41 @@ +package processing.sound; + +import com.jsyn.unitgen.UnitGenerator; +import com.jsyn.unitgen.UnitSource; + +import processing.core.PApplet; + +/** + * For advanced users: common superclass of all noise generators + */ +public abstract class Noise extends SoundObject { + + protected JSynNoise noise; + + protected Noise(PApplet theParent, JSynNoise noise) { + super(theParent); + this.noise = noise; + this.circuit = new JSynCircuit(((UnitSource) this.noise).getOutput()); + } + + public void play(float amp) { + this.amp(amp); + this.play(); + } + + public void play(float amp, float pos) { + this.pan(pos); + this.play(amp); + } + + public void play(float amp, float add, float pos) { + this.set(amp, add, pos); + this.play(); + } + + public void set(float amp, float add, float pos) { + this.amp(amp); + this.add(add); + this.pan(pos); + } +} diff --git a/src/processing/sound/Oscillator.java b/src/processing/sound/Oscillator.java new file mode 100644 index 0000000..ffa83d6 --- /dev/null +++ b/src/processing/sound/Oscillator.java @@ -0,0 +1,69 @@ +package processing.sound; + +import com.jsyn.unitgen.UnitOscillator; + +import processing.core.PApplet; + +/** + * For advanced users: common superclass of all oscillator sound sources + */ +public abstract class Oscillator extends SoundObject { + + protected JSynOscillator oscillator; + + protected Oscillator(PApplet theParent, JSynOscillator oscillator) { + super(theParent); + this.oscillator = oscillator; + this.circuit = new JSynCircuit(this.oscillator.getOutput()); + this.amplitude = this.oscillator.amplitude; + } + + /** + * Set the frequency of the oscillator in Hz. + * @webref sound + * @param freq A floating point value of the oscillator in Hz. + **/ + public void freq(float freq) { + // TODO check positive? + this.oscillator.frequency.set(freq); + } + + /** + * Starts the oscillator + * @webref sound + **/ + public void play() { + super.play(); + } + + public void play(float freq, float amp) { + this.freq(freq); + this.amp(amp); + this.play(); + } + + public void play(float freq, float amp, float add) { + this.add(add); + this.play(freq, amp); + } + + public void play(float freq, float amp, float add, float pos) { + this.set(freq, amp, add, pos); + this.play(); + } + + /** + * Set multiple parameters at once + * @webref sound + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add A value for modulating other audio signals. + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + **/ + public void set(float freq, float amp, float add, float pos) { + this.freq(freq); + this.amp(amp); + this.add(add); + this.pan(pos); + } +} diff --git a/src/processing/sound/PinkNoise.java b/src/processing/sound/PinkNoise.java new file mode 100644 index 0000000..c2dfb18 --- /dev/null +++ b/src/processing/sound/PinkNoise.java @@ -0,0 +1,94 @@ +package processing.sound; + +import processing.core.PApplet; + +/** +* This is a pink noise generator. Pink Noise has a decrease of 3dB per octave. +* @webref sound +**/ +public class PinkNoise extends Noise { + + /** + * @param parent typically use "this" + * @webref sound + */ + public PinkNoise(PApplet parent) { + super(parent, new com.jsyn.unitgen.PinkNoise()); + this.amplitude = this.noise.amplitude; + } + + // Below are just duplicated methods from the Noise and SoundObject superclass which + // are required for the reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float amp) { + super.play(amp); + } + + public void play(float amp, float pos) { + super.play(amp, pos); + } + + /** + * Start the generator + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @param add offset the output of the noise by given value + * @param pos pan the generator in stereo panorama. Allowed values are between -1.0 and 1.0. + * @webref sound + **/ + public void play(float amp, float add, float pos) { + super.play(amp, add, pos); + } + + /** + * Set multiple parameters at once. + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @param add offset the output of the noise by given value + * @param pos pan the generator in stereo panorama. Allowed values are between -1.0 and 1.0. + * @webref sound + **/ + public void set(float amp, float add, float pos) { + super.set(amp, add, pos); + } + + /** + * Change the amplitude/volume of this sound. + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @webref sound + **/ + public void amp(float amp) { + // the JSyn Brownian noise generator can drift to exceed one, so tone down the volume a bit + super.amp(amp / 4); + } + + /** + * Offset the output of this generator by a fixed value + * @param add offset the output of the generator by the given value + * @webref sound + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). + * @webref sound + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the generator + * @webref sound + **/ + public void stop() { + super.stop(); + } +} diff --git a/src/processing/sound/Pulse.java b/src/processing/sound/Pulse.java new file mode 100644 index 0000000..16fdf49 --- /dev/null +++ b/src/processing/sound/Pulse.java @@ -0,0 +1,135 @@ +package processing.sound; + +import com.jsyn.unitgen.PulseOscillator; + +import processing.core.PApplet; + +/** + * This is a simple Pulse oscillator. + * @webref sound + **/ +public class Pulse extends Oscillator { + + /** + * @webref sound + * @param parent typically use "this" + */ + public Pulse(PApplet parent) { + super(parent, new PulseOscillator()); + } + + /** + * Set the pulse width of the oscillator. + * + * @webref sound + * @param width + * The relative pulse width of the oscillator as a float value + * between 0.0 and 1.0 (exclusive) + **/ + public void width(float width) { + this.oscillator.width.set(width); + } + + /** + * Set multiple parameters at once + * + * @webref sound + * @param freq + * The frequency value of the oscillator in Hz. + * @param width + * The pulse width of the oscillator as a value between 0.0 and 1.0. + * @param amp + * The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos + * The panoramic position of the oscillator as a float from -1.0 to + * 1.0. + **/ + public void set(float freq, float width, float amp, float add, float pos) { + this.width(width); + this.set(freq, amp, add, pos); + } + + // Below are just duplicated methods from superclasses which are required + // for the online reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float freq, float amp) { + super.play(freq, amp); + } + + public void play(float freq, float amp, float add) { + super.play(freq, amp, add); + } + + /** + * Starts the oscillator + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + * @webref sound + **/ + public void play(float freq, float amp, float add, float pos) { + super.play(freq, amp, add, pos); + } + + public void set(float freq, float amp, float add, float pos) { + super.set(freq, amp, add, pos); + } + + /** + * Set the frequency of the oscillator in Hz. + * @webref sound + * @param freq A floating point value of the oscillator in Hz. + **/ + public void freq(float freq) { + super.freq(freq); + } + + /** + * Change the amplitude/volume of this sound. + * + * @webref sound + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + **/ + public void amp(float amp) { + super.amp(amp); + } + + /** + * Offset the output of this generator by given value + * + * @webref sound + * @param add Offset the output of the oscillator by given value + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * + * @webref sound + * @param pos + * The panoramic position of this sound unit as a float from -1.0 + * (left) to 1.0 (right). + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the oscillator. + * + * @webref sound + **/ + public void stop() { + super.stop(); + } +} diff --git a/src/processing/sound/Reverb.java b/src/processing/sound/Reverb.java new file mode 100644 index 0000000..091d299 --- /dev/null +++ b/src/processing/sound/Reverb.java @@ -0,0 +1,84 @@ +package processing.sound; + +import processing.core.PApplet; + +/** + * This is a simple reverb effect. + * + * @webref sound + * @param parent + * PApplet: typically use "this" + **/ +public class Reverb extends Effect { + + public Reverb(PApplet parent) { + super(parent); + } + + @Override + protected JSynReverb newInstance() { + return new JSynReverb(); + } + + +// public void process(SoundObject input, float room, float damp, float wet) { + /** + * Change the damping of the reverb effect + * + * @webref sound + * @param damp + * A float value controlling the damping factor of the reverb + **/ + public void damp(float damp) { + if (Engine.checkRange(damp, "damp")) { + this.left.setDamp(damp); + this.right.setDamp(damp); + } + } + + /** + * Change the room size of the reverb effect. + * + * @webref sound + * @param room + * A float value controlling the room size of the effect. + **/ + public void room(float room) { + if (Engine.checkRange(room, "room")) { + this.left.setRoom(room); + this.right.setRoom(room); + } + } + + /** + * Set multiple parameters at once + * + * @webref sound + * @param room + * A value controlling the room size of the effect + * @param damp + * A value controlling the damping factor of the reverb + * @param wet + * A value controlling the wet/dry ratio of the reverb. + **/ + public void set(float room, float damp, float wet) { + this.room(room); + this.damp(damp); + this.wet(wet); + } + + /** + * Change the dry/wet ratio of the delay effect + * + * @webref sound + * @param wet + * A float value controlling the wet/dry ratio of the reverb. TODO + * document + **/ + public void wet(float wet) { + if (Engine.checkRange(wet, "wet")) { + this.left.setWet(wet); + this.right.setWet(wet); + } + } +} diff --git a/src/processing/sound/SawOsc.java b/src/processing/sound/SawOsc.java new file mode 100644 index 0000000..9b518ee --- /dev/null +++ b/src/processing/sound/SawOsc.java @@ -0,0 +1,111 @@ +package processing.sound; + +import com.jsyn.unitgen.SawtoothOscillator; + +import processing.core.PApplet; + +/** + * This is a simple Saw Wave Oscillator + * @webref sound + **/ +public class SawOsc extends Oscillator { + + /** + * @webref sound + * @param parent typically use "this" + */ + public SawOsc(PApplet parent) { + super(parent, new SawtoothOscillator()); + } + + // Below are just duplicated methods from superclasses which are required + // for the online reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float freq, float amp) { + super.play(freq, amp); + } + + public void play(float freq, float amp, float add) { + super.play(freq, amp, add); + } + + /** + * Starts the oscillator + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + * @webref sound + **/ + public void play(float freq, float amp, float add, float pos) { + super.play(freq, amp, add, pos); + } + + /** + * Set multiple parameters at once + * @webref sound + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + **/ + public void set(float freq, float amp, float add, float pos) { + super.set(freq, amp, add, pos); + } + + /** + * Set the frequency of the oscillator in Hz. + * @webref sound + * @param freq A floating point value of the oscillator in Hz. + **/ + public void freq(float freq) { + super.freq(freq); + } + + /** + * Change the amplitude/volume of this sound. + * + * @webref sound + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + **/ + public void amp(float amp) { + super.amp(amp); + } + + /** + * Offset the output of this generator by given value + * + * @webref sound + * @param add Offset the output of the oscillator by given value + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * + * @webref sound + * @param pos + * The panoramic position of this sound unit as a float from -1.0 + * (left) to 1.0 (right). + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the oscillator. + * + * @webref sound + **/ + public void stop() { + super.stop(); + } +} diff --git a/src/processing/sound/SinOsc.java b/src/processing/sound/SinOsc.java new file mode 100644 index 0000000..1c1dd30 --- /dev/null +++ b/src/processing/sound/SinOsc.java @@ -0,0 +1,128 @@ +package processing.sound; + +import com.jsyn.unitgen.SineOscillator; + +import processing.core.PApplet; + +/** + * This is a simple Sine Wave Oscillator + * + * @webref sound + **/ +public class SinOsc extends Oscillator { + + /** + * @webref sound + * @param parent + * typically use "this" + */ + public SinOsc(PApplet parent) { + super(parent, new SineOscillator()); + } + + // Below are just duplicated methods from superclasses which are required + // for the online reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float freq, float amp) { + super.play(freq, amp); + } + + public void play(float freq, float amp, float add) { + super.play(freq, amp, add); + } + + /** + * Starts the oscillator + * + * @param freq + * The frequency value of the oscillator in Hz. + * @param amp + * The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add + * Offset the output of the oscillator by given value + * @param pos + * The panoramic position of the oscillator as a float from -1.0 to + * 1.0. + * @webref sound + **/ + public void play(float freq, float amp, float add, float pos) { + super.play(freq, amp, add, pos); + } + + /** + * Set multiple parameters at once + * + * @webref sound + * @param freq + * The frequency value of the oscillator in Hz. + * @param amp + * The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add + * Offset the output of the oscillator by given value + * @param pos + * The panoramic position of the oscillator as a float from -1.0 to + * 1.0. + **/ + public void set(float freq, float amp, float add, float pos) { + super.set(freq, amp, add, pos); + } + + /** + * Set the frequency of the oscillator in Hz. + * + * @webref sound + * @param freq + * A floating point value of the oscillator in Hz. + **/ + public void freq(float freq) { + super.freq(freq); + } + + /** + * Change the amplitude/volume of this sound. + * + * @webref sound + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + **/ + public void amp(float amp) { + super.amp(amp); + } + + /** + * Offset the output of this generator by given value + * + * @webref sound + * @param add + * Offset the output of the oscillator by given value + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * + * @webref sound + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the oscillator. + * + * @webref sound + **/ + public void stop() { + super.stop(); + } +} diff --git a/src/processing/sound/Sound.java b/src/processing/sound/Sound.java new file mode 100644 index 0000000..9d52a01 --- /dev/null +++ b/src/processing/sound/Sound.java @@ -0,0 +1,133 @@ +package processing.sound; + +import com.jsyn.devices.AudioDeviceManager; + +import processing.core.PApplet; + +/** + * This class can be used for configuring the Processing Sound library. + * + * The Sound class allows for configuring global properties of the sound + * library's audio synthesis and playback, such as the output device, sample + * rate or global output volume. + * + * Information on available input and output devices can be obtained by calling + * Sound.list() + * + * @webref sound + */ +public class Sound { + + // could make this static as well, Engine class guarantees it's a singleton + // anyway + private Engine engine; + + public Sound(PApplet parent) { + this.engine = Engine.getEngine(parent); + } + + /** + * + * @param parent + * typically use "this" + * @param sampleRate + * the sample rate to be used by the synthesis engine (default 44100) + * @param outputDevice + * the device id of the sound card that sound should be played on + * @param inputDevice + * the device id of the sound card from which sound should be + * captured + * @param volume + * the overall output volume of the library (default 1.0) + * @webref sound + */ + public Sound(PApplet parent, int sampleRate, int outputDevice, int inputDevice, float volume) { + this(parent); + this.sampleRate(sampleRate); + this.inputDevice(inputDevice); + this.outputDevice(outputDevice); + this.volume(volume); + } + + /** + * Print and return information on available audio devices and their number of + * input/output channels. + * + * @return an array giving the names of all audio devices available on this + * computer + * @webref sound + */ + public static String[] list() { + AudioDeviceManager audioManager = Engine.getAudioManager(); + int numDevices = audioManager.getDeviceCount(); + String[] devices = new String[numDevices]; + for (int i = 0; i < numDevices; i++) { + String deviceName = audioManager.getDeviceName(i); + devices[i] = audioManager.getDeviceName(i); + int maxInputs = audioManager.getMaxInputChannels(i); + int maxOutputs = audioManager.getMaxOutputChannels(i); + boolean isDefaultInput = (i == audioManager.getDefaultInputDeviceID()); + boolean isDefaultOutput = (i == audioManager.getDefaultOutputDeviceID()); + System.out.println("deviceId" + i + ": " + deviceName); + System.out.println(" max inputs : " + maxInputs + (isDefaultInput ? " (default)" : "")); + System.out.println(" max outputs: " + maxOutputs + (isDefaultOutput ? " (default)" : "")); + } + return devices; + } + + public int sampleRate() { + return this.engine.getSampleRate(); + } + + /** + * Get or set the internal sample rate of the synthesis engine. + * + * @param sampleRate + * the sample rate to be used by the synthesis engine (default 44100) + * @return the internal sample rate used by the synthesis engine + * @webref sound + */ + public int sampleRate(int sampleRate) { + this.engine.setSampleRate(sampleRate); + return this.sampleRate(); + } + + /** + * Choose the device (sound card) which should be used for grabbing audio input + * using AudioIn. + * + * @param deviceId + * the device id obtained from Sound.list() + * @seealso Sound.list() + * @webref sound + */ + public void inputDevice(int deviceId) { + this.engine.selectInputDevice(deviceId); + } + + /** + * Choose the device (sound card) which the Sound library's audio output should + * be sent to. + * + * @param deviceId + * the device id obtained from list() + * @seealso list() + * @webref sound + */ + public void outputDevice(int deviceId) { + this.engine.selectOutputDevice(deviceId); + } + + /** + * Set the overall output volume of the Processing sound library. + * + * @param volume + * the desired output volume, normally between 0.0 and 1.0 (default + * is 1.0) + * @webref sound + */ + public void volume(float volume) { + this.engine.setVolume(volume); + } + +} diff --git a/src/processing/sound/SoundFile.java b/src/processing/sound/SoundFile.java new file mode 100644 index 0000000..5e25553 --- /dev/null +++ b/src/processing/sound/SoundFile.java @@ -0,0 +1,303 @@ +package processing.sound; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; + +import com.jsyn.data.FloatSample; +import com.jsyn.util.SampleLoader; + +import fr.delthas.javamp3.Sound; +import processing.core.PApplet; + +// calls to amp(), pan() etc affect both the LAST initiated and still running sample, AND all subsequently started ones +/** + * This is a Soundfile player which allows to play back and manipulate sound + * files. Supported formats are: WAV, AIF/AIFF, and MP3. + * + * @webref sound + **/ +public class SoundFile extends AudioSample { + + private static Map SAMPLECACHE = new HashMap(); + + // the original library only printed an error if the file wasn't found, + // but then later threw a NullPointerException when trying to play() the file. + // it might be a better idea to throw an exception to the user in some cases, + // e.g. when the file can't be found? + /** + * @param parent + * typically use "this" + * @param path + * filename of the sound file to be loaded + * @webref sound + */ + public SoundFile(PApplet parent, String path) { + super(parent); + + this.sample = SoundFile.SAMPLECACHE.get(path); + + if (this.sample == null) { + InputStream fin = parent.createInput(path); + + // if PApplet.createInput() can't find the file or URL, it prints + // an error message and fin returns null. In this case we can just + // return this dysfunctional SoundFile object without initialising further + if (fin == null) { + Engine.printError("unable to find file " + path); + return; + } + + try { + // load WAV or AIF using JSyn + this.sample = SampleLoader.loadFloatSample(fin); + } catch (IOException e) { + // try parsing as mp3 + try { + Sound mp3 = new Sound(fin); + try { + ByteArrayOutputStream os = new ByteArrayOutputStream(); + // TODO make decoding asynchronous with a FutureTask + // this call is expensive + mp3.decodeFullyInto(os); + float data[] = new float[os.size() / 2]; + SampleLoader.decodeLittleI16ToF32(os.toByteArray(), 0, os.size(), data, 0); + this.sample = new FloatSample(data, mp3.isStereo() ? 2 : 1); + } catch (IOException ee) { + throw ee; + } finally { + mp3.close(); + } + } catch (IOException ee) { + Engine.printError("unable to decode sound file " + path); + // return dysfunctional SoundFile object + return; + } + } + SoundFile.SAMPLECACHE.put(path, this.sample); + } + this.initiatePlayer(); + } + + // Below are just duplicated methods from the AudioSample superclass which + // are required for the reference to build the corresponding pages. + + /** + * Returns the number of channels of the soundfile. + * + * @return Returns the number of channels of the soundfile (1 for mono, 2 for + * stereo) + * @webref sound + **/ + public int channels() { + return super.channels(); + } + + /** + * Cues the playhead to a fixed position in the soundfile. + * + * @param time + * position in the soundfile that the next playback should start + * from, in seconds. + * @webref sound + **/ + public void cue(float time) { + super.cue(time); + } + + /** + * Returns the duration of the soundfile in seconds. + * + * @webref sound + * @return The duration of the soundfile in seconds. + **/ + public float duration() { + return super.duration(); + } + + /** + * Returns the number of frames of this soundfile. + * + * @webref sound + * @return The number of frames of this soundfile. + * @see duration() + **/ + public int frames() { + return super.frames(); + } + + public void play() { + super.play(); + } + + public void play(float rate) { + super.play(rate); + } + + public void play(float rate, float amp) { + super.play(rate, amp); + } + + public void play(float rate, float pos, float amp) { + super.play(rate, pos, amp); + } + + public void play(float rate, float pos, float amp, float add) { + super.play(rate, pos, amp, add); + } + + /** + * Starts the playback of the soundfile. Only plays to the end of the + * audiosample once. + * + * @param rate + * relative playback rate to use. 1 is the original speed. 0.5 is + * half speed and one octave down. 2 is double the speed and one + * octave up. + * @param amp + * the desired playback amplitude of the audiosample as a value from + * 0.0 (complete silence) to 1.0 (full volume) + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). Only works for mono soundfiles! + * @param cue + * position in the audiosample that playback should start from, in + * seconds. + * @param add + * offset the output of the generator by the given value + * @webref sound + **/ + public void play(float rate, float pos, float amp, float add, float cue) { + super.play(rate, pos, amp, add, cue); + } + + + /** + * Jump to a specific position in the soundfile while continuing to play. + * + * @webref sound + * @param time + * position to jump to, in seconds. + **/ + public void jump(float time) { + super.jump(time); + } + + /** + * Stop the playback of the file, but cue it to the current position so that the + * next call to play() will continue playing where it left off. + * + * @see stop + * @webref sound + */ + public void pause() { + super.pause(); + } + + /** + * Check whether this soundfile is currently playing. + * + * @return `true` if the soundfile is currently playing, `false` if it is not. + * @webref sound + */ + public boolean isPlaying() { + return super.isPlaying(); + } + + public void loop() { + super.loop(); + } + + public void loop(float rate) { + super.loop(rate); + } + + public void loop(float rate, float amp) { + super.loop(rate, amp); + } + + public void loop(float rate, float pos, float amp) { + super.loop(rate, pos, amp); + } + + /** + * Starts playback which will loop at the end of the soundfile. + * + * @param rate + * relative playback rate to use. 1 is the original speed. 0.5 is + * half speed and one octave down. 2 is double the speed and one + * octave up. + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). Only works for mono soundfiles! + * @param amp + * the desired playback amplitude of the audiosample as a value from + * 0.0 (complete silence) to 1.0 (full volume) + * @param add + * offset the output of the generator by the given value + * @webref sound + */ + public void loop(float rate, float pos, float amp, float add) { + super.loop(rate, pos, amp, add); + } + + /** + * FIXME see comment in AudioSample class + * + * @param cue + * position in the audiosample that the next playback or loop should + * start from, in seconds. public void loop(float rate, float pos, + * float amp, float add, float cue) { super.loop(rate, pos, amp, add, + * cue); } + */ + + /** + * Change the amplitude/volume of this audiosample. + * + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + * @webref sound + **/ + public void amp(float amp) { + super.amp(amp); + } + + /** + * Move the sound in a stereo panorama. Only works for mono soundfiles! + * + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). + * @webref sound + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Set the playback rate of the soundfile. + * + * @param rate + * Relative playback rate to use. 1 is the original speed. 0.5 is + * half speed and one octave down. 2 is double the speed and one + * octave up. + * @webref sound + **/ + public void rate(float rate) { + super.rate(rate); + } + + /** + * Stops the playback. + * + * @see pause + * @webref sound + **/ + public void stop() { + super.stop(); + } + +} diff --git a/src/processing/sound/SoundObject.java b/src/processing/sound/SoundObject.java new file mode 100644 index 0000000..04f47cb --- /dev/null +++ b/src/processing/sound/SoundObject.java @@ -0,0 +1,148 @@ +package processing.sound; + +import com.jsyn.ports.UnitInputPort; +import com.jsyn.unitgen.UnitFilter; + +import processing.core.PApplet; + +/** + * For advanced users: common superclass of all sound sources (oscillators, + * noise, audio samples and even AudioIn). + */ +// Subclasses need to assign the 'amplitude' port, and also initiate a +// JSynCircuit (which effects can be plugged into) with an appropriate +// JSynProcessor if they want to support pan/add. +public abstract class SoundObject { + + // subclasses need to initialise this circuit + protected JSynCircuit circuit; + + // all subclasses need to set this amplitude port -- either to the amplitude + // port of the circuit, or directly to an amplitude port of their sound unit + protected UnitInputPort amplitude; + + protected float amp = 1.0f; + private boolean isPlaying = false; + + protected SoundObject(PApplet parent) { + Engine.getEngine(parent); + } + + private void setAmplitude() { + this.amplitude.set(this.amp); + } + + /** + * Offset the output of this generator by given value + * + * @webref sound + * @param add + * A value for offsetting the audio signal. + **/ + public void add(float add) { + if (this.circuit.processor == null) { + Engine.printError("stereo sound sources do not support adding"); + } else { + this.circuit.processor.add(add); + } + } + + /** + * Change the amplitude/volume of this sound. + * + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + * @webref sound + **/ + public void amp(float amp) { + if (Engine.checkAmp(amp)) { + this.amp = amp; + if (this.isPlaying()) { + this.setAmplitude(); + } + } + } + + /** + * Check if this sound object is currently playing. + * + * @webref sound + * @return `true` if this sound object is currently playing, `false` if it is + * not. + */ + public boolean isPlaying() { + return this.isPlaying; + } + + /** + * Move the sound in a stereo panorama. + * + * @webref sound + * @param pos + * The panoramic position of this sound unit as a float from -1.0 + * (left) to 1.0 (right). + **/ + public void pan(float pos) { + if (this.circuit.processor == null) { + Engine.printError("stereo sound sources do not support panning"); + } else if (Engine.checkPan(pos)) { + this.circuit.processor.pan(pos); + } + } + + /** + * Start the generator + * + * @webref sound + **/ + public void play() { + // TODO print info message if it's already playing? + if (!this.isPlaying) { + Engine.getEngine().add(this.circuit); + Engine.getEngine().play(this.circuit); + this.setAmplitude(); + this.isPlaying = true; + // TODO rewire effect if one was set previously (before stopping)? + } + } + + /** + * Stop the generator + * + * @webref sound + **/ + public void stop() { + this.isPlaying = false; + this.amplitude.set(0); + if (this.circuit.effect != null) { + this.removeEffect(this.circuit.effect); + } + Engine.getEngine().stop(this.circuit); + Engine.getEngine().remove(this.circuit); + } + + protected void setEffect(Effect effect) { + if (this.circuit.effect == effect) { + Engine.printWarning("this effect is already processing the given sound source"); + } else { + if (this.circuit.effect != null) { + this.removeEffect(this.circuit.effect); + } + + Engine.getEngine().add(effect.left); + Engine.getEngine().add(effect.right); + this.circuit.setEffect(effect); + } + } + + protected void removeEffect(Effect effect) { + if (this.circuit.effect != effect) { + // possibly a previous effect that's being stopped here, ignore call + Engine.printError("this effect is not currently processing any signals."); + + } else { + this.circuit.removeEffect(); + } + } +} diff --git a/src/processing/sound/SqrOsc.java b/src/processing/sound/SqrOsc.java new file mode 100644 index 0000000..ff639c2 --- /dev/null +++ b/src/processing/sound/SqrOsc.java @@ -0,0 +1,111 @@ +package processing.sound; + +import com.jsyn.unitgen.SquareOscillator; + +import processing.core.PApplet; + +/** + * This is a simple Square Wave Oscillator + * @webref sound + **/ +public class SqrOsc extends Oscillator { + + /** + * @webref sound + * @param parent typically use "this" + */ + public SqrOsc(PApplet parent) { + super(parent, new SquareOscillator()); + } + + // Below are just duplicated methods from superclasses which are required + // for the online reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float freq, float amp) { + super.play(freq, amp); + } + + public void play(float freq, float amp, float add) { + super.play(freq, amp, add); + } + + /** + * Starts the oscillator + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + * @webref sound + **/ + public void play(float freq, float amp, float add, float pos) { + super.play(freq, amp, add, pos); + } + + /** + * Set multiple parameters at once + * @webref sound + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + **/ + public void set(float freq, float amp, float add, float pos) { + super.set(freq, amp, add, pos); + } + + /** + * Set the frequency of the oscillator in Hz. + * @webref sound + * @param freq A floating point value of the oscillator in Hz. + **/ + public void freq(float freq) { + super.freq(freq); + } + + /** + * Change the amplitude/volume of this sound. + * + * @webref sound + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + **/ + public void amp(float amp) { + super.amp(amp); + } + + /** + * Offset the output of this generator by given value + * + * @webref sound + * @param add Offset the output of the oscillator by given value + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * + * @webref sound + * @param pos + * The panoramic position of this sound unit as a float from -1.0 + * (left) to 1.0 (right). + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the oscillator. + * + * @webref sound + **/ + public void stop() { + super.stop(); + } +} diff --git a/src/processing/sound/TriOsc.java b/src/processing/sound/TriOsc.java new file mode 100644 index 0000000..bf194ff --- /dev/null +++ b/src/processing/sound/TriOsc.java @@ -0,0 +1,111 @@ +package processing.sound; + +import com.jsyn.unitgen.TriangleOscillator; + +import processing.core.PApplet; + +/** + * This is a simple triangle (or "saw") wave oscillator + * @webref sound + **/ +public class TriOsc extends Oscillator { + + /** + * @webref sound + * @param parent typically use "this" + */ + public TriOsc(PApplet parent) { + super(parent, new TriangleOscillator()); + } + + // Below are just duplicated methods from superclasses which are required + // for the online reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float freq, float amp) { + super.play(freq, amp); + } + + public void play(float freq, float amp, float add) { + super.play(freq, amp, add); + } + + /** + * Starts the oscillator + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + * @webref sound + **/ + public void play(float freq, float amp, float add, float pos) { + super.play(freq, amp, add, pos); + } + + /** + * Set multiple parameters at once + * @webref sound + * @param freq The frequency value of the oscillator in Hz. + * @param amp The amplitude of the oscillator as a value between 0.0 and 1.0. + * @param add Offset the output of the oscillator by given value + * @param pos The panoramic position of the oscillator as a float from -1.0 to 1.0. + **/ + public void set(float freq, float amp, float add, float pos) { + super.set(freq, amp, add, pos); + } + + /** + * Set the frequency of the oscillator in Hz. + * @webref sound + * @param freq A floating point value of the oscillator in Hz. + **/ + public void freq(float freq) { + super.freq(freq); + } + + /** + * Change the amplitude/volume of this sound. + * + * @webref sound + * @param amp + * A float value between 0.0 (complete silence) and 1.0 (full volume) + * controlling the amplitude/volume of this sound. + **/ + public void amp(float amp) { + super.amp(amp); + } + + /** + * Offset the output of this generator by given value + * + * @webref sound + * @param add Offset the output of the oscillator by given value + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * + * @webref sound + * @param pos + * The panoramic position of this sound unit as a float from -1.0 + * (left) to 1.0 (right). + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the oscillator. + * + * @webref sound + **/ + public void stop() { + super.stop(); + } +} diff --git a/src/processing/sound/WhiteNoise.java b/src/processing/sound/WhiteNoise.java new file mode 100644 index 0000000..d8d8577 --- /dev/null +++ b/src/processing/sound/WhiteNoise.java @@ -0,0 +1,94 @@ +package processing.sound; + +import processing.core.PApplet; + +/** + * This is a White Noise Generator. White Noise has a flat spectrum. + * @webref sound + **/ +public class WhiteNoise extends Noise { + + /** + * @param parent typically use "this" + * @webref sound + */ + public WhiteNoise(PApplet parent) { + super(parent, new com.jsyn.unitgen.WhiteNoise()); + this.amplitude = this.noise.amplitude; + } + + // Below are just duplicated methods from the Noise and SoundObject superclass which + // are required for the reference to build the corresponding pages. + + public void play() { + super.play(); + } + + public void play(float amp) { + super.play(amp); + } + + public void play(float amp, float pos) { + super.play(amp, pos); + } + + /** + * Start the generator + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @param add offset the output of the noise by given value + * @param pos pan the generator in stereo panorama. Allowed values are between -1.0 and 1.0. + * @webref sound + **/ + public void play(float amp, float add, float pos) { + super.play(amp, add, pos); + } + + /** + * Set multiple parameters at once. + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @param add offset the output of the noise by given value + * @param pos pan the generator in stereo panorama. Allowed values are between -1.0 and 1.0. + * @webref sound + **/ + public void set(float amp, float add, float pos) { + super.set(amp, add, pos); + } + + /** + * Change the amplitude/volume of this sound. + * @param amp the amplitude of the noise as a value from 0.0 (complete silence) to 1.0 (full volume) + * @webref sound + **/ + public void amp(float amp) { + // the JSyn Brownian noise generator can drift to exceed one, so tone down the volume a bit + super.amp(amp / 4); + } + + /** + * Offset the output of this generator by a fixed value + * @param add offset the output of the generator by the given value + * @webref sound + **/ + public void add(float add) { + super.add(add); + } + + /** + * Move the sound in a stereo panorama. + * @param pos + * the panoramic position of this sound unit from -1.0 (left) to 1.0 + * (right). + * @webref sound + **/ + public void pan(float pos) { + super.pan(pos); + } + + /** + * Stop the generator + * @webref sound + **/ + public void stop() { + super.stop(); + } +}