/*! p5.sound.js v0.3.2 2016-11-01 */
(function (root, factory) {
if (typeof define === 'function' && define.amd)
define('p5.sound', ['p5'], function (p5) { (factory(p5));});
else if (typeof exports === 'object')
factory(require('../p5'));
else
factory(root['p5']);
}(this, function (p5) {
/**
* p5.sound extends p5 with Web Audio functionality including audio input,
* playback, analysis and synthesis.
*
* p5.SoundFile: Load and play sound files.
* p5.Amplitude: Get the current volume of a sound.
* p5.AudioIn: Get sound from an input source, typically
* a computer microphone.
* p5.FFT: Analyze the frequency of sound. Returns
* results from the frequency spectrum or time domain (waveform).
* p5.Oscillator: Generate Sine,
* Triangle, Square and Sawtooth waveforms. Base class of
* p5.Noise and p5.Pulse.
*
* p5.Env: An Envelope is a series
* of fades over time. Often used to control an object's
* output gain level as an "ADSR Envelope" (Attack, Decay,
* Sustain, Release). Can also modulate other parameters.
* p5.Delay: A delay effect with
* parameters for feedback, delayTime, and lowpass filter.
* p5.Filter: Filter the frequency range of a
* sound.
*
* p5.Reverb: Add reverb to a sound by specifying
* duration and decay.
* p5.Convolver: Extends
* p5.Reverb to simulate the sound of real
* physical spaces through convolution.
* p5.SoundRecorder: Record sound for playback
* / save the .wav file.
* p5.Phrase, p5.Part and
* p5.Score: Compose musical sequences.
*
* p5.sound is on GitHub.
* Download the latest version
* here.
*
* @module p5.sound
* @submodule p5.sound
* @for p5.sound
* @main
*/
/**
* p5.sound developed by Jason Sigal for the Processing Foundation, Google Summer of Code 2014. The MIT License (MIT).
*
* http://github.com/therewasaguy/p5.sound
*
* Some of the many audio libraries & resources that inspire p5.sound:
* - TONE.js (c) Yotam Mann, 2014. Licensed under The MIT License (MIT). https://github.com/TONEnoTONE/Tone.js
* - buzz.js (c) Jay Salvat, 2013. Licensed under The MIT License (MIT). http://buzz.jaysalvat.com/
* - Boris Smus Web Audio API book, 2013. Licensed under the Apache License http://www.apache.org/licenses/LICENSE-2.0
* - wavesurfer.js https://github.com/katspaugh/wavesurfer.js
* - Web Audio Components by Jordan Santell https://github.com/web-audio-components
* - Wilm Thoben's Sound library for Processing https://github.com/processing/processing/tree/master/java/libraries/sound
*
* Web Audio API: http://w3.org/TR/webaudio/
*/
var sndcore;
sndcore = function () {
'use strict';
/* AudioContext Monkeypatch
Copyright 2013 Chris Wilson
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
(function (global, exports, perf) {
exports = exports || {};
'use strict';
function fixSetTarget(param) {
if (!param)
// if NYI, just return
return;
if (!param.setTargetAtTime)
param.setTargetAtTime = param.setTargetValueAtTime;
}
if (window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext')) {
window.AudioContext = webkitAudioContext;
if (typeof AudioContext.prototype.createGain !== 'function')
AudioContext.prototype.createGain = AudioContext.prototype.createGainNode;
if (typeof AudioContext.prototype.createDelay !== 'function')
AudioContext.prototype.createDelay = AudioContext.prototype.createDelayNode;
if (typeof AudioContext.prototype.createScriptProcessor !== 'function')
AudioContext.prototype.createScriptProcessor = AudioContext.prototype.createJavaScriptNode;
if (typeof AudioContext.prototype.createPeriodicWave !== 'function')
AudioContext.prototype.createPeriodicWave = AudioContext.prototype.createWaveTable;
AudioContext.prototype.internal_createGain = AudioContext.prototype.createGain;
AudioContext.prototype.createGain = function () {
var node = this.internal_createGain();
fixSetTarget(node.gain);
return node;
};
AudioContext.prototype.internal_createDelay = AudioContext.prototype.createDelay;
AudioContext.prototype.createDelay = function (maxDelayTime) {
var node = maxDelayTime ? this.internal_createDelay(maxDelayTime) : this.internal_createDelay();
fixSetTarget(node.delayTime);
return node;
};
AudioContext.prototype.internal_createBufferSource = AudioContext.prototype.createBufferSource;
AudioContext.prototype.createBufferSource = function () {
var node = this.internal_createBufferSource();
if (!node.start) {
node.start = function (when, offset, duration) {
if (offset || duration)
this.noteGrainOn(when || 0, offset, duration);
else
this.noteOn(when || 0);
};
} else {
node.internal_start = node.start;
node.start = function (when, offset, duration) {
if (typeof duration !== 'undefined')
node.internal_start(when || 0, offset, duration);
else
node.internal_start(when || 0, offset || 0);
};
}
if (!node.stop) {
node.stop = function (when) {
this.noteOff(when || 0);
};
} else {
node.internal_stop = node.stop;
node.stop = function (when) {
node.internal_stop(when || 0);
};
}
fixSetTarget(node.playbackRate);
return node;
};
AudioContext.prototype.internal_createDynamicsCompressor = AudioContext.prototype.createDynamicsCompressor;
AudioContext.prototype.createDynamicsCompressor = function () {
var node = this.internal_createDynamicsCompressor();
fixSetTarget(node.threshold);
fixSetTarget(node.knee);
fixSetTarget(node.ratio);
fixSetTarget(node.reduction);
fixSetTarget(node.attack);
fixSetTarget(node.release);
return node;
};
AudioContext.prototype.internal_createBiquadFilter = AudioContext.prototype.createBiquadFilter;
AudioContext.prototype.createBiquadFilter = function () {
var node = this.internal_createBiquadFilter();
fixSetTarget(node.frequency);
fixSetTarget(node.detune);
fixSetTarget(node.Q);
fixSetTarget(node.gain);
return node;
};
if (typeof AudioContext.prototype.createOscillator !== 'function') {
AudioContext.prototype.internal_createOscillator = AudioContext.prototype.createOscillator;
AudioContext.prototype.createOscillator = function () {
var node = this.internal_createOscillator();
if (!node.start) {
node.start = function (when) {
this.noteOn(when || 0);
};
} else {
node.internal_start = node.start;
node.start = function (when) {
node.internal_start(when || 0);
};
}
if (!node.stop) {
node.stop = function (when) {
this.noteOff(when || 0);
};
} else {
node.internal_stop = node.stop;
node.stop = function (when) {
node.internal_stop(when || 0);
};
}
if (!node.setPeriodicWave)
node.setPeriodicWave = node.setWaveTable;
fixSetTarget(node.frequency);
fixSetTarget(node.detune);
return node;
};
}
}
if (window.hasOwnProperty('webkitOfflineAudioContext') && !window.hasOwnProperty('OfflineAudioContext')) {
window.OfflineAudioContext = webkitOfflineAudioContext;
}
return exports;
}(window));
// <-- end MonkeyPatch.
// Create the Audio Context
var audiocontext = new window.AudioContext();
/**
*
Returns the Audio Context for this sketch. Useful for users
* who would like to dig deeper into the Web Audio API
* .
*
* @method getAudioContext
* @return {Object} AudioContext for this sketch
*/
p5.prototype.getAudioContext = function () {
return audiocontext;
};
// Polyfill for AudioIn, also handled by p5.dom createCapture
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
/**
* Determine which filetypes are supported (inspired by buzz.js)
* The audio element (el) will only be used to test browser support for various audio formats
*/
var el = document.createElement('audio');
p5.prototype.isSupported = function () {
return !!el.canPlayType;
};
var isOGGSupported = function () {
return !!el.canPlayType && el.canPlayType('audio/ogg; codecs="vorbis"');
};
var isMP3Supported = function () {
return !!el.canPlayType && el.canPlayType('audio/mpeg;');
};
var isWAVSupported = function () {
return !!el.canPlayType && el.canPlayType('audio/wav; codecs="1"');
};
var isAACSupported = function () {
return !!el.canPlayType && (el.canPlayType('audio/x-m4a;') || el.canPlayType('audio/aac;'));
};
var isAIFSupported = function () {
return !!el.canPlayType && el.canPlayType('audio/x-aiff;');
};
p5.prototype.isFileSupported = function (extension) {
switch (extension.toLowerCase()) {
case 'mp3':
return isMP3Supported();
case 'wav':
return isWAVSupported();
case 'ogg':
return isOGGSupported();
case 'aac', 'm4a', 'mp4':
return isAACSupported();
case 'aif', 'aiff':
return isAIFSupported();
default:
return false;
}
};
// if it is iOS, we have to have a user interaction to start Web Audio
// http://paulbakaus.com/tutorials/html5/web-audio-on-ios/
var iOS = navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false;
if (iOS) {
var iosStarted = false;
var startIOS = function () {
if (iosStarted)
return;
// create empty buffer
var buffer = audiocontext.createBuffer(1, 1, 22050);
var source = audiocontext.createBufferSource();
source.buffer = buffer;
// connect to output (your speakers)
source.connect(audiocontext.destination);
// play the file
source.start(0);
console.log('start ios!');
if (audiocontext.state === 'running') {
iosStarted = true;
}
};
document.addEventListener('touchend', startIOS, false);
document.addEventListener('touchstart', startIOS, false);
}
}();
var master;
master = function () {
'use strict';
/**
* Master contains AudioContext and the master sound output.
*/
var Master = function () {
var audiocontext = p5.prototype.getAudioContext();
this.input = audiocontext.createGain();
this.output = audiocontext.createGain();
//put a hard limiter on the output
this.limiter = audiocontext.createDynamicsCompressor();
this.limiter.threshold.value = 0;
this.limiter.ratio.value = 20;
this.audiocontext = audiocontext;
this.output.disconnect();
// an array of input sources
this.inputSources = [];
// connect input to limiter
this.input.connect(this.limiter);
// connect limiter to output
this.limiter.connect(this.output);
// meter is just for global Amplitude / FFT analysis
this.meter = audiocontext.createGain();
this.fftMeter = audiocontext.createGain();
this.output.connect(this.meter);
this.output.connect(this.fftMeter);
// connect output to destination
this.output.connect(this.audiocontext.destination);
// an array of all sounds in the sketch
this.soundArray = [];
// an array of all musical parts in the sketch
this.parts = [];
// file extensions to search for
this.extensions = [];
};
// create a single instance of the p5Sound / master output for use within this sketch
var p5sound = new Master();
/**
* Returns a number representing the master amplitude (volume) for sound
* in this sketch.
*
* @method getMasterVolume
* @return {Number} Master amplitude (volume) for sound in this sketch.
* Should be between 0.0 (silence) and 1.0.
*/
p5.prototype.getMasterVolume = function () {
return p5sound.output.gain.value;
};
/**
* Scale the output of all sound in this sketch
* Scaled between 0.0 (silence) and 1.0 (full volume).
* 1.0 is the maximum amplitude of a digital sound, so multiplying
* by greater than 1.0 may cause digital distortion. To
* fade, provide a rampTime
parameter. For more
* complex fades, see the Env class.
*
* Alternately, you can pass in a signal source such as an
* oscillator to modulate the amplitude with an audio signal.
*
* How This Works: When you load the p5.sound module, it
* creates a single instance of p5sound. All sound objects in this
* module output to p5sound before reaching your computer's output.
* So if you change the amplitude of p5sound, it impacts all of the
* sound in this module.
*
* If no value is provided, returns a Web Audio API Gain Node
*
* @method masterVolume
* @param {Number|Object} volume Volume (amplitude) between 0.0
* and 1.0 or modulating signal/oscillator
* @param {Number} [rampTime] Fade for t seconds
* @param {Number} [timeFromNow] Schedule this event to happen at
* t seconds in the future
*/
p5.prototype.masterVolume = function (vol, rampTime, tFromNow) {
if (typeof vol === 'number') {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = p5sound.output.gain.value;
p5sound.output.gain.cancelScheduledValues(now + tFromNow);
p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
} else if (vol) {
vol.connect(p5sound.output.gain);
} else {
// return the Gain Node
return p5sound.output.gain;
}
};
/**
* `p5.soundOut` is the p5.sound master output. It sends output to
* the destination of this window's web audio context. It contains
* Web Audio API nodes including a dyanmicsCompressor (.limiter
),
* and Gain Nodes for .input
and .output
.
*
* @property soundOut
* @type {Object}
*/
p5.prototype.soundOut = p5.soundOut = p5sound;
/**
* a silent connection to the DesinationNode
* which will ensure that anything connected to it
* will not be garbage collected
*
* @private
*/
p5.soundOut._silentNode = p5sound.audiocontext.createGain();
p5.soundOut._silentNode.gain.value = 0;
p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
return p5sound;
}(sndcore);
var helpers;
helpers = function () {
'use strict';
var p5sound = master;
/**
* Returns a number representing the sample rate, in samples per second,
* of all sound objects in this audio context. It is determined by the
* sampling rate of your operating system's sound card, and it is not
* currently possile to change.
* It is often 44100, or twice the range of human hearing.
*
* @method sampleRate
* @return {Number} samplerate samples per second
*/
p5.prototype.sampleRate = function () {
return p5sound.audiocontext.sampleRate;
};
/**
* Returns the closest MIDI note value for
* a given frequency.
*
* @param {Number} frequency A freqeuncy, for example, the "A"
* above Middle C is 440Hz
* @return {Number} MIDI note value
*/
p5.prototype.freqToMidi = function (f) {
var mathlog2 = Math.log(f / 440) / Math.log(2);
var m = Math.round(12 * mathlog2) + 57;
return m;
};
/**
* Returns the frequency value of a MIDI note value.
* General MIDI treats notes as integers where middle C
* is 60, C# is 61, D is 62 etc. Useful for generating
* musical frequencies with oscillators.
*
* @method midiToFreq
* @param {Number} midiNote The number of a MIDI note
* @return {Number} Frequency value of the given MIDI note
* @example
*
* var notes = [60, 64, 67, 72];
* var i = 0;
*
* function setup() {
* osc = new p5.Oscillator('Triangle');
* osc.start();
* frameRate(1);
* }
*
* function draw() {
* var freq = midiToFreq(notes[i]);
* osc.freq(freq);
* i++;
* if (i >= notes.length){
* i = 0;
* }
* }
*
*/
p5.prototype.midiToFreq = function (m) {
return 440 * Math.pow(2, (m - 69) / 12);
};
/**
* List the SoundFile formats that you will include. LoadSound
* will search your directory for these extensions, and will pick
* a format that is compatable with the client's web browser.
* Here is a free online file
* converter.
*
* @method soundFormats
* @param {String|Strings} formats i.e. 'mp3', 'wav', 'ogg'
* @example
*
* function preload() {
* // set the global sound formats
* soundFormats('mp3', 'ogg');
*
* // load either beatbox.mp3, or .ogg, depending on browser
* mySound = loadSound('../sounds/beatbox.mp3');
* }
*
* function setup() {
* mySound.play();
* }
*
*/
p5.prototype.soundFormats = function () {
// reset extensions array
p5sound.extensions = [];
// add extensions
for (var i = 0; i < arguments.length; i++) {
arguments[i] = arguments[i].toLowerCase();
if ([
'mp3',
'wav',
'ogg',
'm4a',
'aac'
].indexOf(arguments[i]) > -1) {
p5sound.extensions.push(arguments[i]);
} else {
throw arguments[i] + ' is not a valid sound format!';
}
}
};
p5.prototype.disposeSound = function () {
for (var i = 0; i < p5sound.soundArray.length; i++) {
p5sound.soundArray[i].dispose();
}
};
// register removeSound to dispose of p5sound SoundFiles, Convolvers,
// Oscillators etc when sketch ends
p5.prototype.registerMethod('remove', p5.prototype.disposeSound);
p5.prototype._checkFileFormats = function (paths) {
var path;
// if path is a single string, check to see if extension is provided
if (typeof paths === 'string') {
path = paths;
// see if extension is provided
var extTest = path.split('.').pop();
// if an extension is provided...
if ([
'mp3',
'wav',
'ogg',
'm4a',
'aac'
].indexOf(extTest) > -1) {
var supported = p5.prototype.isFileSupported(extTest);
if (supported) {
path = path;
} else {
var pathSplit = path.split('.');
var pathCore = pathSplit[pathSplit.length - 1];
for (var i = 0; i < p5sound.extensions.length; i++) {
var extension = p5sound.extensions[i];
var supported = p5.prototype.isFileSupported(extension);
if (supported) {
pathCore = '';
if (pathSplit.length === 2) {
pathCore += pathSplit[0];
}
for (var i = 1; i <= pathSplit.length - 2; i++) {
var p = pathSplit[i];
pathCore += '.' + p;
}
path = pathCore += '.';
path = path += extension;
break;
}
}
}
} else {
for (var i = 0; i < p5sound.extensions.length; i++) {
var extension = p5sound.extensions[i];
var supported = p5.prototype.isFileSupported(extension);
if (supported) {
path = path + '.' + extension;
break;
}
}
}
} else if (typeof paths === 'object') {
for (var i = 0; i < paths.length; i++) {
var extension = paths[i].split('.').pop();
var supported = p5.prototype.isFileSupported(extension);
if (supported) {
// console.log('.'+extension + ' is ' + supported +
// ' supported by your browser.');
path = paths[i];
break;
}
}
}
return path;
};
/**
* Used by Osc and Env to chain signal math
*/
p5.prototype._mathChain = function (o, math, thisChain, nextChain, type) {
// if this type of math already exists in the chain, replace it
for (var i in o.mathOps) {
if (o.mathOps[i] instanceof type) {
o.mathOps[i].dispose();
thisChain = i;
if (thisChain < o.mathOps.length - 1) {
nextChain = o.mathOps[i + 1];
}
}
}
o.mathOps[thisChain - 1].disconnect();
o.mathOps[thisChain - 1].connect(math);
math.connect(nextChain);
o.mathOps[thisChain] = math;
return o;
};
}(master);
var errorHandler;
errorHandler = function () {
'use strict';
/**
* Helper function to generate an error
* with a custom stack trace that points to the sketch
* and removes other parts of the stack trace.
*
* @private
*
* @param {String} name custom error name
* @param {String} errorTrace custom error trace
* @param {String} failedPath path to the file that failed to load
* @property {String} name custom error name
* @property {String} message custom error message
* @property {String} stack trace the error back to a line in the user's sketch.
* Note: this edits out stack trace within p5.js and p5.sound.
* @property {String} originalStack unedited, original stack trace
* @property {String} failedPath path to the file that failed to load
* @return {Error} returns a custom Error object
*/
var CustomError = function (name, errorTrace, failedPath) {
var err = new Error();
var tempStack, splitStack;
err.name = name;
err.originalStack = err.stack + errorTrace;
tempStack = err.stack + errorTrace;
err.failedPath = failedPath;
// only print the part of the stack trace that refers to the user code:
var splitStack = tempStack.split('\n');
splitStack = splitStack.filter(function (ln) {
return !ln.match(/(p5.|native code|globalInit)/g);
});
err.stack = splitStack.join('\n');
return err;
};
return CustomError;
}();
var panner;
panner = function () {
'use strict';
var p5sound = master;
var ac = p5sound.audiocontext;
// Stereo panner
// if there is a stereo panner node use it
if (typeof ac.createStereoPanner !== 'undefined') {
p5.Panner = function (input, output, numInputChannels) {
this.stereoPanner = this.input = ac.createStereoPanner();
input.connect(this.stereoPanner);
this.stereoPanner.connect(output);
};
p5.Panner.prototype.pan = function (val, tFromNow) {
var time = tFromNow || 0;
var t = ac.currentTime + time;
this.stereoPanner.pan.linearRampToValueAtTime(val, t);
};
p5.Panner.prototype.inputChannels = function (numChannels) {
};
p5.Panner.prototype.connect = function (obj) {
this.stereoPanner.connect(obj);
};
p5.Panner.prototype.disconnect = function (obj) {
this.stereoPanner.disconnect();
};
} else {
// if there is no createStereoPanner object
// such as in safari 7.1.7 at the time of writing this
// use this method to create the effect
p5.Panner = function (input, output, numInputChannels) {
this.input = ac.createGain();
input.connect(this.input);
this.left = ac.createGain();
this.right = ac.createGain();
this.left.channelInterpretation = 'discrete';
this.right.channelInterpretation = 'discrete';
// if input is stereo
if (numInputChannels > 1) {
this.splitter = ac.createChannelSplitter(2);
this.input.connect(this.splitter);
this.splitter.connect(this.left, 1);
this.splitter.connect(this.right, 0);
} else {
this.input.connect(this.left);
this.input.connect(this.right);
}
this.output = ac.createChannelMerger(2);
this.left.connect(this.output, 0, 1);
this.right.connect(this.output, 0, 0);
this.output.connect(output);
};
// -1 is left, +1 is right
p5.Panner.prototype.pan = function (val, tFromNow) {
var time = tFromNow || 0;
var t = ac.currentTime + time;
var v = (val + 1) / 2;
var rightVal = Math.cos(v * Math.PI / 2);
var leftVal = Math.sin(v * Math.PI / 2);
this.left.gain.linearRampToValueAtTime(leftVal, t);
this.right.gain.linearRampToValueAtTime(rightVal, t);
};
p5.Panner.prototype.inputChannels = function (numChannels) {
if (numChannels === 1) {
this.input.disconnect();
this.input.connect(this.left);
this.input.connect(this.right);
} else if (numChannels === 2) {
if (typeof (this.splitter === 'undefined')) {
this.splitter = ac.createChannelSplitter(2);
}
this.input.disconnect();
this.input.connect(this.splitter);
this.splitter.connect(this.left, 1);
this.splitter.connect(this.right, 0);
}
};
p5.Panner.prototype.connect = function (obj) {
this.output.connect(obj);
};
p5.Panner.prototype.disconnect = function (obj) {
this.output.disconnect();
};
}
// 3D panner
p5.Panner3D = function (input, output) {
var panner3D = ac.createPanner();
panner3D.panningModel = 'HRTF';
panner3D.distanceModel = 'linear';
panner3D.setPosition(0, 0, 0);
input.connect(panner3D);
panner3D.connect(output);
panner3D.pan = function (xVal, yVal, zVal) {
panner3D.setPosition(xVal, yVal, zVal);
};
return panner3D;
};
}(master);
var soundfile;
soundfile = function () {
'use strict';
var CustomError = errorHandler;
var p5sound = master;
var ac = p5sound.audiocontext;
/**
* SoundFile object with a path to a file.
*
* The p5.SoundFile may not be available immediately because
* it loads the file information asynchronously.
*
* To do something with the sound as soon as it loads
* pass the name of a function as the second parameter.
*
* Only one file path is required. However, audio file formats
* (i.e. mp3, ogg, wav and m4a/aac) are not supported by all
* web browsers. If you want to ensure compatability, instead of a single
* file path, you may include an Array of filepaths, and the browser will
* choose a format that works.
*
* @class p5.SoundFile
* @constructor
* @param {String/Array} path path to a sound file (String). Optionally,
* you may include multiple file formats in
* an array. Alternately, accepts an object
* from the HTML5 File API, or a p5.File.
* @param {Function} [successCallback] Name of a function to call once file loads
* @param {Function} [errorCallback] Name of a function to call if file fails to
* load. This function will receive an error or
* XMLHttpRequest object with information
* about what went wrong.
* @param {Function} [whileLoadingCallback] Name of a function to call while file
* is loading. That function will
* receive progress of the request to
* load the sound file
* (between 0 and 1) as its first
* parameter. This progress
* does not account for the additional
* time needed to decode the audio data.
*
* @return {Object} p5.SoundFile Object
* @example
*
*
* function preload() {
* mySound = loadSound('assets/doorbell.mp3');
* }
*
* function setup() {
* mySound.setVolume(0.1);
* mySound.play();
* }
*
*
*/
p5.SoundFile = function (paths, onload, onerror, whileLoading) {
if (typeof paths !== 'undefined') {
if (typeof paths == 'string' || typeof paths[0] == 'string') {
var path = p5.prototype._checkFileFormats(paths);
this.url = path;
} else if (typeof paths == 'object') {
if (!(window.File && window.FileReader && window.FileList && window.Blob)) {
// The File API isn't supported in this browser
throw 'Unable to load file because the File API is not supported';
}
}
// if type is a p5.File...get the actual file
if (paths.file) {
paths = paths.file;
}
this.file = paths;
}
// private _onended callback, set by the method: onended(callback)
this._onended = function () {
};
this._looping = false;
this._playing = false;
this._paused = false;
this._pauseTime = 0;
// cues for scheduling events with addCue() removeCue()
this._cues = [];
// position of the most recently played sample
this._lastPos = 0;
this._counterNode;
this._scopeNode;
// array of sources so that they can all be stopped!
this.bufferSourceNodes = [];
// current source
this.bufferSourceNode = null;
this.buffer = null;
this.playbackRate = 1;
this.gain = 1;
this.input = p5sound.audiocontext.createGain();
this.output = p5sound.audiocontext.createGain();
this.reversed = false;
// start and end of playback / loop
this.startTime = 0;
this.endTime = null;
this.pauseTime = 0;
// "restart" would stop playback before retriggering
this.mode = 'sustain';
// time that playback was started, in millis
this.startMillis = null;
// stereo panning
this.panPosition = 0;
this.panner = new p5.Panner(this.output, p5sound.input, 2);
// it is possible to instantiate a soundfile with no path
if (this.url || this.file) {
this.load(onload, onerror);
}
// add this p5.SoundFile to the soundArray
p5sound.soundArray.push(this);
if (typeof whileLoading === 'function') {
this._whileLoading = whileLoading;
} else {
this._whileLoading = function () {
};
}
};
// register preload handling of loadSound
p5.prototype.registerPreloadMethod('loadSound', p5.prototype);
/**
* loadSound() returns a new p5.SoundFile from a specified
* path. If called during preload(), the p5.SoundFile will be ready
* to play in time for setup() and draw(). If called outside of
* preload, the p5.SoundFile will not be ready immediately, so
* loadSound accepts a callback as the second parameter. Using a
*
* local server is recommended when loading external files.
*
* @method loadSound
* @param {String/Array} path Path to the sound file, or an array with
* paths to soundfiles in multiple formats
* i.e. ['sound.ogg', 'sound.mp3'].
* Alternately, accepts an object: either
* from the HTML5 File API, or a p5.File.
* @param {Function} [successCallback] Name of a function to call once file loads
* @param {Function} [errorCallback] Name of a function to call if there is
* an error loading the file.
* @param {Function} [whileLoading] Name of a function to call while file is loading.
* This function will receive the percentage loaded
* so far, from 0.0 to 1.0.
* @return {SoundFile} Returns a p5.SoundFile
* @example
*
* function preload() {
* mySound = loadSound('assets/doorbell.mp3');
* }
*
* function setup() {
* mySound.setVolume(0.1);
* mySound.play();
* }
*
*/
p5.prototype.loadSound = function (path, callback, onerror, whileLoading) {
// if loading locally without a server
if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
var s = new p5.SoundFile(path, callback, onerror, whileLoading);
return s;
};
/**
* This is a helper function that the p5.SoundFile calls to load
* itself. Accepts a callback (the name of another function)
* as an optional parameter.
*
* @private
* @param {Function} [successCallback] Name of a function to call once file loads
* @param {Function} [errorCallback] Name of a function to call if there is an error
*/
p5.SoundFile.prototype.load = function (callback, errorCallback) {
var loggedError = false;
var self = this;
var errorTrace = new Error().stack;
if (this.url != undefined && this.url != '') {
var request = new XMLHttpRequest();
request.addEventListener('progress', function (evt) {
self._updateProgress(evt);
}, false);
request.open('GET', this.url, true);
request.responseType = 'arraybuffer';
request.onload = function () {
if (request.status == 200) {
// on sucess loading file:
ac.decodeAudioData(request.response, // success decoding buffer:
function (buff) {
self.buffer = buff;
self.panner.inputChannels(buff.numberOfChannels);
if (callback) {
callback(self);
}
}, // error decoding buffer. "e" is undefined in Chrome 11/22/2015
function (e) {
var err = new CustomError('decodeAudioData', errorTrace, self.url);
var msg = 'AudioContext error at decodeAudioData for ' + self.url;
if (errorCallback) {
err.msg = msg;
errorCallback(err);
} else {
console.error(msg + '\n The error stack trace includes: \n' + err.stack);
}
});
} else {
var err = new CustomError('loadSound', errorTrace, self.url);
var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
if (errorCallback) {
err.message = msg;
errorCallback(err);
} else {
console.error(msg + '\n The error stack trace includes: \n' + err.stack);
}
}
};
// if there is another error, aside from 404...
request.onerror = function (e) {
var err = new CustomError('loadSound', errorTrace, self.url);
var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
if (errorCallback) {
err.message = msg;
errorCallback(err);
} else {
console.error(msg + '\n The error stack trace includes: \n' + err.stack);
}
};
request.send();
} else if (this.file != undefined) {
var reader = new FileReader();
var self = this;
reader.onload = function () {
ac.decodeAudioData(reader.result, function (buff) {
self.buffer = buff;
self.panner.inputChannels(buff.numberOfChannels);
if (callback) {
callback(self);
}
});
};
reader.onerror = function (e) {
if (onerror)
onerror(e);
};
reader.readAsArrayBuffer(this.file);
}
};
// TO DO: use this method to create a loading bar that shows progress during file upload/decode.
p5.SoundFile.prototype._updateProgress = function (evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total * 0.99;
this._whileLoading(percentComplete, evt);
} else {
// Unable to compute progress information since the total size is unknown
this._whileLoading('size unknown');
}
};
/**
* Returns true if the sound file finished loading successfully.
*
* @method isLoaded
* @return {Boolean}
*/
p5.SoundFile.prototype.isLoaded = function () {
if (this.buffer) {
return true;
} else {
return false;
}
};
/**
* Play the p5.SoundFile
*
* @method play
* @param {Number} [startTime] (optional) schedule playback to start (in seconds from now).
* @param {Number} [rate] (optional) playback rate
* @param {Number} [amp] (optional) amplitude (volume)
* of playback
* @param {Number} [cueStart] (optional) cue start time in seconds
* @param {Number} [duration] (optional) duration of playback in seconds
*/
p5.SoundFile.prototype.play = function (time, rate, amp, _cueStart, duration) {
var self = this;
var now = p5sound.audiocontext.currentTime;
var cueStart, cueEnd;
var time = time || 0;
if (time < 0) {
time = 0;
}
time = time + now;
// TO DO: if already playing, create array of buffers for easy stop()
if (this.buffer) {
// reset the pause time (if it was paused)
this._pauseTime = 0;
// handle restart playmode
if (this.mode === 'restart' && this.buffer && this.bufferSourceNode) {
var now = p5sound.audiocontext.currentTime;
this.bufferSourceNode.stop(time);
this._counterNode.stop(time);
}
// set playback rate
if (rate)
this.playbackRate = rate;
// make a new source and counter. They are automatically assigned playbackRate and buffer
this.bufferSourceNode = this._initSourceNode();
// garbage collect counterNode and create a new one
if (this._counterNode)
this._counterNode = undefined;
this._counterNode = this._initCounterNode();
if (_cueStart) {
if (_cueStart >= 0 && _cueStart < this.buffer.duration) {
// this.startTime = cueStart;
cueStart = _cueStart;
} else {
throw 'start time out of range';
}
} else {
cueStart = 0;
}
if (duration) {
// if duration is greater than buffer.duration, just play entire file anyway rather than throw an error
duration = duration <= this.buffer.duration - cueStart ? duration : this.buffer.duration;
} else {
duration = this.buffer.duration - cueStart;
}
// TO DO: Fix this. It broke in Safari
//
// method of controlling gain for individual bufferSourceNodes, without resetting overall soundfile volume
// if (typeof(this.bufferSourceNode.gain === 'undefined' ) ) {
// this.bufferSourceNode.gain = p5sound.audiocontext.createGain();
// }
// this.bufferSourceNode.connect(this.bufferSourceNode.gain);
// set local amp if provided, otherwise 1
var a = amp || 1;
// this.bufferSourceNode.gain.gain.setValueAtTime(a, p5sound.audiocontext.currentTime);
// this.bufferSourceNode.gain.connect(this.output);
this.bufferSourceNode.connect(this.output);
this.output.gain.value = a;
// if it was paused, play at the pause position
if (this._paused) {
this.bufferSourceNode.start(time, this.pauseTime, duration);
this._counterNode.start(time, this.pauseTime, duration);
} else {
this.bufferSourceNode.start(time, cueStart, duration);
this._counterNode.start(time, cueStart, duration);
}
this._playing = true;
this._paused = false;
// add source to sources array, which is used in stopAll()
this.bufferSourceNodes.push(this.bufferSourceNode);
this.bufferSourceNode._arrayIndex = this.bufferSourceNodes.length - 1;
// delete this.bufferSourceNode from the sources array when it is done playing:
var clearOnEnd = function (e) {
this._playing = false;
this.removeEventListener('ended', clearOnEnd, false);
// call the onended callback
self._onended(self);
self.bufferSourceNodes.forEach(function (n, i) {
if (n._playing === false) {
self.bufferSourceNodes.splice(i);
}
});
if (self.bufferSourceNodes.length === 0) {
self._playing = false;
}
};
this.bufferSourceNode.onended = clearOnEnd;
} else {
throw 'not ready to play file, buffer has yet to load. Try preload()';
}
// if looping, will restart at original time
this.bufferSourceNode.loop = this._looping;
this._counterNode.loop = this._looping;
if (this._looping === true) {
var cueEnd = cueStart + duration;
this.bufferSourceNode.loopStart = cueStart;
this.bufferSourceNode.loopEnd = cueEnd;
this._counterNode.loopStart = cueStart;
this._counterNode.loopEnd = cueEnd;
}
};
/**
* p5.SoundFile has two play modes: restart
and
* sustain
. Play Mode determines what happens to a
* p5.SoundFile if it is triggered while in the middle of playback.
* In sustain mode, playback will continue simultaneous to the
* new playback. In restart mode, play() will stop playback
* and start over. Sustain is the default mode.
*
* @method playMode
* @param {String} str 'restart' or 'sustain'
* @example
*
* function setup(){
* mySound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
* function mouseClicked() {
* mySound.playMode('sustain');
* mySound.play();
* }
* function keyPressed() {
* mySound.playMode('restart');
* mySound.play();
* }
*
*
*/
p5.SoundFile.prototype.playMode = function (str) {
var s = str.toLowerCase();
// if restart, stop all other sounds from playing
if (s === 'restart' && this.buffer && this.bufferSourceNode) {
for (var i = 0; i < this.bufferSourceNodes.length - 1; i++) {
var now = p5sound.audiocontext.currentTime;
this.bufferSourceNodes[i].stop(now);
}
}
// set play mode to effect future playback
if (s === 'restart' || s === 'sustain') {
this.mode = s;
} else {
throw 'Invalid play mode. Must be either "restart" or "sustain"';
}
};
/**
* Pauses a file that is currently playing. If the file is not
* playing, then nothing will happen.
*
* After pausing, .play() will resume from the paused
* position.
* If p5.SoundFile had been set to loop before it was paused,
* it will continue to loop after it is unpaused with .play().
*
* @method pause
* @param {Number} [startTime] (optional) schedule event to occur
* seconds from now
* @example
*
* var soundFile;
*
* function preload() {
* soundFormats('ogg', 'mp3');
* soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
* }
* function setup() {
* background(0, 255, 0);
* soundFile.setVolume(0.1);
* soundFile.loop();
* }
* function keyTyped() {
* if (key == 'p') {
* soundFile.pause();
* background(255, 0, 0);
* }
* }
*
* function keyReleased() {
* if (key == 'p') {
* soundFile.play();
* background(0, 255, 0);
* }
* }
*
*
*/
p5.SoundFile.prototype.pause = function (time) {
var now = p5sound.audiocontext.currentTime;
var time = time || 0;
var pTime = time + now;
if (this.isPlaying() && this.buffer && this.bufferSourceNode) {
this.pauseTime = this.currentTime();
this.bufferSourceNode.stop(pTime);
this._counterNode.stop(pTime);
this._paused = true;
this._playing = false;
this._pauseTime = this.currentTime();
} else {
this._pauseTime = 0;
}
};
/**
* Loop the p5.SoundFile. Accepts optional parameters to set the
* playback rate, playback volume, loopStart, loopEnd.
*
* @method loop
* @param {Number} [startTime] (optional) schedule event to occur
* seconds from now
* @param {Number} [rate] (optional) playback rate
* @param {Number} [amp] (optional) playback volume
* @param {Number} [cueLoopStart](optional) startTime in seconds
* @param {Number} [duration] (optional) loop duration in seconds
*/
p5.SoundFile.prototype.loop = function (startTime, rate, amp, loopStart, duration) {
this._looping = true;
this.play(startTime, rate, amp, loopStart, duration);
};
/**
* Set a p5.SoundFile's looping flag to true or false. If the sound
* is currently playing, this change will take effect when it
* reaches the end of the current playback.
*
* @param {Boolean} Boolean set looping to true or false
*/
p5.SoundFile.prototype.setLoop = function (bool) {
if (bool === true) {
this._looping = true;
} else if (bool === false) {
this._looping = false;
} else {
throw 'Error: setLoop accepts either true or false';
}
if (this.bufferSourceNode) {
this.bufferSourceNode.loop = this._looping;
this._counterNode.loop = this._looping;
}
};
/**
* Returns 'true' if a p5.SoundFile is currently looping and playing, 'false' if not.
*
* @return {Boolean}
*/
p5.SoundFile.prototype.isLooping = function () {
if (!this.bufferSourceNode) {
return false;
}
if (this._looping === true && this.isPlaying() === true) {
return true;
}
return false;
};
/**
* Returns true if a p5.SoundFile is playing, false if not (i.e.
* paused or stopped).
*
* @method isPlaying
* @return {Boolean}
*/
p5.SoundFile.prototype.isPlaying = function () {
return this._playing;
};
/**
* Returns true if a p5.SoundFile is paused, false if not (i.e.
* playing or stopped).
*
* @method isPaused
* @return {Boolean}
*/
p5.SoundFile.prototype.isPaused = function () {
return this._paused;
};
/**
* Stop soundfile playback.
*
* @method stop
* @param {Number} [startTime] (optional) schedule event to occur
* in seconds from now
*/
p5.SoundFile.prototype.stop = function (timeFromNow) {
var time = timeFromNow || 0;
if (this.mode == 'sustain') {
this.stopAll(time);
this._playing = false;
this.pauseTime = 0;
this._paused = false;
} else if (this.buffer && this.bufferSourceNode) {
var now = p5sound.audiocontext.currentTime;
var t = time || 0;
this.pauseTime = 0;
this.bufferSourceNode.stop(now + t);
this._counterNode.stop(now + t);
this._playing = false;
this._paused = false;
}
};
/**
* Stop playback on all of this soundfile's sources.
* @private
*/
p5.SoundFile.prototype.stopAll = function (_time) {
var now = p5sound.audiocontext.currentTime;
var time = _time || 0;
if (this.buffer && this.bufferSourceNode) {
for (var i = 0; i < this.bufferSourceNodes.length; i++) {
if (typeof this.bufferSourceNodes[i] != undefined) {
try {
this.bufferSourceNodes[i].onended = function () {
};
this.bufferSourceNodes[i].stop(now + time);
} catch (e) {
}
}
}
this._counterNode.stop(now + time);
this._onended(this);
}
};
/**
* Multiply the output volume (amplitude) of a sound file
* between 0.0 (silence) and 1.0 (full volume).
* 1.0 is the maximum amplitude of a digital sound, so multiplying
* by greater than 1.0 may cause digital distortion. To
* fade, provide a rampTime
parameter. For more
* complex fades, see the Env class.
*
* Alternately, you can pass in a signal source such as an
* oscillator to modulate the amplitude with an audio signal.
*
* @method setVolume
* @param {Number|Object} volume Volume (amplitude) between 0.0
* and 1.0 or modulating signal/oscillator
* @param {Number} [rampTime] Fade for t seconds
* @param {Number} [timeFromNow] Schedule this event to happen at
* t seconds in the future
*/
p5.SoundFile.prototype.setVolume = function (vol, rampTime, tFromNow) {
if (typeof vol === 'number') {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now + tFromNow);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
} else if (vol) {
vol.connect(this.output.gain);
} else {
// return the Gain Node
return this.output.gain;
}
};
// same as setVolume, to match Processing Sound
p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume;
// these are the same thing
p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume;
p5.SoundFile.prototype.getVolume = function () {
return this.output.gain.value;
};
/**
* Set the stereo panning of a p5.sound object to
* a floating point number between -1.0 (left) and 1.0 (right).
* Default is 0.0 (center).
*
* @method pan
* @param {Number} [panValue] Set the stereo panner
* @param {Number} timeFromNow schedule this event to happen
* seconds from now
* @example
*
*
* var ball = {};
* var soundFile;
*
* function setup() {
* soundFormats('ogg', 'mp3');
* soundFile = loadSound('assets/beatbox.mp3');
* }
*
* function draw() {
* background(0);
* ball.x = constrain(mouseX, 0, width);
* ellipse(ball.x, height/2, 20, 20)
* }
*
* function mousePressed(){
* // map the ball's x location to a panning degree
* // between -1.0 (left) and 1.0 (right)
* var panning = map(ball.x, 0., width,-1.0, 1.0);
* soundFile.pan(panning);
* soundFile.play();
* }
*
*/
p5.SoundFile.prototype.pan = function (pval, tFromNow) {
this.panPosition = pval;
this.panner.pan(pval, tFromNow);
};
/**
* Returns the current stereo pan position (-1.0 to 1.0)
*
* @return {Number} Returns the stereo pan setting of the Oscillator
* as a number between -1.0 (left) and 1.0 (right).
* 0.0 is center and default.
*/
p5.SoundFile.prototype.getPan = function () {
return this.panPosition;
};
/**
* Set the playback rate of a sound file. Will change the speed and the pitch.
* Values less than zero will reverse the audio buffer.
*
* @method rate
* @param {Number} [playbackRate] Set the playback rate. 1.0 is normal,
* .5 is half-speed, 2.0 is twice as fast.
* Values less than zero play backwards.
* @example
*
* var song;
*
* function preload() {
* song = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
* function setup() {
* song.loop();
* }
*
* function draw() {
* background(200);
*
* // Set the rate to a range between 0.1 and 4
* // Changing the rate also alters the pitch
* var speed = map(mouseY, 0.1, height, 0, 2);
* speed = constrain(speed, 0.01, 4);
* song.rate(speed);
*
* // Draw a circle to show what is going on
* stroke(0);
* fill(51, 100);
* ellipse(mouseX, 100, 48, 48);
* }
*
*
*
*
*/
p5.SoundFile.prototype.rate = function (playbackRate) {
if (this.playbackRate === playbackRate && this.bufferSourceNode) {
if (this.bufferSourceNode.playbackRate.value === playbackRate) {
return;
}
}
this.playbackRate = playbackRate;
var rate = playbackRate;
if (this.playbackRate === 0 && this._playing) {
this.pause();
}
if (this.playbackRate < 0 && !this.reversed) {
var cPos = this.currentTime();
var cRate = this.bufferSourceNode.playbackRate.value;
// this.pause();
this.reverseBuffer();
rate = Math.abs(playbackRate);
var newPos = (cPos - this.duration()) / rate;
this.pauseTime = newPos;
} else if (this.playbackRate > 0 && this.reversed) {
this.reverseBuffer();
}
if (this.bufferSourceNode) {
var now = p5sound.audiocontext.currentTime;
this.bufferSourceNode.playbackRate.cancelScheduledValues(now);
this.bufferSourceNode.playbackRate.linearRampToValueAtTime(Math.abs(rate), now);
this._counterNode.playbackRate.cancelScheduledValues(now);
this._counterNode.playbackRate.linearRampToValueAtTime(Math.abs(rate), now);
}
};
// TO DO: document this
p5.SoundFile.prototype.setPitch = function (num) {
var newPlaybackRate = midiToFreq(num) / midiToFreq(60);
this.rate(newPlaybackRate);
};
p5.SoundFile.prototype.getPlaybackRate = function () {
return this.playbackRate;
};
/**
* Returns the duration of a sound file in seconds.
*
* @method duration
* @return {Number} The duration of the soundFile in seconds.
*/
p5.SoundFile.prototype.duration = function () {
// Return Duration
if (this.buffer) {
return this.buffer.duration;
} else {
return 0;
}
};
/**
* Return the current position of the p5.SoundFile playhead, in seconds.
* Note that if you change the playbackRate while the p5.SoundFile is
* playing, the results may not be accurate.
*
* @method currentTime
* @return {Number} currentTime of the soundFile in seconds.
*/
p5.SoundFile.prototype.currentTime = function () {
// TO DO --> make reverse() flip these values appropriately
if (this._pauseTime > 0) {
return this._pauseTime;
} else {
return this._lastPos / ac.sampleRate;
}
};
/**
* Move the playhead of the song to a position, in seconds. Start
* and Stop time. If none are given, will reset the file to play
* entire duration from start to finish.
*
* @method jump
* @param {Number} cueTime cueTime of the soundFile in seconds.
* @param {Number} duration duration in seconds.
*/
p5.SoundFile.prototype.jump = function (cueTime, duration) {
if (cueTime < 0 || cueTime > this.buffer.duration) {
throw 'jump time out of range';
}
if (duration > this.buffer.duration - cueTime) {
throw 'end time out of range';
}
var cTime = cueTime || 0;
var eTime = duration || this.buffer.duration - cueTime;
if (this.isPlaying()) {
this.stop();
}
this.play(0, this.playbackRate, this.output.gain.value, cTime, eTime);
};
/**
* Return the number of channels in a sound file.
* For example, Mono = 1, Stereo = 2.
*
* @method channels
* @return {Number} [channels]
*/
p5.SoundFile.prototype.channels = function () {
return this.buffer.numberOfChannels;
};
/**
* Return the sample rate of the sound file.
*
* @method sampleRate
* @return {Number} [sampleRate]
*/
p5.SoundFile.prototype.sampleRate = function () {
return this.buffer.sampleRate;
};
/**
* Return the number of samples in a sound file.
* Equal to sampleRate * duration.
*
* @method frames
* @return {Number} [sampleCount]
*/
p5.SoundFile.prototype.frames = function () {
return this.buffer.length;
};
/**
* Returns an array of amplitude peaks in a p5.SoundFile that can be
* used to draw a static waveform. Scans through the p5.SoundFile's
* audio buffer to find the greatest amplitudes. Accepts one
* parameter, 'length', which determines size of the array.
* Larger arrays result in more precise waveform visualizations.
*
* Inspired by Wavesurfer.js.
*
* @method getPeaks
* @params {Number} [length] length is the size of the returned array.
* Larger length results in more precision.
* Defaults to 5*width of the browser window.
* @returns {Float32Array} Array of peaks.
*/
p5.SoundFile.prototype.getPeaks = function (length) {
if (this.buffer) {
// set length to window's width if no length is provided
if (!length) {
length = window.width * 5;
}
if (this.buffer) {
var buffer = this.buffer;
var sampleSize = buffer.length / length;
var sampleStep = ~~(sampleSize / 10) || 1;
var channels = buffer.numberOfChannels;
var peaks = new Float32Array(Math.round(length));
for (var c = 0; c < channels; c++) {
var chan = buffer.getChannelData(c);
for (var i = 0; i < length; i++) {
var start = ~~(i * sampleSize);
var end = ~~(start + sampleSize);
var max = 0;
for (var j = start; j < end; j += sampleStep) {
var value = chan[j];
if (value > max) {
max = value;
} else if (-value > max) {
max = value;
}
}
if (c === 0 || Math.abs(max) > peaks[i]) {
peaks[i] = max;
}
}
}
return peaks;
}
} else {
throw 'Cannot load peaks yet, buffer is not loaded';
}
};
/**
* Reverses the p5.SoundFile's buffer source.
* Playback must be handled separately (see example).
*
* @method reverseBuffer
* @example
*
* var drum;
*
* function preload() {
* drum = loadSound('assets/drum.mp3');
* }
*
* function setup() {
* drum.reverseBuffer();
* drum.play();
* }
*
*
*
*/
p5.SoundFile.prototype.reverseBuffer = function () {
var curVol = this.getVolume();
this.setVolume(0, 0.01, 0);
this.pause();
if (this.buffer) {
for (var i = 0; i < this.buffer.numberOfChannels; i++) {
Array.prototype.reverse.call(this.buffer.getChannelData(i));
}
// set reversed flag
this.reversed = !this.reversed;
} else {
throw 'SoundFile is not done loading';
}
this.setVolume(curVol, 0.01, 0.0101);
this.play();
};
/**
* Schedule an event to be called when the soundfile
* reaches the end of a buffer. If the soundfile is
* playing through once, this will be called when it
* ends. If it is looping, it will be called when
* stop is called.
*
* @method onended
* @param {Function} callback function to call when the
* soundfile has ended.
*/
p5.SoundFile.prototype.onended = function (callback) {
this._onended = callback;
return this;
};
p5.SoundFile.prototype.add = function () {
};
p5.SoundFile.prototype.dispose = function () {
var now = p5sound.audiocontext.currentTime;
// remove reference to soundfile
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.stop(now);
if (this.buffer && this.bufferSourceNode) {
for (var i = 0; i < this.bufferSourceNodes.length - 1; i++) {
if (this.bufferSourceNodes[i] !== null) {
this.bufferSourceNodes[i].disconnect();
try {
this.bufferSourceNodes[i].stop(now);
} catch (e) {
}
this.bufferSourceNodes[i] = null;
}
}
if (this.isPlaying()) {
try {
this._counterNode.stop(now);
} catch (e) {
console.log(e);
}
this._counterNode = null;
}
}
if (this.output) {
this.output.disconnect();
this.output = null;
}
if (this.panner) {
this.panner.disconnect();
this.panner = null;
}
};
/**
* Connects the output of a p5sound object to input of another
* p5.sound object. For example, you may connect a p5.SoundFile to an
* FFT or an Effect. If no parameter is given, it will connect to
* the master output. Most p5sound objects connect to the master
* output when they are created.
*
* @method connect
* @param {Object} [object] Audio object that accepts an input
*/
p5.SoundFile.prototype.connect = function (unit) {
if (!unit) {
this.panner.connect(p5sound.input);
} else {
if (unit.hasOwnProperty('input')) {
this.panner.connect(unit.input);
} else {
this.panner.connect(unit);
}
}
};
/**
* Disconnects the output of this p5sound object.
*
* @method disconnect
*/
p5.SoundFile.prototype.disconnect = function () {
this.panner.disconnect();
};
/**
*/
p5.SoundFile.prototype.getLevel = function (smoothing) {
console.warn('p5.SoundFile.getLevel has been removed from the library. Use p5.Amplitude instead');
};
/**
* Reset the source for this SoundFile to a
* new path (URL).
*
* @method setPath
* @param {String} path path to audio file
* @param {Function} callback Callback
*/
p5.SoundFile.prototype.setPath = function (p, callback) {
var path = p5.prototype._checkFileFormats(p);
this.url = path;
this.load(callback);
};
/**
* Replace the current Audio Buffer with a new Buffer.
*
* @param {Array} buf Array of Float32 Array(s). 2 Float32 Arrays
* will create a stereo source. 1 will create
* a mono source.
*/
p5.SoundFile.prototype.setBuffer = function (buf) {
var numChannels = buf.length;
var size = buf[0].length;
var newBuffer = ac.createBuffer(numChannels, size, ac.sampleRate);
if (!buf[0] instanceof Float32Array) {
buf[0] = new Float32Array(buf[0]);
}
for (var channelNum = 0; channelNum < numChannels; channelNum++) {
var channel = newBuffer.getChannelData(channelNum);
channel.set(buf[channelNum]);
}
this.buffer = newBuffer;
// set numbers of channels on input to the panner
this.panner.inputChannels(numChannels);
};
//////////////////////////////////////////////////
// script processor node with an empty buffer to help
// keep a sample-accurate position in playback buffer.
// Inspired by Chinmay Pendharkar's technique for Sonoport --> http://bit.ly/1HwdCsV
// Copyright [2015] [Sonoport (Asia) Pte. Ltd.],
// Licensed under the Apache License http://apache.org/licenses/LICENSE-2.0
////////////////////////////////////////////////////////////////////////////////////
// initialize counterNode, set its initial buffer and playbackRate
p5.SoundFile.prototype._initCounterNode = function () {
var self = this;
var now = ac.currentTime;
var cNode = ac.createBufferSource();
// dispose of scope node if it already exists
if (self._scopeNode) {
self._scopeNode.disconnect();
self._scopeNode.onaudioprocess = undefined;
self._scopeNode = null;
}
self._scopeNode = ac.createScriptProcessor(256, 1, 1);
// create counter buffer of the same length as self.buffer
cNode.buffer = _createCounterBuffer(self.buffer);
cNode.playbackRate.setValueAtTime(self.playbackRate, now);
cNode.connect(self._scopeNode);
self._scopeNode.connect(p5.soundOut._silentNode);
self._scopeNode.onaudioprocess = function (processEvent) {
var inputBuffer = processEvent.inputBuffer.getChannelData(0);
// update the lastPos
self._lastPos = inputBuffer[inputBuffer.length - 1] || 0;
// do any callbacks that have been scheduled
self._onTimeUpdate(self._lastPos);
};
return cNode;
};
// initialize sourceNode, set its initial buffer and playbackRate
p5.SoundFile.prototype._initSourceNode = function () {
var self = this;
var now = ac.currentTime;
var bufferSourceNode = ac.createBufferSource();
bufferSourceNode.buffer = self.buffer;
bufferSourceNode.playbackRate.value = self.playbackRate;
return bufferSourceNode;
};
var _createCounterBuffer = function (buffer) {
var array = new Float32Array(buffer.length);
var audioBuf = ac.createBuffer(1, buffer.length, 44100);
for (var index = 0; index < buffer.length; index++) {
array[index] = index;
}
audioBuf.getChannelData(0).set(array);
return audioBuf;
};
/**
* processPeaks returns an array of timestamps where it thinks there is a beat.
*
* This is an asynchronous function that processes the soundfile in an offline audio context,
* and sends the results to your callback function.
*
* The process involves running the soundfile through a lowpass filter, and finding all of the
* peaks above the initial threshold. If the total number of peaks are below the minimum number of peaks,
* it decreases the threshold and re-runs the analysis until either minPeaks or minThreshold are reached.
*
* @method processPeaks
* @param {Function} callback a function to call once this data is returned
* @param {Number} [initThreshold] initial threshold defaults to 0.9
* @param {Number} [minThreshold] minimum threshold defaults to 0.22
* @param {Number} [minPeaks] minimum number of peaks defaults to 200
* @return {Array} Array of timestamped peaks
*/
p5.SoundFile.prototype.processPeaks = function (callback, _initThreshold, _minThreshold, _minPeaks) {
var bufLen = this.buffer.length;
var sampleRate = this.buffer.sampleRate;
var buffer = this.buffer;
var initialThreshold = _initThreshold || 0.9, threshold = initialThreshold, minThreshold = _minThreshold || 0.22, minPeaks = _minPeaks || 200;
// Create offline context
var offlineContext = new OfflineAudioContext(1, bufLen, sampleRate);
// create buffer source
var source = offlineContext.createBufferSource();
source.buffer = buffer;
// Create filter. TO DO: allow custom setting of filter
var filter = offlineContext.createBiquadFilter();
filter.type = 'lowpass';
source.connect(filter);
filter.connect(offlineContext.destination);
// start playing at time:0
source.start(0);
offlineContext.startRendering();
// Render the song
// act on the result
offlineContext.oncomplete = function (e) {
var data = {};
var filteredBuffer = e.renderedBuffer;
var bufferData = filteredBuffer.getChannelData(0);
// step 1:
// create Peak instances, add them to array, with strength and sampleIndex
do {
allPeaks = getPeaksAtThreshold(bufferData, threshold);
threshold -= 0.005;
} while (Object.keys(allPeaks).length < minPeaks && threshold >= minThreshold);
// step 2:
// find intervals for each peak in the sampleIndex, add tempos array
var intervalCounts = countIntervalsBetweenNearbyPeaks(allPeaks);
// step 3: find top tempos
var groups = groupNeighborsByTempo(intervalCounts, filteredBuffer.sampleRate);
// sort top intervals
var topTempos = groups.sort(function (intA, intB) {
return intB.count - intA.count;
}).splice(0, 5);
// set this SoundFile's tempo to the top tempo ??
this.tempo = topTempos[0].tempo;
// step 4:
// new array of peaks at top tempo within a bpmVariance
var bpmVariance = 5;
var tempoPeaks = getPeaksAtTopTempo(allPeaks, topTempos[0].tempo, filteredBuffer.sampleRate, bpmVariance);
callback(tempoPeaks);
};
};
// process peaks
var Peak = function (amp, i) {
this.sampleIndex = i;
this.amplitude = amp;
this.tempos = [];
this.intervals = [];
};
var allPeaks = [];
// 1. for processPeaks() Function to identify peaks above a threshold
// returns an array of peak indexes as frames (samples) of the original soundfile
function getPeaksAtThreshold(data, threshold) {
var peaksObj = {};
var length = data.length;
for (var i = 0; i < length; i++) {
if (data[i] > threshold) {
var amp = data[i];
var peak = new Peak(amp, i);
peaksObj[i] = peak;
// Skip forward ~ 1/8s to get past this peak.
i += 6000;
}
i++;
}
return peaksObj;
}
// 2. for processPeaks()
function countIntervalsBetweenNearbyPeaks(peaksObj) {
var intervalCounts = [];
var peaksArray = Object.keys(peaksObj).sort();
for (var index = 0; index < peaksArray.length; index++) {
// find intervals in comparison to nearby peaks
for (var i = 0; i < 10; i++) {
var startPeak = peaksObj[peaksArray[index]];
var endPeak = peaksObj[peaksArray[index + i]];
if (startPeak && endPeak) {
var startPos = startPeak.sampleIndex;
var endPos = endPeak.sampleIndex;
var interval = endPos - startPos;
// add a sample interval to the startPeek in the allPeaks array
if (interval > 0) {
startPeak.intervals.push(interval);
}
// tally the intervals and return interval counts
var foundInterval = intervalCounts.some(function (intervalCount, p) {
if (intervalCount.interval === interval) {
intervalCount.count++;
return intervalCount;
}
});
// store with JSON like formatting
if (!foundInterval) {
intervalCounts.push({
interval: interval,
count: 1
});
}
}
}
}
return intervalCounts;
}
// 3. for processPeaks --> find tempo
function groupNeighborsByTempo(intervalCounts, sampleRate) {
var tempoCounts = [];
intervalCounts.forEach(function (intervalCount, i) {
try {
// Convert an interval to tempo
var theoreticalTempo = Math.abs(60 / (intervalCount.interval / sampleRate));
theoreticalTempo = mapTempo(theoreticalTempo);
var foundTempo = tempoCounts.some(function (tempoCount) {
if (tempoCount.tempo === theoreticalTempo)
return tempoCount.count += intervalCount.count;
});
if (!foundTempo) {
if (isNaN(theoreticalTempo)) {
return;
}
tempoCounts.push({
tempo: Math.round(theoreticalTempo),
count: intervalCount.count
});
}
} catch (e) {
throw e;
}
});
return tempoCounts;
}
// 4. for processPeaks - get peaks at top tempo
function getPeaksAtTopTempo(peaksObj, tempo, sampleRate, bpmVariance) {
var peaksAtTopTempo = [];
var peaksArray = Object.keys(peaksObj).sort();
// TO DO: filter out peaks that have the tempo and return
for (var i = 0; i < peaksArray.length; i++) {
var key = peaksArray[i];
var peak = peaksObj[key];
for (var j = 0; j < peak.intervals.length; j++) {
var intervalBPM = Math.round(Math.abs(60 / (peak.intervals[j] / sampleRate)));
intervalBPM = mapTempo(intervalBPM);
var dif = intervalBPM - tempo;
if (Math.abs(intervalBPM - tempo) < bpmVariance) {
// convert sampleIndex to seconds
peaksAtTopTempo.push(peak.sampleIndex / 44100);
}
}
}
// filter out peaks that are very close to each other
peaksAtTopTempo = peaksAtTopTempo.filter(function (peakTime, index, arr) {
var dif = arr[index + 1] - peakTime;
if (dif > 0.01) {
return true;
}
});
return peaksAtTopTempo;
}
// helper function for processPeaks
function mapTempo(theoreticalTempo) {
// these scenarios create infinite while loop
if (!isFinite(theoreticalTempo) || theoreticalTempo == 0) {
return;
}
// Adjust the tempo to fit within the 90-180 BPM range
while (theoreticalTempo < 90)
theoreticalTempo *= 2;
while (theoreticalTempo > 180 && theoreticalTempo > 90)
theoreticalTempo /= 2;
return theoreticalTempo;
}
/*** SCHEDULE EVENTS ***/
/**
* Schedule events to trigger every time a MediaElement
* (audio/video) reaches a playback cue point.
*
* Accepts a callback function, a time (in seconds) at which to trigger
* the callback, and an optional parameter for the callback.
*
* Time will be passed as the first parameter to the callback function,
* and param will be the second parameter.
*
*
* @method addCue
* @param {Number} time Time in seconds, relative to this media
* element's playback. For example, to trigger
* an event every time playback reaches two
* seconds, pass in the number 2. This will be
* passed as the first parameter to
* the callback function.
* @param {Function} callback Name of a function that will be
* called at the given time. The callback will
* receive time and (optionally) param as its
* two parameters.
* @param {Object} [value] An object to be passed as the
* second parameter to the
* callback function.
* @return {Number} id ID of this cue,
* useful for removeCue(id)
* @example
*
* function setup() {
* background(0);
* noStroke();
* fill(255);
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* mySound = loadSound('assets/beat.mp3');
*
* // schedule calls to changeText
* mySound.addCue(0.50, changeText, "hello" );
* mySound.addCue(1.00, changeText, "p5" );
* mySound.addCue(1.50, changeText, "what" );
* mySound.addCue(2.00, changeText, "do" );
* mySound.addCue(2.50, changeText, "you" );
* mySound.addCue(3.00, changeText, "want" );
* mySound.addCue(4.00, changeText, "to" );
* mySound.addCue(5.00, changeText, "make" );
* mySound.addCue(6.00, changeText, "?" );
* }
*
* function changeText(val) {
* background(0);
* text(val, width/2, height/2);
* }
*
* function mouseClicked() {
* if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
* if (mySound.isPlaying() ) {
* mySound.stop();
* } else {
* mySound.play();
* }
* }
* }
*
*/
p5.SoundFile.prototype.addCue = function (time, callback, val) {
var id = this._cueIDCounter++;
var cue = new Cue(callback, time, id, val);
this._cues.push(cue);
// if (!this.elt.ontimeupdate) {
// this.elt.ontimeupdate = this._onTimeUpdate.bind(this);
// }
return id;
};
/**
* Remove a callback based on its ID. The ID is returned by the
* addCue method.
*
* @method removeCue
* @param {Number} id ID of the cue, as returned by addCue
*/
p5.SoundFile.prototype.removeCue = function (id) {
var cueLength = this._cues.length;
for (var i = 0; i < cueLength; i++) {
var cue = this._cues[i];
if (cue.id === id) {
this.cues.splice(i, 1);
}
}
if (this._cues.length === 0) {
}
};
/**
* Remove all of the callbacks that had originally been scheduled
* via the addCue method.
*
* @method clearCues
*/
p5.SoundFile.prototype.clearCues = function () {
this._cues = [];
};
// private method that checks for cues to be fired if events
// have been scheduled using addCue(callback, time).
p5.SoundFile.prototype._onTimeUpdate = function (position) {
var playbackTime = position / this.buffer.sampleRate;
var cueLength = this._cues.length;
for (var i = 0; i < cueLength; i++) {
var cue = this._cues[i];
var callbackTime = cue.time;
var val = cue.val;
if (this._prevTime < callbackTime && callbackTime <= playbackTime) {
// pass the scheduled callbackTime as parameter to the callback
cue.callback(val);
}
}
this._prevTime = playbackTime;
};
// Cue inspired by JavaScript setTimeout, and the
// Tone.js Transport Timeline Event, MIT License Yotam Mann 2015 tonejs.org
var Cue = function (callback, time, id, val) {
this.callback = callback;
this.time = time;
this.id = id;
this.val = val;
};
}(sndcore, errorHandler, master);
var amplitude;
amplitude = function () {
'use strict';
var p5sound = master;
/**
* Amplitude measures volume between 0.0 and 1.0.
* Listens to all p5sound by default, or use setInput()
* to listen to a specific sound source. Accepts an optional
* smoothing value, which defaults to 0.
*
* @class p5.Amplitude
* @constructor
* @param {Number} [smoothing] between 0.0 and .999 to smooth
* amplitude readings (defaults to 0)
* @return {Object} Amplitude Object
* @example
*
* var sound, amplitude, cnv;
*
* function preload(){
* sound = loadSound('assets/beat.mp3');
* }
* function setup() {
* cnv = createCanvas(100,100);
* amplitude = new p5.Amplitude();
*
* // start / stop the sound when canvas is clicked
* cnv.mouseClicked(function() {
* if (sound.isPlaying() ){
* sound.stop();
* } else {
* sound.play();
* }
* });
* }
* function draw() {
* background(0);
* fill(255);
* var level = amplitude.getLevel();
* var size = map(level, 0, 1, 0, 200);
* ellipse(width/2, height/2, size, size);
* }
*
*
*/
p5.Amplitude = function (smoothing) {
// Set to 2048 for now. In future iterations, this should be inherited or parsed from p5sound's default
this.bufferSize = 2048;
// set audio context
this.audiocontext = p5sound.audiocontext;
this.processor = this.audiocontext.createScriptProcessor(this.bufferSize, 2, 1);
// for connections
this.input = this.processor;
this.output = this.audiocontext.createGain();
// smoothing defaults to 0
this.smoothing = smoothing || 0;
// the variables to return
this.volume = 0;
this.average = 0;
this.stereoVol = [
0,
0
];
this.stereoAvg = [
0,
0
];
this.stereoVolNorm = [
0,
0
];
this.volMax = 0.001;
this.normalize = false;
this.processor.onaudioprocess = this._audioProcess.bind(this);
this.processor.connect(this.output);
this.output.gain.value = 0;
// this may only be necessary because of a Chrome bug
this.output.connect(this.audiocontext.destination);
// connect to p5sound master output by default, unless set by input()
p5sound.meter.connect(this.processor);
// add this p5.SoundFile to the soundArray
p5sound.soundArray.push(this);
};
/**
* Connects to the p5sound instance (master output) by default.
* Optionally, you can pass in a specific source (i.e. a soundfile).
*
* @method setInput
* @param {soundObject|undefined} [snd] set the sound source
* (optional, defaults to
* master output)
* @param {Number|undefined} [smoothing] a range between 0.0 and 1.0
* to smooth amplitude readings
* @example
*
* function preload(){
* sound1 = loadSound('assets/beat.mp3');
* sound2 = loadSound('assets/drum.mp3');
* }
* function setup(){
* amplitude = new p5.Amplitude();
* sound1.play();
* sound2.play();
* amplitude.setInput(sound2);
* }
* function draw() {
* background(0);
* fill(255);
* var level = amplitude.getLevel();
* var size = map(level, 0, 1, 0, 200);
* ellipse(width/2, height/2, size, size);
* }
* function mouseClicked(){
* sound1.stop();
* sound2.stop();
* }
*
*/
p5.Amplitude.prototype.setInput = function (source, smoothing) {
p5sound.meter.disconnect();
if (smoothing) {
this.smoothing = smoothing;
}
// connect to the master out of p5s instance if no snd is provided
if (source == null) {
console.log('Amplitude input source is not ready! Connecting to master output instead');
p5sound.meter.connect(this.processor);
} else if (source instanceof p5.Signal) {
source.output.connect(this.processor);
} else if (source) {
source.connect(this.processor);
this.processor.disconnect();
this.processor.connect(this.output);
} else {
p5sound.meter.connect(this.processor);
}
};
p5.Amplitude.prototype.connect = function (unit) {
if (unit) {
if (unit.hasOwnProperty('input')) {
this.output.connect(unit.input);
} else {
this.output.connect(unit);
}
} else {
this.output.connect(this.panner.connect(p5sound.input));
}
};
p5.Amplitude.prototype.disconnect = function (unit) {
this.output.disconnect();
};
// TO DO make this stereo / dependent on # of audio channels
p5.Amplitude.prototype._audioProcess = function (event) {
for (var channel = 0; channel < event.inputBuffer.numberOfChannels; channel++) {
var inputBuffer = event.inputBuffer.getChannelData(channel);
var bufLength = inputBuffer.length;
var total = 0;
var sum = 0;
var x;
for (var i = 0; i < bufLength; i++) {
x = inputBuffer[i];
if (this.normalize) {
total += Math.max(Math.min(x / this.volMax, 1), -1);
sum += Math.max(Math.min(x / this.volMax, 1), -1) * Math.max(Math.min(x / this.volMax, 1), -1);
} else {
total += x;
sum += x * x;
}
}
var average = total / bufLength;
// ... then take the square root of the sum.
var rms = Math.sqrt(sum / bufLength);
this.stereoVol[channel] = Math.max(rms, this.stereoVol[channel] * this.smoothing);
this.stereoAvg[channel] = Math.max(average, this.stereoVol[channel] * this.smoothing);
this.volMax = Math.max(this.stereoVol[channel], this.volMax);
}
// add volume from all channels together
var self = this;
var volSum = this.stereoVol.reduce(function (previousValue, currentValue, index) {
self.stereoVolNorm[index - 1] = Math.max(Math.min(self.stereoVol[index - 1] / self.volMax, 1), 0);
self.stereoVolNorm[index] = Math.max(Math.min(self.stereoVol[index] / self.volMax, 1), 0);
return previousValue + currentValue;
});
// volume is average of channels
this.volume = volSum / this.stereoVol.length;
// normalized value
this.volNorm = Math.max(Math.min(this.volume / this.volMax, 1), 0);
};
/**
* Returns a single Amplitude reading at the moment it is called.
* For continuous readings, run in the draw loop.
*
* @method getLevel
* @param {Number} [channel] Optionally return only channel 0 (left) or 1 (right)
* @return {Number} Amplitude as a number between 0.0 and 1.0
* @example
*
* function preload(){
* sound = loadSound('assets/beat.mp3');
* }
* function setup() {
* amplitude = new p5.Amplitude();
* sound.play();
* }
* function draw() {
* background(0);
* fill(255);
* var level = amplitude.getLevel();
* var size = map(level, 0, 1, 0, 200);
* ellipse(width/2, height/2, size, size);
* }
* function mouseClicked(){
* sound.stop();
* }
*
*/
p5.Amplitude.prototype.getLevel = function (channel) {
if (typeof channel !== 'undefined') {
if (this.normalize) {
return this.stereoVolNorm[channel];
} else {
return this.stereoVol[channel];
}
} else if (this.normalize) {
return this.volNorm;
} else {
return this.volume;
}
};
/**
* Determines whether the results of Amplitude.process() will be
* Normalized. To normalize, Amplitude finds the difference the
* loudest reading it has processed and the maximum amplitude of
* 1.0. Amplitude adds this difference to all values to produce
* results that will reliably map between 0.0 and 1.0. However,
* if a louder moment occurs, the amount that Normalize adds to
* all the values will change. Accepts an optional boolean parameter
* (true or false). Normalizing is off by default.
*
* @method toggleNormalize
* @param {boolean} [boolean] set normalize to true (1) or false (0)
*/
p5.Amplitude.prototype.toggleNormalize = function (bool) {
if (typeof bool === 'boolean') {
this.normalize = bool;
} else {
this.normalize = !this.normalize;
}
};
/**
* Smooth Amplitude analysis by averaging with the last analysis
* frame. Off by default.
*
* @method smooth
* @param {Number} set smoothing from 0.0 <= 1
*/
p5.Amplitude.prototype.smooth = function (s) {
if (s >= 0 && s < 1) {
this.smoothing = s;
} else {
console.log('Error: smoothing must be between 0 and 1');
}
};
p5.Amplitude.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.input.disconnect();
this.output.disconnect();
this.input = this.processor = undefined;
this.output = undefined;
};
}(master);
var fft;
fft = function () {
'use strict';
var p5sound = master;
/**
* FFT (Fast Fourier Transform) is an analysis algorithm that
* isolates individual
*
* audio frequencies within a waveform.
*
* Once instantiated, a p5.FFT object can return an array based on
* two types of analyses:
• FFT.waveform()
computes
* amplitude values along the time domain. The array indices correspond
* to samples across a brief moment in time. Each value represents
* amplitude of the waveform at that sample of time.
* • FFT.analyze()
computes amplitude values along the
* frequency domain. The array indices correspond to frequencies (i.e.
* pitches), from the lowest to the highest that humans can hear. Each
* value represents amplitude at that slice of the frequency spectrum.
* Use with getEnergy()
to measure amplitude at specific
* frequencies, or within a range of frequencies.
*
* FFT analyzes a very short snapshot of sound called a sample
* buffer. It returns an array of amplitude measurements, referred
* to as bins
. The array is 1024 bins long by default.
* You can change the bin array length, but it must be a power of 2
* between 16 and 1024 in order for the FFT algorithm to function
* correctly. The actual size of the FFT buffer is twice the
* number of bins, so given a standard sample rate, the buffer is
* 2048/44100 seconds long.
*
*
* @class p5.FFT
* @constructor
* @param {Number} [smoothing] Smooth results of Freq Spectrum.
* 0.0 < smoothing < 1.0.
* Defaults to 0.8.
* @param {Number} [bins] Length of resulting array.
* Must be a power of two between
* 16 and 1024. Defaults to 1024.
* @return {Object} FFT Object
* @example
*
* function preload(){
* sound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
* function setup(){
* var cnv = createCanvas(100,100);
* cnv.mouseClicked(togglePlay);
* fft = new p5.FFT();
* sound.amp(0.2);
* }
*
* function draw(){
* background(0);
*
* var spectrum = fft.analyze();
* noStroke();
* fill(0,255,0); // spectrum is green
* for (var i = 0; i< spectrum.length; i++){
* var x = map(i, 0, spectrum.length, 0, width);
* var h = -height + map(spectrum[i], 0, 255, height, 0);
* rect(x, height, width / spectrum.length, h )
* }
*
* var waveform = fft.waveform();
* noFill();
* beginShape();
* stroke(255,0,0); // waveform is red
* strokeWeight(1);
* for (var i = 0; i< waveform.length; i++){
* var x = map(i, 0, waveform.length, 0, width);
* var y = map( waveform[i], -1, 1, 0, height);
* vertex(x,y);
* }
* endShape();
*
* text('click to play/pause', 4, 10);
* }
*
* // fade sound if mouse is over canvas
* function togglePlay() {
* if (sound.isPlaying()) {
* sound.pause();
* } else {
* sound.loop();
* }
* }
*
*/
p5.FFT = function (smoothing, bins) {
this.smoothing = smoothing || 0.8;
this.bins = bins || 1024;
var FFT_SIZE = bins * 2 || 2048;
this.input = this.analyser = p5sound.audiocontext.createAnalyser();
// default connections to p5sound fftMeter
p5sound.fftMeter.connect(this.analyser);
this.analyser.smoothingTimeConstant = this.smoothing;
this.analyser.fftSize = FFT_SIZE;
this.freqDomain = new Uint8Array(this.analyser.frequencyBinCount);
this.timeDomain = new Uint8Array(this.analyser.frequencyBinCount);
// predefined frequency ranages, these will be tweakable
this.bass = [
20,
140
];
this.lowMid = [
140,
400
];
this.mid = [
400,
2600
];
this.highMid = [
2600,
5200
];
this.treble = [
5200,
14000
];
// add this p5.SoundFile to the soundArray
p5sound.soundArray.push(this);
};
/**
* Set the input source for the FFT analysis. If no source is
* provided, FFT will analyze all sound in the sketch.
*
* @method setInput
* @param {Object} [source] p5.sound object (or web audio API source node)
*/
p5.FFT.prototype.setInput = function (source) {
if (!source) {
p5sound.fftMeter.connect(this.analyser);
} else {
if (source.output) {
source.output.connect(this.analyser);
} else if (source.connect) {
source.connect(this.analyser);
}
p5sound.fftMeter.disconnect();
}
};
/**
* Returns an array of amplitude values (between -1.0 and +1.0) that represent
* a snapshot of amplitude readings in a single buffer. Length will be
* equal to bins (defaults to 1024). Can be used to draw the waveform
* of a sound.
*
* @method waveform
* @param {Number} [bins] Must be a power of two between
* 16 and 1024. Defaults to 1024.
* @param {String} [precision] If any value is provided, will return results
* in a Float32 Array which is more precise
* than a regular array.
* @return {Array} Array Array of amplitude values (-1 to 1)
* over time. Array length = bins.
*
*/
p5.FFT.prototype.waveform = function () {
var bins, mode, normalArray;
for (var i = 0; i < arguments.length; i++) {
if (typeof arguments[i] === 'number') {
bins = arguments[i];
this.analyser.fftSize = bins * 2;
}
if (typeof arguments[i] === 'string') {
mode = arguments[i];
}
}
// getFloatFrequencyData doesnt work in Safari as of 5/2015
if (mode && !p5.prototype._isSafari()) {
timeToFloat(this, this.timeDomain);
this.analyser.getFloatTimeDomainData(this.timeDomain);
return this.timeDomain;
} else {
timeToInt(this, this.timeDomain);
this.analyser.getByteTimeDomainData(this.timeDomain);
var normalArray = new Array();
for (var i = 0; i < this.timeDomain.length; i++) {
var scaled = p5.prototype.map(this.timeDomain[i], 0, 255, -1, 1);
normalArray.push(scaled);
}
return normalArray;
}
};
/**
* Returns an array of amplitude values (between 0 and 255)
* across the frequency spectrum. Length is equal to FFT bins
* (1024 by default). The array indices correspond to frequencies
* (i.e. pitches), from the lowest to the highest that humans can
* hear. Each value represents amplitude at that slice of the
* frequency spectrum. Must be called prior to using
* getEnergy()
.
*
* @method analyze
* @param {Number} [bins] Must be a power of two between
* 16 and 1024. Defaults to 1024.
* @param {Number} [scale] If "dB," returns decibel
* float measurements between
* -140 and 0 (max).
* Otherwise returns integers from 0-255.
* @return {Array} spectrum Array of energy (amplitude/volume)
* values across the frequency spectrum.
* Lowest energy (silence) = 0, highest
* possible is 255.
* @example
*
* var osc;
* var fft;
*
* function setup(){
* createCanvas(100,100);
* osc = new p5.Oscillator();
* osc.amp(0);
* osc.start();
* fft = new p5.FFT();
* }
*
* function draw(){
* background(0);
*
* var freq = map(mouseX, 0, 800, 20, 15000);
* freq = constrain(freq, 1, 20000);
* osc.freq(freq);
*
* var spectrum = fft.analyze();
* noStroke();
* fill(0,255,0); // spectrum is green
* for (var i = 0; i< spectrum.length; i++){
* var x = map(i, 0, spectrum.length, 0, width);
* var h = -height + map(spectrum[i], 0, 255, height, 0);
* rect(x, height, width / spectrum.length, h );
* }
*
* stroke(255);
* text('Freq: ' + round(freq)+'Hz', 10, 10);
*
* isMouseOverCanvas();
* }
*
* // only play sound when mouse is over canvas
* function isMouseOverCanvas() {
* var mX = mouseX, mY = mouseY;
* if (mX > 0 && mX < width && mY < height && mY > 0) {
* osc.amp(0.5, 0.2);
* } else {
* osc.amp(0, 0.2);
* }
* }
*
*
*
*/
p5.FFT.prototype.analyze = function () {
var bins, mode;
for (var i = 0; i < arguments.length; i++) {
if (typeof arguments[i] === 'number') {
bins = this.bins = arguments[i];
this.analyser.fftSize = this.bins * 2;
}
if (typeof arguments[i] === 'string') {
mode = arguments[i];
}
}
if (mode && mode.toLowerCase() === 'db') {
freqToFloat(this);
this.analyser.getFloatFrequencyData(this.freqDomain);
return this.freqDomain;
} else {
freqToInt(this, this.freqDomain);
this.analyser.getByteFrequencyData(this.freqDomain);
var normalArray = Array.apply([], this.freqDomain);
normalArray.length === this.analyser.fftSize;
normalArray.constructor === Array;
return normalArray;
}
};
/**
* Returns the amount of energy (volume) at a specific
*
* frequency, or the average amount of energy between two
* frequencies. Accepts Number(s) corresponding
* to frequency (in Hz), or a String corresponding to predefined
* frequency ranges ("bass", "lowMid", "mid", "highMid", "treble").
* Returns a range between 0 (no energy/volume at that frequency) and
* 255 (maximum energy).
* NOTE: analyze() must be called prior to getEnergy(). Analyze()
* tells the FFT to analyze frequency data, and getEnergy() uses
* the results determine the value at a specific frequency or
* range of frequencies.
*
* @method getEnergy
* @param {Number|String} frequency1 Will return a value representing
* energy at this frequency. Alternately,
* the strings "bass", "lowMid" "mid",
* "highMid", and "treble" will return
* predefined frequency ranges.
* @param {Number} [frequency2] If a second frequency is given,
* will return average amount of
* energy that exists between the
* two frequencies.
* @return {Number} Energy Energy (volume/amplitude) from
* 0 and 255.
*
*/
p5.FFT.prototype.getEnergy = function (frequency1, frequency2) {
var nyquist = p5sound.audiocontext.sampleRate / 2;
if (frequency1 === 'bass') {
frequency1 = this.bass[0];
frequency2 = this.bass[1];
} else if (frequency1 === 'lowMid') {
frequency1 = this.lowMid[0];
frequency2 = this.lowMid[1];
} else if (frequency1 === 'mid') {
frequency1 = this.mid[0];
frequency2 = this.mid[1];
} else if (frequency1 === 'highMid') {
frequency1 = this.highMid[0];
frequency2 = this.highMid[1];
} else if (frequency1 === 'treble') {
frequency1 = this.treble[0];
frequency2 = this.treble[1];
}
if (typeof frequency1 !== 'number') {
throw 'invalid input for getEnergy()';
} else if (!frequency2) {
var index = Math.round(frequency1 / nyquist * this.freqDomain.length);
return this.freqDomain[index];
} else if (frequency1 && frequency2) {
// if second is higher than first
if (frequency1 > frequency2) {
var swap = frequency2;
frequency2 = frequency1;
frequency1 = swap;
}
var lowIndex = Math.round(frequency1 / nyquist * this.freqDomain.length);
var highIndex = Math.round(frequency2 / nyquist * this.freqDomain.length);
var total = 0;
var numFrequencies = 0;
// add up all of the values for the frequencies
for (var i = lowIndex; i <= highIndex; i++) {
total += this.freqDomain[i];
numFrequencies += 1;
}
// divide by total number of frequencies
var toReturn = total / numFrequencies;
return toReturn;
} else {
throw 'invalid input for getEnergy()';
}
};
// compatability with v.012, changed to getEnergy in v.0121. Will be deprecated...
p5.FFT.prototype.getFreq = function (freq1, freq2) {
console.log('getFreq() is deprecated. Please use getEnergy() instead.');
var x = this.getEnergy(freq1, freq2);
return x;
};
/**
* Returns the
*
* spectral centroid of the input signal.
* NOTE: analyze() must be called prior to getCentroid(). Analyze()
* tells the FFT to analyze frequency data, and getCentroid() uses
* the results determine the spectral centroid.
*
* @method getCentroid
* @return {Number} Spectral Centroid Frequency Frequency of the spectral centroid in Hz.
*
*
* @example
*
*
*
*function setup(){
* cnv = createCanvas(800,400);
* sound = new p5.AudioIn();
* sound.start();
* fft = new p5.FFT();
* sound.connect(fft);
*}
*
*
*function draw(){
*
* var centroidplot = 0.0;
* var spectralCentroid = 0;
*
*
* background(0);
* stroke(0,255,0);
* var spectrum = fft.analyze();
* fill(0,255,0); // spectrum is green
*
* //draw the spectrum
*
* for (var i = 0; i< spectrum.length; i++){
* var x = map(log(i), 0, log(spectrum.length), 0, width);
* var h = map(spectrum[i], 0, 255, 0, height);
* var rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
* rect(x, height, rectangle_width, -h )
* }
* var nyquist = 22050;
*
* // get the centroid
* spectralCentroid = fft.getCentroid();
*
* // the mean_freq_index calculation is for the display.
* var mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
*
* centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
*
*
* stroke(255,0,0); // the line showing where the centroid is will be red
*
* rect(centroidplot, 0, width / spectrum.length, height)
* noStroke();
* fill(255,255,255); // text is white
* textSize(40);
* text("centroid: "+round(spectralCentroid)+" Hz", 10, 40);
*}
*
*/
p5.FFT.prototype.getCentroid = function () {
var nyquist = p5sound.audiocontext.sampleRate / 2;
var cumulative_sum = 0;
var centroid_normalization = 0;
for (var i = 0; i < this.freqDomain.length; i++) {
cumulative_sum += i * this.freqDomain[i];
centroid_normalization += this.freqDomain[i];
}
var mean_freq_index = 0;
if (centroid_normalization != 0) {
mean_freq_index = cumulative_sum / centroid_normalization;
}
var spec_centroid_freq = mean_freq_index * (nyquist / this.freqDomain.length);
return spec_centroid_freq;
};
/**
* Smooth FFT analysis by averaging with the last analysis frame.
*
* @method smooth
* @param {Number} smoothing 0.0 < smoothing < 1.0.
* Defaults to 0.8.
*/
p5.FFT.prototype.smooth = function (s) {
if (s) {
this.smoothing = s;
}
this.analyser.smoothingTimeConstant = s;
};
p5.FFT.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.analyser.disconnect();
this.analyser = undefined;
};
/**
* Returns an array of average amplitude values for a given number
* of frequency bands split equally. N defaults to 16.
* NOTE: analyze() must be called prior to linAverages(). Analyze()
* tells the FFT to analyze frequency data, and linAverages() uses
* the results to group them into a smaller set of averages.
*
* @method linAverages
* @param {Number} N Number of returned frequency groups
* @return {Array} linearAverages Array of average amplitude values for each group
*/
p5.FFT.prototype.linAverages = function (N) {
var N = N || 16;
// This prevents undefined, null or 0 values of N
var spectrum = this.freqDomain;
var spectrumLength = spectrum.length;
var spectrumStep = Math.floor(spectrumLength / N);
var linearAverages = new Array(N);
// Keep a second index for the current average group and place the values accordingly
// with only one loop in the spectrum data
var groupIndex = 0;
for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
linearAverages[groupIndex] = linearAverages[groupIndex] !== undefined ? (linearAverages[groupIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex];
// Increase the group index when the last element of the group is processed
if (specIndex % spectrumStep == spectrumStep - 1) {
groupIndex++;
}
}
return linearAverages;
};
/**
* Returns an array of average amplitude values of the spectrum, for a given
* set of
* Octave Bands
* NOTE: analyze() must be called prior to logAverages(). Analyze()
* tells the FFT to analyze frequency data, and logAverages() uses
* the results to group them into a smaller set of averages.
*
* @method logAverages
* @param {Array} octaveBands Array of Octave Bands objects for grouping
* @return {Array} logAverages Array of average amplitude values for each group
*/
p5.FFT.prototype.logAverages = function (octaveBands) {
var nyquist = p5sound.audiocontext.sampleRate / 2;
var spectrum = this.freqDomain;
var spectrumLength = spectrum.length;
var logAverages = new Array(octaveBands.length);
// Keep a second index for the current average group and place the values accordingly
// With only one loop in the spectrum data
var octaveIndex = 0;
for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
var specIndexFrequency = Math.round(specIndex * nyquist / this.freqDomain.length);
// Increase the group index if the current frequency exceeds the limits of the band
if (specIndexFrequency > octaveBands[octaveIndex].hi) {
octaveIndex++;
}
logAverages[octaveIndex] = logAverages[octaveIndex] !== undefined ? (logAverages[octaveIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex];
}
return logAverages;
};
/**
* Calculates and Returns the 1/N
* Octave Bands
* N defaults to 3 and minimum central frequency to 15.625Hz.
* (1/3 Octave Bands ~= 31 Frequency Bands)
* Setting fCtr0 to a central value of a higher octave will ignore the lower bands
* and produce less frequency groups.
*
* @method getOctaveBands
* @param {Number} N Specifies the 1/N type of generated octave bands
* @param {Number} fCtr0 Minimum central frequency for the lowest band
* @return {Array} octaveBands Array of octave band objects with their bounds
*/
p5.FFT.prototype.getOctaveBands = function (N, fCtr0) {
var N = N || 3;
// Default to 1/3 Octave Bands
var fCtr0 = fCtr0 || 15.625;
// Minimum central frequency, defaults to 15.625Hz
var octaveBands = [];
var lastFrequencyBand = {
lo: fCtr0 / Math.pow(2, 1 / (2 * N)),
ctr: fCtr0,
hi: fCtr0 * Math.pow(2, 1 / (2 * N))
};
octaveBands.push(lastFrequencyBand);
var nyquist = p5sound.audiocontext.sampleRate / 2;
while (lastFrequencyBand.hi < nyquist) {
var newFrequencyBand = {};
newFrequencyBand.lo = lastFrequencyBand.hi, newFrequencyBand.ctr = lastFrequencyBand.ctr * Math.pow(2, 1 / N), newFrequencyBand.hi = newFrequencyBand.ctr * Math.pow(2, 1 / (2 * N)), octaveBands.push(newFrequencyBand);
lastFrequencyBand = newFrequencyBand;
}
return octaveBands;
};
// helper methods to convert type from float (dB) to int (0-255)
var freqToFloat = function (fft) {
if (fft.freqDomain instanceof Float32Array === false) {
fft.freqDomain = new Float32Array(fft.analyser.frequencyBinCount);
}
};
var freqToInt = function (fft) {
if (fft.freqDomain instanceof Uint8Array === false) {
fft.freqDomain = new Uint8Array(fft.analyser.frequencyBinCount);
}
};
var timeToFloat = function (fft) {
if (fft.timeDomain instanceof Float32Array === false) {
fft.timeDomain = new Float32Array(fft.analyser.frequencyBinCount);
}
};
var timeToInt = function (fft) {
if (fft.timeDomain instanceof Uint8Array === false) {
fft.timeDomain = new Uint8Array(fft.analyser.frequencyBinCount);
}
};
}(master);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_core_Tone;
Tone_core_Tone = function () {
'use strict';
function isUndef(val) {
return val === void 0;
}
function isFunction(val) {
return typeof val === 'function';
}
var audioContext;
if (isUndef(window.AudioContext)) {
window.AudioContext = window.webkitAudioContext;
}
if (isUndef(window.OfflineAudioContext)) {
window.OfflineAudioContext = window.webkitOfflineAudioContext;
}
if (!isUndef(AudioContext)) {
audioContext = new AudioContext();
} else {
throw new Error('Web Audio is not supported in this browser');
}
if (!isFunction(AudioContext.prototype.createGain)) {
AudioContext.prototype.createGain = AudioContext.prototype.createGainNode;
}
if (!isFunction(AudioContext.prototype.createDelay)) {
AudioContext.prototype.createDelay = AudioContext.prototype.createDelayNode;
}
if (!isFunction(AudioContext.prototype.createPeriodicWave)) {
AudioContext.prototype.createPeriodicWave = AudioContext.prototype.createWaveTable;
}
if (!isFunction(AudioBufferSourceNode.prototype.start)) {
AudioBufferSourceNode.prototype.start = AudioBufferSourceNode.prototype.noteGrainOn;
}
if (!isFunction(AudioBufferSourceNode.prototype.stop)) {
AudioBufferSourceNode.prototype.stop = AudioBufferSourceNode.prototype.noteOff;
}
if (!isFunction(OscillatorNode.prototype.start)) {
OscillatorNode.prototype.start = OscillatorNode.prototype.noteOn;
}
if (!isFunction(OscillatorNode.prototype.stop)) {
OscillatorNode.prototype.stop = OscillatorNode.prototype.noteOff;
}
if (!isFunction(OscillatorNode.prototype.setPeriodicWave)) {
OscillatorNode.prototype.setPeriodicWave = OscillatorNode.prototype.setWaveTable;
}
AudioNode.prototype._nativeConnect = AudioNode.prototype.connect;
AudioNode.prototype.connect = function (B, outNum, inNum) {
if (B.input) {
if (Array.isArray(B.input)) {
if (isUndef(inNum)) {
inNum = 0;
}
this.connect(B.input[inNum]);
} else {
this.connect(B.input, outNum, inNum);
}
} else {
try {
if (B instanceof AudioNode) {
this._nativeConnect(B, outNum, inNum);
} else {
this._nativeConnect(B, outNum);
}
} catch (e) {
throw new Error('error connecting to node: ' + B);
}
}
};
var Tone = function (inputs, outputs) {
if (isUndef(inputs) || inputs === 1) {
this.input = this.context.createGain();
} else if (inputs > 1) {
this.input = new Array(inputs);
}
if (isUndef(outputs) || outputs === 1) {
this.output = this.context.createGain();
} else if (outputs > 1) {
this.output = new Array(inputs);
}
};
Tone.prototype.set = function (params, value, rampTime) {
if (this.isObject(params)) {
rampTime = value;
} else if (this.isString(params)) {
var tmpObj = {};
tmpObj[params] = value;
params = tmpObj;
}
for (var attr in params) {
value = params[attr];
var parent = this;
if (attr.indexOf('.') !== -1) {
var attrSplit = attr.split('.');
for (var i = 0; i < attrSplit.length - 1; i++) {
parent = parent[attrSplit[i]];
}
attr = attrSplit[attrSplit.length - 1];
}
var param = parent[attr];
if (isUndef(param)) {
continue;
}
if (Tone.Signal && param instanceof Tone.Signal || Tone.Param && param instanceof Tone.Param) {
if (param.value !== value) {
if (isUndef(rampTime)) {
param.value = value;
} else {
param.rampTo(value, rampTime);
}
}
} else if (param instanceof AudioParam) {
if (param.value !== value) {
param.value = value;
}
} else if (param instanceof Tone) {
param.set(value);
} else if (param !== value) {
parent[attr] = value;
}
}
return this;
};
Tone.prototype.get = function (params) {
if (isUndef(params)) {
params = this._collectDefaults(this.constructor);
} else if (this.isString(params)) {
params = [params];
}
var ret = {};
for (var i = 0; i < params.length; i++) {
var attr = params[i];
var parent = this;
var subRet = ret;
if (attr.indexOf('.') !== -1) {
var attrSplit = attr.split('.');
for (var j = 0; j < attrSplit.length - 1; j++) {
var subAttr = attrSplit[j];
subRet[subAttr] = subRet[subAttr] || {};
subRet = subRet[subAttr];
parent = parent[subAttr];
}
attr = attrSplit[attrSplit.length - 1];
}
var param = parent[attr];
if (this.isObject(params[attr])) {
subRet[attr] = param.get();
} else if (Tone.Signal && param instanceof Tone.Signal) {
subRet[attr] = param.value;
} else if (Tone.Param && param instanceof Tone.Param) {
subRet[attr] = param.value;
} else if (param instanceof AudioParam) {
subRet[attr] = param.value;
} else if (param instanceof Tone) {
subRet[attr] = param.get();
} else if (!isFunction(param) && !isUndef(param)) {
subRet[attr] = param;
}
}
return ret;
};
Tone.prototype._collectDefaults = function (constr) {
var ret = [];
if (!isUndef(constr.defaults)) {
ret = Object.keys(constr.defaults);
}
if (!isUndef(constr._super)) {
var superDefs = this._collectDefaults(constr._super);
for (var i = 0; i < superDefs.length; i++) {
if (ret.indexOf(superDefs[i]) === -1) {
ret.push(superDefs[i]);
}
}
}
return ret;
};
Tone.prototype.toString = function () {
for (var className in Tone) {
var isLetter = className[0].match(/^[A-Z]$/);
var sameConstructor = Tone[className] === this.constructor;
if (isFunction(Tone[className]) && isLetter && sameConstructor) {
return className;
}
}
return 'Tone';
};
Tone.context = audioContext;
Tone.prototype.context = Tone.context;
Tone.prototype.bufferSize = 2048;
Tone.prototype.blockTime = 128 / Tone.context.sampleRate;
Tone.prototype.dispose = function () {
if (!this.isUndef(this.input)) {
if (this.input instanceof AudioNode) {
this.input.disconnect();
}
this.input = null;
}
if (!this.isUndef(this.output)) {
if (this.output instanceof AudioNode) {
this.output.disconnect();
}
this.output = null;
}
return this;
};
var _silentNode = null;
Tone.prototype.noGC = function () {
this.output.connect(_silentNode);
return this;
};
AudioNode.prototype.noGC = function () {
this.connect(_silentNode);
return this;
};
Tone.prototype.connect = function (unit, outputNum, inputNum) {
if (Array.isArray(this.output)) {
outputNum = this.defaultArg(outputNum, 0);
this.output[outputNum].connect(unit, 0, inputNum);
} else {
this.output.connect(unit, outputNum, inputNum);
}
return this;
};
Tone.prototype.disconnect = function (outputNum) {
if (Array.isArray(this.output)) {
outputNum = this.defaultArg(outputNum, 0);
this.output[outputNum].disconnect();
} else {
this.output.disconnect();
}
return this;
};
Tone.prototype.connectSeries = function () {
if (arguments.length > 1) {
var currentUnit = arguments[0];
for (var i = 1; i < arguments.length; i++) {
var toUnit = arguments[i];
currentUnit.connect(toUnit);
currentUnit = toUnit;
}
}
return this;
};
Tone.prototype.connectParallel = function () {
var connectFrom = arguments[0];
if (arguments.length > 1) {
for (var i = 1; i < arguments.length; i++) {
var connectTo = arguments[i];
connectFrom.connect(connectTo);
}
}
return this;
};
Tone.prototype.chain = function () {
if (arguments.length > 0) {
var currentUnit = this;
for (var i = 0; i < arguments.length; i++) {
var toUnit = arguments[i];
currentUnit.connect(toUnit);
currentUnit = toUnit;
}
}
return this;
};
Tone.prototype.fan = function () {
if (arguments.length > 0) {
for (var i = 0; i < arguments.length; i++) {
this.connect(arguments[i]);
}
}
return this;
};
AudioNode.prototype.chain = Tone.prototype.chain;
AudioNode.prototype.fan = Tone.prototype.fan;
Tone.prototype.defaultArg = function (given, fallback) {
if (this.isObject(given) && this.isObject(fallback)) {
var ret = {};
for (var givenProp in given) {
ret[givenProp] = this.defaultArg(fallback[givenProp], given[givenProp]);
}
for (var fallbackProp in fallback) {
ret[fallbackProp] = this.defaultArg(given[fallbackProp], fallback[fallbackProp]);
}
return ret;
} else {
return isUndef(given) ? fallback : given;
}
};
Tone.prototype.optionsObject = function (values, keys, defaults) {
var options = {};
if (values.length === 1 && this.isObject(values[0])) {
options = values[0];
} else {
for (var i = 0; i < keys.length; i++) {
options[keys[i]] = values[i];
}
}
if (!this.isUndef(defaults)) {
return this.defaultArg(options, defaults);
} else {
return options;
}
};
Tone.prototype.isUndef = isUndef;
Tone.prototype.isFunction = isFunction;
Tone.prototype.isNumber = function (arg) {
return typeof arg === 'number';
};
Tone.prototype.isObject = function (arg) {
return Object.prototype.toString.call(arg) === '[object Object]' && arg.constructor === Object;
};
Tone.prototype.isBoolean = function (arg) {
return typeof arg === 'boolean';
};
Tone.prototype.isArray = function (arg) {
return Array.isArray(arg);
};
Tone.prototype.isString = function (arg) {
return typeof arg === 'string';
};
Tone.noOp = function () {
};
Tone.prototype._readOnly = function (property) {
if (Array.isArray(property)) {
for (var i = 0; i < property.length; i++) {
this._readOnly(property[i]);
}
} else {
Object.defineProperty(this, property, {
writable: false,
enumerable: true
});
}
};
Tone.prototype._writable = function (property) {
if (Array.isArray(property)) {
for (var i = 0; i < property.length; i++) {
this._writable(property[i]);
}
} else {
Object.defineProperty(this, property, { writable: true });
}
};
Tone.State = {
Started: 'started',
Stopped: 'stopped',
Paused: 'paused'
};
Tone.prototype.equalPowerScale = function (percent) {
var piFactor = 0.5 * Math.PI;
return Math.sin(percent * piFactor);
};
Tone.prototype.dbToGain = function (db) {
return Math.pow(2, db / 6);
};
Tone.prototype.gainToDb = function (gain) {
return 20 * (Math.log(gain) / Math.LN10);
};
Tone.prototype.now = function () {
return this.context.currentTime;
};
Tone.extend = function (child, parent) {
if (isUndef(parent)) {
parent = Tone;
}
function TempConstructor() {
}
TempConstructor.prototype = parent.prototype;
child.prototype = new TempConstructor();
child.prototype.constructor = child;
child._super = parent;
};
var newContextCallbacks = [];
Tone._initAudioContext = function (callback) {
callback(Tone.context);
newContextCallbacks.push(callback);
};
Tone.setContext = function (ctx) {
Tone.prototype.context = ctx;
Tone.context = ctx;
for (var i = 0; i < newContextCallbacks.length; i++) {
newContextCallbacks[i](ctx);
}
};
Tone.startMobile = function () {
var osc = Tone.context.createOscillator();
var silent = Tone.context.createGain();
silent.gain.value = 0;
osc.connect(silent);
silent.connect(Tone.context.destination);
var now = Tone.context.currentTime;
osc.start(now);
osc.stop(now + 1);
};
Tone._initAudioContext(function (audioContext) {
Tone.prototype.blockTime = 128 / audioContext.sampleRate;
_silentNode = audioContext.createGain();
_silentNode.gain.value = 0;
_silentNode.connect(audioContext.destination);
});
Tone.version = 'r7-dev';
return Tone;
}();
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_signal_SignalBase;
Tone_signal_SignalBase = function (Tone) {
'use strict';
Tone.SignalBase = function () {
};
Tone.extend(Tone.SignalBase);
Tone.SignalBase.prototype.connect = function (node, outputNumber, inputNumber) {
if (Tone.Signal && Tone.Signal === node.constructor || Tone.Param && Tone.Param === node.constructor || Tone.TimelineSignal && Tone.TimelineSignal === node.constructor) {
node._param.cancelScheduledValues(0);
node._param.value = 0;
node.overridden = true;
} else if (node instanceof AudioParam) {
node.cancelScheduledValues(0);
node.value = 0;
}
Tone.prototype.connect.call(this, node, outputNumber, inputNumber);
return this;
};
return Tone.SignalBase;
}(Tone_core_Tone);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_signal_WaveShaper;
Tone_signal_WaveShaper = function (Tone) {
'use strict';
Tone.WaveShaper = function (mapping, bufferLen) {
this._shaper = this.input = this.output = this.context.createWaveShaper();
this._curve = null;
if (Array.isArray(mapping)) {
this.curve = mapping;
} else if (isFinite(mapping) || this.isUndef(mapping)) {
this._curve = new Float32Array(this.defaultArg(mapping, 1024));
} else if (this.isFunction(mapping)) {
this._curve = new Float32Array(this.defaultArg(bufferLen, 1024));
this.setMap(mapping);
}
};
Tone.extend(Tone.WaveShaper, Tone.SignalBase);
Tone.WaveShaper.prototype.setMap = function (mapping) {
for (var i = 0, len = this._curve.length; i < len; i++) {
var normalized = i / len * 2 - 1;
this._curve[i] = mapping(normalized, i);
}
this._shaper.curve = this._curve;
return this;
};
Object.defineProperty(Tone.WaveShaper.prototype, 'curve', {
get: function () {
return this._shaper.curve;
},
set: function (mapping) {
this._curve = new Float32Array(mapping);
this._shaper.curve = this._curve;
}
});
Object.defineProperty(Tone.WaveShaper.prototype, 'oversample', {
get: function () {
return this._shaper.oversample;
},
set: function (oversampling) {
if ([
'none',
'2x',
'4x'
].indexOf(oversampling) !== -1) {
this._shaper.oversample = oversampling;
} else {
throw new Error('invalid oversampling: ' + oversampling);
}
}
});
Tone.WaveShaper.prototype.dispose = function () {
Tone.prototype.dispose.call(this);
this._shaper.disconnect();
this._shaper = null;
this._curve = null;
return this;
};
return Tone.WaveShaper;
}(Tone_core_Tone);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_core_Type;
Tone_core_Type = function (Tone) {
'use strict';
Tone.Type = {
Default: 'number',
Time: 'time',
Frequency: 'frequency',
NormalRange: 'normalRange',
AudioRange: 'audioRange',
Decibels: 'db',
Interval: 'interval',
BPM: 'bpm',
Positive: 'positive',
Cents: 'cents',
Degrees: 'degrees',
MIDI: 'midi',
TransportTime: 'transportTime',
Ticks: 'tick',
Note: 'note',
Milliseconds: 'milliseconds',
Notation: 'notation'
};
Tone.prototype.isNowRelative = function () {
var nowRelative = new RegExp(/^\s*\+(.)+/i);
return function (note) {
return nowRelative.test(note);
};
}();
Tone.prototype.isTicks = function () {
var tickFormat = new RegExp(/^\d+i$/i);
return function (note) {
return tickFormat.test(note);
};
}();
Tone.prototype.isNotation = function () {
var notationFormat = new RegExp(/^[0-9]+[mnt]$/i);
return function (note) {
return notationFormat.test(note);
};
}();
Tone.prototype.isTransportTime = function () {
var transportTimeFormat = new RegExp(/^(\d+(\.\d+)?\:){1,2}(\d+(\.\d+)?)?$/i);
return function (transportTime) {
return transportTimeFormat.test(transportTime);
};
}();
Tone.prototype.isNote = function () {
var noteFormat = new RegExp(/^[a-g]{1}(b|#|x|bb)?-?[0-9]+$/i);
return function (note) {
return noteFormat.test(note);
};
}();
Tone.prototype.isFrequency = function () {
var freqFormat = new RegExp(/^\d*\.?\d+hz$/i);
return function (freq) {
return freqFormat.test(freq);
};
}();
function getTransportBpm() {
if (Tone.Transport && Tone.Transport.bpm) {
return Tone.Transport.bpm.value;
} else {
return 120;
}
}
function getTransportTimeSignature() {
if (Tone.Transport && Tone.Transport.timeSignature) {
return Tone.Transport.timeSignature;
} else {
return 4;
}
}
Tone.prototype.notationToSeconds = function (notation, bpm, timeSignature) {
bpm = this.defaultArg(bpm, getTransportBpm());
timeSignature = this.defaultArg(timeSignature, getTransportTimeSignature());
var beatTime = 60 / bpm;
if (notation === '1n') {
notation = '1m';
}
var subdivision = parseInt(notation, 10);
var beats = 0;
if (subdivision === 0) {
beats = 0;
}
var lastLetter = notation.slice(-1);
if (lastLetter === 't') {
beats = 4 / subdivision * 2 / 3;
} else if (lastLetter === 'n') {
beats = 4 / subdivision;
} else if (lastLetter === 'm') {
beats = subdivision * timeSignature;
} else {
beats = 0;
}
return beatTime * beats;
};
Tone.prototype.transportTimeToSeconds = function (transportTime, bpm, timeSignature) {
bpm = this.defaultArg(bpm, getTransportBpm());
timeSignature = this.defaultArg(timeSignature, getTransportTimeSignature());
var measures = 0;
var quarters = 0;
var sixteenths = 0;
var split = transportTime.split(':');
if (split.length === 2) {
measures = parseFloat(split[0]);
quarters = parseFloat(split[1]);
} else if (split.length === 1) {
quarters = parseFloat(split[0]);
} else if (split.length === 3) {
measures = parseFloat(split[0]);
quarters = parseFloat(split[1]);
sixteenths = parseFloat(split[2]);
}
var beats = measures * timeSignature + quarters + sixteenths / 4;
return beats * (60 / bpm);
};
Tone.prototype.ticksToSeconds = function (ticks, bpm) {
if (this.isUndef(Tone.Transport)) {
return 0;
}
ticks = parseFloat(ticks);
bpm = this.defaultArg(bpm, getTransportBpm());
var tickTime = 60 / bpm / Tone.Transport.PPQ;
return tickTime * ticks;
};
Tone.prototype.frequencyToSeconds = function (freq) {
return 1 / parseFloat(freq);
};
Tone.prototype.samplesToSeconds = function (samples) {
return samples / this.context.sampleRate;
};
Tone.prototype.secondsToSamples = function (seconds) {
return seconds * this.context.sampleRate;
};
Tone.prototype.secondsToTransportTime = function (seconds, bpm, timeSignature) {
bpm = this.defaultArg(bpm, getTransportBpm());
timeSignature = this.defaultArg(timeSignature, getTransportTimeSignature());
var quarterTime = 60 / bpm;
var quarters = seconds / quarterTime;
var measures = Math.floor(quarters / timeSignature);
var sixteenths = quarters % 1 * 4;
quarters = Math.floor(quarters) % timeSignature;
var progress = [
measures,
quarters,
sixteenths
];
return progress.join(':');
};
Tone.prototype.secondsToFrequency = function (seconds) {
return 1 / seconds;
};
Tone.prototype.toTransportTime = function (time, bpm, timeSignature) {
var seconds = this.toSeconds(time);
return this.secondsToTransportTime(seconds, bpm, timeSignature);
};
Tone.prototype.toFrequency = function (freq, now) {
if (this.isFrequency(freq)) {
return parseFloat(freq);
} else if (this.isNotation(freq) || this.isTransportTime(freq)) {
return this.secondsToFrequency(this.toSeconds(freq, now));
} else if (this.isNote(freq)) {
return this.noteToFrequency(freq);
} else {
return freq;
}
};
Tone.prototype.toTicks = function (time) {
if (this.isUndef(Tone.Transport)) {
return 0;
}
var bpm = Tone.Transport.bpm.value;
var plusNow = 0;
if (this.isNowRelative(time)) {
time = time.replace('+', '');
plusNow = Tone.Transport.ticks;
} else if (this.isUndef(time)) {
return Tone.Transport.ticks;
}
var seconds = this.toSeconds(time);
var quarter = 60 / bpm;
var quarters = seconds / quarter;
var tickNum = quarters * Tone.Transport.PPQ;
return Math.round(tickNum + plusNow);
};
Tone.prototype.toSamples = function (time) {
var seconds = this.toSeconds(time);
return Math.round(seconds * this.context.sampleRate);
};
Tone.prototype.toSeconds = function (time, now) {
now = this.defaultArg(now, this.now());
if (this.isNumber(time)) {
return time;
} else if (this.isString(time)) {
var plusTime = 0;
if (this.isNowRelative(time)) {
time = time.replace('+', '');
plusTime = now;
}
var betweenParens = time.match(/\(([^)(]+)\)/g);
if (betweenParens) {
for (var j = 0; j < betweenParens.length; j++) {
var symbol = betweenParens[j].replace(/[\(\)]/g, '');
var symbolVal = this.toSeconds(symbol);
time = time.replace(betweenParens[j], symbolVal);
}
}
if (time.indexOf('@') !== -1) {
var quantizationSplit = time.split('@');
if (!this.isUndef(Tone.Transport)) {
var toQuantize = quantizationSplit[0].trim();
if (toQuantize === '') {
toQuantize = undefined;
}
if (plusTime > 0) {
toQuantize = '+' + toQuantize;
plusTime = 0;
}
var subdivision = quantizationSplit[1].trim();
time = Tone.Transport.quantize(toQuantize, subdivision);
} else {
throw new Error('quantization requires Tone.Transport');
}
} else {
var components = time.split(/[\(\)\-\+\/\*]/);
if (components.length > 1) {
var originalTime = time;
for (var i = 0; i < components.length; i++) {
var symb = components[i].trim();
if (symb !== '') {
var val = this.toSeconds(symb);
time = time.replace(symb, val);
}
}
try {
time = eval(time);
} catch (e) {
throw new EvalError('cannot evaluate Time: ' + originalTime);
}
} else if (this.isNotation(time)) {
time = this.notationToSeconds(time);
} else if (this.isTransportTime(time)) {
time = this.transportTimeToSeconds(time);
} else if (this.isFrequency(time)) {
time = this.frequencyToSeconds(time);
} else if (this.isTicks(time)) {
time = this.ticksToSeconds(time);
} else {
time = parseFloat(time);
}
}
return time + plusTime;
} else {
return now;
}
};
Tone.prototype.toNotation = function (time, bpm, timeSignature) {
var testNotations = [
'1m',
'2n',
'4n',
'8n',
'16n',
'32n',
'64n',
'128n'
];
var retNotation = toNotationHelper.call(this, time, bpm, timeSignature, testNotations);
var testTripletNotations = [
'1m',
'2n',
'2t',
'4n',
'4t',
'8n',
'8t',
'16n',
'16t',
'32n',
'32t',
'64n',
'64t',
'128n'
];
var retTripletNotation = toNotationHelper.call(this, time, bpm, timeSignature, testTripletNotations);
if (retTripletNotation.split('+').length < retNotation.split('+').length) {
return retTripletNotation;
} else {
return retNotation;
}
};
function toNotationHelper(time, bpm, timeSignature, testNotations) {
var seconds = this.toSeconds(time);
var threshold = this.notationToSeconds(testNotations[testNotations.length - 1], bpm, timeSignature);
var retNotation = '';
for (var i = 0; i < testNotations.length; i++) {
var notationTime = this.notationToSeconds(testNotations[i], bpm, timeSignature);
var multiple = seconds / notationTime;
var floatingPointError = 0.000001;
if (1 - multiple % 1 < floatingPointError) {
multiple += floatingPointError;
}
multiple = Math.floor(multiple);
if (multiple > 0) {
if (multiple === 1) {
retNotation += testNotations[i];
} else {
retNotation += multiple.toString() + '*' + testNotations[i];
}
seconds -= multiple * notationTime;
if (seconds < threshold) {
break;
} else {
retNotation += ' + ';
}
}
}
if (retNotation === '') {
retNotation = '0';
}
return retNotation;
}
Tone.prototype.fromUnits = function (val, units) {
if (this.convert || this.isUndef(this.convert)) {
switch (units) {
case Tone.Type.Time:
return this.toSeconds(val);
case Tone.Type.Frequency:
return this.toFrequency(val);
case Tone.Type.Decibels:
return this.dbToGain(val);
case Tone.Type.NormalRange:
return Math.min(Math.max(val, 0), 1);
case Tone.Type.AudioRange:
return Math.min(Math.max(val, -1), 1);
case Tone.Type.Positive:
return Math.max(val, 0);
default:
return val;
}
} else {
return val;
}
};
Tone.prototype.toUnits = function (val, units) {
if (this.convert || this.isUndef(this.convert)) {
switch (units) {
case Tone.Type.Decibels:
return this.gainToDb(val);
default:
return val;
}
} else {
return val;
}
};
var noteToScaleIndex = {
'cbb': -2,
'cb': -1,
'c': 0,
'c#': 1,
'cx': 2,
'dbb': 0,
'db': 1,
'd': 2,
'd#': 3,
'dx': 4,
'ebb': 2,
'eb': 3,
'e': 4,
'e#': 5,
'ex': 6,
'fbb': 3,
'fb': 4,
'f': 5,
'f#': 6,
'fx': 7,
'gbb': 5,
'gb': 6,
'g': 7,
'g#': 8,
'gx': 9,
'abb': 7,
'ab': 8,
'a': 9,
'a#': 10,
'ax': 11,
'bbb': 9,
'bb': 10,
'b': 11,
'b#': 12,
'bx': 13
};
var scaleIndexToNote = [
'C',
'C#',
'D',
'D#',
'E',
'F',
'F#',
'G',
'G#',
'A',
'A#',
'B'
];
Tone.A4 = 440;
Tone.prototype.noteToFrequency = function (note) {
var parts = note.split(/(-?\d+)/);
if (parts.length === 3) {
var index = noteToScaleIndex[parts[0].toLowerCase()];
var octave = parts[1];
var noteNumber = index + (parseInt(octave, 10) + 1) * 12;
return this.midiToFrequency(noteNumber);
} else {
return 0;
}
};
Tone.prototype.frequencyToNote = function (freq) {
var log = Math.log(freq / Tone.A4) / Math.LN2;
var noteNumber = Math.round(12 * log) + 57;
var octave = Math.floor(noteNumber / 12);
if (octave < 0) {
noteNumber += -12 * octave;
}
var noteName = scaleIndexToNote[noteNumber % 12];
return noteName + octave.toString();
};
Tone.prototype.intervalToFrequencyRatio = function (interval) {
return Math.pow(2, interval / 12);
};
Tone.prototype.midiToNote = function (midiNumber) {
var octave = Math.floor(midiNumber / 12) - 1;
var note = midiNumber % 12;
return scaleIndexToNote[note] + octave;
};
Tone.prototype.noteToMidi = function (note) {
var parts = note.split(/(\d+)/);
if (parts.length === 3) {
var index = noteToScaleIndex[parts[0].toLowerCase()];
var octave = parts[1];
return index + (parseInt(octave, 10) + 1) * 12;
} else {
return 0;
}
};
Tone.prototype.midiToFrequency = function (midi) {
return Tone.A4 * Math.pow(2, (midi - 69) / 12);
};
return Tone;
}(Tone_core_Tone);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_core_Param;
Tone_core_Param = function (Tone) {
'use strict';
Tone.Param = function () {
var options = this.optionsObject(arguments, [
'param',
'units',
'convert'
], Tone.Param.defaults);
this._param = this.input = options.param;
this.units = options.units;
this.convert = options.convert;
this.overridden = false;
if (!this.isUndef(options.value)) {
this.value = options.value;
}
};
Tone.extend(Tone.Param);
Tone.Param.defaults = {
'units': Tone.Type.Default,
'convert': true,
'param': undefined
};
Object.defineProperty(Tone.Param.prototype, 'value', {
get: function () {
return this._toUnits(this._param.value);
},
set: function (value) {
var convertedVal = this._fromUnits(value);
this._param.value = convertedVal;
}
});
Tone.Param.prototype._fromUnits = function (val) {
if (this.convert || this.isUndef(this.convert)) {
switch (this.units) {
case Tone.Type.Time:
return this.toSeconds(val);
case Tone.Type.Frequency:
return this.toFrequency(val);
case Tone.Type.Decibels:
return this.dbToGain(val);
case Tone.Type.NormalRange:
return Math.min(Math.max(val, 0), 1);
case Tone.Type.AudioRange:
return Math.min(Math.max(val, -1), 1);
case Tone.Type.Positive:
return Math.max(val, 0);
default:
return val;
}
} else {
return val;
}
};
Tone.Param.prototype._toUnits = function (val) {
if (this.convert || this.isUndef(this.convert)) {
switch (this.units) {
case Tone.Type.Decibels:
return this.gainToDb(val);
default:
return val;
}
} else {
return val;
}
};
Tone.Param.prototype._minOutput = 0.00001;
Tone.Param.prototype.setValueAtTime = function (value, time) {
value = this._fromUnits(value);
this._param.setValueAtTime(value, this.toSeconds(time));
return this;
};
Tone.Param.prototype.setRampPoint = function (now) {
now = this.defaultArg(now, this.now());
var currentVal = this._param.value;
this._param.setValueAtTime(currentVal, now);
return this;
};
Tone.Param.prototype.linearRampToValueAtTime = function (value, endTime) {
value = this._fromUnits(value);
this._param.linearRampToValueAtTime(value, this.toSeconds(endTime));
return this;
};
Tone.Param.prototype.exponentialRampToValueAtTime = function (value, endTime) {
value = this._fromUnits(value);
value = Math.max(this._minOutput, value);
this._param.exponentialRampToValueAtTime(value, this.toSeconds(endTime));
return this;
};
Tone.Param.prototype.exponentialRampToValue = function (value, rampTime) {
var now = this.now();
var currentVal = this.value;
this.setValueAtTime(Math.max(currentVal, this._minOutput), now);
this.exponentialRampToValueAtTime(value, now + this.toSeconds(rampTime));
return this;
};
Tone.Param.prototype.linearRampToValue = function (value, rampTime) {
var now = this.now();
this.setRampPoint(now);
this.linearRampToValueAtTime(value, now + this.toSeconds(rampTime));
return this;
};
Tone.Param.prototype.setTargetAtTime = function (value, startTime, timeConstant) {
value = this._fromUnits(value);
value = Math.max(this._minOutput, value);
timeConstant = Math.max(this._minOutput, timeConstant);
this._param.setTargetAtTime(value, this.toSeconds(startTime), timeConstant);
return this;
};
Tone.Param.prototype.setValueCurveAtTime = function (values, startTime, duration) {
for (var i = 0; i < values.length; i++) {
values[i] = this._fromUnits(values[i]);
}
this._param.setValueCurveAtTime(values, this.toSeconds(startTime), this.toSeconds(duration));
return this;
};
Tone.Param.prototype.cancelScheduledValues = function (startTime) {
this._param.cancelScheduledValues(this.toSeconds(startTime));
return this;
};
Tone.Param.prototype.rampTo = function (value, rampTime) {
rampTime = this.defaultArg(rampTime, 0);
if (this.units === Tone.Type.Frequency || this.units === Tone.Type.BPM) {
this.exponentialRampToValue(value, rampTime);
} else {
this.linearRampToValue(value, rampTime);
}
return this;
};
Tone.Param.prototype.dispose = function () {
Tone.prototype.dispose.call(this);
this._param = null;
return this;
};
return Tone.Param;
}(Tone_core_Tone);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_core_Gain;
Tone_core_Gain = function (Tone) {
'use strict';
Tone.Gain = function () {
var options = this.optionsObject(arguments, [
'gain',
'units'
], Tone.Gain.defaults);
this.input = this.output = this._gainNode = this.context.createGain();
this.gain = new Tone.Param({
'param': this._gainNode.gain,
'units': options.units,
'value': options.gain,
'convert': options.convert
});
this._readOnly('gain');
};
Tone.extend(Tone.Gain);
Tone.Gain.defaults = {
'gain': 1,
'convert': true
};
Tone.Gain.prototype.dispose = function () {
Tone.Param.prototype.dispose.call(this);
this._gainNode.disconnect();
this._gainNode = null;
this._writable('gain');
this.gain.dispose();
this.gain = null;
};
return Tone.Gain;
}(Tone_core_Tone, Tone_core_Param);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_signal_Signal;
Tone_signal_Signal = function (Tone) {
'use strict';
Tone.Signal = function () {
var options = this.optionsObject(arguments, [
'value',
'units'
], Tone.Signal.defaults);
this.output = this._gain = this.context.createGain();
options.param = this._gain.gain;
Tone.Param.call(this, options);
this.input = this._param = this._gain.gain;
Tone.Signal._constant.chain(this._gain);
};
Tone.extend(Tone.Signal, Tone.Param);
Tone.Signal.defaults = {
'value': 0,
'units': Tone.Type.Default,
'convert': true
};
Tone.Signal.prototype.connect = Tone.SignalBase.prototype.connect;
Tone.Signal.prototype.dispose = function () {
Tone.Param.prototype.dispose.call(this);
this._param = null;
this._gain.disconnect();
this._gain = null;
return this;
};
Tone.Signal._constant = null;
Tone._initAudioContext(function (audioContext) {
var buffer = audioContext.createBuffer(1, 128, audioContext.sampleRate);
var arr = buffer.getChannelData(0);
for (var i = 0; i < arr.length; i++) {
arr[i] = 1;
}
Tone.Signal._constant = audioContext.createBufferSource();
Tone.Signal._constant.channelCount = 1;
Tone.Signal._constant.channelCountMode = 'explicit';
Tone.Signal._constant.buffer = buffer;
Tone.Signal._constant.loop = true;
Tone.Signal._constant.start(0);
Tone.Signal._constant.noGC();
});
return Tone.Signal;
}(Tone_core_Tone, Tone_signal_WaveShaper, Tone_core_Type, Tone_core_Param);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_signal_Add;
Tone_signal_Add = function (Tone) {
'use strict';
Tone.Add = function (value) {
Tone.call(this, 2, 0);
this._sum = this.input[0] = this.input[1] = this.output = this.context.createGain();
this._param = this.input[1] = new Tone.Signal(value);
this._param.connect(this._sum);
};
Tone.extend(Tone.Add, Tone.Signal);
Tone.Add.prototype.dispose = function () {
Tone.prototype.dispose.call(this);
this._sum.disconnect();
this._sum = null;
this._param.dispose();
this._param = null;
return this;
};
return Tone.Add;
}(Tone_core_Tone);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_signal_Multiply;
Tone_signal_Multiply = function (Tone) {
'use strict';
Tone.Multiply = function (value) {
Tone.call(this, 2, 0);
this._mult = this.input[0] = this.output = this.context.createGain();
this._param = this.input[1] = this.output.gain;
this._param.value = this.defaultArg(value, 0);
};
Tone.extend(Tone.Multiply, Tone.Signal);
Tone.Multiply.prototype.dispose = function () {
Tone.prototype.dispose.call(this);
this._mult.disconnect();
this._mult = null;
this._param = null;
return this;
};
return Tone.Multiply;
}(Tone_core_Tone);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_signal_Scale;
Tone_signal_Scale = function (Tone) {
'use strict';
Tone.Scale = function (outputMin, outputMax) {
this._outputMin = this.defaultArg(outputMin, 0);
this._outputMax = this.defaultArg(outputMax, 1);
this._scale = this.input = new Tone.Multiply(1);
this._add = this.output = new Tone.Add(0);
this._scale.connect(this._add);
this._setRange();
};
Tone.extend(Tone.Scale, Tone.SignalBase);
Object.defineProperty(Tone.Scale.prototype, 'min', {
get: function () {
return this._outputMin;
},
set: function (min) {
this._outputMin = min;
this._setRange();
}
});
Object.defineProperty(Tone.Scale.prototype, 'max', {
get: function () {
return this._outputMax;
},
set: function (max) {
this._outputMax = max;
this._setRange();
}
});
Tone.Scale.prototype._setRange = function () {
this._add.value = this._outputMin;
this._scale.value = this._outputMax - this._outputMin;
};
Tone.Scale.prototype.dispose = function () {
Tone.prototype.dispose.call(this);
this._add.dispose();
this._add = null;
this._scale.dispose();
this._scale = null;
return this;
};
return Tone.Scale;
}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Multiply);
var signal;
signal = function () {
'use strict';
// Signal is built with the Tone.js signal by Yotam Mann
// https://github.com/TONEnoTONE/Tone.js/
var Signal = Tone_signal_Signal;
var Add = Tone_signal_Add;
var Mult = Tone_signal_Multiply;
var Scale = Tone_signal_Scale;
var Tone = Tone_core_Tone;
var p5sound = master;
Tone.setContext(p5sound.audiocontext);
/**
* p5.Signal is a constant audio-rate signal used by p5.Oscillator
* and p5.Envelope for modulation math.
*
* This is necessary because Web Audio is processed on a seprate clock.
* For example, the p5 draw loop runs about 60 times per second. But
* the audio clock must process samples 44100 times per second. If we
* want to add a value to each of those samples, we can't do it in the
* draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns
* a Tone.Signal from the Tone.js library by Yotam Mann.
* If you want to work directly with audio signals for modular
* synthesis, check out
* tone.js.
*
* @class p5.Signal
* @constructor
* @return {Tone.Signal} A Signal object from the Tone.js library
* @example
*
* function setup() {
* carrier = new p5.Oscillator('sine');
* carrier.amp(1); // set amplitude
* carrier.freq(220); // set frequency
* carrier.start(); // start oscillating
*
* modulator = new p5.Oscillator('sawtooth');
* modulator.disconnect();
* modulator.amp(1);
* modulator.freq(4);
* modulator.start();
*
* // Modulator's default amplitude range is -1 to 1.
* // Multiply it by -200, so the range is -200 to 200
* // then add 220 so the range is 20 to 420
* carrier.freq( modulator.mult(-200).add(220) );
* }
*
*/
p5.Signal = function (value) {
var s = new Signal(value);
// p5sound.soundArray.push(s);
return s;
};
/**
* Fade to value, for smooth transitions
*
* @method fade
* @param {Number} value Value to set this signal
* @param {[Number]} secondsFromNow Length of fade, in seconds from now
*/
Signal.prototype.fade = Signal.prototype.linearRampToValueAtTime;
Mult.prototype.fade = Signal.prototype.fade;
Add.prototype.fade = Signal.prototype.fade;
Scale.prototype.fade = Signal.prototype.fade;
/**
* Connect a p5.sound object or Web Audio node to this
* p5.Signal so that its amplitude values can be scaled.
*
* @param {Object} input
*/
Signal.prototype.setInput = function (_input) {
_input.connect(this);
};
Mult.prototype.setInput = Signal.prototype.setInput;
Add.prototype.setInput = Signal.prototype.setInput;
Scale.prototype.setInput = Signal.prototype.setInput;
// signals can add / mult / scale themselves
/**
* Add a constant value to this audio signal,
* and return the resulting audio signal. Does
* not change the value of the original signal,
* instead it returns a new p5.SignalAdd.
*
* @method add
* @param {Number} number
* @return {p5.SignalAdd} object
*/
Signal.prototype.add = function (num) {
var add = new Add(num);
// add.setInput(this);
this.connect(add);
return add;
};
Mult.prototype.add = Signal.prototype.add;
Add.prototype.add = Signal.prototype.add;
Scale.prototype.add = Signal.prototype.add;
/**
* Multiply this signal by a constant value,
* and return the resulting audio signal. Does
* not change the value of the original signal,
* instead it returns a new p5.SignalMult.
*
* @method mult
* @param {Number} number to multiply
* @return {Tone.Multiply} object
*/
Signal.prototype.mult = function (num) {
var mult = new Mult(num);
// mult.setInput(this);
this.connect(mult);
return mult;
};
Mult.prototype.mult = Signal.prototype.mult;
Add.prototype.mult = Signal.prototype.mult;
Scale.prototype.mult = Signal.prototype.mult;
/**
* Scale this signal value to a given range,
* and return the result as an audio signal. Does
* not change the value of the original signal,
* instead it returns a new p5.SignalScale.
*
* @method scale
* @param {Number} number to multiply
* @param {Number} inMin input range minumum
* @param {Number} inMax input range maximum
* @param {Number} outMin input range minumum
* @param {Number} outMax input range maximum
* @return {p5.SignalScale} object
*/
Signal.prototype.scale = function (inMin, inMax, outMin, outMax) {
var mapOutMin, mapOutMax;
if (arguments.length === 4) {
mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5;
mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5;
} else {
mapOutMin = arguments[0];
mapOutMax = arguments[1];
}
var scale = new Scale(mapOutMin, mapOutMax);
this.connect(scale);
return scale;
};
Mult.prototype.scale = Signal.prototype.scale;
Add.prototype.scale = Signal.prototype.scale;
Scale.prototype.scale = Signal.prototype.scale;
}(Tone_signal_Signal, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale, Tone_core_Tone, master);
var oscillator;
oscillator = function () {
'use strict';
var p5sound = master;
var Signal = Tone_signal_Signal;
var Add = Tone_signal_Add;
var Mult = Tone_signal_Multiply;
var Scale = Tone_signal_Scale;
/**
* Creates a signal that oscillates between -1.0 and 1.0.
* By default, the oscillation takes the form of a sinusoidal
* shape ('sine'). Additional types include 'triangle',
* 'sawtooth' and 'square'. The frequency defaults to
* 440 oscillations per second (440Hz, equal to the pitch of an
* 'A' note).
*
* Set the type of oscillation with setType(), or by creating a
* specific oscillator.
For example:
* new p5.SinOsc(freq)
* new p5.TriOsc(freq)
* new p5.SqrOsc(freq)
* new p5.SawOsc(freq)
.
*
*
* @class p5.Oscillator
* @constructor
* @param {Number} [freq] frequency defaults to 440Hz
* @param {String} [type] type of oscillator. Options:
* 'sine' (default), 'triangle',
* 'sawtooth', 'square'
* @return {Object} Oscillator object
* @example
*
* var osc;
* var playing = false;
*
* function setup() {
* backgroundColor = color(255,0,255);
* textAlign(CENTER);
*
* osc = new p5.Oscillator();
* osc.setType('sine');
* osc.freq(240);
* osc.amp(0);
* osc.start();
* }
*
* function draw() {
* background(backgroundColor)
* text('click to play', width/2, height/2);
* }
*
* function mouseClicked() {
* if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {
* if (!playing) {
* // ramp amplitude to 0.5 over 0.1 seconds
* osc.amp(0.5, 0.05);
* playing = true;
* backgroundColor = color(0,255,255);
* } else {
* // ramp amplitude to 0 over 0.5 seconds
* osc.amp(0, 0.5);
* playing = false;
* backgroundColor = color(255,0,255);
* }
* }
* }
*
*/
p5.Oscillator = function (freq, type) {
if (typeof freq === 'string') {
var f = type;
type = freq;
freq = f;
}
if (typeof type === 'number') {
var f = type;
type = freq;
freq = f;
}
this.started = false;
// components
this.phaseAmount = undefined;
this.oscillator = p5sound.audiocontext.createOscillator();
this.f = freq || 440;
// frequency
this.oscillator.type = type || 'sine';
this.oscillator.frequency.setValueAtTime(this.f, p5sound.audiocontext.currentTime);
var o = this.oscillator;
// connections
this.output = p5sound.audiocontext.createGain();
this._freqMods = [];
// modulators connected to this oscillator's frequency
// set default output gain to 0.5
this.output.gain.value = 0.5;
this.output.gain.setValueAtTime(0.5, p5sound.audiocontext.currentTime);
this.oscillator.connect(this.output);
// stereo panning
this.panPosition = 0;
this.connection = p5sound.input;
// connect to p5sound by default
this.panner = new p5.Panner(this.output, this.connection, 1);
//array of math operation signal chaining
this.mathOps = [this.output];
// add to the soundArray so we can dispose of the osc later
p5sound.soundArray.push(this);
};
/**
* Start an oscillator. Accepts an optional parameter to
* determine how long (in seconds from now) until the
* oscillator starts.
*
* @method start
* @param {Number} [time] startTime in seconds from now.
* @param {Number} [frequency] frequency in Hz.
*/
p5.Oscillator.prototype.start = function (time, f) {
if (this.started) {
var now = p5sound.audiocontext.currentTime;
this.stop(now);
}
if (!this.started) {
var freq = f || this.f;
var type = this.oscillator.type;
// set old osc free to be garbage collected (memory)
if (this.oscillator) {
this.oscillator.disconnect();
this.oscillator = undefined;
}
// var detune = this.oscillator.frequency.value;
this.oscillator = p5sound.audiocontext.createOscillator();
this.oscillator.frequency.exponentialRampToValueAtTime(Math.abs(freq), p5sound.audiocontext.currentTime);
this.oscillator.type = type;
// this.oscillator.detune.value = detune;
this.oscillator.connect(this.output);
time = time || 0;
this.oscillator.start(time + p5sound.audiocontext.currentTime);
this.freqNode = this.oscillator.frequency;
// if other oscillators are already connected to this osc's freq
for (var i in this._freqMods) {
if (typeof this._freqMods[i].connect !== 'undefined') {
this._freqMods[i].connect(this.oscillator.frequency);
}
}
this.started = true;
}
};
/**
* Stop an oscillator. Accepts an optional parameter
* to determine how long (in seconds from now) until the
* oscillator stops.
*
* @method stop
* @param {Number} secondsFromNow Time, in seconds from now.
*/
p5.Oscillator.prototype.stop = function (time) {
if (this.started) {
var t = time || 0;
var now = p5sound.audiocontext.currentTime;
this.oscillator.stop(t + now);
this.started = false;
}
};
/**
* Set the amplitude between 0 and 1.0. Or, pass in an object
* such as an oscillator to modulate amplitude with an audio signal.
*
* @method amp
* @param {Number|Object} vol between 0 and 1.0
* or a modulating signal/oscillator
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
* @return {AudioParam} gain If no value is provided,
* returns the Web Audio API
* AudioParam that controls
* this oscillator's
* gain/amplitude/volume)
*/
p5.Oscillator.prototype.amp = function (vol, rampTime, tFromNow) {
var self = this;
if (typeof vol === 'number') {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
} else if (vol) {
vol.connect(self.output.gain);
} else {
// return the Gain Node
return this.output.gain;
}
};
// these are now the same thing
p5.Oscillator.prototype.fade = p5.Oscillator.prototype.amp;
p5.Oscillator.prototype.getAmp = function () {
return this.output.gain.value;
};
/**
* Set frequency of an oscillator to a value. Or, pass in an object
* such as an oscillator to modulate the frequency with an audio signal.
*
* @method freq
* @param {Number|Object} Frequency Frequency in Hz
* or modulating signal/oscillator
* @param {Number} [rampTime] Ramp time (in seconds)
* @param {Number} [timeFromNow] Schedule this event to happen
* at x seconds from now
* @return {AudioParam} Frequency If no value is provided,
* returns the Web Audio API
* AudioParam that controls
* this oscillator's frequency
* @example
*
* var osc = new p5.Oscillator(300);
* osc.start();
* osc.freq(40, 10);
*
*/
p5.Oscillator.prototype.freq = function (val, rampTime, tFromNow) {
if (typeof val === 'number' && !isNaN(val)) {
this.f = val;
var now = p5sound.audiocontext.currentTime;
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
// var currentFreq = this.oscillator.frequency.value;
// this.oscillator.frequency.cancelScheduledValues(now);
if (rampTime == 0) {
this.oscillator.frequency.cancelScheduledValues(now);
this.oscillator.frequency.setValueAtTime(val, tFromNow + now);
} else {
if (val > 0) {
this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
} else {
this.oscillator.frequency.linearRampToValueAtTime(val, tFromNow + rampTime + now);
}
}
// reset phase if oscillator has a phase
if (this.phaseAmount) {
this.phase(this.phaseAmount);
}
} else if (val) {
if (val.output) {
val = val.output;
}
val.connect(this.oscillator.frequency);
// keep track of what is modulating this param
// so it can be re-connected if
this._freqMods.push(val);
} else {
// return the Frequency Node
return this.oscillator.frequency;
}
};
p5.Oscillator.prototype.getFreq = function () {
return this.oscillator.frequency.value;
};
/**
* Set type to 'sine', 'triangle', 'sawtooth' or 'square'.
*
* @method setType
* @param {String} type 'sine', 'triangle', 'sawtooth' or 'square'.
*/
p5.Oscillator.prototype.setType = function (type) {
this.oscillator.type = type;
};
p5.Oscillator.prototype.getType = function () {
return this.oscillator.type;
};
/**
* Connect to a p5.sound / Web Audio object.
*
* @method connect
* @param {Object} unit A p5.sound or Web Audio object
*/
p5.Oscillator.prototype.connect = function (unit) {
if (!unit) {
this.panner.connect(p5sound.input);
} else if (unit.hasOwnProperty('input')) {
this.panner.connect(unit.input);
this.connection = unit.input;
} else {
this.panner.connect(unit);
this.connection = unit;
}
};
/**
* Disconnect all outputs
*
* @method disconnect
*/
p5.Oscillator.prototype.disconnect = function (unit) {
this.output.disconnect();
this.panner.disconnect();
this.output.connect(this.panner);
this.oscMods = [];
};
/**
* Pan between Left (-1) and Right (1)
*
* @method pan
* @param {Number} panning Number between -1 and 1
* @param {Number} timeFromNow schedule this event to happen
* seconds from now
*/
p5.Oscillator.prototype.pan = function (pval, tFromNow) {
this.panPosition = pval;
this.panner.pan(pval, tFromNow);
};
p5.Oscillator.prototype.getPan = function () {
return this.panPosition;
};
// get rid of the oscillator
p5.Oscillator.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
if (this.oscillator) {
var now = p5sound.audiocontext.currentTime;
this.stop(now);
this.disconnect();
this.panner = null;
this.oscillator = null;
}
// if it is a Pulse
if (this.osc2) {
this.osc2.dispose();
}
};
/**
* Set the phase of an oscillator between 0.0 and 1.0.
* In this implementation, phase is a delay time
* based on the oscillator's current frequency.
*
* @method phase
* @param {Number} phase float between 0.0 and 1.0
*/
p5.Oscillator.prototype.phase = function (p) {
var delayAmt = p5.prototype.map(p, 0, 1, 0, 1 / this.f);
var now = p5sound.audiocontext.currentTime;
this.phaseAmount = p;
if (!this.dNode) {
// create a delay node
this.dNode = p5sound.audiocontext.createDelay();
// put the delay node in between output and panner
this.oscillator.disconnect();
this.oscillator.connect(this.dNode);
this.dNode.connect(this.output);
}
// set delay time to match phase:
this.dNode.delayTime.setValueAtTime(delayAmt, now);
};
// ========================== //
// SIGNAL MATH FOR MODULATION //
// ========================== //
// return sigChain(this, scale, thisChain, nextChain, Scale);
var sigChain = function (o, mathObj, thisChain, nextChain, type) {
var chainSource = o.oscillator;
// if this type of math already exists in the chain, replace it
for (var i in o.mathOps) {
if (o.mathOps[i] instanceof type) {
chainSource.disconnect();
o.mathOps[i].dispose();
thisChain = i;
// assume nextChain is output gain node unless...
if (thisChain < o.mathOps.length - 2) {
nextChain = o.mathOps[i + 1];
}
}
}
if (thisChain == o.mathOps.length - 1) {
o.mathOps.push(nextChain);
}
// assume source is the oscillator unless i > 0
if (i > 0) {
chainSource = o.mathOps[i - 1];
}
chainSource.disconnect();
chainSource.connect(mathObj);
mathObj.connect(nextChain);
o.mathOps[thisChain] = mathObj;
return o;
};
/**
* Add a value to the p5.Oscillator's output amplitude,
* and return the oscillator. Calling this method again
* will override the initial add() with a new value.
*
* @method add
* @param {Number} number Constant number to add
* @return {p5.Oscillator} Oscillator Returns this oscillator
* with scaled output
*
*/
p5.Oscillator.prototype.add = function (num) {
var add = new Add(num);
var thisChain = this.mathOps.length - 1;
var nextChain = this.output;
return sigChain(this, add, thisChain, nextChain, Add);
};
/**
* Multiply the p5.Oscillator's output amplitude
* by a fixed value (i.e. turn it up!). Calling this method
* again will override the initial mult() with a new value.
*
* @method mult
* @param {Number} number Constant number to multiply
* @return {p5.Oscillator} Oscillator Returns this oscillator
* with multiplied output
*/
p5.Oscillator.prototype.mult = function (num) {
var mult = new Mult(num);
var thisChain = this.mathOps.length - 1;
var nextChain = this.output;
return sigChain(this, mult, thisChain, nextChain, Mult);
};
/**
* Scale this oscillator's amplitude values to a given
* range, and return the oscillator. Calling this method
* again will override the initial scale() with new values.
*
* @method scale
* @param {Number} inMin input range minumum
* @param {Number} inMax input range maximum
* @param {Number} outMin input range minumum
* @param {Number} outMax input range maximum
* @return {p5.Oscillator} Oscillator Returns this oscillator
* with scaled output
*/
p5.Oscillator.prototype.scale = function (inMin, inMax, outMin, outMax) {
var mapOutMin, mapOutMax;
if (arguments.length === 4) {
mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5;
mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5;
} else {
mapOutMin = arguments[0];
mapOutMax = arguments[1];
}
var scale = new Scale(mapOutMin, mapOutMax);
var thisChain = this.mathOps.length - 1;
var nextChain = this.output;
return sigChain(this, scale, thisChain, nextChain, Scale);
};
// ============================== //
// SinOsc, TriOsc, SqrOsc, SawOsc //
// ============================== //
/**
* Constructor: new p5.SinOsc()
.
* This creates a Sine Wave Oscillator and is
* equivalent to new p5.Oscillator('sine')
*
or creating a p5.Oscillator and then calling
* its method setType('sine')
.
* See p5.Oscillator for methods.
*
* @method p5.SinOsc
* @param {[Number]} freq Set the frequency
*/
p5.SinOsc = function (freq) {
p5.Oscillator.call(this, freq, 'sine');
};
p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);
/**
* Constructor: new p5.TriOsc()
.
* This creates a Triangle Wave Oscillator and is
* equivalent to new p5.Oscillator('triangle')
*
or creating a p5.Oscillator and then calling
* its method setType('triangle')
.
* See p5.Oscillator for methods.
*
* @method p5.TriOsc
* @param {[Number]} freq Set the frequency
*/
p5.TriOsc = function (freq) {
p5.Oscillator.call(this, freq, 'triangle');
};
p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);
/**
* Constructor: new p5.SawOsc()
.
* This creates a SawTooth Wave Oscillator and is
* equivalent to new p5.Oscillator('sawtooth')
*
or creating a p5.Oscillator and then calling
* its method setType('sawtooth')
.
* See p5.Oscillator for methods.
*
* @method p5.SawOsc
* @param {[Number]} freq Set the frequency
*/
p5.SawOsc = function (freq) {
p5.Oscillator.call(this, freq, 'sawtooth');
};
p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype);
/**
* Constructor: new p5.SqrOsc()
.
* This creates a Square Wave Oscillator and is
* equivalent to new p5.Oscillator('square')
*
or creating a p5.Oscillator and then calling
* its method setType('square')
.
* See p5.Oscillator for methods.
*
* @method p5.SqrOsc
* @param {[Number]} freq Set the frequency
*/
p5.SqrOsc = function (freq) {
p5.Oscillator.call(this, freq, 'square');
};
p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype);
}(master, Tone_signal_Signal, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_core_Timeline;
Tone_core_Timeline = function (Tone) {
'use strict';
Tone.Timeline = function () {
var options = this.optionsObject(arguments, ['memory'], Tone.Timeline.defaults);
this._timeline = [];
this._toRemove = [];
this._iterating = false;
this.memory = options.memory;
};
Tone.extend(Tone.Timeline);
Tone.Timeline.defaults = { 'memory': Infinity };
Object.defineProperty(Tone.Timeline.prototype, 'length', {
get: function () {
return this._timeline.length;
}
});
Tone.Timeline.prototype.addEvent = function (event) {
if (this.isUndef(event.time)) {
throw new Error('events must have a time attribute');
}
event.time = this.toSeconds(event.time);
if (this._timeline.length) {
var index = this._search(event.time);
this._timeline.splice(index + 1, 0, event);
} else {
this._timeline.push(event);
}
if (this.length > this.memory) {
var diff = this.length - this.memory;
this._timeline.splice(0, diff);
}
return this;
};
Tone.Timeline.prototype.removeEvent = function (event) {
if (this._iterating) {
this._toRemove.push(event);
} else {
var index = this._timeline.indexOf(event);
if (index !== -1) {
this._timeline.splice(index, 1);
}
}
return this;
};
Tone.Timeline.prototype.getEvent = function (time) {
time = this.toSeconds(time);
var index = this._search(time);
if (index !== -1) {
return this._timeline[index];
} else {
return null;
}
};
Tone.Timeline.prototype.getEventAfter = function (time) {
time = this.toSeconds(time);
var index = this._search(time);
if (index + 1 < this._timeline.length) {
return this._timeline[index + 1];
} else {
return null;
}
};
Tone.Timeline.prototype.getEventBefore = function (time) {
time = this.toSeconds(time);
var index = this._search(time);
if (index - 1 >= 0) {
return this._timeline[index - 1];
} else {
return null;
}
};
Tone.Timeline.prototype.cancel = function (after) {
if (this._timeline.length > 1) {
after = this.toSeconds(after);
var index = this._search(after);
if (index >= 0) {
this._timeline = this._timeline.slice(0, index);
} else {
this._timeline = [];
}
} else if (this._timeline.length === 1) {
if (this._timeline[0].time >= after) {
this._timeline = [];
}
}
return this;
};
Tone.Timeline.prototype.cancelBefore = function (time) {
if (this._timeline.length) {
time = this.toSeconds(time);
var index = this._search(time);
if (index >= 0) {
this._timeline = this._timeline.slice(index + 1);
}
}
return this;
};
Tone.Timeline.prototype._search = function (time) {
var beginning = 0;
var len = this._timeline.length;
var end = len;
while (beginning <= end && beginning < len) {
var midPoint = Math.floor(beginning + (end - beginning) / 2);
var event = this._timeline[midPoint];
if (event.time === time) {
for (var i = midPoint; i < this._timeline.length; i++) {
var testEvent = this._timeline[i];
if (testEvent.time === time) {
midPoint = i;
}
}
return midPoint;
} else if (event.time > time) {
end = midPoint - 1;
} else if (event.time < time) {
beginning = midPoint + 1;
}
}
return beginning - 1;
};
Tone.Timeline.prototype._iterate = function (callback, lowerBound, upperBound) {
this._iterating = true;
lowerBound = this.defaultArg(lowerBound, 0);
upperBound = this.defaultArg(upperBound, this._timeline.length - 1);
for (var i = lowerBound; i <= upperBound; i++) {
callback(this._timeline[i]);
}
this._iterating = false;
if (this._toRemove.length > 0) {
for (var j = 0; j < this._toRemove.length; j++) {
var index = this._timeline.indexOf(this._toRemove[j]);
if (index !== -1) {
this._timeline.splice(index, 1);
}
}
this._toRemove = [];
}
};
Tone.Timeline.prototype.forEach = function (callback) {
this._iterate(callback);
return this;
};
Tone.Timeline.prototype.forEachBefore = function (time, callback) {
time = this.toSeconds(time);
var upperBound = this._search(time);
if (upperBound !== -1) {
this._iterate(callback, 0, upperBound);
}
return this;
};
Tone.Timeline.prototype.forEachAfter = function (time, callback) {
time = this.toSeconds(time);
var lowerBound = this._search(time);
this._iterate(callback, lowerBound + 1);
return this;
};
Tone.Timeline.prototype.forEachFrom = function (time, callback) {
time = this.toSeconds(time);
var lowerBound = this._search(time);
while (lowerBound >= 0 && this._timeline[lowerBound].time >= time) {
lowerBound--;
}
this._iterate(callback, lowerBound + 1);
return this;
};
Tone.Timeline.prototype.forEachAtTime = function (time, callback) {
time = this.toSeconds(time);
var upperBound = this._search(time);
if (upperBound !== -1) {
this._iterate(function (event) {
if (event.time === time) {
callback(event);
}
}, 0, upperBound);
}
return this;
};
Tone.Timeline.prototype.dispose = function () {
Tone.prototype.dispose.call(this);
this._timeline = null;
this._toRemove = null;
};
return Tone.Timeline;
}(Tone_core_Tone);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_signal_TimelineSignal;
Tone_signal_TimelineSignal = function (Tone) {
'use strict';
Tone.TimelineSignal = function () {
var options = this.optionsObject(arguments, [
'value',
'units'
], Tone.Signal.defaults);
Tone.Signal.apply(this, options);
options.param = this._param;
Tone.Param.call(this, options);
this._events = new Tone.Timeline(10);
this._initial = this._fromUnits(this._param.value);
};
Tone.extend(Tone.TimelineSignal, Tone.Param);
Tone.TimelineSignal.Type = {
Linear: 'linear',
Exponential: 'exponential',
Target: 'target',
Set: 'set'
};
Object.defineProperty(Tone.TimelineSignal.prototype, 'value', {
get: function () {
return this._toUnits(this._param.value);
},
set: function (value) {
var convertedVal = this._fromUnits(value);
this._initial = convertedVal;
this._param.value = convertedVal;
}
});
Tone.TimelineSignal.prototype.setValueAtTime = function (value, startTime) {
value = this._fromUnits(value);
startTime = this.toSeconds(startTime);
this._events.addEvent({
'type': Tone.TimelineSignal.Type.Set,
'value': value,
'time': startTime
});
this._param.setValueAtTime(value, startTime);
return this;
};
Tone.TimelineSignal.prototype.linearRampToValueAtTime = function (value, endTime) {
value = this._fromUnits(value);
endTime = this.toSeconds(endTime);
this._events.addEvent({
'type': Tone.TimelineSignal.Type.Linear,
'value': value,
'time': endTime
});
this._param.linearRampToValueAtTime(value, endTime);
return this;
};
Tone.TimelineSignal.prototype.exponentialRampToValueAtTime = function (value, endTime) {
value = this._fromUnits(value);
value = Math.max(this._minOutput, value);
endTime = this.toSeconds(endTime);
this._events.addEvent({
'type': Tone.TimelineSignal.Type.Exponential,
'value': value,
'time': endTime
});
this._param.exponentialRampToValueAtTime(value, endTime);
return this;
};
Tone.TimelineSignal.prototype.setTargetAtTime = function (value, startTime, timeConstant) {
value = this._fromUnits(value);
value = Math.max(this._minOutput, value);
timeConstant = Math.max(this._minOutput, timeConstant);
startTime = this.toSeconds(startTime);
this._events.addEvent({
'type': Tone.TimelineSignal.Type.Target,
'value': value,
'time': startTime,
'constant': timeConstant
});
this._param.setTargetAtTime(value, startTime, timeConstant);
return this;
};
Tone.TimelineSignal.prototype.cancelScheduledValues = function (after) {
this._events.cancel(after);
this._param.cancelScheduledValues(this.toSeconds(after));
return this;
};
Tone.TimelineSignal.prototype.setRampPoint = function (time) {
time = this.toSeconds(time);
var val = this.getValueAtTime(time);
var after = this._searchAfter(time);
if (after) {
this.cancelScheduledValues(time);
if (after.type === Tone.TimelineSignal.Type.Linear) {
this.linearRampToValueAtTime(val, time);
} else if (after.type === Tone.TimelineSignal.Type.Exponential) {
this.exponentialRampToValueAtTime(val, time);
}
}
this.setValueAtTime(val, time);
return this;
};
Tone.TimelineSignal.prototype.linearRampToValueBetween = function (value, start, finish) {
this.setRampPoint(start);
this.linearRampToValueAtTime(value, finish);
return this;
};
Tone.TimelineSignal.prototype.exponentialRampToValueBetween = function (value, start, finish) {
this.setRampPoint(start);
this.exponentialRampToValueAtTime(value, finish);
return this;
};
Tone.TimelineSignal.prototype._searchBefore = function (time) {
return this._events.getEvent(time);
};
Tone.TimelineSignal.prototype._searchAfter = function (time) {
return this._events.getEventAfter(time);
};
Tone.TimelineSignal.prototype.getValueAtTime = function (time) {
var after = this._searchAfter(time);
var before = this._searchBefore(time);
var value = this._initial;
if (before === null) {
value = this._initial;
} else if (before.type === Tone.TimelineSignal.Type.Target) {
var previous = this._events.getEventBefore(before.time);
var previouVal;
if (previous === null) {
previouVal = this._initial;
} else {
previouVal = previous.value;
}
value = this._exponentialApproach(before.time, previouVal, before.value, before.constant, time);
} else if (after === null) {
value = before.value;
} else if (after.type === Tone.TimelineSignal.Type.Linear) {
value = this._linearInterpolate(before.time, before.value, after.time, after.value, time);
} else if (after.type === Tone.TimelineSignal.Type.Exponential) {
value = this._exponentialInterpolate(before.time, before.value, after.time, after.value, time);
} else {
value = before.value;
}
return value;
};
Tone.TimelineSignal.prototype.connect = Tone.SignalBase.prototype.connect;
Tone.TimelineSignal.prototype._exponentialApproach = function (t0, v0, v1, timeConstant, t) {
return v1 + (v0 - v1) * Math.exp(-(t - t0) / timeConstant);
};
Tone.TimelineSignal.prototype._linearInterpolate = function (t0, v0, t1, v1, t) {
return v0 + (v1 - v0) * ((t - t0) / (t1 - t0));
};
Tone.TimelineSignal.prototype._exponentialInterpolate = function (t0, v0, t1, v1, t) {
v0 = Math.max(this._minOutput, v0);
return v0 * Math.pow(v1 / v0, (t - t0) / (t1 - t0));
};
Tone.TimelineSignal.prototype.dispose = function () {
Tone.Signal.prototype.dispose.call(this);
Tone.Param.prototype.dispose.call(this);
this._events.dispose();
this._events = null;
};
return Tone.TimelineSignal;
}(Tone_core_Tone, Tone_signal_Signal);
var env;
env = function () {
'use strict';
var p5sound = master;
var Add = Tone_signal_Add;
var Mult = Tone_signal_Multiply;
var Scale = Tone_signal_Scale;
var TimelineSignal = Tone_signal_TimelineSignal;
var Tone = Tone_core_Tone;
Tone.setContext(p5sound.audiocontext);
/**
* Envelopes are pre-defined amplitude distribution over time.
* Typically, envelopes are used to control the output volume
* of an object, a series of fades referred to as Attack, Decay,
* Sustain and Release (
* ADSR
* ). Envelopes can also control other Web Audio Parameters—for example, a p5.Env can
* control an Oscillator's frequency like this: osc.freq(env)
.
* Use setRange
to change the attack/release level.
* Use setADSR
to change attackTime, decayTime, sustainPercent and releaseTime.
* Use the play
method to play the entire envelope,
* the ramp
method for a pingable trigger,
* or triggerAttack
/
* triggerRelease
to trigger noteOn/noteOff.
*
* @class p5.Env
* @constructor
* @example
*
* var attackLevel = 1.0;
* var releaseLevel = 0;
*
* var attackTime = 0.001
* var decayTime = 0.2;
* var susPercent = 0.2;
* var releaseTime = 0.5;
*
* var env, triOsc;
*
* function setup() {
* var cnv = createCanvas(100, 100);
*
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* env = new p5.Env();
* env.setADSR(attackTime, decayTime, susPercent, releaseTime);
* env.setRange(attackLevel, releaseLevel);
*
* triOsc = new p5.Oscillator('triangle');
* triOsc.amp(env);
* triOsc.start();
* triOsc.freq(220);
*
* cnv.mousePressed(playEnv);
* }
*
* function playEnv(){
* env.play();
* }
*
*/
p5.Env = function (t1, l1, t2, l2, t3, l3) {
var now = p5sound.audiocontext.currentTime;
/**
* Time until envelope reaches attackLevel
* @property attackTime
*/
this.aTime = t1 || 0.1;
/**
* Level once attack is complete.
* @property attackLevel
*/
this.aLevel = l1 || 1;
/**
* Time until envelope reaches decayLevel.
* @property decayTime
*/
this.dTime = t2 || 0.5;
/**
* Level after decay. The envelope will sustain here until it is released.
* @property decayLevel
*/
this.dLevel = l2 || 0;
/**
* Duration of the release portion of the envelope.
* @property releaseTime
*/
this.rTime = t3 || 0;
/**
* Level at the end of the release.
* @property releaseLevel
*/
this.rLevel = l3 || 0;
this._rampHighPercentage = 0.98;
this._rampLowPercentage = 0.02;
this.output = p5sound.audiocontext.createGain();
this.control = new TimelineSignal();
this._init();
// this makes sure the envelope starts at zero
this.control.connect(this.output);
// connect to the output
this.connection = null;
// store connection
//array of math operation signal chaining
this.mathOps = [this.control];
//whether envelope should be linear or exponential curve
this.isExponential = false;
// oscillator or buffer source to clear on env complete
// to save resources if/when it is retriggered
this.sourceToClear = null;
// set to true if attack is set, then false on release
this.wasTriggered = false;
// add to the soundArray so we can dispose of the env later
p5sound.soundArray.push(this);
};
// this init function just smooths the starting value to zero and gives a start point for the timeline
// - it was necessary to remove glitches at the beginning.
p5.Env.prototype._init = function () {
var now = p5sound.audiocontext.currentTime;
var t = now;
this.control.setTargetAtTime(0.00001, t, 0.001);
//also, compute the correct time constants
this._setRampAD(this.aTime, this.dTime);
};
/**
* Reset the envelope with a series of time/value pairs.
*
* @method set
* @param {Number} attackTime Time (in seconds) before level
* reaches attackLevel
* @param {Number} attackLevel Typically an amplitude between
* 0.0 and 1.0
* @param {Number} decayTime Time
* @param {Number} decayLevel Amplitude (In a standard ADSR envelope,
* decayLevel = sustainLevel)
* @param {Number} releaseTime Release Time (in seconds)
* @param {Number} releaseLevel Amplitude
* @example
*
* var t1 = 0.1; // attack time in seconds
* var l1 = 0.7; // attack level 0.0 to 1.0
* var t2 = 0.3; // decay time in seconds
* var l2 = 0.1; // decay level 0.0 to 1.0
* var t3 = 0.2; // sustain time in seconds
* var l3 = dL; // sustain level 0.0 to 1.0
* // release level defaults to zero
*
* var env;
* var triOsc;
*
* function setup() {
* background(0);
* noStroke();
* fill(255);
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* env = new p5.Env(t1, l1, t2, l2, t3, l3);
* triOsc = new p5.Oscillator('triangle');
* triOsc.amp(env); // give the env control of the triOsc's amp
* triOsc.start();
* }
*
* // mouseClick triggers envelope if over canvas
* function mouseClicked() {
* // is mouse over canvas?
* if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
* env.play(triOsc);
* }
* }
*
*
*/
p5.Env.prototype.set = function (t1, l1, t2, l2, t3, l3) {
this.aTime = t1;
this.aLevel = l1;
this.dTime = t2 || 0;
this.dLevel = l2 || 0;
this.rTime = t3 || 0;
this.rLevel = l3 || 0;
// set time constants for ramp
this._setRampAD(t1, t2);
};
/**
* Set values like a traditional
*
* ADSR envelope
* .
*
* @method setADSR
* @param {Number} attackTime Time (in seconds before envelope
* reaches Attack Level
* @param {Number} [decayTime] Time (in seconds) before envelope
* reaches Decay/Sustain Level
* @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
* where 1.0 = attackLevel, 0.0 = releaseLevel.
* The susRatio determines the decayLevel and the level at which the
* sustain portion of the envelope will sustain.
* For example, if attackLevel is 0.4, releaseLevel is 0,
* and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
* increased to 1.0 (using setRange
),
* then decayLevel would increase proportionally, to become 0.5.
* @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
* @example
*
* var attackLevel = 1.0;
* var releaseLevel = 0;
*
* var attackTime = 0.001
* var decayTime = 0.2;
* var susPercent = 0.2;
* var releaseTime = 0.5;
*
* var env, triOsc;
*
* function setup() {
* var cnv = createCanvas(100, 100);
*
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* env = new p5.Env();
* env.setADSR(attackTime, decayTime, susPercent, releaseTime);
* env.setRange(attackLevel, releaseLevel);
*
* triOsc = new p5.Oscillator('triangle');
* triOsc.amp(env);
* triOsc.start();
* triOsc.freq(220);
*
* cnv.mousePressed(playEnv);
* }
*
* function playEnv(){
* env.play();
* }
*
*/
p5.Env.prototype.setADSR = function (aTime, dTime, sPercent, rTime) {
this.aTime = aTime;
this.dTime = dTime || 0;
// lerp
this.sPercent = sPercent || 0;
this.dLevel = typeof sPercent !== 'undefined' ? sPercent * (this.aLevel - this.rLevel) + this.rLevel : 0;
this.rTime = rTime || 0;
// also set time constants for ramp
this._setRampAD(aTime, dTime);
};
/**
* Set max (attackLevel) and min (releaseLevel) of envelope.
*
* @method setRange
* @param {Number} aLevel attack level (defaults to 1)
* @param {Number} rLevel release level (defaults to 0)
* @example
*
* var attackLevel = 1.0;
* var releaseLevel = 0;
*
* var attackTime = 0.001
* var decayTime = 0.2;
* var susPercent = 0.2;
* var releaseTime = 0.5;
*
* var env, triOsc;
*
* function setup() {
* var cnv = createCanvas(100, 100);
*
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* env = new p5.Env();
* env.setADSR(attackTime, decayTime, susPercent, releaseTime);
* env.setRange(attackLevel, releaseLevel);
*
* triOsc = new p5.Oscillator('triangle');
* triOsc.amp(env);
* triOsc.start();
* triOsc.freq(220);
*
* cnv.mousePressed(playEnv);
* }
*
* function playEnv(){
* env.play();
* }
*
*/
p5.Env.prototype.setRange = function (aLevel, rLevel) {
this.aLevel = aLevel || 1;
this.rLevel = rLevel || 0;
};
// private (undocumented) method called when ADSR is set to set time constants for ramp
//
// Set the
// time constants for simple exponential ramps.
// The larger the time constant value, the slower the
// transition will be.
//
// method _setRampAD
// param {Number} attackTimeConstant attack time constant
// param {Number} decayTimeConstant decay time constant
//
p5.Env.prototype._setRampAD = function (t1, t2) {
this._rampAttackTime = this.checkExpInput(t1);
this._rampDecayTime = this.checkExpInput(t2);
var TCDenominator = 1;
/// Aatish Bhatia's calculation for time constant for rise(to adjust 1/1-e calculation to any percentage)
TCDenominator = Math.log(1 / this.checkExpInput(1 - this._rampHighPercentage));
this._rampAttackTC = t1 / this.checkExpInput(TCDenominator);
TCDenominator = Math.log(1 / this._rampLowPercentage);
this._rampDecayTC = t2 / this.checkExpInput(TCDenominator);
};
// private method
p5.Env.prototype.setRampPercentages = function (p1, p2) {
//set the percentages that the simple exponential ramps go to
this._rampHighPercentage = this.checkExpInput(p1);
this._rampLowPercentage = this.checkExpInput(p2);
var TCDenominator = 1;
//now re-compute the time constants based on those percentages
/// Aatish Bhatia's calculation for time constant for rise(to adjust 1/1-e calculation to any percentage)
TCDenominator = Math.log(1 / this.checkExpInput(1 - this._rampHighPercentage));
this._rampAttackTC = this._rampAttackTime / this.checkExpInput(TCDenominator);
TCDenominator = Math.log(1 / this._rampLowPercentage);
this._rampDecayTC = this._rampDecayTime / this.checkExpInput(TCDenominator);
};
/**
* Assign a parameter to be controlled by this envelope.
* If a p5.Sound object is given, then the p5.Env will control its
* output gain. If multiple inputs are provided, the env will
* control all of them.
*
* @method setInput
* @param {Object} unit A p5.sound object or
* Web Audio Param.
*/
p5.Env.prototype.setInput = function (unit) {
for (var i = 0; i < arguments.length; i++) {
this.connect(arguments[i]);
}
};
/**
* Set whether the envelope ramp is linear (default) or exponential.
* Exponential ramps can be useful because we perceive amplitude
* and frequency logarithmically.
*
* @method setExp
* @param {Boolean} isExp true is exponential, false is linear
*/
p5.Env.prototype.setExp = function (isExp) {
this.isExponential = isExp;
};
//helper method to protect against zero values being sent to exponential functions
p5.Env.prototype.checkExpInput = function (value) {
if (value <= 0) {
value = 1e-8;
}
return value;
};
/**
* Play tells the envelope to start acting on a given input.
* If the input is a p5.sound object (i.e. AudioIn, Oscillator,
* SoundFile), then Env will control its output volume.
* Envelopes can also be used to control any
* Web Audio Audio Param.
*
* @method play
* @param {Object} unit A p5.sound object or
* Web Audio Param.
* @param {Number} [startTime] time from now (in seconds) at which to play
* @param {Number} [sustainTime] time to sustain before releasing the envelope
* @example
*
* var attackLevel = 1.0;
* var releaseLevel = 0;
*
* var attackTime = 0.001
* var decayTime = 0.2;
* var susPercent = 0.2;
* var releaseTime = 0.5;
*
* var env, triOsc;
*
* function setup() {
* var cnv = createCanvas(100, 100);
*
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* env = new p5.Env();
* env.setADSR(attackTime, decayTime, susPercent, releaseTime);
* env.setRange(attackLevel, releaseLevel);
*
* triOsc = new p5.Oscillator('triangle');
* triOsc.amp(env);
* triOsc.start();
* triOsc.freq(220);
*
* cnv.mousePressed(playEnv);
* }
*
* function playEnv(){
* // trigger env on triOsc, 0 seconds from now
* // After decay, sustain for 0.2 seconds before release
* env.play(triOsc, 0, 0.2);
* }
*
*/
p5.Env.prototype.play = function (unit, secondsFromNow, susTime) {
var now = p5sound.audiocontext.currentTime;
var tFromNow = secondsFromNow || 0;
var susTime = susTime || 0;
if (unit) {
if (this.connection !== unit) {
this.connect(unit);
}
}
this.triggerAttack(unit, tFromNow);
this.triggerRelease(unit, tFromNow + this.aTime + this.dTime + susTime);
};
/**
* Trigger the Attack, and Decay portion of the Envelope.
* Similar to holding down a key on a piano, but it will
* hold the sustain level until you let go. Input can be
* any p5.sound object, or a
* Web Audio Param.
*
* @method triggerAttack
* @param {Object} unit p5.sound Object or Web Audio Param
* @param {Number} secondsFromNow time from now (in seconds)
* @example
*
*
* var attackLevel = 1.0;
* var releaseLevel = 0;
*
* var attackTime = 0.001
* var decayTime = 0.3;
* var susPercent = 0.4;
* var releaseTime = 0.5;
*
* var env, triOsc;
*
* function setup() {
* var cnv = createCanvas(100, 100);
* background(200);
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* env = new p5.Env();
* env.setADSR(attackTime, decayTime, susPercent, releaseTime);
* env.setRange(attackLevel, releaseLevel);
*
* triOsc = new p5.Oscillator('triangle');
* triOsc.amp(env);
* triOsc.start();
* triOsc.freq(220);
*
* cnv.mousePressed(envAttack);
* }
*
* function envAttack(){
* console.log('trigger attack');
* env.triggerAttack();
*
* background(0,255,0);
* text('attack!', width/2, height/2);
* }
*
* function mouseReleased() {
* env.triggerRelease();
*
* background(200);
* text('click to play', width/2, height/2);
* }
*
*/
p5.Env.prototype.triggerAttack = function (unit, secondsFromNow) {
var now = p5sound.audiocontext.currentTime;
var tFromNow = secondsFromNow || 0;
var t = now + tFromNow;
this.lastAttack = t;
this.wasTriggered = true;
if (unit) {
if (this.connection !== unit) {
this.connect(unit);
}
}
// get and set value (with linear ramp) to anchor automation
var valToSet = this.control.getValueAtTime(t);
this.control.cancelScheduledValues(t);
// not sure if this is necessary
if (this.isExponential == true) {
this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t);
} else {
this.control.linearRampToValueAtTime(valToSet, t);
}
// after each ramp completes, cancel scheduled values
// (so they can be overridden in case env has been re-triggered)
// then, set current value (with linearRamp to avoid click)
// then, schedule the next automation...
// attack
t += this.aTime;
if (this.isExponential == true) {
this.control.exponentialRampToValueAtTime(this.checkExpInput(this.aLevel), t);
valToSet = this.checkExpInput(this.control.getValueAtTime(t));
this.control.cancelScheduledValues(t);
this.control.exponentialRampToValueAtTime(valToSet, t);
} else {
this.control.linearRampToValueAtTime(this.aLevel, t);
valToSet = this.control.getValueAtTime(t);
this.control.cancelScheduledValues(t);
this.control.linearRampToValueAtTime(valToSet, t);
}
// decay to decay level (if using ADSR, then decay level == sustain level)
t += this.dTime;
if (this.isExponential == true) {
this.control.exponentialRampToValueAtTime(this.checkExpInput(this.dLevel), t);
valToSet = this.checkExpInput(this.control.getValueAtTime(t));
this.control.cancelScheduledValues(t);
this.control.exponentialRampToValueAtTime(valToSet, t);
} else {
this.control.linearRampToValueAtTime(this.dLevel, t);
valToSet = this.control.getValueAtTime(t);
this.control.cancelScheduledValues(t);
this.control.linearRampToValueAtTime(valToSet, t);
}
};
/**
* Trigger the Release of the Envelope. This is similar to releasing
* the key on a piano and letting the sound fade according to the
* release level and release time.
*
* @method triggerRelease
* @param {Object} unit p5.sound Object or Web Audio Param
* @param {Number} secondsFromNow time to trigger the release
* @example
*
*
* var attackLevel = 1.0;
* var releaseLevel = 0;
*
* var attackTime = 0.001
* var decayTime = 0.3;
* var susPercent = 0.4;
* var releaseTime = 0.5;
*
* var env, triOsc;
*
* function setup() {
* var cnv = createCanvas(100, 100);
* background(200);
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* env = new p5.Env();
* env.setADSR(attackTime, decayTime, susPercent, releaseTime);
* env.setRange(attackLevel, releaseLevel);
*
* triOsc = new p5.Oscillator('triangle');
* triOsc.amp(env);
* triOsc.start();
* triOsc.freq(220);
*
* cnv.mousePressed(envAttack);
* }
*
* function envAttack(){
* console.log('trigger attack');
* env.triggerAttack();
*
* background(0,255,0);
* text('attack!', width/2, height/2);
* }
*
* function mouseReleased() {
* env.triggerRelease();
*
* background(200);
* text('click to play', width/2, height/2);
* }
*
*/
p5.Env.prototype.triggerRelease = function (unit, secondsFromNow) {
// only trigger a release if an attack was triggered
if (!this.wasTriggered) {
// this currently causes a bit of trouble:
// if a later release has been scheduled (via the play function)
// a new earlier release won't interrupt it, because
// this.wasTriggered has already been set to false.
// If we want new earlier releases to override, then we need to
// keep track of the last release time, and if the new release time is
// earlier, then use it.
return;
}
var now = p5sound.audiocontext.currentTime;
var tFromNow = secondsFromNow || 0;
var t = now + tFromNow;
if (unit) {
if (this.connection !== unit) {
this.connect(unit);
}
}
// get and set value (with linear or exponential ramp) to anchor automation
var valToSet = this.control.getValueAtTime(t);
this.control.cancelScheduledValues(t);
// not sure if this is necessary
if (this.isExponential == true) {
this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t);
} else {
this.control.linearRampToValueAtTime(valToSet, t);
}
// release
t += this.rTime;
if (this.isExponential == true) {
this.control.exponentialRampToValueAtTime(this.checkExpInput(this.rLevel), t);
valToSet = this.checkExpInput(this.control.getValueAtTime(t));
this.control.cancelScheduledValues(t);
this.control.exponentialRampToValueAtTime(valToSet, t);
} else {
this.control.linearRampToValueAtTime(this.rLevel, t);
valToSet = this.control.getValueAtTime(t);
this.control.cancelScheduledValues(t);
this.control.linearRampToValueAtTime(valToSet, t);
}
this.wasTriggered = false;
};
/**
* Exponentially ramp to a value using the first two
* values from setADSR(attackTime, decayTime)
* as
* time constants for simple exponential ramps.
* If the value is higher than current value, it uses attackTime,
* while a decrease uses decayTime.
*
* @method ramp
* @param {Object} unit p5.sound Object or Web Audio Param
* @param {Number} secondsFromNow When to trigger the ramp
* @param {Number} v Target value
* @param {Number} [v2] Second target value (optional)
* @example
*
* var env, osc, amp, cnv;
*
* var attackTime = 0.001;
* var decayTime = 0.2;
* var attackLevel = 1;
* var decayLevel = 0;
*
* function setup() {
* cnv = createCanvas(100, 100);
* fill(0,255,0);
* noStroke();
*
* env = new p5.Env();
* env.setADSR(attackTime, decayTime);
*
* osc = new p5.Oscillator();
* osc.amp(env);
* osc.start();
*
* amp = new p5.Amplitude();
*
* cnv.mousePressed(triggerRamp);
* }
*
* function triggerRamp() {
* env.ramp(osc, 0, attackLevel, decayLevel);
* }
*
* function draw() {
* background(20,20,20);
* text('click me', 10, 20);
* var h = map(amp.getLevel(), 0, 0.4, 0, height);;
*
* rect(0, height, width, -h);
* }
*
*/
p5.Env.prototype.ramp = function (unit, secondsFromNow, v1, v2) {
var now = p5sound.audiocontext.currentTime;
var tFromNow = secondsFromNow || 0;
var t = now + tFromNow;
var destination1 = this.checkExpInput(v1);
var destination2 = typeof v2 !== 'undefined' ? this.checkExpInput(v2) : undefined;
// connect env to unit if not already connected
if (unit) {
if (this.connection !== unit) {
this.connect(unit);
}
}
//get current value
var currentVal = this.checkExpInput(this.control.getValueAtTime(t));
this.control.cancelScheduledValues(t);
//if it's going up
if (destination1 > currentVal) {
this.control.setTargetAtTime(destination1, t, this._rampAttackTC);
t += this._rampAttackTime;
} else if (destination1 < currentVal) {
this.control.setTargetAtTime(destination1, t, this._rampDecayTC);
t += this._rampDecayTime;
}
// Now the second part of envelope begins
if (destination2 === undefined)
return;
//if it's going up
if (destination2 > destination1) {
this.control.setTargetAtTime(destination2, t, this._rampAttackTC);
} else if (destination2 < destination1) {
this.control.setTargetAtTime(destination2, t, this._rampDecayTC);
}
};
p5.Env.prototype.connect = function (unit) {
this.connection = unit;
// assume we're talking about output gain
// unless given a different audio param
if (unit instanceof p5.Oscillator || unit instanceof p5.SoundFile || unit instanceof p5.AudioIn || unit instanceof p5.Reverb || unit instanceof p5.Noise || unit instanceof p5.Filter || unit instanceof p5.Delay) {
unit = unit.output.gain;
}
if (unit instanceof AudioParam) {
//set the initial value
unit.setValueAtTime(0, p5sound.audiocontext.currentTime);
}
if (unit instanceof p5.Signal) {
unit.setValue(0);
}
this.output.connect(unit);
};
p5.Env.prototype.disconnect = function (unit) {
this.output.disconnect();
};
// Signal Math
/**
* Add a value to the p5.Oscillator's output amplitude,
* and return the oscillator. Calling this method
* again will override the initial add() with new values.
*
* @method add
* @param {Number} number Constant number to add
* @return {p5.Env} Envelope Returns this envelope
* with scaled output
*/
p5.Env.prototype.add = function (num) {
var add = new Add(num);
var thisChain = this.mathOps.length;
var nextChain = this.output;
return p5.prototype._mathChain(this, add, thisChain, nextChain, Add);
};
/**
* Multiply the p5.Env's output amplitude
* by a fixed value. Calling this method
* again will override the initial mult() with new values.
*
* @method mult
* @param {Number} number Constant number to multiply
* @return {p5.Env} Envelope Returns this envelope
* with scaled output
*/
p5.Env.prototype.mult = function (num) {
var mult = new Mult(num);
var thisChain = this.mathOps.length;
var nextChain = this.output;
return p5.prototype._mathChain(this, mult, thisChain, nextChain, Mult);
};
/**
* Scale this envelope's amplitude values to a given
* range, and return the envelope. Calling this method
* again will override the initial scale() with new values.
*
* @method scale
* @param {Number} inMin input range minumum
* @param {Number} inMax input range maximum
* @param {Number} outMin input range minumum
* @param {Number} outMax input range maximum
* @return {p5.Env} Envelope Returns this envelope
* with scaled output
*/
p5.Env.prototype.scale = function (inMin, inMax, outMin, outMax) {
var scale = new Scale(inMin, inMax, outMin, outMax);
var thisChain = this.mathOps.length;
var nextChain = this.output;
return p5.prototype._mathChain(this, scale, thisChain, nextChain, Scale);
};
// get rid of the oscillator
p5.Env.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
var now = p5sound.audiocontext.currentTime;
this.disconnect();
try {
this.control.dispose();
this.control = null;
} catch (e) {
}
for (var i = 1; i < this.mathOps.length; i++) {
mathOps[i].dispose();
}
};
}(master, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale, Tone_signal_TimelineSignal, Tone_core_Tone);
var pulse;
pulse = function () {
'use strict';
var p5sound = master;
/**
* Creates a Pulse object, an oscillator that implements
* Pulse Width Modulation.
* The pulse is created with two oscillators.
* Accepts a parameter for frequency, and to set the
* width between the pulses. See
* p5.Oscillator
for a full list of methods.
*
* @class p5.Pulse
* @constructor
* @param {Number} [freq] Frequency in oscillations per second (Hz)
* @param {Number} [w] Width between the pulses (0 to 1.0,
* defaults to 0)
* @example
*
* var pulse;
* function setup() {
* background(0);
*
* // Create and start the pulse wave oscillator
* pulse = new p5.Pulse();
* pulse.amp(0.5);
* pulse.freq(220);
* pulse.start();
* }
*
* function draw() {
* var w = map(mouseX, 0, width, 0, 1);
* w = constrain(w, 0, 1);
* pulse.width(w)
* }
*
*/
p5.Pulse = function (freq, w) {
p5.Oscillator.call(this, freq, 'sawtooth');
// width of PWM, should be betw 0 to 1.0
this.w = w || 0;
// create a second oscillator with inverse frequency
this.osc2 = new p5.SawOsc(freq);
// create a delay node
this.dNode = p5sound.audiocontext.createDelay();
// dc offset
this.dcOffset = createDCOffset();
this.dcGain = p5sound.audiocontext.createGain();
this.dcOffset.connect(this.dcGain);
this.dcGain.connect(this.output);
// set delay time based on PWM width
this.f = freq || 440;
var mW = this.w / this.oscillator.frequency.value;
this.dNode.delayTime.value = mW;
this.dcGain.gain.value = 1.7 * (0.5 - this.w);
// disconnect osc2 and connect it to delay, which is connected to output
this.osc2.disconnect();
this.osc2.panner.disconnect();
this.osc2.amp(-1);
// inverted amplitude
this.osc2.output.connect(this.dNode);
this.dNode.connect(this.output);
this.output.gain.value = 1;
this.output.connect(this.panner);
};
p5.Pulse.prototype = Object.create(p5.Oscillator.prototype);
/**
* Set the width of a Pulse object (an oscillator that implements
* Pulse Width Modulation).
*
* @method width
* @param {Number} [width] Width between the pulses (0 to 1.0,
* defaults to 0)
*/
p5.Pulse.prototype.width = function (w) {
if (typeof w === 'number') {
if (w <= 1 && w >= 0) {
this.w = w;
// set delay time based on PWM width
// var mW = map(this.w, 0, 1.0, 0, 1/this.f);
var mW = this.w / this.oscillator.frequency.value;
this.dNode.delayTime.value = mW;
}
this.dcGain.gain.value = 1.7 * (0.5 - this.w);
} else {
w.connect(this.dNode.delayTime);
var sig = new p5.SignalAdd(-0.5);
sig.setInput(w);
sig = sig.mult(-1);
sig = sig.mult(1.7);
sig.connect(this.dcGain.gain);
}
};
p5.Pulse.prototype.start = function (f, time) {
var now = p5sound.audiocontext.currentTime;
var t = time || 0;
if (!this.started) {
var freq = f || this.f;
var type = this.oscillator.type;
this.oscillator = p5sound.audiocontext.createOscillator();
this.oscillator.frequency.setValueAtTime(freq, now);
this.oscillator.type = type;
this.oscillator.connect(this.output);
this.oscillator.start(t + now);
// set up osc2
this.osc2.oscillator = p5sound.audiocontext.createOscillator();
this.osc2.oscillator.frequency.setValueAtTime(freq, t + now);
this.osc2.oscillator.type = type;
this.osc2.oscillator.connect(this.osc2.output);
this.osc2.start(t + now);
this.freqNode = [
this.oscillator.frequency,
this.osc2.oscillator.frequency
];
// start dcOffset, too
this.dcOffset = createDCOffset();
this.dcOffset.connect(this.dcGain);
this.dcOffset.start(t + now);
// if LFO connections depend on these oscillators
if (this.mods !== undefined && this.mods.frequency !== undefined) {
this.mods.frequency.connect(this.freqNode[0]);
this.mods.frequency.connect(this.freqNode[1]);
}
this.started = true;
this.osc2.started = true;
}
};
p5.Pulse.prototype.stop = function (time) {
if (this.started) {
var t = time || 0;
var now = p5sound.audiocontext.currentTime;
this.oscillator.stop(t + now);
this.osc2.oscillator.stop(t + now);
this.dcOffset.stop(t + now);
this.started = false;
this.osc2.started = false;
}
};
p5.Pulse.prototype.freq = function (val, rampTime, tFromNow) {
if (typeof val === 'number') {
this.f = val;
var now = p5sound.audiocontext.currentTime;
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var currentFreq = this.oscillator.frequency.value;
this.oscillator.frequency.cancelScheduledValues(now);
this.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
this.osc2.oscillator.frequency.cancelScheduledValues(now);
this.osc2.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
this.osc2.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
if (this.freqMod) {
this.freqMod.output.disconnect();
this.freqMod = null;
}
} else if (val.output) {
val.output.disconnect();
val.output.connect(this.oscillator.frequency);
val.output.connect(this.osc2.oscillator.frequency);
this.freqMod = val;
}
};
// inspiration: http://webaudiodemos.appspot.com/oscilloscope/
function createDCOffset() {
var ac = p5sound.audiocontext;
var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
var data = buffer.getChannelData(0);
for (var i = 0; i < 2048; i++)
data[i] = 1;
var bufferSource = ac.createBufferSource();
bufferSource.buffer = buffer;
bufferSource.loop = true;
return bufferSource;
}
}(master, oscillator);
var noise;
noise = function () {
'use strict';
var p5sound = master;
/**
* Noise is a type of oscillator that generates a buffer with random values.
*
* @class p5.Noise
* @constructor
* @param {String} type Type of noise can be 'white' (default),
* 'brown' or 'pink'.
* @return {Object} Noise Object
*/
p5.Noise = function (type) {
var assignType;
p5.Oscillator.call(this);
delete this.f;
delete this.freq;
delete this.oscillator;
if (type === 'brown') {
assignType = _brownNoise;
} else if (type === 'pink') {
assignType = _pinkNoise;
} else {
assignType = _whiteNoise;
}
this.buffer = assignType;
};
p5.Noise.prototype = Object.create(p5.Oscillator.prototype);
// generate noise buffers
var _whiteNoise = function () {
var bufferSize = 2 * p5sound.audiocontext.sampleRate;
var whiteBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
var noiseData = whiteBuffer.getChannelData(0);
for (var i = 0; i < bufferSize; i++) {
noiseData[i] = Math.random() * 2 - 1;
}
whiteBuffer.type = 'white';
return whiteBuffer;
}();
var _pinkNoise = function () {
var bufferSize = 2 * p5sound.audiocontext.sampleRate;
var pinkBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
var noiseData = pinkBuffer.getChannelData(0);
var b0, b1, b2, b3, b4, b5, b6;
b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0;
for (var i = 0; i < bufferSize; i++) {
var white = Math.random() * 2 - 1;
b0 = 0.99886 * b0 + white * 0.0555179;
b1 = 0.99332 * b1 + white * 0.0750759;
b2 = 0.969 * b2 + white * 0.153852;
b3 = 0.8665 * b3 + white * 0.3104856;
b4 = 0.55 * b4 + white * 0.5329522;
b5 = -0.7616 * b5 - white * 0.016898;
noiseData[i] = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362;
noiseData[i] *= 0.11;
// (roughly) compensate for gain
b6 = white * 0.115926;
}
pinkBuffer.type = 'pink';
return pinkBuffer;
}();
var _brownNoise = function () {
var bufferSize = 2 * p5sound.audiocontext.sampleRate;
var brownBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
var noiseData = brownBuffer.getChannelData(0);
var lastOut = 0;
for (var i = 0; i < bufferSize; i++) {
var white = Math.random() * 2 - 1;
noiseData[i] = (lastOut + 0.02 * white) / 1.02;
lastOut = noiseData[i];
noiseData[i] *= 3.5;
}
brownBuffer.type = 'brown';
return brownBuffer;
}();
/**
* Set type of noise to 'white', 'pink' or 'brown'.
* White is the default.
*
* @method setType
* @param {String} [type] 'white', 'pink' or 'brown'
*/
p5.Noise.prototype.setType = function (type) {
switch (type) {
case 'white':
this.buffer = _whiteNoise;
break;
case 'pink':
this.buffer = _pinkNoise;
break;
case 'brown':
this.buffer = _brownNoise;
break;
default:
this.buffer = _whiteNoise;
}
if (this.started) {
var now = p5sound.audiocontext.currentTime;
this.stop(now);
this.start(now + 0.01);
}
};
p5.Noise.prototype.getType = function () {
return this.buffer.type;
};
/**
* Start the noise
*
* @method start
*/
p5.Noise.prototype.start = function () {
if (this.started) {
this.stop();
}
this.noise = p5sound.audiocontext.createBufferSource();
this.noise.buffer = this.buffer;
this.noise.loop = true;
this.noise.connect(this.output);
var now = p5sound.audiocontext.currentTime;
this.noise.start(now);
this.started = true;
};
/**
* Stop the noise.
*
* @method stop
*/
p5.Noise.prototype.stop = function () {
var now = p5sound.audiocontext.currentTime;
if (this.noise) {
this.noise.stop(now);
this.started = false;
}
};
/**
* Pan the noise.
*
* @method pan
* @param {Number} panning Number between -1 (left)
* and 1 (right)
* @param {Number} timeFromNow schedule this event to happen
* seconds from now
*/
/**
* Set the amplitude of the noise between 0 and 1.0. Or,
* modulate amplitude with an audio signal such as an oscillator.
*
* @param {Number|Object} volume amplitude between 0 and 1.0
* or modulating signal/oscillator
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Noise.prototype.dispose = function () {
var now = p5sound.audiocontext.currentTime;
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
if (this.noise) {
this.noise.disconnect();
this.stop(now);
}
if (this.output) {
this.output.disconnect();
}
if (this.panner) {
this.panner.disconnect();
}
this.output = null;
this.panner = null;
this.buffer = null;
this.noise = null;
};
}(master);
var audioin;
audioin = function () {
'use strict';
var p5sound = master;
var CustomError = errorHandler;
/**
* Get audio from an input, i.e. your computer's microphone.
*
* Turn the mic on/off with the start() and stop() methods. When the mic
* is on, its volume can be measured with getLevel or by connecting an
* FFT object.
*
* If you want to hear the AudioIn, use the .connect() method.
* AudioIn does not connect to p5.sound output by default to prevent
* feedback.
*
* Note: This uses the getUserMedia/
* Stream API, which is not supported by certain browsers. Access in Chrome browser
* is limited to localhost and https, but access over http may be limited.
*
* @class p5.AudioIn
* @constructor
* @param {Function} [errorCallback] A function to call if there is an error
* accessing the AudioIn. For example,
* Safari and iOS devices do not
* currently allow microphone access.
* @return {Object} AudioIn
* @example
*
* var mic;
* function setup(){
* mic = new p5.AudioIn()
* mic.start();
* }
* function draw(){
* background(0);
* micLevel = mic.getLevel();
* ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);
* }
*
*/
p5.AudioIn = function (errorCallback) {
// set up audio input
this.input = p5sound.audiocontext.createGain();
this.output = p5sound.audiocontext.createGain();
this.stream = null;
this.mediaStream = null;
this.currentSource = 0;
/**
* Client must allow browser to access their microphone / audioin source.
* Default: false. Will become true when the client enables acces.
*
* @property {Boolean} enabled
*/
this.enabled = false;
// create an amplitude, connect to it by default but not to master out
this.amplitude = new p5.Amplitude();
this.output.connect(this.amplitude.input);
// Some browsers let developer determine their input sources
if (typeof window.MediaStreamTrack === 'undefined') {
if (errorCallback) {
errorCallback();
} else {
window.alert('This browser does not support AudioIn');
}
} else if (typeof window.MediaStreamTrack.getSources === 'function') {
// Chrome supports getSources to list inputs. Dev picks default
window.MediaStreamTrack.getSources(this._gotSources);
} else {
}
// add to soundArray so we can dispose on close
p5sound.soundArray.push(this);
};
/**
* Start processing audio input. This enables the use of other
* AudioIn methods like getLevel(). Note that by default, AudioIn
* is not connected to p5.sound's output. So you won't hear
* anything unless you use the connect() method.
*
* Certain browsers limit access to the user's microphone. For example,
* Chrome only allows access from localhost and over https. For this reason,
* you may want to include an errorCallback—a function that is called in case
* the browser won't provide mic access.
*
* @method start
* @param {Function} successCallback Name of a function to call on
* success.
* @param {Function} errorCallback Name of a function to call if
* there was an error. For example,
* some browsers do not support
* getUserMedia.
*/
p5.AudioIn.prototype.start = function (successCallback, errorCallback) {
var self = this;
// if stream was already started...
// if _gotSources() i.e. developers determine which source to use
if (p5sound.inputSources[self.currentSource]) {
// set the audio source
var audioSource = p5sound.inputSources[self.currentSource].id;
var constraints = { audio: { optional: [{ sourceId: audioSource }] } };
window.navigator.getUserMedia(constraints, this._onStream = function (stream) {
self.stream = stream;
self.enabled = true;
// Wrap a MediaStreamSourceNode around the live input
self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream);
self.mediaStream.connect(self.output);
if (successCallback)
successCallback();
// only send to the Amplitude reader, so we can see it but not hear it.
self.amplitude.setInput(self.output);
}, this._onStreamError = function (e) {
if (errorCallback)
errorCallback(e);
else
console.error(e);
});
} else {
// if Firefox where users select their source via browser
// if (typeof MediaStreamTrack.getSources === 'undefined') {
// Only get the audio stream.
window.navigator.getUserMedia({ 'audio': true }, this._onStream = function (stream) {
self.stream = stream;
self.enabled = true;
// Wrap a MediaStreamSourceNode around the live input
self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream);
self.mediaStream.connect(self.output);
// only send to the Amplitude reader, so we can see it but not hear it.
self.amplitude.setInput(self.output);
if (successCallback)
successCallback();
}, this._onStreamError = function (e) {
if (errorCallback)
errorCallback(e);
else
console.error(e);
});
}
};
/**
* Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel().
* If re-starting, the user may be prompted for permission access.
*
* @method stop
*/
p5.AudioIn.prototype.stop = function () {
if (this.stream) {
// assume only one track
this.stream.getTracks()[0].stop();
}
};
/**
* Connect to an audio unit. If no parameter is provided, will
* connect to the master output (i.e. your speakers).
*
* @method connect
* @param {Object} [unit] An object that accepts audio input,
* such as an FFT
*/
p5.AudioIn.prototype.connect = function (unit) {
if (unit) {
if (unit.hasOwnProperty('input')) {
this.output.connect(unit.input);
} else if (unit.hasOwnProperty('analyser')) {
this.output.connect(unit.analyser);
} else {
this.output.connect(unit);
}
} else {
this.output.connect(p5sound.input);
}
};
/**
* Disconnect the AudioIn from all audio units. For example, if
* connect() had been called, disconnect() will stop sending
* signal to your speakers.
*
* @method disconnect
*/
p5.AudioIn.prototype.disconnect = function () {
this.output.disconnect();
// stay connected to amplitude even if not outputting to p5
this.output.connect(this.amplitude.input);
};
/**
* Read the Amplitude (volume level) of an AudioIn. The AudioIn
* class contains its own instance of the Amplitude class to help
* make it easy to get a microphone's volume level. Accepts an
* optional smoothing value (0.0 < 1.0). NOTE: AudioIn must
* .start() before using .getLevel().
*
* @method getLevel
* @param {Number} [smoothing] Smoothing is 0.0 by default.
* Smooths values based on previous values.
* @return {Number} Volume level (between 0.0 and 1.0)
*/
p5.AudioIn.prototype.getLevel = function (smoothing) {
if (smoothing) {
this.amplitude.smoothing = smoothing;
}
return this.amplitude.getLevel();
};
/**
* Add input sources to the list of available sources.
*
* @private
*/
p5.AudioIn.prototype._gotSources = function (sourceInfos) {
for (var i = 0; i < sourceInfos.length; i++) {
var sourceInfo = sourceInfos[i];
if (sourceInfo.kind === 'audio') {
// add the inputs to inputSources
//p5sound.inputSources.push(sourceInfo);
return sourceInfo;
}
}
};
/**
* Set amplitude (volume) of a mic input between 0 and 1.0.
*
* @method amp
* @param {Number} vol between 0 and 1.0
* @param {Number} [time] ramp time (optional)
*/
p5.AudioIn.prototype.amp = function (vol, t) {
if (t) {
var rampTime = t || 0;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
this.output.gain.setValueAtTime(currentVol, p5sound.audiocontext.currentTime);
this.output.gain.linearRampToValueAtTime(vol, rampTime + p5sound.audiocontext.currentTime);
} else {
this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime);
}
};
p5.AudioIn.prototype.listSources = function () {
console.log('listSources is deprecated - please use AudioIn.getSources');
console.log('input sources: ');
if (p5sound.inputSources.length > 0) {
return p5sound.inputSources;
} else {
return 'This browser does not support MediaStreamTrack.getSources()';
}
};
/**
* Chrome only. Returns a list of available input sources
* and allows the user to set the media source. Firefox allows
* the user to choose from input sources in the permissions dialogue
* instead of enumerating available sources and selecting one.
* Note: in order to have descriptive media names your page must be
* served over a secure (HTTPS) connection and the page should
* request user media before enumerating devices. Otherwise device
* ID will be a long device ID number and does not specify device
* type. For example see
* https://simpl.info/getusermedia/sources/index.html vs.
* http://simpl.info/getusermedia/sources/index.html
*
* @method getSources
* @param {Function} callback a callback to handle the sources
* when they have been enumerated
* @example
*
* var audiograb;
*
* function setup(){
* //new audioIn
* audioGrab = new p5.AudioIn();
*
* audioGrab.getSources(function(sourceList) {
* //print out the array of available sources
* console.log(sourceList);
* //set the source to the first item in the inputSources array
* audioGrab.setSource(0);
* });
* }
*
*/
p5.AudioIn.prototype.getSources = function (callback) {
if (typeof window.MediaStreamTrack.getSources === 'function') {
window.MediaStreamTrack.getSources(function (data) {
for (var i = 0, max = data.length; i < max; i++) {
var sourceInfo = data[i];
if (sourceInfo.kind === 'audio') {
// add the inputs to inputSources
p5sound.inputSources.push(sourceInfo);
}
}
callback(p5sound.inputSources);
});
} else {
console.log('This browser does not support MediaStreamTrack.getSources()');
}
};
/**
* Set the input source. Accepts a number representing a
* position in the array returned by listSources().
* This is only available in browsers that support
* MediaStreamTrack.getSources(). Instead, some browsers
* give users the option to set their own media source.
*
* @method setSource
* @param {number} num position of input source in the array
*/
p5.AudioIn.prototype.setSource = function (num) {
// TO DO - set input by string or # (array position)
var self = this;
if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) {
// set the current source
self.currentSource = num;
console.log('set source to ' + p5sound.inputSources[self.currentSource].id);
} else {
console.log('unable to set input source');
}
};
// private method
p5.AudioIn.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.stop();
if (this.output) {
this.output.disconnect();
}
if (this.amplitude) {
this.amplitude.disconnect();
}
this.amplitude = null;
this.output = null;
};
}(master, errorHandler);
var filter;
filter = function () {
'use strict';
var p5sound = master;
/**
* A p5.Filter uses a Web Audio Biquad Filter to filter
* the frequency response of an input source. Inheriting
* classes include:
* * p5.LowPass
- allows frequencies below
* the cutoff frequency to pass through, and attenuates
* frequencies above the cutoff.
* * p5.HighPass
- the opposite of a lowpass
* filter.
* * p5.BandPass
- allows a range of
* frequencies to pass through and attenuates the frequencies
* below and above this frequency range.
*
* The .res()
method controls either width of the
* bandpass, or resonance of the low/highpass cutoff frequency.
*
* @class p5.Filter
* @constructor
* @param {[String]} type 'lowpass' (default), 'highpass', 'bandpass'
* @return {Object} p5.Filter
* @example
*
* var fft, noise, filter;
*
* function setup() {
* fill(255, 40, 255);
*
* filter = new p5.BandPass();
*
* noise = new p5.Noise();
* // disconnect unfiltered noise,
* // and connect to filter
* noise.disconnect();
* noise.connect(filter);
* noise.start();
*
* fft = new p5.FFT();
* }
*
* function draw() {
* background(30);
*
* // set the BandPass frequency based on mouseX
* var freq = map(mouseX, 0, width, 20, 10000);
* filter.freq(freq);
* // give the filter a narrow band (lower res = wider bandpass)
* filter.res(50);
*
* // draw filtered spectrum
* var spectrum = fft.analyze();
* noStroke();
* for (var i = 0; i < spectrum.length; i++) {
* var x = map(i, 0, spectrum.length, 0, width);
* var h = -height + map(spectrum[i], 0, 255, height, 0);
* rect(x, height, width/spectrum.length, h);
* }
*
* isMouseOverCanvas();
* }
*
* function isMouseOverCanvas() {
* var mX = mouseX, mY = mouseY;
* if (mX > 0 && mX < width && mY < height && mY > 0) {
* noise.amp(0.5, 0.2);
* } else {
* noise.amp(0, 0.2);
* }
* }
*
*/
p5.Filter = function (type) {
this.ac = p5sound.audiocontext;
this.input = this.ac.createGain();
this.output = this.ac.createGain();
/**
* The p5.Filter is built with a
*
* Web Audio BiquadFilter Node.
*
* @property biquadFilter
* @type {Object} Web Audio Delay Node
*/
this.biquad = this.ac.createBiquadFilter();
this.input.connect(this.biquad);
this.biquad.connect(this.output);
this.connect();
if (type) {
this.setType(type);
}
// add to the soundArray
p5sound.soundArray.push(this);
};
/**
* Filter an audio signal according to a set
* of filter parameters.
*
* @method process
* @param {Object} Signal An object that outputs audio
* @param {[Number]} freq Frequency in Hz, from 10 to 22050
* @param {[Number]} res Resonance/Width of the filter frequency
* from 0.001 to 1000
*/
p5.Filter.prototype.process = function (src, freq, res) {
src.connect(this.input);
this.set(freq, res);
};
/**
* Set the frequency and the resonance of the filter.
*
* @method set
* @param {Number} freq Frequency in Hz, from 10 to 22050
* @param {Number} res Resonance (Q) from 0.001 to 1000
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
p5.Filter.prototype.set = function (freq, res, time) {
if (freq) {
this.freq(freq, time);
}
if (res) {
this.res(res, time);
}
};
/**
* Set the filter frequency, in Hz, from 10 to 22050 (the range of
* human hearing, although in reality most people hear in a narrower
* range).
*
* @method freq
* @param {Number} freq Filter Frequency
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
* @return {Number} value Returns the current frequency value
*/
p5.Filter.prototype.freq = function (freq, time) {
var self = this;
var t = time || 0;
if (freq <= 0) {
freq = 1;
}
if (typeof freq === 'number') {
self.biquad.frequency.value = freq;
self.biquad.frequency.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
self.biquad.frequency.exponentialRampToValueAtTime(freq, this.ac.currentTime + 0.02 + t);
} else if (freq) {
freq.connect(this.biquad.frequency);
}
return self.biquad.frequency.value;
};
/**
* Controls either width of a bandpass frequency,
* or the resonance of a low/highpass cutoff frequency.
*
* @method res
* @param {Number} res Resonance/Width of filter freq
* from 0.001 to 1000
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
* @return {Number} value Returns the current res value
*/
p5.Filter.prototype.res = function (res, time) {
var self = this;
var t = time || 0;
if (typeof res == 'number') {
self.biquad.Q.value = res;
self.biquad.Q.cancelScheduledValues(self.ac.currentTime + 0.01 + t);
self.biquad.Q.linearRampToValueAtTime(res, self.ac.currentTime + 0.02 + t);
} else if (res) {
freq.connect(this.biquad.Q);
}
return self.biquad.Q.value;
};
/**
* Set the type of a p5.Filter. Possible types include:
* "lowpass" (default), "highpass", "bandpass",
* "lowshelf", "highshelf", "peaking", "notch",
* "allpass".
*
* @method setType
* @param {String}
*/
p5.Filter.prototype.setType = function (t) {
this.biquad.type = t;
};
/**
* Set the output level of the filter.
*
* @method amp
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
p5.Filter.prototype.amp = function (vol, rampTime, tFromNow) {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + 0.001);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + 0.001);
};
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
p5.Filter.prototype.connect = function (unit) {
var u = unit || p5.soundOut.input;
this.output.connect(u);
};
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Filter.prototype.disconnect = function () {
this.output.disconnect();
};
p5.Filter.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.input.disconnect();
this.input = undefined;
this.output.disconnect();
this.output = undefined;
this.biquad.disconnect();
this.biquad = undefined;
};
/**
* Constructor: new p5.LowPass()
Filter.
* This is the same as creating a p5.Filter and then calling
* its method setType('lowpass')
.
* See p5.Filter for methods.
*
* @method p5.LowPass
*/
p5.LowPass = function () {
p5.Filter.call(this, 'lowpass');
};
p5.LowPass.prototype = Object.create(p5.Filter.prototype);
/**
* Constructor: new p5.HighPass()
Filter.
* This is the same as creating a p5.Filter and then calling
* its method setType('highpass')
.
* See p5.Filter for methods.
*
* @method p5.HighPass
*/
p5.HighPass = function () {
p5.Filter.call(this, 'highpass');
};
p5.HighPass.prototype = Object.create(p5.Filter.prototype);
/**
* Constructor: new p5.BandPass()
Filter.
* This is the same as creating a p5.Filter and then calling
* its method setType('bandpass')
.
* See p5.Filter for methods.
*
* @method p5.BandPass
*/
p5.BandPass = function () {
p5.Filter.call(this, 'bandpass');
};
p5.BandPass.prototype = Object.create(p5.Filter.prototype);
}(master);
var delay;
delay = function () {
'use strict';
var p5sound = master;
var Filter = filter;
/**
* Delay is an echo effect. It processes an existing sound source,
* and outputs a delayed version of that sound. The p5.Delay can
* produce different effects depending on the delayTime, feedback,
* filter, and type. In the example below, a feedback of 0.5 will
* produce a looping delay that decreases in volume by
* 50% each repeat. A filter will cut out the high frequencies so
* that the delay does not sound as piercing as the original source.
*
* @class p5.Delay
* @constructor
* @return {Object} Returns a p5.Delay object
* @example
*
* var noise, env, delay;
*
* function setup() {
* background(0);
* noStroke();
* fill(255);
* textAlign(CENTER);
* text('click to play', width/2, height/2);
*
* noise = new p5.Noise('brown');
* noise.amp(0);
* noise.start();
*
* delay = new p5.Delay();
*
* // delay.process() accepts 4 parameters:
* // source, delayTime, feedback, filter frequency
* // play with these numbers!!
* delay.process(noise, .12, .7, 2300);
*
* // play the noise with an envelope,
* // a series of fades ( time / value pairs )
* env = new p5.Env(.01, 0.2, .2, .1);
* }
*
* // mouseClick triggers envelope
* function mouseClicked() {
* // is mouse over canvas?
* if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
* env.play(noise);
* }
* }
*
*/
p5.Delay = function () {
this.ac = p5sound.audiocontext;
this.input = this.ac.createGain();
this.output = this.ac.createGain();
this._split = this.ac.createChannelSplitter(2);
this._merge = this.ac.createChannelMerger(2);
this._leftGain = this.ac.createGain();
this._rightGain = this.ac.createGain();
/**
* The p5.Delay is built with two
*
* Web Audio Delay Nodes, one for each stereo channel.
*
* @property leftDelay
* @type {Object} Web Audio Delay Node
*/
this.leftDelay = this.ac.createDelay();
/**
* The p5.Delay is built with two
*
* Web Audio Delay Nodes, one for each stereo channel.
*
* @property rightDelay
* @type {Object} Web Audio Delay Node
*/
this.rightDelay = this.ac.createDelay();
this._leftFilter = new p5.Filter();
this._rightFilter = new p5.Filter();
this._leftFilter.disconnect();
this._rightFilter.disconnect();
this._leftFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
this._rightFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
this._leftFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
this._rightFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
// graph routing
this.input.connect(this._split);
this.leftDelay.connect(this._leftGain);
this.rightDelay.connect(this._rightGain);
this._leftGain.connect(this._leftFilter.input);
this._rightGain.connect(this._rightFilter.input);
this._merge.connect(this.output);
this.output.connect(p5.soundOut.input);
this._leftFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
this._rightFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
// default routing
this.setType(0);
this._maxDelay = this.leftDelay.delayTime.maxValue;
// add this p5.SoundFile to the soundArray
p5sound.soundArray.push(this);
};
/**
* Add delay to an audio signal according to a set
* of delay parameters.
*
* @method process
* @param {Object} Signal An object that outputs audio
* @param {Number} [delayTime] Time (in seconds) of the delay/echo.
* Some browsers limit delayTime to
* 1 second.
* @param {Number} [feedback] sends the delay back through itself
* in a loop that decreases in volume
* each time.
* @param {Number} [lowPass] Cutoff frequency. Only frequencies
* below the lowPass will be part of the
* delay.
*/
p5.Delay.prototype.process = function (src, _delayTime, _feedback, _filter) {
var feedback = _feedback || 0;
var delayTime = _delayTime || 0;
if (feedback >= 1) {
throw new Error('Feedback value will force a positive feedback loop.');
}
if (delayTime >= this._maxDelay) {
throw new Error('Delay Time exceeds maximum delay time of ' + this._maxDelay + ' second.');
}
src.connect(this.input);
this.leftDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
this.rightDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
this._leftGain.gain.setValueAtTime(feedback, this.ac.currentTime);
this._rightGain.gain.setValueAtTime(feedback, this.ac.currentTime);
if (_filter) {
this._leftFilter.freq(_filter);
this._rightFilter.freq(_filter);
}
};
/**
* Set the delay (echo) time, in seconds. Usually this value will be
* a floating point number between 0.0 and 1.0.
*
* @method delayTime
* @param {Number} delayTime Time (in seconds) of the delay
*/
p5.Delay.prototype.delayTime = function (t) {
// if t is an audio node...
if (typeof t !== 'number') {
t.connect(this.leftDelay.delayTime);
t.connect(this.rightDelay.delayTime);
} else {
this.leftDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
this.rightDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
this.leftDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
this.rightDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
}
};
/**
* Feedback occurs when Delay sends its signal back through its input
* in a loop. The feedback amount determines how much signal to send each
* time through the loop. A feedback greater than 1.0 is not desirable because
* it will increase the overall output each time through the loop,
* creating an infinite feedback loop.
*
* @method feedback
* @param {Number|Object} feedback 0.0 to 1.0, or an object such as an
* Oscillator that can be used to
* modulate this param
*/
p5.Delay.prototype.feedback = function (f) {
// if f is an audio node...
if (typeof f !== 'number') {
f.connect(this._leftGain.gain);
f.connect(this._rightGain.gain);
} else if (f >= 1) {
throw new Error('Feedback value will force a positive feedback loop.');
} else {
this._leftGain.gain.exponentialRampToValueAtTime(f, this.ac.currentTime);
this._rightGain.gain.exponentialRampToValueAtTime(f, this.ac.currentTime);
}
};
/**
* Set a lowpass filter frequency for the delay. A lowpass filter
* will cut off any frequencies higher than the filter frequency.
*
* @method filter
* @param {Number|Object} cutoffFreq A lowpass filter will cut off any
* frequencies higher than the filter frequency.
* @param {Number|Object} res Resonance of the filter frequency
* cutoff, or an object (i.e. a p5.Oscillator)
* that can be used to modulate this parameter.
* High numbers (i.e. 15) will produce a resonance,
* low numbers (i.e. .2) will produce a slope.
*/
p5.Delay.prototype.filter = function (freq, q) {
this._leftFilter.set(freq, q);
this._rightFilter.set(freq, q);
};
/**
* Choose a preset type of delay. 'pingPong' bounces the signal
* from the left to the right channel to produce a stereo effect.
* Any other parameter will revert to the default delay setting.
*
* @method setType
* @param {String|Number} type 'pingPong' (1) or 'default' (0)
*/
p5.Delay.prototype.setType = function (t) {
if (t === 1) {
t = 'pingPong';
}
this._split.disconnect();
this._leftFilter.disconnect();
this._rightFilter.disconnect();
this._split.connect(this.leftDelay, 0);
this._split.connect(this.rightDelay, 1);
switch (t) {
case 'pingPong':
this._rightFilter.setType(this._leftFilter.biquad.type);
this._leftFilter.output.connect(this._merge, 0, 0);
this._rightFilter.output.connect(this._merge, 0, 1);
this._leftFilter.output.connect(this.rightDelay);
this._rightFilter.output.connect(this.leftDelay);
break;
default:
this._leftFilter.output.connect(this._merge, 0, 0);
this._leftFilter.output.connect(this._merge, 0, 1);
this._leftFilter.output.connect(this.leftDelay);
this._leftFilter.output.connect(this.rightDelay);
}
};
/**
* Set the output level of the delay effect.
*
* @method amp
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
p5.Delay.prototype.amp = function (vol, rampTime, tFromNow) {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + 0.001);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + 0.001);
};
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
p5.Delay.prototype.connect = function (unit) {
var u = unit || p5.soundOut.input;
this.output.connect(u);
};
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Delay.prototype.disconnect = function () {
this.output.disconnect();
};
p5.Delay.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.input.disconnect();
this.output.disconnect();
this._split.disconnect();
this._leftFilter.disconnect();
this._rightFilter.disconnect();
this._merge.disconnect();
this._leftGain.disconnect();
this._rightGain.disconnect();
this.leftDelay.disconnect();
this.rightDelay.disconnect();
this.input = undefined;
this.output = undefined;
this._split = undefined;
this._leftFilter = undefined;
this._rightFilter = undefined;
this._merge = undefined;
this._leftGain = undefined;
this._rightGain = undefined;
this.leftDelay = undefined;
this.rightDelay = undefined;
};
}(master, filter);
var reverb;
reverb = function () {
'use strict';
var p5sound = master;
var CustomError = errorHandler;
/**
* Reverb adds depth to a sound through a large number of decaying
* echoes. It creates the perception that sound is occurring in a
* physical space. The p5.Reverb has paramters for Time (how long does the
* reverb last) and decayRate (how much the sound decays with each echo)
* that can be set with the .set() or .process() methods. The p5.Convolver
* extends p5.Reverb allowing you to recreate the sound of actual physical
* spaces through convolution.
*
* @class p5.Reverb
* @constructor
* @example
*
* var soundFile, reverb;
* function preload() {
* soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
* function setup() {
* reverb = new p5.Reverb();
* soundFile.disconnect(); // so we'll only hear reverb...
*
* // connect soundFile to reverb, process w/
* // 3 second reverbTime, decayRate of 2%
* reverb.process(soundFile, 3, 2);
* soundFile.play();
* }
*
*/
p5.Reverb = function () {
this.ac = p5sound.audiocontext;
this.convolverNode = this.ac.createConvolver();
this.input = this.ac.createGain();
this.output = this.ac.createGain();
// otherwise, Safari distorts
this.input.gain.value = 0.5;
this.input.connect(this.convolverNode);
this.convolverNode.connect(this.output);
// default params
this._seconds = 3;
this._decay = 2;
this._reverse = false;
this._buildImpulse();
this.connect();
p5sound.soundArray.push(this);
};
/**
* Connect a source to the reverb, and assign reverb parameters.
*
* @method process
* @param {Object} src p5.sound / Web Audio object with a sound
* output.
* @param {Number} [seconds] Duration of the reverb, in seconds.
* Min: 0, Max: 10. Defaults to 3.
* @param {Number} [decayRate] Percentage of decay with each echo.
* Min: 0, Max: 100. Defaults to 2.
* @param {Boolean} [reverse] Play the reverb backwards or forwards.
*/
p5.Reverb.prototype.process = function (src, seconds, decayRate, reverse) {
src.connect(this.input);
var rebuild = false;
if (seconds) {
this._seconds = seconds;
rebuild = true;
}
if (decayRate) {
this._decay = decayRate;
}
if (reverse) {
this._reverse = reverse;
}
if (rebuild) {
this._buildImpulse();
}
};
/**
* Set the reverb settings. Similar to .process(), but without
* assigning a new input.
*
* @method set
* @param {Number} [seconds] Duration of the reverb, in seconds.
* Min: 0, Max: 10. Defaults to 3.
* @param {Number} [decayRate] Percentage of decay with each echo.
* Min: 0, Max: 100. Defaults to 2.
* @param {Boolean} [reverse] Play the reverb backwards or forwards.
*/
p5.Reverb.prototype.set = function (seconds, decayRate, reverse) {
var rebuild = false;
if (seconds) {
this._seconds = seconds;
rebuild = true;
}
if (decayRate) {
this._decay = decayRate;
}
if (reverse) {
this._reverse = reverse;
}
if (rebuild) {
this._buildImpulse();
}
};
/**
* Set the output level of the delay effect.
*
* @method amp
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
p5.Reverb.prototype.amp = function (vol, rampTime, tFromNow) {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + 0.001);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + 0.001);
};
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
p5.Reverb.prototype.connect = function (unit) {
var u = unit || p5.soundOut.input;
this.output.connect(u.input ? u.input : u);
};
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Reverb.prototype.disconnect = function () {
this.output.disconnect();
};
/**
* Inspired by Simple Reverb by Jordan Santell
* https://github.com/web-audio-components/simple-reverb/blob/master/index.js
*
* Utility function for building an impulse response
* based on the module parameters.
*
* @private
*/
p5.Reverb.prototype._buildImpulse = function () {
var rate = this.ac.sampleRate;
var length = rate * this._seconds;
var decay = this._decay;
var impulse = this.ac.createBuffer(2, length, rate);
var impulseL = impulse.getChannelData(0);
var impulseR = impulse.getChannelData(1);
var n, i;
for (i = 0; i < length; i++) {
n = this.reverse ? length - i : i;
impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
}
this.convolverNode.buffer = impulse;
};
p5.Reverb.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
if (this.convolverNode) {
this.convolverNode.buffer = null;
this.convolverNode = null;
}
if (typeof this.output !== 'undefined') {
this.output.disconnect();
this.output = null;
}
if (typeof this.panner !== 'undefined') {
this.panner.disconnect();
this.panner = null;
}
};
// =======================================================================
// *** p5.Convolver ***
// =======================================================================
/**
* p5.Convolver extends p5.Reverb. It can emulate the sound of real
* physical spaces through a process called
* convolution.
*
* Convolution multiplies any audio input by an "impulse response"
* to simulate the dispersion of sound over time. The impulse response is
* generated from an audio file that you provide. One way to
* generate an impulse response is to pop a balloon in a reverberant space
* and record the echo. Convolution can also be used to experiment with
* sound.
*
* Use the method createConvolution(path)
to instantiate a
* p5.Convolver with a path to your impulse response audio file.
*
* @class p5.Convolver
* @constructor
* @param {String} path path to a sound file
* @param {Function} [callback] function to call when loading succeeds
* @param {Function} [errorCallback] function to call if loading fails.
* This function will receive an error or
* XMLHttpRequest object with information
* about what went wrong.
* @example
*
* var cVerb, sound;
* function preload() {
* // We have both MP3 and OGG versions of all sound assets
* soundFormats('ogg', 'mp3');
*
* // Try replacing 'bx-spring' with other soundfiles like
* // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
* cVerb = createConvolver('assets/bx-spring.mp3');
*
* // Try replacing 'Damscray_DancingTiger' with
* // 'beat', 'doorbell', lucky_dragons_-_power_melody'
* sound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
* function setup() {
* // disconnect from master output...
* sound.disconnect();
*
* // ...and process with cVerb
* // so that we only hear the convolution
* cVerb.process(sound);
*
* sound.play();
* }
*
*/
p5.Convolver = function (path, callback, errorCallback) {
this.ac = p5sound.audiocontext;
/**
* Internally, the p5.Convolver uses the a
*
* Web Audio Convolver Node.
*
* @property convolverNode
* @type {Object} Web Audio Convolver Node
*/
this.convolverNode = this.ac.createConvolver();
this.input = this.ac.createGain();
this.output = this.ac.createGain();
// otherwise, Safari distorts
this.input.gain.value = 0.5;
this.input.connect(this.convolverNode);
this.convolverNode.connect(this.output);
if (path) {
this.impulses = [];
this._loadBuffer(path, callback, errorCallback);
} else {
// parameters
this._seconds = 3;
this._decay = 2;
this._reverse = false;
this._buildImpulse();
}
this.connect();
p5sound.soundArray.push(this);
};
p5.Convolver.prototype = Object.create(p5.Reverb.prototype);
p5.prototype.registerPreloadMethod('createConvolver', p5.prototype);
/**
* Create a p5.Convolver. Accepts a path to a soundfile
* that will be used to generate an impulse response.
*
* @method createConvolver
* @param {String} path path to a sound file
* @param {Function} [callback] function to call if loading is successful.
* The object will be passed in as the argument
* to the callback function.
* @param {Function} [errorCallback] function to call if loading is not successful.
* A custom error will be passed in as the argument
* to the callback function.
* @return {p5.Convolver}
* @example
*
* var cVerb, sound;
* function preload() {
* // We have both MP3 and OGG versions of all sound assets
* soundFormats('ogg', 'mp3');
*
* // Try replacing 'bx-spring' with other soundfiles like
* // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
* cVerb = createConvolver('assets/bx-spring.mp3');
*
* // Try replacing 'Damscray_DancingTiger' with
* // 'beat', 'doorbell', lucky_dragons_-_power_melody'
* sound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
* function setup() {
* // disconnect from master output...
* sound.disconnect();
*
* // ...and process with cVerb
* // so that we only hear the convolution
* cVerb.process(sound);
*
* sound.play();
* }
*
*/
p5.prototype.createConvolver = function (path, callback, errorCallback) {
// if loading locally without a server
if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
var cReverb = new p5.Convolver(path, callback, errorCallback);
cReverb.impulses = [];
return cReverb;
};
/**
* Private method to load a buffer as an Impulse Response,
* assign it to the convolverNode, and add to the Array of .impulses.
*
* @param {String} path
* @param {Function} callback
* @param {Function} errorCallback
* @private
*/
p5.Convolver.prototype._loadBuffer = function (path, callback, errorCallback) {
var path = p5.prototype._checkFileFormats(path);
var self = this;
var errorTrace = new Error().stack;
var ac = p5.prototype.getAudioContext();
var request = new XMLHttpRequest();
request.open('GET', path, true);
request.responseType = 'arraybuffer';
request.onload = function () {
if (request.status == 200) {
// on success loading file:
ac.decodeAudioData(request.response, function (buff) {
var buffer = {};
var chunks = path.split('/');
buffer.name = chunks[chunks.length - 1];
buffer.audioBuffer = buff;
self.impulses.push(buffer);
self.convolverNode.buffer = buffer.audioBuffer;
if (callback) {
callback(buffer);
}
}, // error decoding buffer. "e" is undefined in Chrome 11/22/2015
function (e) {
var err = new CustomError('decodeAudioData', errorTrace, self.url);
var msg = 'AudioContext error at decodeAudioData for ' + self.url;
if (errorCallback) {
err.msg = msg;
errorCallback(err);
} else {
console.error(msg + '\n The error stack trace includes: \n' + err.stack);
}
});
} else {
var err = new CustomError('loadConvolver', errorTrace, self.url);
var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
if (errorCallback) {
err.message = msg;
errorCallback(err);
} else {
console.error(msg + '\n The error stack trace includes: \n' + err.stack);
}
}
};
// if there is another error, aside from 404...
request.onerror = function (e) {
var err = new CustomError('loadConvolver', errorTrace, self.url);
var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
if (errorCallback) {
err.message = msg;
errorCallback(err);
} else {
console.error(msg + '\n The error stack trace includes: \n' + err.stack);
}
};
request.send();
};
p5.Convolver.prototype.set = null;
/**
* Connect a source to the reverb, and assign reverb parameters.
*
* @method process
* @param {Object} src p5.sound / Web Audio object with a sound
* output.
* @example
*
* var cVerb, sound;
* function preload() {
* soundFormats('ogg', 'mp3');
*
* cVerb = createConvolver('assets/concrete-tunnel.mp3');
*
* sound = loadSound('assets/beat.mp3');
* }
*
* function setup() {
* // disconnect from master output...
* sound.disconnect();
*
* // ...and process with (i.e. connect to) cVerb
* // so that we only hear the convolution
* cVerb.process(sound);
*
* sound.play();
* }
*
*/
p5.Convolver.prototype.process = function (src) {
src.connect(this.input);
};
/**
* If you load multiple impulse files using the .addImpulse method,
* they will be stored as Objects in this Array. Toggle between them
* with the toggleImpulse(id)
method.
*
* @property impulses
* @type {Array} Array of Web Audio Buffers
*/
p5.Convolver.prototype.impulses = [];
/**
* Load and assign a new Impulse Response to the p5.Convolver.
* The impulse is added to the .impulses
array. Previous
* impulses can be accessed with the .toggleImpulse(id)
* method.
*
* @method addImpulse
* @param {String} path path to a sound file
* @param {Function} callback function (optional)
* @param {Function} errorCallback function (optional)
*/
p5.Convolver.prototype.addImpulse = function (path, callback, errorCallback) {
// if loading locally without a server
if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
this._loadBuffer(path, callback, errorCallback);
};
/**
* Similar to .addImpulse, except that the .impulses
* Array is reset to save memory. A new .impulses
* array is created with this impulse as the only item.
*
* @method resetImpulse
* @param {String} path path to a sound file
* @param {Function} callback function (optional)
* @param {Function} errorCallback function (optional)
*/
p5.Convolver.prototype.resetImpulse = function (path, callback, errorCallback) {
// if loading locally without a server
if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
this.impulses = [];
this._loadBuffer(path, callback, errorCallback);
};
/**
* If you have used .addImpulse()
to add multiple impulses
* to a p5.Convolver, then you can use this method to toggle between
* the items in the .impulses
Array. Accepts a parameter
* to identify which impulse you wish to use, identified either by its
* original filename (String) or by its position in the .impulses
*
Array (Number).
* You can access the objects in the .impulses Array directly. Each
* Object has two attributes: an .audioBuffer
(type:
* Web Audio
* AudioBuffer) and a .name
, a String that corresponds
* with the original filename.
*
* @method toggleImpulse
* @param {String|Number} id Identify the impulse by its original filename
* (String), or by its position in the
* .impulses
Array (Number).
*/
p5.Convolver.prototype.toggleImpulse = function (id) {
if (typeof id === 'number' && id < this.impulses.length) {
this.convolverNode.buffer = this.impulses[id].audioBuffer;
}
if (typeof id === 'string') {
for (var i = 0; i < this.impulses.length; i++) {
if (this.impulses[i].name === id) {
this.convolverNode.buffer = this.impulses[i].audioBuffer;
break;
}
}
}
};
p5.Convolver.prototype.dispose = function () {
// remove all the Impulse Response buffers
for (var i in this.impulses) {
this.impulses[i] = null;
}
this.convolverNode.disconnect();
this.concolverNode = null;
if (typeof this.output !== 'undefined') {
this.output.disconnect();
this.output = null;
}
if (typeof this.panner !== 'undefined') {
this.panner.disconnect();
this.panner = null;
}
};
}(master, errorHandler, sndcore);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_core_TimelineState;
Tone_core_TimelineState = function (Tone) {
'use strict';
Tone.TimelineState = function (initial) {
Tone.Timeline.call(this);
this._initial = initial;
};
Tone.extend(Tone.TimelineState, Tone.Timeline);
Tone.TimelineState.prototype.getStateAtTime = function (time) {
var event = this.getEvent(time);
if (event !== null) {
return event.state;
} else {
return this._initial;
}
};
Tone.TimelineState.prototype.setStateAtTime = function (state, time) {
this.addEvent({
'state': state,
'time': this.toSeconds(time)
});
};
return Tone.TimelineState;
}(Tone_core_Tone, Tone_core_Timeline);
/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
var Tone_core_Clock;
Tone_core_Clock = function (Tone) {
'use strict';
Tone.Clock = function () {
var options = this.optionsObject(arguments, [
'callback',
'frequency'
], Tone.Clock.defaults);
this.callback = options.callback;
this._lookAhead = 'auto';
this._computedLookAhead = 1 / 60;
this._threshold = 0.5;
this._nextTick = -1;
this._lastUpdate = 0;
this._loopID = -1;
this.frequency = new Tone.TimelineSignal(options.frequency, Tone.Type.Frequency);
this.ticks = 0;
this._state = new Tone.TimelineState(Tone.State.Stopped);
this._boundLoop = this._loop.bind(this);
this._readOnly('frequency');
this._loop();
};
Tone.extend(Tone.Clock);
Tone.Clock.defaults = {
'callback': Tone.noOp,
'frequency': 1,
'lookAhead': 'auto'
};
Object.defineProperty(Tone.Clock.prototype, 'state', {
get: function () {
return this._state.getStateAtTime(this.now());
}
});
Object.defineProperty(Tone.Clock.prototype, 'lookAhead', {
get: function () {
return this._lookAhead;
},
set: function (val) {
if (val === 'auto') {
this._lookAhead = 'auto';
} else {
this._lookAhead = this.toSeconds(val);
}
}
});
Tone.Clock.prototype.start = function (time, offset) {
time = this.toSeconds(time);
if (this._state.getStateAtTime(time) !== Tone.State.Started) {
this._state.addEvent({
'state': Tone.State.Started,
'time': time,
'offset': offset
});
}
return this;
};
Tone.Clock.prototype.stop = function (time) {
time = this.toSeconds(time);
if (this._state.getStateAtTime(time) !== Tone.State.Stopped) {
this._state.setStateAtTime(Tone.State.Stopped, time);
}
return this;
};
Tone.Clock.prototype.pause = function (time) {
time = this.toSeconds(time);
if (this._state.getStateAtTime(time) === Tone.State.Started) {
this._state.setStateAtTime(Tone.State.Paused, time);
}
return this;
};
Tone.Clock.prototype._loop = function (time) {
this._loopID = requestAnimationFrame(this._boundLoop);
if (this._lookAhead === 'auto') {
if (!this.isUndef(time)) {
var diff = (time - this._lastUpdate) / 1000;
this._lastUpdate = time;
if (diff < this._threshold) {
this._computedLookAhead = (9 * this._computedLookAhead + diff) / 10;
}
}
} else {
this._computedLookAhead = this._lookAhead;
}
var now = this.now();
var lookAhead = this._computedLookAhead * 2;
var event = this._state.getEvent(now + lookAhead);
var state = Tone.State.Stopped;
if (event) {
state = event.state;
if (this._nextTick === -1 && state === Tone.State.Started) {
this._nextTick = event.time;
if (!this.isUndef(event.offset)) {
this.ticks = event.offset;
}
}
}
if (state === Tone.State.Started) {
while (now + lookAhead > this._nextTick) {
if (now > this._nextTick + this._threshold) {
this._nextTick = now;
}
var tickTime = this._nextTick;
this._nextTick += 1 / this.frequency.getValueAtTime(this._nextTick);
this.callback(tickTime);
this.ticks++;
}
} else if (state === Tone.State.Stopped) {
this._nextTick = -1;
this.ticks = 0;
}
};
Tone.Clock.prototype.getStateAtTime = function (time) {
return this._state.getStateAtTime(time);
};
Tone.Clock.prototype.dispose = function () {
cancelAnimationFrame(this._loopID);
Tone.TimelineState.prototype.dispose.call(this);
this._writable('frequency');
this.frequency.dispose();
this.frequency = null;
this._boundLoop = Tone.noOp;
this._nextTick = Infinity;
this.callback = null;
this._state.dispose();
this._state = null;
};
return Tone.Clock;
}(Tone_core_Tone, Tone_signal_TimelineSignal);
var metro;
metro = function () {
'use strict';
var p5sound = master;
// requires the Tone.js library's Clock (MIT license, Yotam Mann)
// https://github.com/TONEnoTONE/Tone.js/
var Clock = Tone_core_Clock;
var ac = p5sound.audiocontext;
// var upTick = false;
p5.Metro = function () {
this.clock = new Clock({ 'callback': this.ontick.bind(this) });
this.syncedParts = [];
this.bpm = 120;
// gets overridden by p5.Part
this._init();
this.tickCallback = function () {
};
};
var prevTick = 0;
var tatumTime = 0;
p5.Metro.prototype.ontick = function (tickTime) {
var elapsedTime = tickTime - prevTick;
var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;
if (elapsedTime - tatumTime <= -0.02) {
return;
} else {
prevTick = tickTime;
// for all of the active things on the metro:
for (var i in this.syncedParts) {
var thisPart = this.syncedParts[i];
if (!thisPart.isPlaying)
return;
thisPart.incrementStep(secondsFromNow);
// each synced source keeps track of its own beat number
for (var j in thisPart.phrases) {
var thisPhrase = thisPart.phrases[j];
var phraseArray = thisPhrase.sequence;
var bNum = this.metroTicks % phraseArray.length;
if (phraseArray[bNum] !== 0 && (this.metroTicks < phraseArray.length || !thisPhrase.looping)) {
thisPhrase.callback(secondsFromNow, phraseArray[bNum]);
}
}
}
this.metroTicks += 1;
this.tickCallback(secondsFromNow);
}
};
p5.Metro.prototype.setBPM = function (bpm, rampTime) {
var beatTime = 60 / (bpm * this.tatums);
var now = p5sound.audiocontext.currentTime;
tatumTime = beatTime;
var rampTime = rampTime || 0;
this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);
this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);
this.bpm = bpm;
};
p5.Metro.prototype.getBPM = function (tempo) {
return this.clock.getRate() / this.tatums * 60;
};
p5.Metro.prototype._init = function () {
this.metroTicks = 0;
};
// clear existing synced parts, add only this one
p5.Metro.prototype.resetSync = function (part) {
this.syncedParts = [part];
};
// push a new synced part to the array
p5.Metro.prototype.pushSync = function (part) {
this.syncedParts.push(part);
};
p5.Metro.prototype.start = function (timeFromNow) {
var t = timeFromNow || 0;
var now = p5sound.audiocontext.currentTime;
this.clock.start(now + t);
this.setBPM(this.bpm);
};
p5.Metro.prototype.stop = function (timeFromNow) {
var t = timeFromNow || 0;
var now = p5sound.audiocontext.currentTime;
if (this.clock._oscillator) {
this.clock.stop(now + t);
}
};
p5.Metro.prototype.beatLength = function (tatums) {
this.tatums = 1 / tatums / 4;
};
}(master, Tone_core_Clock);
var looper;
looper = function () {
'use strict';
var p5sound = master;
var bpm = 120;
/**
* Set the global tempo, in beats per minute, for all
* p5.Parts. This method will impact all active p5.Parts.
*
* @param {Number} BPM Beats Per Minute
* @param {Number} rampTime Seconds from now
*/
p5.prototype.setBPM = function (BPM, rampTime) {
bpm = BPM;
for (var i in p5sound.parts) {
p5sound.parts[i].setBPM(bpm, rampTime);
}
};
/**
* A phrase is a pattern of musical events over time, i.e.
* a series of notes and rests.
*
* Phrases must be added to a p5.Part for playback, and
* each part can play multiple phrases at the same time.
* For example, one Phrase might be a kick drum, another
* could be a snare, and another could be the bassline.
*
* The first parameter is a name so that the phrase can be
* modified or deleted later. The callback is a a function that
* this phrase will call at every step—for example it might be
* called playNote(value){}
. The array determines
* which value is passed into the callback at each step of the
* phrase. It can be numbers, an object with multiple numbers,
* or a zero (0) indicates a rest so the callback won't be called).
*
* @class p5.Phrase
* @constructor
* @param {String} name Name so that you can access the Phrase.
* @param {Function} callback The name of a function that this phrase
* will call. Typically it will play a sound,
* and accept two parameters: a time at which
* to play the sound (in seconds from now),
* and a value from the sequence array. The
* time should be passed into the play() or
* start() method to ensure precision.
* @param {Array} sequence Array of values to pass into the callback
* at each step of the phrase.
* @example
*
* var mySound, myPhrase, myPart;
* var pattern = [1,0,0,2,0,2,0,0];
* var msg = 'click to play';
*
* function preload() {
* mySound = loadSound('assets/beatbox.mp3');
* }
*
* function setup() {
* noStroke();
* fill(255);
* textAlign(CENTER);
* masterVolume(0.1);
*
* myPhrase = new p5.Phrase('bbox', makeSound, pattern);
* myPart = new p5.Part();
* myPart.addPhrase(myPhrase);
* myPart.setBPM(60);
* }
*
* function draw() {
* background(0);
* text(msg, width/2, height/2);
* }
*
* function makeSound(time, playbackRate) {
* mySound.rate(playbackRate);
* mySound.play(time);
* }
*
* function mouseClicked() {
* if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
* myPart.start();
* msg = 'playing pattern';
* }
* }
*
*
*/
p5.Phrase = function (name, callback, sequence) {
this.phraseStep = 0;
this.name = name;
this.callback = callback;
/**
* Array of values to pass into the callback
* at each step of the phrase. Depending on the callback
* function's requirements, these values may be numbers,
* strings, or an object with multiple parameters.
* Zero (0) indicates a rest.
*
* @property sequence
* @type {Array}
*/
this.sequence = sequence;
};
/**
* A p5.Part plays back one or more p5.Phrases. Instantiate a part
* with steps and tatums. By default, each step represents 1/16th note.
*
* See p5.Phrase for more about musical timing.
*
* @class p5.Part
* @constructor
* @param {Number} [steps] Steps in the part
* @param {Number} [tatums] Divisions of a beat (default is 1/16, a quarter note)
* @example
*
* var box, drum, myPart;
* var boxPat = [1,0,0,2,0,2,0,0];
* var drumPat = [0,1,1,0,2,0,1,0];
* var msg = 'click to play';
*
* function preload() {
* box = loadSound('assets/beatbox.mp3');
* drum = loadSound('assets/drum.mp3');
* }
*
* function setup() {
* noStroke();
* fill(255);
* textAlign(CENTER);
* masterVolume(0.1);
*
* var boxPhrase = new p5.Phrase('box', playBox, boxPat);
* var drumPhrase = new p5.Phrase('drum', playDrum, drumPat);
* myPart = new p5.Part();
* myPart.addPhrase(boxPhrase);
* myPart.addPhrase(drumPhrase);
* myPart.setBPM(60);
* masterVolume(0.1);
* }
*
* function draw() {
* background(0);
* text(msg, width/2, height/2);
* }
*
* function playBox(time, playbackRate) {
* box.rate(playbackRate);
* box.play(time);
* }
*
* function playDrum(time, playbackRate) {
* drum.rate(playbackRate);
* drum.play(time);
* }
*
* function mouseClicked() {
* if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
* myPart.start();
* msg = 'playing part';
* }
* }
*
*/
p5.Part = function (steps, bLength) {
this.length = steps || 0;
// how many beats
this.partStep = 0;
this.phrases = [];
this.isPlaying = false;
this.noLoop();
this.tatums = bLength || 0.0625;
// defaults to quarter note
this.metro = new p5.Metro();
this.metro._init();
this.metro.beatLength(this.tatums);
this.metro.setBPM(bpm);
p5sound.parts.push(this);
this.callback = function () {
};
};
/**
* Set the tempo of this part, in Beats Per Minute.
*
* @method setBPM
* @param {Number} BPM Beats Per Minute
* @param {Number} [rampTime] Seconds from now
*/
p5.Part.prototype.setBPM = function (tempo, rampTime) {
this.metro.setBPM(tempo, rampTime);
};
/**
* Returns the Beats Per Minute of this currently part.
*
* @method getBPM
* @return {Number}
*/
p5.Part.prototype.getBPM = function () {
return this.metro.getBPM();
};
/**
* Start playback of this part. It will play
* through all of its phrases at a speed
* determined by setBPM.
*
* @method start
* @param {Number} [time] seconds from now
*/
p5.Part.prototype.start = function (time) {
if (!this.isPlaying) {
this.isPlaying = true;
this.metro.resetSync(this);
var t = time || 0;
this.metro.start(t);
}
};
/**
* Loop playback of this part. It will begin
* looping through all of its phrases at a speed
* determined by setBPM.
*
* @method loop
* @param {Number} [time] seconds from now
*/
p5.Part.prototype.loop = function (time) {
this.looping = true;
// rest onended function
this.onended = function () {
this.partStep = 0;
};
var t = time || 0;
this.start(t);
};
/**
* Tell the part to stop looping.
*
* @method noLoop
*/
p5.Part.prototype.noLoop = function () {
this.looping = false;
// rest onended function
this.onended = function () {
this.stop();
};
};
/**
* Stop the part and cue it to step 0.
*
* @method stop
* @param {Number} [time] seconds from now
*/
p5.Part.prototype.stop = function (time) {
this.partStep = 0;
this.pause(time);
};
/**
* Pause the part. Playback will resume
* from the current step.
*
* @method pause
* @param {Number} time seconds from now
*/
p5.Part.prototype.pause = function (time) {
this.isPlaying = false;
var t = time || 0;
this.metro.stop(t);
};
/**
* Add a p5.Phrase to this Part.
*
* @method addPhrase
* @param {p5.Phrase} phrase reference to a p5.Phrase
*/
p5.Part.prototype.addPhrase = function (name, callback, array) {
var p;
if (arguments.length === 3) {
p = new p5.Phrase(name, callback, array);
} else if (arguments[0] instanceof p5.Phrase) {
p = arguments[0];
} else {
throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase';
}
this.phrases.push(p);
// reset the length if phrase is longer than part's existing length
if (p.sequence.length > this.length) {
this.length = p.sequence.length;
}
};
/**
* Remove a phrase from this part, based on the name it was
* given when it was created.
*
* @method removePhrase
* @param {String} phraseName
*/
p5.Part.prototype.removePhrase = function (name) {
for (var i in this.phrases) {
if (this.phrases[i].name === name) {
this.phrases.splice(i, 1);
}
}
};
/**
* Get a phrase from this part, based on the name it was
* given when it was created. Now you can modify its array.
*
* @method getPhrase
* @param {String} phraseName
*/
p5.Part.prototype.getPhrase = function (name) {
for (var i in this.phrases) {
if (this.phrases[i].name === name) {
return this.phrases[i];
}
}
};
/**
* Get a phrase from this part, based on the name it was
* given when it was created. Now you can modify its array.
*
* @method replaceSequence
* @param {String} phraseName
* @param {Array} sequence Array of values to pass into the callback
* at each step of the phrase.
*/
p5.Part.prototype.replaceSequence = function (name, array) {
for (var i in this.phrases) {
if (this.phrases[i].name === name) {
this.phrases[i].sequence = array;
}
}
};
p5.Part.prototype.incrementStep = function (time) {
if (this.partStep < this.length - 1) {
this.callback(time);
this.partStep += 1;
} else {
if (!this.looping && this.partStep == this.length - 1) {
console.log('done');
// this.callback(time);
this.onended();
}
}
};
/**
* Fire a callback function at every step.
*
* @method onStep
* @param {Function} callback The name of the callback
* you want to fire
* on every beat/tatum.
*/
p5.Part.prototype.onStep = function (callback) {
this.callback = callback;
};
// ===============
// p5.Score
// ===============
/**
* A Score consists of a series of Parts. The parts will
* be played back in order. For example, you could have an
* A part, a B part, and a C part, and play them back in this order
* new p5.Score(a, a, b, a, c)
*
* @class p5.Score
* @constructor
* @param {p5.Part} part(s) One or multiple parts, to be played in sequence.
* @return {p5.Score}
*/
p5.Score = function () {
// for all of the arguments
this.parts = [];
this.currentPart = 0;
var thisScore = this;
for (var i in arguments) {
this.parts[i] = arguments[i];
this.parts[i].nextPart = this.parts[i + 1];
this.parts[i].onended = function () {
thisScore.resetPart(i);
playNextPart(thisScore);
};
}
this.looping = false;
};
p5.Score.prototype.onended = function () {
if (this.looping) {
// this.resetParts();
this.parts[0].start();
} else {
this.parts[this.parts.length - 1].onended = function () {
this.stop();
this.resetParts();
};
}
this.currentPart = 0;
};
/**
* Start playback of the score.
*
* @method start
*/
p5.Score.prototype.start = function () {
this.parts[this.currentPart].start();
this.scoreStep = 0;
};
/**
* Stop playback of the score.
*
* @method stop
*/
p5.Score.prototype.stop = function () {
this.parts[this.currentPart].stop();
this.currentPart = 0;
this.scoreStep = 0;
};
/**
* Pause playback of the score.
*
* @method pause
*/
p5.Score.prototype.pause = function () {
this.parts[this.currentPart].stop();
};
/**
* Loop playback of the score.
*
* @method loop
*/
p5.Score.prototype.loop = function () {
this.looping = true;
this.start();
};
/**
* Stop looping playback of the score. If it
* is currently playing, this will go into effect
* after the current round of playback completes.
*
* @method noLoop
*/
p5.Score.prototype.noLoop = function () {
this.looping = false;
};
p5.Score.prototype.resetParts = function () {
for (var i in this.parts) {
this.resetPart(i);
}
};
p5.Score.prototype.resetPart = function (i) {
this.parts[i].stop();
this.parts[i].partStep = 0;
for (var p in this.parts[i].phrases) {
this.parts[i].phrases[p].phraseStep = 0;
}
};
/**
* Set the tempo for all parts in the score
*
* @param {Number} BPM Beats Per Minute
* @param {Number} rampTime Seconds from now
*/
p5.Score.prototype.setBPM = function (bpm, rampTime) {
for (var i in this.parts) {
this.parts[i].setBPM(bpm, rampTime);
}
};
function playNextPart(aScore) {
aScore.currentPart++;
if (aScore.currentPart >= aScore.parts.length) {
aScore.scoreStep = 0;
aScore.onended();
} else {
aScore.scoreStep = 0;
aScore.parts[aScore.currentPart - 1].stop();
aScore.parts[aScore.currentPart].start();
}
}
}(master);
var soundRecorder;
soundRecorder = function () {
'use strict';
var p5sound = master;
var ac = p5sound.audiocontext;
/**
* Record sounds for playback and/or to save as a .wav file.
* The p5.SoundRecorder records all sound output from your sketch,
* or can be assigned a specific source with setInput().
* The record() method accepts a p5.SoundFile as a parameter.
* When playback is stopped (either after the given amount of time,
* or with the stop() method), the p5.SoundRecorder will send its
* recording to that p5.SoundFile for playback.
*
* @class p5.SoundRecorder
* @constructor
* @example
*
* var mic, recorder, soundFile;
* var state = 0;
*
* function setup() {
* background(200);
* // create an audio in
* mic = new p5.AudioIn();
*
* // prompts user to enable their browser mic
* mic.start();
*
* // create a sound recorder
* recorder = new p5.SoundRecorder();
*
* // connect the mic to the recorder
* recorder.setInput(mic);
*
* // this sound file will be used to
* // playback & save the recording
* soundFile = new p5.SoundFile();
*
* text('keyPress to record', 20, 20);
* }
*
* function keyPressed() {
* // make sure user enabled the mic
* if (state === 0 && mic.enabled) {
*
* // record to our p5.SoundFile
* recorder.record(soundFile);
*
* background(255,0,0);
* text('Recording!', 20, 20);
* state++;
* }
* else if (state === 1) {
* background(0,255,0);
*
* // stop recorder and
* // send result to soundFile
* recorder.stop();
*
* text('Stopped', 20, 20);
* state++;
* }
*
* else if (state === 2) {
* soundFile.play(); // play the result!
* save(soundFile, 'mySound.wav');
* state++;
* }
* }
*
*/
p5.SoundRecorder = function () {
this.input = ac.createGain();
this.output = ac.createGain();
this.recording = false;
this.bufferSize = 1024;
this._channels = 2;
// stereo (default)
this._clear();
// initialize variables
this._jsNode = ac.createScriptProcessor(this.bufferSize, this._channels, 2);
this._jsNode.onaudioprocess = this._audioprocess.bind(this);
/**
* callback invoked when the recording is over
* @private
* @type {function(Float32Array)}
*/
this._callback = function () {
};
// connections
this._jsNode.connect(p5.soundOut._silentNode);
this.setInput();
// add this p5.SoundFile to the soundArray
p5sound.soundArray.push(this);
};
/**
* Connect a specific device to the p5.SoundRecorder.
* If no parameter is given, p5.SoundRecorer will record
* all audible p5.sound from your sketch.
*
* @method setInput
* @param {Object} [unit] p5.sound object or a web audio unit
* that outputs sound
*/
p5.SoundRecorder.prototype.setInput = function (unit) {
this.input.disconnect();
this.input = null;
this.input = ac.createGain();
this.input.connect(this._jsNode);
this.input.connect(this.output);
if (unit) {
unit.connect(this.input);
} else {
p5.soundOut.output.connect(this.input);
}
};
/**
* Start recording. To access the recording, provide
* a p5.SoundFile as the first parameter. The p5.SoundRecorder
* will send its recording to that p5.SoundFile for playback once
* recording is complete. Optional parameters include duration
* (in seconds) of the recording, and a callback function that
* will be called once the complete recording has been
* transfered to the p5.SoundFile.
*
* @method record
* @param {p5.SoundFile} soundFile p5.SoundFile
* @param {Number} [duration] Time (in seconds)
* @param {Function} [callback] The name of a function that will be
* called once the recording completes
*/
p5.SoundRecorder.prototype.record = function (sFile, duration, callback) {
this.recording = true;
if (duration) {
this.sampleLimit = Math.round(duration * ac.sampleRate);
}
if (sFile && callback) {
this._callback = function () {
this.buffer = this._getBuffer();
sFile.setBuffer(this.buffer);
callback();
};
} else if (sFile) {
this._callback = function () {
this.buffer = this._getBuffer();
sFile.setBuffer(this.buffer);
};
}
};
/**
* Stop the recording. Once the recording is stopped,
* the results will be sent to the p5.SoundFile that
* was given on .record(), and if a callback function
* was provided on record, that function will be called.
*
* @method stop
*/
p5.SoundRecorder.prototype.stop = function () {
this.recording = false;
this._callback();
this._clear();
};
p5.SoundRecorder.prototype._clear = function () {
this._leftBuffers = [];
this._rightBuffers = [];
this.recordedSamples = 0;
this.sampleLimit = null;
};
/**
* internal method called on audio process
*
* @private
* @param {AudioProcessorEvent} event
*/
p5.SoundRecorder.prototype._audioprocess = function (event) {
if (this.recording === false) {
return;
} else if (this.recording === true) {
// if we are past the duration, then stop... else:
if (this.sampleLimit && this.recordedSamples >= this.sampleLimit) {
this.stop();
} else {
// get channel data
var left = event.inputBuffer.getChannelData(0);
var right = event.inputBuffer.getChannelData(1);
// clone the samples
this._leftBuffers.push(new Float32Array(left));
this._rightBuffers.push(new Float32Array(right));
this.recordedSamples += this.bufferSize;
}
}
};
p5.SoundRecorder.prototype._getBuffer = function () {
var buffers = [];
buffers.push(this._mergeBuffers(this._leftBuffers));
buffers.push(this._mergeBuffers(this._rightBuffers));
return buffers;
};
p5.SoundRecorder.prototype._mergeBuffers = function (channelBuffer) {
var result = new Float32Array(this.recordedSamples);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
};
p5.SoundRecorder.prototype.dispose = function () {
this._clear();
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this._callback = function () {
};
if (this.input) {
this.input.disconnect();
}
this.input = null;
this._jsNode = null;
};
/**
* Save a p5.SoundFile as a .wav audio file.
*
* @method saveSound
* @param {p5.SoundFile} soundFile p5.SoundFile that you wish to save
* @param {String} name name of the resulting .wav file.
*/
p5.prototype.saveSound = function (soundFile, name) {
var leftChannel, rightChannel;
leftChannel = soundFile.buffer.getChannelData(0);
// handle mono files
if (soundFile.buffer.numberOfChannels > 1) {
rightChannel = soundFile.buffer.getChannelData(1);
} else {
rightChannel = leftChannel;
}
var interleaved = interleave(leftChannel, rightChannel);
// create the buffer and view to create the .WAV file
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// write the WAV container,
// check spec at: https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 36 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, 44100, true);
view.setUint32(28, 44100 * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (32767 * volume), true);
index += 2;
}
p5.prototype.writeFile([view], name, 'wav');
};
// helper methods to save waves
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
}(sndcore, master);
var peakdetect;
peakdetect = function () {
'use strict';
var p5sound = master;
/**
* PeakDetect works in conjunction with p5.FFT to
* look for onsets in some or all of the frequency spectrum.
*
*
* To use p5.PeakDetect, call update
in the draw loop
* and pass in a p5.FFT object.
*
*
* You can listen for a specific part of the frequency spectrum by
* setting the range between freq1
and freq2
.
*
*
* threshold
is the threshold for detecting a peak,
* scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
* as 1.0.
*
*
* The update method is meant to be run in the draw loop, and
* frames determines how many loops must pass before
* another peak can be detected.
* For example, if the frameRate() = 60, you could detect the beat of a
* 120 beat-per-minute song with this equation:
* framesPerPeak = 60 / (estimatedBPM / 60 );
*
*
*
* Based on example contribtued by @b2renger, and a simple beat detection
* explanation by Felix Turner.
*
*
* @class p5.PeakDetect
* @constructor
* @param {Number} [freq1] lowFrequency - defaults to 20Hz
* @param {Number} [freq2] highFrequency - defaults to 20000 Hz
* @param {Number} [threshold] Threshold for detecting a beat between 0 and 1
* scaled logarithmically where 0.1 is 1/2 the loudness
* of 1.0. Defaults to 0.35.
* @param {Number} [framesPerPeak] Defaults to 20.
* @example
*
*
* var cnv, soundFile, fft, peakDetect;
* var ellipseWidth = 10;
*
* function setup() {
* background(0);
* noStroke();
* fill(255);
* textAlign(CENTER);
*
* soundFile = loadSound('assets/beat.mp3');
*
* // p5.PeakDetect requires a p5.FFT
* fft = new p5.FFT();
* peakDetect = new p5.PeakDetect();
*
* }
*
* function draw() {
* background(0);
* text('click to play/pause', width/2, height/2);
*
* // peakDetect accepts an fft post-analysis
* fft.analyze();
* peakDetect.update(fft);
*
* if ( peakDetect.isDetected ) {
* ellipseWidth = 50;
* } else {
* ellipseWidth *= 0.95;
* }
*
* ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
* }
*
* // toggle play/stop when canvas is clicked
* function mouseClicked() {
* if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
* if (soundFile.isPlaying() ) {
* soundFile.stop();
* } else {
* soundFile.play();
* }
* }
* }
*
*/
p5.PeakDetect = function (freq1, freq2, threshold, _framesPerPeak) {
var framesPerPeak;
// framesPerPeak determines how often to look for a beat.
// If a beat is provided, try to look for a beat based on bpm
this.framesPerPeak = _framesPerPeak || 20;
this.framesSinceLastPeak = 0;
this.decayRate = 0.95;
this.threshold = threshold || 0.35;
this.cutoff = 0;
// how much to increase the cutoff
// TO DO: document this / figure out how to make it accessible
this.cutoffMult = 1.5;
this.energy = 0;
this.penergy = 0;
// TO DO: document this property / figure out how to make it accessible
this.currentValue = 0;
/**
* isDetected is set to true when a peak is detected.
*
* @attribute isDetected
* @type {Boolean}
* @default false
*/
this.isDetected = false;
this.f1 = freq1 || 40;
this.f2 = freq2 || 20000;
// function to call when a peak is detected
this._onPeak = function () {
};
};
/**
* The update method is run in the draw loop.
*
* Accepts an FFT object. You must call .analyze()
* on the FFT object prior to updating the peakDetect
* because it relies on a completed FFT analysis.
*
* @method update
* @param {p5.FFT} fftObject A p5.FFT object
*/
p5.PeakDetect.prototype.update = function (fftObject) {
var nrg = this.energy = fftObject.getEnergy(this.f1, this.f2) / 255;
if (nrg > this.cutoff && nrg > this.threshold && nrg - this.penergy > 0) {
// trigger callback
this._onPeak();
this.isDetected = true;
// debounce
this.cutoff = nrg * this.cutoffMult;
this.framesSinceLastPeak = 0;
} else {
this.isDetected = false;
if (this.framesSinceLastPeak <= this.framesPerPeak) {
this.framesSinceLastPeak++;
} else {
this.cutoff *= this.decayRate;
this.cutoff = Math.max(this.cutoff, this.threshold);
}
}
this.currentValue = nrg;
this.penergy = nrg;
};
/**
* onPeak accepts two arguments: a function to call when
* a peak is detected. The value of the peak,
* between 0.0 and 1.0, is passed to the callback.
*
* @method onPeak
* @param {Function} callback Name of a function that will
* be called when a peak is
* detected.
* @param {Object} [val] Optional value to pass
* into the function when
* a peak is detected.
* @example
*
* var cnv, soundFile, fft, peakDetect;
* var ellipseWidth = 0;
*
* function setup() {
* cnv = createCanvas(100,100);
* textAlign(CENTER);
*
* soundFile = loadSound('assets/beat.mp3');
* fft = new p5.FFT();
* peakDetect = new p5.PeakDetect();
*
* setupSound();
*
* // when a beat is detected, call triggerBeat()
* peakDetect.onPeak(triggerBeat);
* }
*
* function draw() {
* background(0);
* fill(255);
* text('click to play', width/2, height/2);
*
* fft.analyze();
* peakDetect.update(fft);
*
* ellipseWidth *= 0.95;
* ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
* }
*
* // this function is called by peakDetect.onPeak
* function triggerBeat() {
* ellipseWidth = 50;
* }
*
* // mouseclick starts/stops sound
* function setupSound() {
* cnv.mouseClicked( function() {
* if (soundFile.isPlaying() ) {
* soundFile.stop();
* } else {
* soundFile.play();
* }
* });
* }
*
*/
p5.PeakDetect.prototype.onPeak = function (callback, val) {
var self = this;
self._onPeak = function () {
callback(self.energy, val);
};
};
}(master);
var gain;
gain = function () {
'use strict';
var p5sound = master;
/**
* A gain node is usefull to set the relative volume of sound.
* It's typically used to build mixers.
*
* @class p5.Gain
* @constructor
* @example
*
*
* // load two soundfile and crossfade beetween them
* var sound1,sound2;
* var gain1, gain2, gain3;
*
* function preload(){
* soundFormats('ogg', 'mp3');
* sound1 = loadSound('../_files/Damscray_-_Dancing_Tiger_01');
* sound2 = loadSound('../_files/beat.mp3');
* }
*
* function setup() {
* createCanvas(400,200);
*
* // create a 'master' gain to which we will connect both soundfiles
* gain3 = new p5.Gain();
* gain3.connect();
*
* // setup first sound for playing
* sound1.rate(1);
* sound1.loop();
* sound1.disconnect(); // diconnect from p5 output
*
* gain1 = new p5.Gain(); // setup a gain node
* gain1.setInput(sound1); // connect the first sound to its input
* gain1.connect(gain3); // connect its output to the 'master'
*
* sound2.rate(1);
* sound2.disconnect();
* sound2.loop();
*
* gain2 = new p5.Gain();
* gain2.setInput(sound2);
* gain2.connect(gain3);
*
* }
*
* function draw(){
* background(180);
*
* // calculate the horizontal distance beetween the mouse and the right of the screen
* var d = dist(mouseX,0,width,0);
*
* // map the horizontal position of the mouse to values useable for volume control of sound1
* var vol1 = map(mouseX,0,width,0,1);
* var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
*
* gain1.amp(vol1,0.5,0);
* gain2.amp(vol2,0.5,0);
*
* // map the vertical position of the mouse to values useable for 'master volume control'
* var vol3 = map(mouseY,0,height,0,1);
* gain3.amp(vol3,0.5,0);
* }
*
*
*/
p5.Gain = function () {
this.ac = p5sound.audiocontext;
this.input = this.ac.createGain();
this.output = this.ac.createGain();
// otherwise, Safari distorts
this.input.gain.value = 0.5;
this.input.connect(this.output);
// add to the soundArray
p5sound.soundArray.push(this);
};
/**
* Connect a source to the gain node.
*
* @method setInput
* @param {Object} src p5.sound / Web Audio object with a sound
* output.
*/
p5.Gain.prototype.setInput = function (src) {
src.connect(this.input);
};
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
p5.Gain.prototype.connect = function (unit) {
var u = unit || p5.soundOut.input;
this.output.connect(u.input ? u.input : u);
};
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Gain.prototype.disconnect = function () {
this.output.disconnect();
};
/**
* Set the output level of the gain node.
*
* @method amp
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
p5.Gain.prototype.amp = function (vol, rampTime, tFromNow) {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
};
p5.Gain.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.output.disconnect();
this.input.disconnect();
this.output = undefined;
this.input = undefined;
};
}(master, sndcore);
var distortion;
distortion = function () {
'use strict';
var p5sound = master;
/*
* Adapted from [Kevin Ennis on StackOverflow](http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion)
*/
function makeDistortionCurve(amount) {
var k = typeof amount === 'number' ? amount : 50;
var n_samples = 44100;
var curve = new Float32Array(n_samples);
var deg = Math.PI / 180;
var i = 0;
var x;
for (; i < n_samples; ++i) {
x = i * 2 / n_samples - 1;
curve[i] = (3 + k) * x * 20 * deg / (Math.PI + k * Math.abs(x));
}
return curve;
}
/**
* A Distortion effect created with a Waveshaper Node,
* with an approach adapted from
* [Kevin Ennis](http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion)
*
* @class p5.Distortion
* @constructor
* @param {Number} [amount=0.25] Unbounded distortion amount.
* Normal values range from 0-1.
* @param {String} [oversample='none'] 'none', '2x', or '4x'.
*
* @return {Object} Distortion object
*/
p5.Distortion = function (amount, oversample) {
if (typeof amount === 'undefined') {
amount = 0.25;
}
if (typeof amount !== 'number') {
throw new Error('amount must be a number');
}
if (typeof oversample === 'undefined') {
oversample = '2x';
}
if (typeof oversample !== 'string') {
throw new Error('oversample must be a String');
}
var curveAmount = p5.prototype.map(amount, 0, 1, 0, 2000);
this.ac = p5sound.audiocontext;
this.input = this.ac.createGain();
this.output = this.ac.createGain();
/**
* The p5.Distortion is built with a
*
* Web Audio WaveShaper Node.
*
* @property WaveShaperNode
* @type {Object} AudioNode
*/
this.waveShaperNode = this.ac.createWaveShaper();
this.amount = curveAmount;
this.waveShaperNode.curve = makeDistortionCurve(curveAmount);
this.waveShaperNode.oversample = oversample;
this.input.connect(this.waveShaperNode);
this.waveShaperNode.connect(this.output);
this.connect();
// add to the soundArray
p5sound.soundArray.push(this);
};
p5.Distortion.prototype.process = function (src, amount, oversample) {
src.connect(this.input);
this.set(amount, oversample);
};
/**
* Set the amount and oversample of the waveshaper distortion.
*
* @method setType
* @param {Number} [amount=0.25] Unbounded distortion amount.
* Normal values range from 0-1.
* @param {String} [oversample='none'] 'none', '2x', or '4x'.
* @param {String}
*/
p5.Distortion.prototype.set = function (amount, oversample) {
if (amount) {
var curveAmount = p5.prototype.map(amount, 0, 1, 0, 2000);
this.amount = curveAmount;
this.waveShaperNode.curve = makeDistortionCurve(curveAmount);
}
if (oversample) {
this.waveShaperNode.oversample = oversample;
}
};
/**
* Return the distortion amount, typically between 0-1.
*
* @method getAmount
* @return {Number} Unbounded distortion amount.
* Normal values range from 0-1.
*/
p5.Distortion.prototype.getAmount = function () {
return this.amount;
};
/**
* Return the oversampling.
*
* @return {String} Oversample can either be 'none', '2x', or '4x'.
*/
p5.Distortion.prototype.getOversample = function () {
return this.waveShaperNode.oversample;
};
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
p5.Distortion.prototype.connect = function (unit) {
var u = unit || p5.soundOut.input;
this.output.connect(u);
};
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Distortion.prototype.disconnect = function () {
this.output.disconnect();
};
p5.Distortion.prototype.dispose = function () {
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
this.input.disconnect();
this.waveShaperNode.disconnect();
this.input = null;
this.waveShaperNode = null;
if (typeof this.output !== 'undefined') {
this.output.disconnect();
this.output = null;
}
};
}(master);
var src_app;
src_app = function () {
'use strict';
var p5SOUND = sndcore;
return p5SOUND;
}(sndcore, master, helpers, errorHandler, panner, soundfile, amplitude, fft, signal, oscillator, env, pulse, noise, audioin, filter, delay, reverb, metro, looper, soundRecorder, peakdetect, gain, distortion);
}));