From 4839fa20acb55401b4a875618e7225b3241a8d09 Mon Sep 17 00:00:00 2001
From: TechRunner p5.sound extends p5 with Web Audio functionality including audio input,
+ * playback, analysis and synthesis.
+ * p5.sound is on GitHub.
+ * Download the latest version
+ * here.
+ *
Some browsers require users to startAudioContext + * with a user gesture, such as touchStarted in the example below.
+ * + * @for p5 + * @method getAudioContext + * @return {Object} AudioContext for this sketch + * @example + *
+ * function draw() {
+ * background(255);
+ * textAlign(CENTER);
+ *
+ * if (getAudioContext().state !== 'running') {
+ * text('click to start audio', width/2, height/2);
+ * } else {
+ * text('audio is enabled', width/2, height/2);
+ * }
+ * }
+ *
+ * function touchStarted() {
+ * if (getAudioContext().state !== 'running') {
+ * getAudioContext().resume();
+ * }
+ * var synth = new p5.MonoSynth();
+ * synth.play('A4', 0.5, 0, 0.2);
+ * }
+ *
+ * It is not only a good practice to give users control over starting + * audio. This policy is enforced by many web browsers, including iOS and + * Google Chrome, which create the Web Audio API's + * Audio Context + * in a suspended state.
+ * + *In these browser-specific policies, sound will not play until a user
+ * interaction event (i.e. mousePressed()) explicitly resumes
+ * the AudioContext, or starts an audio node. This can be accomplished by
+ * calling start() on a p5.Oscillator,
+ * play() on a p5.SoundFile, or simply
+ * userStartAudio().
userStartAudio() starts the AudioContext on a user
+ * gesture. The default behavior will enable audio on any
+ * mouseUp or touchEnd event. It can also be placed in a specific
+ * interaction function, such as mousePressed() as in the
+ * example below. This method utilizes
+ * StartAudioContext
+ * , a library by Yotam Mann (MIT Licence, 2016).
+ * function setup() {
+ * // mimics the autoplay policy
+ * getAudioContext().suspend();
+ *
+ * let mySynth = new p5.MonoSynth();
+ *
+ * // This won't play until the context has resumed
+ * mySynth.play('A6');
+ * }
+ * function draw() {
+ * background(220);
+ * textAlign(CENTER, CENTER);
+ * text(getAudioContext().state, width/2, height/2);
+ * }
+ * function mousePressed() {
+ * userStartAudio();
+ * }
+ * rampTime parameter. For more
+ * complex fades, see the Envelope class.
+ *
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ * How This Works: When you load the p5.sound module, it + * creates a single instance of p5sound. All sound objects in this + * module output to p5sound before reaching your computer's output. + * So if you change the amplitude of p5sound, it impacts all of the + * sound in this module.
+ * + *If no value is provided, returns a Web Audio API Gain Node
+ * + * @method outputVolume + * @param {Number|Object} volume Volume (amplitude) between 0.0 + * and 1.0 or modulating signal/oscillator + * @param {Number} [rampTime] Fade for t seconds + * @param {Number} [timeFromNow] Schedule this event to happen at + * t seconds in the future + */ + + +p5.prototype.outputVolume = function (vol) { + var rampTime = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + var tFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; + + if (typeof vol === 'number') { + var now = p5sound.audiocontext.currentTime; + var currentVol = p5sound.output.gain.value; + p5sound.output.gain.cancelScheduledValues(now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); + } else if (vol) { + vol.connect(p5sound.output.gain); + } else { + return p5sound.output.gain; + } +}; +/** + * `p5.soundOut` is the p5.sound final output bus. It sends output to + * the destination of this window's web audio context. It contains + * Web Audio API nodes including a dyanmicsCompressor (.limiter),
+ * and Gain Nodes for .input and .output.
+ *
+ * @property {Object} soundOut
+ */
+
+
+p5.prototype.soundOut = p5.soundOut = p5sound;
+
+p5.soundOut._silentNode = p5sound.audiocontext.createGain();
+p5.soundOut._silentNode.gain.value = 0;
+
+p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
+
+ var main = (p5sound);
+var processorNames = __webpack_require__(5);
+var processorNames_default = __webpack_require__.n(processorNames);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+
+
+/**
+ * @for p5
+ */
+
+/**
+ * Returns a number representing the sample rate, in samples per second,
+ * of all sound objects in this audio context. It is determined by the
+ * sampling rate of your operating system's sound card, and it is not
+ * currently possile to change.
+ * It is often 44100, or twice the range of human hearing.
+ *
+ * @method sampleRate
+ * @return {Number} samplerate samples per second
+ */
+
+function sampleRate() {
+ return main.audiocontext.sampleRate;
+}
+/**
+ * Returns the closest MIDI note value for
+ * a given frequency.
+ *
+ * @method freqToMidi
+ * @param {Number} frequency A freqeuncy, for example, the "A"
+ * above Middle C is 440Hz
+ * @return {Number} MIDI note value
+ */
+
+
+function freqToMidi(f) {
+ var mathlog2 = Math.log(f / 440) / Math.log(2);
+ var m = Math.round(12 * mathlog2) + 69;
+ return m;
+}
+/**
+ * Returns the frequency value of a MIDI note value.
+ * General MIDI treats notes as integers where middle C
+ * is 60, C# is 61, D is 62 etc. Useful for generating
+ * musical frequencies with oscillators.
+ *
+ * @method midiToFreq
+ * @param {Number} midiNote The number of a MIDI note
+ * @return {Number} Frequency value of the given MIDI note
+ * @example
+ *
+ * let midiNotes = [60, 64, 67, 72];
+ * let noteIndex = 0;
+ * let midiVal, freq;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(startSound);
+ * osc = new p5.TriOsc();
+ * env = new p5.Envelope();
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap to play', 10, 20);
+ * if (midiVal) {
+ * text('MIDI: ' + midiVal, 10, 40);
+ * text('Freq: ' + freq, 10, 60);
+ * }
+ * }
+ *
+ * function startSound() {
+ * // see also: userStartAudio();
+ * osc.start();
+ *
+ * midiVal = midiNotes[noteIndex % midiNotes.length];
+ * freq = midiToFreq(midiVal);
+ * osc.freq(freq);
+ * env.ramp(osc, 0, 1.0, 0);
+ *
+ * noteIndex++;
+ * }
+ *
+ * function preload() {
+ * // set the global sound formats
+ * soundFormats('mp3', 'ogg');
+ *
+ * // load either beatbox.mp3, or .ogg, depending on browser
+ * mySound = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * text('sound loaded! tap to play', 10, 20, width - 20);
+ * cnv.mousePressed(function() {
+ * mySound.play();
+ * });
+ * }
+ * SoundFile object with a path to a file.
+ * + *The p5.SoundFile may not be available immediately because + * it loads the file information asynchronously.
+ * + *To do something with the sound as soon as it loads + * pass the name of a function as the second parameter.
+ * + *Only one file path is required. However, audio file formats + * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all + * web browsers. If you want to ensure compatability, instead of a single + * file path, you may include an Array of filepaths, and the browser will + * choose a format that works.
+ * + * @class p5.SoundFile + * @constructor + * @param {String|Array} path path to a sound file (String). Optionally, + * you may include multiple file formats in + * an array. Alternately, accepts an object + * from the HTML5 File API, or a p5.File. + * @param {Function} [successCallback] Name of a function to call once file loads + * @param {Function} [errorCallback] Name of a function to call if file fails to + * load. This function will receive an error or + * XMLHttpRequest object with information + * about what went wrong. + * @param {Function} [whileLoadingCallback] Name of a function to call while file + * is loading. That function will + * receive progress of the request to + * load the sound file + * (between 0 and 1) as its first + * parameter. This progress + * does not account for the additional + * time needed to decode the audio data. + * + * @example + *
+ * let mySound;
+ * function preload() {
+ * soundFormats('mp3', 'ogg');
+ * mySound = loadSound('assets/doorbell');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap here to play', 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * // playing a sound file on a user gesture
+ * // is equivalent to `userStartAudio()`
+ * mySound.play();
+ * }
+ * restart and
+ * sustain. Play Mode determines what happens to a
+ * p5.SoundFile if it is triggered while in the middle of playback.
+ * In sustain mode, playback will continue simultaneous to the
+ * new playback. In restart mode, play() will stop playback
+ * and start over. With untilDone, a sound will play only if it's
+ * not already playing. Sustain is the default mode.
+ *
+ * @method playMode
+ * @for p5.SoundFile
+ * @param {String} str 'restart' or 'sustain' or 'untilDone'
+ * @example
+ *
+ * let mySound;
+ * function preload(){
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * noFill();
+ * rect(0, height/2, width - 1, height/2 - 1);
+ * rect(0, 0, width - 1, height/2);
+ * textAlign(CENTER, CENTER);
+ * fill(20);
+ * text('restart', width/2, 1 * height/4);
+ * text('sustain', width/2, 3 * height/4);
+ * }
+ * function canvasPressed() {
+ * if (mouseX < height/2) {
+ * mySound.playMode('restart');
+ * } else {
+ * mySound.playMode('sustain');
+ * }
+ * mySound.play();
+ * }
+ *
+ *
+ * let soundFile;
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play, release to pause', 10, 20, width - 20);
+ * }
+ * function canvasPressed() {
+ * soundFile.loop();
+ * background(0, 200, 50);
+ * }
+ * function mouseReleased() {
+ * soundFile.pause();
+ * background(220);
+ * }
+ *
+ *
+ * let soundFile;
+ * let loopStart = 0.5;
+ * let loopDuration = 0.2;
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play, release to pause', 10, 20, width - 20);
+ * }
+ * function canvasPressed() {
+ * soundFile.loop();
+ * background(0, 200, 50);
+ * }
+ * function mouseReleased() {
+ * soundFile.pause();
+ * background(220);
+ * }
+ *
+ *
+ * let ballX = 0;
+ * let soundFile;
+ *
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function draw() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * ballX = constrain(mouseX, 0, width);
+ * ellipse(ballX, height/2, 20, 20);
+ * }
+ *
+ * function canvasPressed(){
+ * // map the ball's x location to a panning degree
+ * // between -1.0 (left) and 1.0 (right)
+ * let panning = map(ballX, 0., width,-1.0, 1.0);
+ * soundFile.pan(panning);
+ * soundFile.play();
+ * }
+ *
+ * let mySound;
+ *
+ * function preload() {
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * }
+ * function canvasPressed() {
+ * mySound.loop();
+ * }
+ * function mouseReleased() {
+ * mySound.pause();
+ * }
+ * function draw() {
+ * background(220);
+ *
+ * // Set the rate to a range between 0.1 and 4
+ * // Changing the rate also alters the pitch
+ * let playbackRate = map(mouseY, 0.1, height, 2, 0);
+ * playbackRate = constrain(playbackRate, 0.01, 4);
+ * mySound.rate(playbackRate);
+ *
+ * line(0, mouseY, width, mouseY);
+ * text('rate: ' + round(playbackRate * 100) + '%', 10, 20);
+ * }
+ *
+ *
+ * rampTime parameter. For more
+ * complex fades, see the Envelope class.
+ *
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ * @method setVolume
+ * @for p5.SoundFile
+ * @param {Number|Object} volume Volume (amplitude) between 0.0
+ * and 1.0 or modulating signal/oscillator
+ * @param {Number} [rampTime] Fade for t seconds
+ * @param {Number} [timeFromNow] Schedule this event to happen at
+ * t seconds in the future
+ */
+
+ }, {
+ key: "setVolume",
+ value: function setVolume(vol, _rampTime, _tFromNow) {
+ if (typeof vol === 'number') {
+ var rampTime = _rampTime || 0;
+ var tFromNow = _tFromNow || 0;
+ var now = main.audiocontext.currentTime;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
+ } else if (vol) {
+ vol.connect(this.output.gain);
+ } else {
+ return this.output.gain;
+ }
+ }
+ /**
+ * Returns the duration of a sound file in seconds.
+ *
+ * @method duration
+ * @for p5.SoundFile
+ * @return {Number} The duration of the soundFile in seconds.
+ */
+
+ }, {
+ key: "duration",
+ value: function duration() {
+ if (this.buffer) {
+ return this.buffer.duration;
+ } else {
+ return 0;
+ }
+ }
+ /**
+ * Return the current position of the p5.SoundFile playhead, in seconds.
+ * Time is relative to the normal buffer direction, so if `reverseBuffer`
+ * has been called, currentTime will count backwards.
+ *
+ * @method currentTime
+ * @for p5.SoundFile
+ * @return {Number} currentTime of the soundFile in seconds.
+ */
+
+ }, {
+ key: "currentTime",
+ value: function currentTime() {
+ return this.reversed ? Math.abs(this._lastPos - this.buffer.length) / soundfile_ac.sampleRate : this._lastPos / soundfile_ac.sampleRate;
+ }
+ /**
+ * Move the playhead of a soundfile that is currently playing to a
+ * new position and a new duration, in seconds.
+ * If none are given, will reset the file to play entire duration
+ * from start to finish. To set the position of a soundfile that is
+ * not currently playing, use the `play` or `loop` methods.
+ *
+ * @method jump
+ * @for p5.SoundFile
+ * @param {Number} cueTime cueTime of the soundFile in seconds.
+ * @param {Number} duration duration in seconds.
+ */
+
+ }, {
+ key: "jump",
+ value: function jump(cueTime, duration) {
+ if (cueTime < 0 || cueTime > this.buffer.duration) {
+ throw 'jump time out of range';
+ }
+
+ if (duration > this.buffer.duration - cueTime) {
+ throw 'end time out of range';
+ }
+
+ var cTime = cueTime || 0;
+ var dur = duration || undefined;
+
+ if (this.isPlaying()) {
+ this.stop(0);
+ this.play(0, this.playbackRate, this.output.gain.value, cTime, dur);
+ }
+ }
+ /**
+ * Return the number of channels in a sound file.
+ * For example, Mono = 1, Stereo = 2.
+ *
+ * @method channels
+ * @for p5.SoundFile
+ * @return {Number} [channels]
+ */
+
+ }, {
+ key: "channels",
+ value: function channels() {
+ return this.buffer.numberOfChannels;
+ }
+ /**
+ * Return the sample rate of the sound file.
+ *
+ * @method sampleRate
+ * @for p5.SoundFile
+ * @return {Number} [sampleRate]
+ */
+
+ }, {
+ key: "sampleRate",
+ value: function sampleRate() {
+ return this.buffer.sampleRate;
+ }
+ /**
+ * Return the number of samples in a sound file.
+ * Equal to sampleRate * duration.
+ *
+ * @method frames
+ * @for p5.SoundFile
+ * @return {Number} [sampleCount]
+ */
+
+ }, {
+ key: "frames",
+ value: function frames() {
+ return this.buffer.length;
+ }
+ /**
+ * Returns an array of amplitude peaks in a p5.SoundFile that can be
+ * used to draw a static waveform. Scans through the p5.SoundFile's
+ * audio buffer to find the greatest amplitudes. Accepts one
+ * parameter, 'length', which determines size of the array.
+ * Larger arrays result in more precise waveform visualizations.
+ *
+ * Inspired by Wavesurfer.js.
+ *
+ * @method getPeaks
+ * @for p5.SoundFile
+ * @params {Number} [length] length is the size of the returned array.
+ * Larger length results in more precision.
+ * Defaults to 5*width of the browser window.
+ * @returns {Float32Array} Array of peaks.
+ */
+
+ }, {
+ key: "getPeaks",
+ value: function getPeaks(length) {
+ if (this.buffer) {
+ if (!length) {
+ length = window.width * 5;
+ }
+
+ if (this.buffer) {
+ var buffer = this.buffer;
+ var sampleSize = buffer.length / length;
+ var sampleStep = ~~(sampleSize / 10) || 1;
+ var channels = buffer.numberOfChannels;
+ var peaks = new Float32Array(Math.round(length));
+
+ for (var c = 0; c < channels; c++) {
+ var chan = buffer.getChannelData(c);
+
+ for (var i = 0; i < length; i++) {
+ var start = ~~(i * sampleSize);
+ var end = ~~(start + sampleSize);
+ var max = 0;
+
+ for (var j = start; j < end; j += sampleStep) {
+ var value = chan[j];
+
+ if (value > max) {
+ max = value;
+ } else if (-value > max) {
+ max = value;
+ }
+ }
+
+ if (c === 0 || Math.abs(max) > peaks[i]) {
+ peaks[i] = max;
+ }
+ }
+ }
+
+ return peaks;
+ }
+ } else {
+ throw 'Cannot load peaks yet, buffer is not loaded';
+ }
+ }
+ /**
+ * Reverses the p5.SoundFile's buffer source.
+ * Playback must be handled separately (see example).
+ *
+ * @method reverseBuffer
+ * @for p5.SoundFile
+ * @example
+ *
+ * let drum;
+ * function preload() {
+ * drum = loadSound('assets/drum.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * drum.stop();
+ * drum.reverseBuffer();
+ * drum.play();
+ * }
+ *
+ *
+ * let mySound;
+ * function preload() {
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play', 10, 20);
+ *
+ * // schedule calls to changeText
+ * mySound.addCue(0, changeText, "hello" );
+ * mySound.addCue(0.5, changeText, "hello," );
+ * mySound.addCue(1, changeText, "hello, p5!");
+ * mySound.addCue(1.5, changeText, "hello, p5!!");
+ * mySound.addCue(2, changeText, "hello, p5!!!!!");
+ * }
+ *
+ * function changeText(val) {
+ * background(220);
+ * text(val, 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * mySound.play();
+ * }
+ *
+ * let mySound;
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to download', 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * mySound.save('my cool filename');
+ * }
+ *
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ *
+ * function setup() {
+ * noCanvas();
+ * let soundBlob = mySound.getBlob();
+ *
+ * // Now we can send the blob to a server...
+ * let serverUrl = 'https://jsonplaceholder.typicode.com/posts';
+ * let httpRequestOptions = {
+ * method: 'POST',
+ * body: new FormData().append('soundBlob', soundBlob),
+ * headers: new Headers({
+ * 'Content-Type': 'multipart/form-data'
+ * })
+ * };
+ * httpDo(serverUrl, httpRequestOptions);
+ *
+ * // We can also create an `ObjectURL` pointing to the Blob
+ * let blobUrl = URL.createObjectURL(soundBlob);
+ *
+ * // The `
+ * let mySound;
+ * function preload() {
+ * soundFormats('mp3', 'ogg');
+ * mySound = loadSound('assets/doorbell');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap here to play', 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * // playing a sound file on a user gesture
+ * // is equivalent to `userStartAudio()`
+ * mySound.play();
+ * }
+ *
+ * let sound, amplitude;
+ *
+ * function preload(){
+ * sound = loadSound('assets/beat.mp3');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100,100);
+ * cnv.mouseClicked(togglePlay);
+ * amplitude = new p5.Amplitude();
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * let level = amplitude.getLevel();
+ * let size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ *
+ * function togglePlay() {
+ * if (sound.isPlaying() ){
+ * sound.pause();
+ * } else {
+ * sound.loop();
+ * amplitude = new p5.Amplitude();
+ * amplitude.setInput(sound);
+ * }
+ * }
+ *
+ *
+ * function preload(){
+ * sound1 = loadSound('assets/beat.mp3');
+ * sound2 = loadSound('assets/drum.mp3');
+ * }
+ * function setup(){
+ * cnv = createCanvas(100, 100);
+ * cnv.mouseClicked(toggleSound);
+ *
+ * amplitude = new p5.Amplitude();
+ * amplitude.setInput(sound2);
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * let level = amplitude.getLevel();
+ * let size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ *
+ * function toggleSound(){
+ * if (sound1.isPlaying() && sound2.isPlaying()) {
+ * sound1.stop();
+ * sound2.stop();
+ * } else {
+ * sound1.play();
+ * sound2.play();
+ * }
+ * }
+ *
+ * function preload(){
+ * sound = loadSound('assets/beat.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mouseClicked(toggleSound);
+ * amplitude = new p5.Amplitude();
+ * }
+ *
+ * function draw() {
+ * background(220, 150);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, 20);
+ *
+ * let level = amplitude.getLevel();
+ * let size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ *
+ * function toggleSound(){
+ * if (sound.isPlaying()) {
+ * sound.stop();
+ * } else {
+ * sound.play();
+ * }
+ * }
+ * FFT (Fast Fourier Transform) is an analysis algorithm that + * isolates individual + * + * audio frequencies within a waveform.
+ * + *Once instantiated, a p5.FFT object can return an array based on
+ * two types of analyses:
• FFT.waveform() computes
+ * amplitude values along the time domain. The array indices correspond
+ * to samples across a brief moment in time. Each value represents
+ * amplitude of the waveform at that sample of time.
+ * • FFT.analyze() computes amplitude values along the
+ * frequency domain. The array indices correspond to frequencies (i.e.
+ * pitches), from the lowest to the highest that humans can hear. Each
+ * value represents amplitude at that slice of the frequency spectrum.
+ * Use with getEnergy() to measure amplitude at specific
+ * frequencies, or within a range of frequencies.
FFT analyzes a very short snapshot of sound called a sample
+ * buffer. It returns an array of amplitude measurements, referred
+ * to as bins. The array is 1024 bins long by default.
+ * You can change the bin array length, but it must be a power of 2
+ * between 16 and 1024 in order for the FFT algorithm to function
+ * correctly. The actual size of the FFT buffer is twice the
+ * number of bins, so given a standard sample rate, the buffer is
+ * 2048/44100 seconds long.
+ * function preload(){
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup(){
+ * let cnv = createCanvas(100,100);
+ * cnv.mouseClicked(togglePlay);
+ * fft = new p5.FFT();
+ * sound.amp(0.2);
+ * }
+ *
+ * function draw(){
+ * background(220);
+ *
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * fill(255, 0, 255);
+ * for (let i = 0; i< spectrum.length; i++){
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h )
+ * }
+ *
+ * let waveform = fft.waveform();
+ * noFill();
+ * beginShape();
+ * stroke(20);
+ * for (let i = 0; i < waveform.length; i++){
+ * let x = map(i, 0, waveform.length, 0, width);
+ * let y = map( waveform[i], -1, 1, 0, height);
+ * vertex(x,y);
+ * }
+ * endShape();
+ *
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function togglePlay() {
+ * if (sound.isPlaying()) {
+ * sound.pause();
+ * } else {
+ * sound.loop();
+ * }
+ * }
+ * getEnergy().
+ *
+ * @method analyze
+ * @for p5.FFT
+ * @param {Number} [bins] Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @param {Number} [scale] If "dB," returns decibel
+ * float measurements between
+ * -140 and 0 (max).
+ * Otherwise returns integers from 0-255.
+ * @return {Array} spectrum Array of energy (amplitude/volume)
+ * values across the frequency spectrum.
+ * Lowest energy (silence) = 0, highest
+ * possible is 255.
+ * @example
+ *
+ * let osc, fft;
+ *
+ * function setup(){
+ * let cnv = createCanvas(100,100);
+ * cnv.mousePressed(startSound);
+ * osc = new p5.Oscillator();
+ * osc.amp(0);
+ * fft = new p5.FFT();
+ * }
+ *
+ * function draw(){
+ * background(220);
+ *
+ * let freq = map(mouseX, 0, windowWidth, 20, 10000);
+ * freq = constrain(freq, 1, 20000);
+ * osc.freq(freq);
+ *
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * fill(255, 0, 255);
+ * for (let i = 0; i< spectrum.length; i++){
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h );
+ * }
+ *
+ * stroke(255);
+ * if (!osc.started) {
+ * text('tap here and drag to change frequency', 10, 20, width - 20);
+ * } else {
+ * text(round(freq)+'Hz', 10, 20);
+ * }
+ * }
+ *
+ * function startSound() {
+ * osc.start();
+ * osc.amp(0.5, 0.2);
+ * }
+ *
+ * function mouseReleased() {
+ * osc.amp(0, 0.2);
+ * }
+ *
+ * function setup(){
+ * cnv = createCanvas(100,100);
+ * cnv.mousePressed(userStartAudio);
+ * sound = new p5.AudioIn();
+ * sound.start();
+ * fft = new p5.FFT();
+ * sound.connect(fft);
+ *}
+ *
+ *function draw() {
+ * if (getAudioContext().state !== 'running') {
+ * background(220);
+ * text('tap here and enable mic to begin', 10, 20, width - 20);
+ * return;
+ * }
+ * let centroidplot = 0.0;
+ * let spectralCentroid = 0;
+ *
+ * background(0);
+ * stroke(0,255,0);
+ * let spectrum = fft.analyze();
+ * fill(0,255,0); // spectrum is green
+ *
+ * //draw the spectrum
+ * for (let i = 0; i < spectrum.length; i++){
+ * let x = map(log(i), 0, log(spectrum.length), 0, width);
+ * let h = map(spectrum[i], 0, 255, 0, height);
+ * let rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
+ * rect(x, height, rectangle_width, -h )
+ * }
+ * let nyquist = 22050;
+ *
+ * // get the centroid
+ * spectralCentroid = fft.getCentroid();
+ *
+ * // the mean_freq_index calculation is for the display.
+ * let mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
+ *
+ * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
+ *
+ * stroke(255,0,0); // the line showing where the centroid is will be red
+ *
+ * rect(centroidplot, 0, width / spectrum.length, height)
+ * noStroke();
+ * fill(255,255,255); // text is white
+ * text('centroid: ', 10, 20);
+ * text(round(spectralCentroid)+' Hz', 10, 40);
+ *}
+ * Creates a signal that oscillates between -1.0 and 1.0. + * By default, the oscillation takes the form of a sinusoidal + * shape ('sine'). Additional types include 'triangle', + * 'sawtooth' and 'square'. The frequency defaults to + * 440 oscillations per second (440Hz, equal to the pitch of an + * 'A' note).
+ * + *Set the type of oscillation with setType(), or by instantiating a + * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc. + *
+ * + * @class p5.Oscillator + * @constructor + * @param {Number} [freq] frequency defaults to 440Hz + * @param {String} [type] type of oscillator. Options: + * 'sine' (default), 'triangle', + * 'sawtooth', 'square' + * @example + *
+ * let osc, playing, freq, amp;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playOscillator);
+ * osc = new p5.Oscillator('sine');
+ * }
+ *
+ * function draw() {
+ * background(220)
+ * freq = constrain(map(mouseX, 0, width, 100, 500), 100, 500);
+ * amp = constrain(map(mouseY, height, 0, 0, 1), 0, 1);
+ *
+ * text('tap to play', 20, 20);
+ * text('freq: ' + freq, 20, 40);
+ * text('amp: ' + amp, 20, 60);
+ *
+ * if (playing) {
+ * // smooth the transitions by 0.1 seconds
+ * osc.freq(freq, 0.1);
+ * osc.amp(amp, 0.1);
+ * }
+ * }
+ *
+ * function playOscillator() {
+ * // starting an oscillator on a user gesture will enable audio
+ * // in browsers that have a strict autoplay policy.
+ * // See also: userStartAudio();
+ * osc.start();
+ * playing = true;
+ * }
+ *
+ * function mouseReleased() {
+ * // ramp amplitude to 0 over 0.5 seconds
+ * osc.amp(0, 0.5);
+ * playing = false;
+ * }
+ *
+ * let osc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playOscillator);
+ * osc = new p5.Oscillator(300);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playOscillator() {
+ * osc.start();
+ * osc.amp(0.5);
+ * // start at 700Hz
+ * osc.freq(700);
+ * // ramp to 60Hz over 0.7 seconds
+ * osc.freq(60, 0.7);
+ * osc.amp(0, 0.1, 0.7);
+ * }
+ * new p5.SinOsc().
+ * This creates a Sine Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sine')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('sine').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SinOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var SinOsc =
+function (_Oscillator) {
+ _inherits(SinOsc, _Oscillator);
+
+ function SinOsc(freq) {
+ oscillator_classCallCheck(this, SinOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(SinOsc).call(this, freq, 'sine'));
+ }
+
+ return SinOsc;
+}(oscillator_Oscillator);
+/**
+ * Constructor: new p5.TriOsc().
+ * This creates a Triangle Wave Oscillator and is
+ * equivalent to new p5.Oscillator('triangle')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('triangle').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.TriOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var TriOsc =
+function (_Oscillator2) {
+ _inherits(TriOsc, _Oscillator2);
+
+ function TriOsc(freq) {
+ oscillator_classCallCheck(this, TriOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(TriOsc).call(this, freq, 'triangle'));
+ }
+
+ return TriOsc;
+}(oscillator_Oscillator);
+/**
+ * Constructor: new p5.SawOsc().
+ * This creates a SawTooth Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sawtooth')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('sawtooth').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SawOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var SawOsc =
+function (_Oscillator3) {
+ _inherits(SawOsc, _Oscillator3);
+
+ function SawOsc(freq) {
+ oscillator_classCallCheck(this, SawOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(SawOsc).call(this, freq, 'sawtooth'));
+ }
+
+ return SawOsc;
+}(oscillator_Oscillator);
+/**
+ * Constructor: new p5.SqrOsc().
+ * This creates a Square Wave Oscillator and is
+ * equivalent to new p5.Oscillator('square')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('square').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SqrOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var SqrOsc =
+function (_Oscillator4) {
+ _inherits(SqrOsc, _Oscillator4);
+
+ function SqrOsc(freq) {
+ oscillator_classCallCheck(this, SqrOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(SqrOsc).call(this, freq, 'square'));
+ }
+
+ return SqrOsc;
+}(oscillator_Oscillator);
+
+ var oscillator = (oscillator_Oscillator);
+
+var TimelineSignal = __webpack_require__(7);
+var TimelineSignal_default = __webpack_require__.n(TimelineSignal);
+
+
+
+
+
+
+/**
+ * Envelopes are pre-defined amplitude distribution over time.
+ * Typically, envelopes are used to control the output volume
+ * of an object, a series of fades referred to as Attack, Decay,
+ * Sustain and Release (
+ * ADSR
+ * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can
+ * control an Oscillator's frequency like this: osc.freq(env).
Use setRange to change the attack/release level.
+ * Use setADSR to change attackTime, decayTime, sustainPercent and releaseTime.
Use the play method to play the entire envelope,
+ * the ramp method for a pingable trigger,
+ * or triggerAttack/
+ * triggerRelease to trigger noteOn/noteOff.
+ * let t1 = 0.1; // attack time in seconds
+ * let l1 = 0.7; // attack level 0.0 to 1.0
+ * let t2 = 0.3; // decay time in seconds
+ * let l2 = 0.1; // decay level 0.0 to 1.0
+ *
+ * let env;
+ * let triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * cnv.mousePressed(playSound);
+ *
+ * env = new p5.Envelope(t1, l1, t2, l2);
+ * triOsc = new p5.Oscillator('triangle');
+ * }
+ *
+ * function playSound() {
+ * // starting the oscillator ensures that audio is enabled.
+ * triOsc.start();
+ * env.play(triOsc);
+ * }
+ *
+ * let attackTime;
+ * let l1 = 0.7; // attack level 0.0 to 1.0
+ * let t2 = 0.3; // decay time in seconds
+ * let l2 = 0.1; // decay level 0.0 to 1.0
+ * let l3 = 0.2; // release time in seconds
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ *
+ * attackTime = map(mouseX, 0, width, 0.0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 20);
+ * }
+ *
+ * // mouseClick triggers envelope if over canvas
+ * function playSound() {
+ * env.set(attackTime, l1, t2, l2, l3);
+ *
+ * triOsc.start();
+ * env.play(triOsc);
+ * }
+ * setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ * @example
+ *
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackTime = map(mouseX, 0, width, 0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 40);
+ * }
+ *
+ * function playEnv() {
+ * triOsc.start();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.play();
+ * }
+ *
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackLevel = map(mouseY, height, 0, 0, 1.0);
+ * text('attack level: ' + attackLevel, 5, height - 20);
+ * }
+ *
+ * function playEnv() {
+ * triOsc.start();
+ * env.setRange(attackLevel, releaseLevel);
+ * env.play();
+ * }
+ * Play tells the envelope to start acting on a given input. + * If the input is a p5.sound object (i.e. AudioIn, Oscillator, + * SoundFile), then Envelope will control its output volume. + * Envelopes can also be used to control any + * Web Audio Audio Param.
+ * + * @method play + * @for p5.Envelope + * @param {Object} unit A p5.sound object or + * Web Audio Param. + * @param {Number} [startTime] time from now (in seconds) at which to play + * @param {Number} [sustainTime] time to sustain before releasing the envelope + * @example + *
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
+ * triOsc.start();
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackTime = map(mouseX, 0, width, 0, 1.0);
+ * attackLevel = map(mouseY, height, 0, 0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 40);
+ * text('attack level: ' + attackLevel, 5, height - 20);
+ * }
+ *
+ * function playEnv() {
+ * // ensure that audio is enabled
+ * userStartAudio();
+ *
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ * env.play();
+ * }
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.3;
+ * let releaseTime = 0.4;
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * textAlign(CENTER);
+ * textSize(10);
+ * text('tap to triggerAttack', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(1.0, 0.0);
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(envAttack);
+ * }
+ *
+ * function envAttack() {
+ * background(0, 255, 255);
+ * text('release to release', width/2, height/2);
+ *
+ * // ensures audio is enabled. See also: `userStartAudio`
+ * triOsc.start();
+ *
+ * env.triggerAttack(triOsc);
+ * }
+ *
+ * function mouseReleased() {
+ * background(220);
+ * text('tap to triggerAttack', width/2, height/2);
+ *
+ * env.triggerRelease(triOsc);
+ * }
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.3;
+ * let releaseTime = 0.4;
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * textAlign(CENTER);
+ * textSize(10);
+ * text('tap to triggerAttack', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(1.0, 0.0);
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(envAttack);
+ * }
+ *
+ * function envAttack() {
+ * background(0, 255, 255);
+ * text('release to release', width/2, height/2);
+ *
+ * // ensures audio is enabled. See also: `userStartAudio`
+ * triOsc.start();
+ *
+ * env.triggerAttack(triOsc);
+ * }
+ *
+ * function mouseReleased() {
+ * background(220);
+ * text('tap to triggerAttack', width/2, height/2);
+ *
+ * env.triggerRelease(triOsc);
+ * }
+ * setADSR(attackTime, decayTime)
+ * as
+ * time constants for simple exponential ramps.
+ * If the value is higher than current value, it uses attackTime,
+ * while a decrease uses decayTime.
+ *
+ * @method ramp
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow When to trigger the ramp
+ * @param {Number} v Target value
+ * @param {Number} [v2] Second target value
+ * @example
+ *
+ * let env, osc, amp;
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let attackLevel = 1;
+ * let decayLevel = 0;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * fill(0,255,0);
+ * noStroke();
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime);
+ * osc = new p5.Oscillator();
+ * osc.amp(env);
+ * amp = new p5.Amplitude();
+ *
+ * cnv.mousePressed(triggerRamp);
+ * }
+ *
+ * function triggerRamp() {
+ * // ensures audio is enabled. See also: `userStartAudio`
+ * osc.start();
+ *
+ * env.ramp(osc, 0, attackLevel, decayLevel);
+ * }
+ *
+ * function draw() {
+ * background(20);
+ * text('tap to play', 10, 20);
+ * let h = map(amp.getLevel(), 0, 0.4, 0, height);;
+ * rect(0, height, width, -h);
+ * }
+ * p5.Oscillator for a full list of methods.
+ *
+ * @class p5.Pulse
+ * @extends p5.Oscillator
+ * @constructor
+ * @param {Number} [freq] Frequency in oscillations per second (Hz)
+ * @param {Number} [w] Width between the pulses (0 to 1.0,
+ * defaults to 0)
+ * @example
+ *
+ * let pulse;
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(startPulse);
+ * background(220);
+ *
+ * pulse = new p5.Pulse();
+ * pulse.amp(0.5);
+ * pulse.freq(220);
+ * }
+ * function startPulse() {
+ * pulse.start();
+ * pulse.amp(0.5, 0.02);
+ * }
+ * function mouseReleased() {
+ * pulse.amp(0, 0.2);
+ * }
+ * function draw() {
+ * background(220);
+ * text('tap to play', 5, 20, width - 20);
+ * let w = map(mouseX, 0, width, 0, 1);
+ * w = constrain(w, 0, 1);
+ * pulse.width(w);
+ * text('pulse width: ' + w, 5, height - 20);
+ * }
+ * Get audio from an input, i.e. your computer's microphone.
+ * + *Turn the mic on/off with the start() and stop() methods. When the mic + * is on, its volume can be measured with getLevel or by connecting an + * FFT object.
+ * + *If you want to hear the AudioIn, use the .connect() method. + * AudioIn does not connect to p5.sound output by default to prevent + * feedback.
+ * + *Note: This uses the getUserMedia/ + * Stream API, which is not supported by certain browsers. Access in Chrome browser + * is limited to localhost and https, but access over http may be limited.
+ * + * @class p5.AudioIn + * @constructor + * @param {Function} [errorCallback] A function to call if there is an error + * accessing the AudioIn. For example, + * Safari and iOS devices do not + * currently allow microphone access. + * @example + *
+ * let mic;
+ *
+ * function setup(){
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(userStartAudio);
+ * textAlign(CENTER);
+ * mic = new p5.AudioIn();
+ * mic.start();
+ * }
+ *
+ * function draw(){
+ * background(0);
+ * fill(255);
+ * text('tap to start', width/2, 20);
+ *
+ * micLevel = mic.getLevel();
+ * let y = height - micLevel * height;
+ * ellipse(width/2, y, 10, 10);
+ * }
+ *
+ * let audioIn;
+ *
+ * function setup(){
+ * text('getting sources...', 0, 20);
+ * audioIn = new p5.AudioIn();
+ * audioIn.getSources(gotSources);
+ * }
+ *
+ * function gotSources(deviceList) {
+ * if (deviceList.length > 0) {
+ * //set the source to the first item in the deviceList array
+ * audioIn.setSource(0);
+ * let currentSource = deviceList[audioIn.currentSource];
+ * text('set source to: ' + currentSource.deviceId, 5, 20, width);
+ * }
+ * }
+ *
+ * let audioIn;
+ *
+ * function setup(){
+ * text('getting sources...', 0, 20);
+ * audioIn = new p5.AudioIn();
+ * audioIn.getSources(gotSources);
+ * }
+ *
+ * function gotSources(deviceList) {
+ * if (deviceList.length > 0) {
+ * //set the source to the first item in the deviceList array
+ * audioIn.setSource(0);
+ * let currentSource = deviceList[audioIn.currentSource];
+ * text('set source to: ' + currentSource.deviceId, 5, 20, width);
+ * }
+ * }
+ * A p5.Filter uses a Web Audio Biquad Filter to filter + * the frequency response of an input source. Subclasses + * include:
+ *p5.LowPass:
+ * Allows frequencies below the cutoff frequency to pass through,
+ * and attenuates frequencies above the cutoff.p5.HighPass:
+ * The opposite of a lowpass filter. p5.BandPass:
+ * Allows a range of frequencies to pass through and attenuates
+ * the frequencies below and above this frequency range..res() method controls either width of the
+ * bandpass, or resonance of the low/highpass cutoff frequency.
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.Filter
+ * @extends p5.Effect
+ * @constructor
+ * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'
+ * @example
+ *
+ * let fft, noise, filter;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100,100);
+ * cnv.mousePressed(makeNoise);
+ * fill(255, 0, 255);
+ *
+ * filter = new p5.BandPass();
+ * noise = new p5.Noise();
+ * noise.disconnect();
+ * noise.connect(filter);
+ *
+ * fft = new p5.FFT();
+ * }
+ *
+ * function draw() {
+ * background(220);
+ *
+ * // set the BandPass frequency based on mouseX
+ * let freq = map(mouseX, 0, width, 20, 10000);
+ * freq = constrain(freq, 0, 22050);
+ * filter.freq(freq);
+ * // give the filter a narrow band (lower res = wider bandpass)
+ * filter.res(50);
+ *
+ * // draw filtered spectrum
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * for (let i = 0; i < spectrum.length; i++) {
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width/spectrum.length, h);
+ * }
+ * if (!noise.started) {
+ * text('tap here and drag to change frequency', 10, 20, width - 20);
+ * } else {
+ * text('Frequency: ' + round(freq)+'Hz', 20, 20, width - 20);
+ * }
+ * }
+ *
+ * function makeNoise() {
+ * // see also: `userStartAudio()`
+ * noise.start();
+ * noise.amp(0.5, 0.2);
+ * }
+ *
+ * function mouseReleased() {
+ * noise.amp(0, 0.2);
+ * }
+ *
+ * new p5.LowPass() Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('lowpass').
+ * See p5.Filter for methods.
+ *
+ * @class p5.LowPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+
+var LowPass =
+function (_Filter) {
+ filter_inherits(LowPass, _Filter);
+
+ function LowPass() {
+ filter_classCallCheck(this, LowPass);
+
+ return filter_possibleConstructorReturn(this, filter_getPrototypeOf(LowPass).call(this, 'lowpass'));
+ }
+
+ return LowPass;
+}(Filter);
+/**
+ * Constructor: new p5.HighPass() Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('highpass').
+ * See p5.Filter for methods.
+ *
+ * @class p5.HighPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+
+var HighPass =
+function (_Filter2) {
+ filter_inherits(HighPass, _Filter2);
+
+ function HighPass() {
+ filter_classCallCheck(this, HighPass);
+
+ return filter_possibleConstructorReturn(this, filter_getPrototypeOf(HighPass).call(this, 'highpass'));
+ }
+
+ return HighPass;
+}(Filter);
+/**
+ * Constructor: new p5.BandPass() Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('bandpass').
+ * See p5.Filter for methods.
+ *
+ * @class p5.BandPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+
+var BandPass =
+function (_Filter3) {
+ filter_inherits(BandPass, _Filter3);
+
+ function BandPass() {
+ filter_classCallCheck(this, BandPass);
+
+ return filter_possibleConstructorReturn(this, filter_getPrototypeOf(BandPass).call(this, 'bandpass'));
+ }
+
+ return BandPass;
+}(Filter);
+
+ var filter = (Filter);
+
+function eqFilter_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { eqFilter_typeof = function _typeof(obj) { return typeof obj; }; } else { eqFilter_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return eqFilter_typeof(obj); }
+
+function eqFilter_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function eqFilter_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function eqFilter_createClass(Constructor, protoProps, staticProps) { if (protoProps) eqFilter_defineProperties(Constructor.prototype, protoProps); if (staticProps) eqFilter_defineProperties(Constructor, staticProps); return Constructor; }
+
+function eqFilter_possibleConstructorReturn(self, call) { if (call && (eqFilter_typeof(call) === "object" || typeof call === "function")) { return call; } return eqFilter_assertThisInitialized(self); }
+
+function eqFilter_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function eqFilter_getPrototypeOf(o) { eqFilter_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return eqFilter_getPrototypeOf(o); }
+
+function eqFilter_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) eqFilter_setPrototypeOf(subClass, superClass); }
+
+function eqFilter_setPrototypeOf(o, p) { eqFilter_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return eqFilter_setPrototypeOf(o, p); }
+
+
+
+/**
+ * EQFilter extends p5.Filter with constraints
+ * necessary for the p5.EQ
+ *
+ * @private
+ */
+
+var eqFilter_EQFilter =
+function (_Filter) {
+ eqFilter_inherits(EQFilter, _Filter);
+
+ function EQFilter(freq, res) {
+ var _this;
+
+ eqFilter_classCallCheck(this, EQFilter);
+
+ _this = eqFilter_possibleConstructorReturn(this, eqFilter_getPrototypeOf(EQFilter).call(this, 'peaking'));
+
+ _this.disconnect();
+
+ _this.set(freq, res);
+
+ _this.biquad.gain.value = 0;
+ delete _this.input;
+ delete _this.output;
+ delete _this._drywet;
+ delete _this.wet;
+ return _this;
+ }
+
+ eqFilter_createClass(EQFilter, [{
+ key: "amp",
+ value: function amp() {
+ console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`');
+ }
+ }, {
+ key: "drywet",
+ value: function drywet() {
+ console.warn('`drywet()` is not available for p5.EQ bands.');
+ }
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ var u = unit || p5.soundOut.input;
+
+ if (this.biquad) {
+ this.biquad.connect(u.input ? u.input : u);
+ } else {
+ this.output.connect(u.input ? u.input : u);
+ }
+ }
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.biquad) {
+ this.biquad.disconnect();
+ }
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+ this.disconnect();
+ delete this.biquad;
+ }
+ }]);
+
+ return EQFilter;
+}(filter);
+
+ var eqFilter = (eqFilter_EQFilter);
+function eq_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { eq_typeof = function _typeof(obj) { return typeof obj; }; } else { eq_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return eq_typeof(obj); }
+
+function eq_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function eq_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function eq_createClass(Constructor, protoProps, staticProps) { if (protoProps) eq_defineProperties(Constructor.prototype, protoProps); if (staticProps) eq_defineProperties(Constructor, staticProps); return Constructor; }
+
+function eq_possibleConstructorReturn(self, call) { if (call && (eq_typeof(call) === "object" || typeof call === "function")) { return call; } return eq_assertThisInitialized(self); }
+
+function eq_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function eq_get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { eq_get = Reflect.get; } else { eq_get = function _get(target, property, receiver) { var base = eq_superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return eq_get(target, property, receiver || target); }
+
+function eq_superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = eq_getPrototypeOf(object); if (object === null) break; } return object; }
+
+function eq_getPrototypeOf(o) { eq_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return eq_getPrototypeOf(o); }
+
+function eq_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) eq_setPrototypeOf(subClass, superClass); }
+
+function eq_setPrototypeOf(o, p) { eq_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return eq_setPrototypeOf(o, p); }
+
+
+
+/**
+ * p5.EQ is an audio effect that performs the function of a multiband
+ * audio equalizer. Equalization is used to adjust the balance of
+ * frequency compoenents of an audio signal. This process is commonly used
+ * in sound production and recording to change the waveform before it reaches
+ * a sound output device. EQ can also be used as an audio effect to create
+ * interesting distortions by filtering out parts of the spectrum. p5.EQ is
+ * built using a chain of Web Audio Biquad Filter Nodes and can be
+ * instantiated with 3 or 8 bands. Bands can be added or removed from
+ * the EQ by directly modifying p5.EQ.bands (the array that stores filters).
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.EQ
+ * @constructor
+ * @extends p5.Effect
+ * @param {Number} [_eqsize] Constructor will accept 3 or 8, defaults to 3
+ * @return {Object} p5.EQ object
+ *
+ * @example
+ *
+ * let eq, soundFile
+ * let eqBandIndex = 0;
+ * let eqBandNames = ['lows', 'mids', 'highs'];
+ *
+ * function preload() {
+ * soundFormats('mp3', 'ogg');
+ * soundFile = loadSound('assets/beat');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(toggleSound);
+ *
+ * eq = new p5.EQ(eqBandNames.length);
+ * soundFile.disconnect();
+ * eq.process(soundFile);
+ * }
+ *
+ * function draw() {
+ * background(30);
+ * noStroke();
+ * fill(255);
+ * textAlign(CENTER);
+ * text('filtering ', 50, 25);
+ *
+ * fill(255, 40, 255);
+ * textSize(26);
+ * text(eqBandNames[eqBandIndex], 50, 55);
+ *
+ * fill(255);
+ * textSize(9);
+ *
+ * if (!soundFile.isPlaying()) {
+ * text('tap to play', 50, 80);
+ * } else {
+ * text('tap to filter next band', 50, 80)
+ * }
+ * }
+ *
+ * function toggleSound() {
+ * if (!soundFile.isPlaying()) {
+ * soundFile.play();
+ * } else {
+ * eqBandIndex = (eqBandIndex + 1) % eq.bands.length;
+ * }
+ *
+ * for (let i = 0; i < eq.bands.length; i++) {
+ * eq.bands[i].gain(0);
+ * }
+ * // filter the band we want to filter
+ * eq.bands[eqBandIndex].gain(-40);
+ * }
+ * p5.Panner3D - Constructs a Spatial Pannerp5.Listener3D - Constructs a Spatial Listenerp5.soundOut.audiocontext.listener
+ *
+ *
+ * @class p5.Panner3D
+ * @constructor
+ */
+
+var Panner3D =
+function (_Effect) {
+ panner3d_inherits(Panner3D, _Effect);
+
+ function Panner3D() {
+ var _this;
+
+ panner3d_classCallCheck(this, Panner3D);
+
+ _this = panner3d_possibleConstructorReturn(this, panner3d_getPrototypeOf(Panner3D).call(this));
+ /**
+ *
+ * Web Audio Spatial Panner Node
+ *
+ * Properties include
+ * let osc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * osc = new p5.Oscillator('square');
+ * osc.amp(0.5);
+ * delay = new p5.Delay();
+ *
+ * // delay.process() accepts 4 parameters:
+ * // source, delayTime (in seconds), feedback, filter frequency
+ * delay.process(osc, 0.12, .7, 2300);
+ *
+ * cnv.mousePressed(oscStart);
+ * }
+ *
+ * function oscStart() {
+ * osc.start();
+ * }
+ *
+ * function mouseReleased() {
+ * osc.stop();
+ * }
+ *
+ * let soundFile, reverb;
+ * function preload() {
+ * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ *
+ * reverb = new p5.Reverb();
+ * soundFile.disconnect(); // so we'll only hear reverb...
+ *
+ * // connect soundFile to reverb, process w/
+ * // 3 second reverbTime, decayRate of 2%
+ * reverb.process(soundFile, 3, 2);
+ * }
+ *
+ * function draw() {
+ * let dryWet = constrain(map(mouseX, 0, width, 0, 1), 0, 1);
+ * // 1 = all reverb, 0 = no reverb
+ * reverb.drywet(dryWet);
+ *
+ * background(220);
+ * text('tap to play', 10, 20);
+ * text('dry/wet: ' + round(dryWet * 100) + '%', 10, height - 20);
+ * }
+ *
+ * function playSound() {
+ * soundFile.play();
+ * }
+ * p5.Convolver extends p5.Reverb. It can emulate the sound of real + * physical spaces through a process called + * convolution.
+ * + *Convolution multiplies any audio input by an "impulse response" + * to simulate the dispersion of sound over time. The impulse response is + * generated from an audio file that you provide. One way to + * generate an impulse response is to pop a balloon in a reverberant space + * and record the echo. Convolution can also be used to experiment with + * sound.
+ * + *Use the method createConvolution(path) to instantiate a
+ * p5.Convolver with a path to your impulse response audio file.
+ * let cVerb, sound;
+ * function preload() {
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
+ *
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
+ *
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * // disconnect from main output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ * }
+ *
+ * function playSound() {
+ * sound.play();
+ * }
+ * toggleImpulse(id) method.
+ *
+ * @property {Array} impulses
+ * @for p5.Convolver
+ */
+
+
+ _this2.impulses = [];
+ _this2.set = null;
+ return _this2;
+ }
+ /**
+ * Private method to load a buffer as an Impulse Response,
+ * assign it to the convolverNode, and add to the Array of .impulses.
+ *
+ * @param {String} path
+ * @param {Function} callback
+ * @param {Function} errorCallback
+ * @private
+ */
+
+
+ reverb_createClass(Convolver, [{
+ key: "_loadBuffer",
+ value: function _loadBuffer(_path, callback, errorCallback) {
+ var path = p5.prototype._checkFileFormats(_path);
+
+ var self = this;
+ var errorTrace = new Error().stack;
+ var ac = Object(audiocontext["b" ])();
+ var request = new XMLHttpRequest();
+ request.open('GET', path, true);
+ request.responseType = 'arraybuffer';
+
+ request.onload = function () {
+ if (request.status === 200) {
+ ac.decodeAudioData(request.response, function (buff) {
+ var buffer = {};
+ var chunks = path.split('/');
+ buffer.name = chunks[chunks.length - 1];
+ buffer.audioBuffer = buff;
+ self.impulses.push(buffer);
+
+ self._setBuffer(buffer.audioBuffer);
+
+ if (callback) {
+ callback(buffer);
+ }
+ },
+ function () {
+ var err = new errorHandler('decodeAudioData', errorTrace, self.url);
+ var msg = 'AudioContext error at decodeAudioData for ' + self.url;
+
+ if (errorCallback) {
+ err.msg = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ });
+ }
+ else {
+ var err = new errorHandler('loadConvolver', errorTrace, self.url);
+ var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ }
+ };
+
+
+ request.onerror = function () {
+ var err = new errorHandler('loadConvolver', errorTrace, self.url);
+ var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ };
+
+ request.send();
+ }
+ /**
+ * Connect a source to the convolver.
+ *
+ * @method process
+ * @for p5.Convolver
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ * @example
+ *
+ * let cVerb, sound;
+ * function preload() {
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
+ *
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
+ *
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * // disconnect from main output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ * }
+ *
+ * function playSound() {
+ * sound.play();
+ * }
+ *
+ * .impulses array. Previous
+ * impulses can be accessed with the .toggleImpulse(id)
+ * method.
+ *
+ * @method addImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
+ */
+
+ }, {
+ key: "addImpulse",
+ value: function addImpulse(path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+ }
+
+ this._loadBuffer(path, callback, errorCallback);
+ }
+ /**
+ * Similar to .addImpulse, except that the .impulses
+ * Array is reset to save memory. A new .impulses
+ * array is created with this impulse as the only item.
+ *
+ * @method resetImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
+ */
+
+ }, {
+ key: "resetImpulse",
+ value: function resetImpulse(path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+ }
+
+ this.impulses = [];
+
+ this._loadBuffer(path, callback, errorCallback);
+ }
+ /**
+ * If you have used .addImpulse() to add multiple impulses
+ * to a p5.Convolver, then you can use this method to toggle between
+ * the items in the .impulses Array. Accepts a parameter
+ * to identify which impulse you wish to use, identified either by its
+ * original filename (String) or by its position in the .impulses
+ * Array (Number)..audioBuffer (type:
+ * Web Audio
+ * AudioBuffer) and a .name, a String that corresponds
+ * with the original filename.
+ *
+ * @method toggleImpulse
+ * @for p5.Convolver
+ * @param {String|Number} id Identify the impulse by its original filename
+ * (String), or by its position in the
+ * .impulses Array (Number).
+ */
+
+ }, {
+ key: "toggleImpulse",
+ value: function toggleImpulse(id) {
+ if (typeof id === 'number' && id < this.impulses.length) {
+ this._setBuffer(this.impulses[id].audioBuffer);
+ }
+
+ if (typeof id === 'string') {
+ for (var i = 0; i < this.impulses.length; i++) {
+ if (this.impulses[i].name === id) {
+ this._setBuffer(this.impulses[i].audioBuffer);
+
+ break;
+ }
+ }
+ }
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ reverb_get(reverb_getPrototypeOf(Convolver.prototype), "dispose", this).call(this);
+
+
+ for (var i in this.impulses) {
+ if (this.impulses[i]) {
+ this.impulses[i] = null;
+ }
+ }
+ }
+ }]);
+
+ return Convolver;
+}(Reverb);
+/**
+ * Create a p5.Convolver. Accepts a path to a soundfile
+ * that will be used to generate an impulse response.
+ *
+ * @method createConvolver
+ * @for p5
+ * @param {String} path path to a sound file
+ * @param {Function} [callback] function to call if loading is successful.
+ * The object will be passed in as the argument
+ * to the callback function.
+ * @param {Function} [errorCallback] function to call if loading is not successful.
+ * A custom error will be passed in as the argument
+ * to the callback function.
+ * @return {p5.Convolver}
+ * @example
+ *
+ * let cVerb, sound;
+ * function preload() {
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
+ *
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
+ *
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * // disconnect from main output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ * }
+ *
+ * function playSound() {
+ * sound.play();
+ * }
+ * A phrase is a pattern of musical events over time, i.e. + * a series of notes and rests.
+ * + *Phrases must be added to a p5.Part for playback, and + * each part can play multiple phrases at the same time. + * For example, one Phrase might be a kick drum, another + * could be a snare, and another could be the bassline.
+ * + *The first parameter is a name so that the phrase can be
+ * modified or deleted later. The callback is a a function that
+ * this phrase will call at every step—for example it might be
+ * called playNote(value){}. The array determines
+ * which value is passed into the callback at each step of the
+ * phrase. It can be numbers, an object with multiple numbers,
+ * or a zero (0) indicates a rest so the callback won't be called).
+ * let mySound, myPhrase, myPart;
+ * let pattern = [1,0,0,2,0,2,0,0];
+ *
+ * function preload() {
+ * mySound = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playMyPart);
+ * background(220);
+ * text('tap to play', width/2, height/2);
+ * textAlign(CENTER, CENTER);
+ *
+ * myPhrase = new p5.Phrase('bbox', onEachStep, pattern);
+ * myPart = new p5.Part();
+ * myPart.addPhrase(myPhrase);
+ * myPart.setBPM(60);
+ * }
+ *
+ * function onEachStep(time, playbackRate) {
+ * mySound.rate(playbackRate);
+ * mySound.play(time);
+ * }
+ *
+ * function playMyPart() {
+ * userStartAudio();
+ * myPart.start();
+ * }
+ * A p5.Part plays back one or more p5.Phrases. Instantiate a part + * with steps and tatums. By default, each step represents a 1/16th note.
+ * + *See p5.Phrase for more about musical timing.
+ * + * @class p5.Part + * @constructor + * @param {Number} [steps] Steps in the part + * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note) + * @example + *
+ * let box, drum, myPart;
+ * let boxPat = [1,0,0,2,0,2,0,0];
+ * let drumPat = [0,1,1,0,2,0,1,0];
+ *
+ * function preload() {
+ * box = loadSound('assets/beatbox.mp3');
+ * drum = loadSound('assets/drum.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playMyPart);
+ * background(220);
+ * textAlign(CENTER, CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * let boxPhrase = new p5.Phrase('box', playBox, boxPat);
+ * let drumPhrase = new p5.Phrase('drum', playDrum, drumPat);
+ * myPart = new p5.Part();
+ * myPart.addPhrase(boxPhrase);
+ * myPart.addPhrase(drumPhrase);
+ * myPart.setBPM(60);
+ * }
+ *
+ * function playBox(time, playbackRate) {
+ * box.rate(playbackRate);
+ * box.play(time);
+ * }
+ *
+ * function playDrum(time, playbackRate) {
+ * drum.rate(playbackRate);
+ * drum.play(time);
+ * }
+ *
+ * function playMyPart() {
+ * userStartAudio();
+ *
+ * myPart.start();
+ * }
+ * new p5.Score(a, a, b, a, c)
+ *
+ * @class p5.Score
+ * @constructor
+ * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.
+ */
+
+
+var Score =
+function () {
+ function Score() {
+ looper_classCallCheck(this, Score);
+
+ this.parts = [];
+ this.currentPart = new Array(arguments.length);
+ ;
+ var thisScore = this;
+
+ for (var i in arguments) {
+ this.parts[i] = arguments[i];
+ this.parts[i].nextPart = this.parts[i + 1];
+
+ this.parts[i].onended = function () {
+ thisScore.resetPart(i);
+ playNextPart(thisScore);
+ };
+ }
+
+ this.looping = false;
+ }
+
+ looper_createClass(Score, [{
+ key: "onended",
+ value: function onended() {
+ if (this.looping) {
+ this.parts[0].start();
+ } else {
+ this.parts[this.parts.length - 1].onended = function () {
+ this.stop();
+ this.resetParts();
+ };
+ }
+
+ this.currentPart = 0;
+ }
+ /**
+ * Start playback of the score.
+ *
+ * @method start
+ * @for p5.Score
+ */
+
+ }, {
+ key: "start",
+ value: function start() {
+ this.parts[this.currentPart].start();
+ this.scoreStep = 0;
+ }
+ /**
+ * Stop playback of the score.
+ *
+ * @method stop
+ * @for p5.Score
+ */
+
+ }, {
+ key: "stop",
+ value: function stop() {
+ this.parts[this.currentPart].stop();
+ this.currentPart = 0;
+ this.scoreStep = 0;
+ }
+ /**
+ * Pause playback of the score.
+ *
+ * @method pause
+ * @for p5.Score
+ */
+
+ }, {
+ key: "pause",
+ value: function pause() {
+ this.parts[this.currentPart].stop();
+ }
+ /**
+ * Loop playback of the score.
+ *
+ * @method loop
+ * @for p5.Score
+ */
+
+ }, {
+ key: "loop",
+ value: function loop() {
+ this.looping = true;
+ this.start();
+ }
+ /**
+ * Stop looping playback of the score. If it
+ * is currently playing, this will go into effect
+ * after the current round of playback completes.
+ *
+ * @method noLoop
+ * @for p5.Score
+ */
+
+ }, {
+ key: "noLoop",
+ value: function noLoop() {
+ this.looping = false;
+ }
+ }, {
+ key: "resetParts",
+ value: function resetParts() {
+ var self = this;
+ this.parts.forEach(function (part) {
+ self.resetParts[part];
+ });
+ }
+ }, {
+ key: "resetPart",
+ value: function resetPart(i) {
+ this.parts[i].stop();
+ this.parts[i].partStep = 0;
+
+ for (var p in this.parts[i].phrases) {
+ if (this.parts[i]) {
+ this.parts[i].phrases[p].phraseStep = 0;
+ }
+ }
+ }
+ /**
+ * Set the tempo for all parts in the score
+ *
+ * @method setBPM
+ * @for p5.Score
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} rampTime Seconds from now
+ */
+
+ }, {
+ key: "setBPM",
+ value: function setBPM(bpm, rampTime) {
+ for (var i in this.parts) {
+ if (this.parts[i]) {
+ this.parts[i].setBPM(bpm, rampTime);
+ }
+ }
+ }
+ }]);
+
+ return Score;
+}();
+
+function playNextPart(aScore) {
+ aScore.currentPart++;
+
+ if (aScore.currentPart >= aScore.parts.length) {
+ aScore.scoreStep = 0;
+ aScore.onended();
+ } else {
+ aScore.scoreStep = 0;
+ aScore.parts[aScore.currentPart - 1].stop();
+ aScore.parts[aScore.currentPart].start();
+ }
+}
+
+
+function soundLoop_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function soundLoop_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function soundLoop_createClass(Constructor, protoProps, staticProps) { if (protoProps) soundLoop_defineProperties(Constructor.prototype, protoProps); if (staticProps) soundLoop_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+/**
+ * SoundLoop
+ *
+ * @class p5.SoundLoop
+ * @constructor
+ *
+ * @param {Function} callback this function will be called on each iteration of theloop
+ * @param {Number|String} [interval] amount of time (if a number) or beats (if a string, following Tone.Time convention) for each iteration of the loop. Defaults to 1 second.
+ *
+ * @example
+ *
+ * let synth, soundLoop;
+ * let notePattern = [60, 62, 64, 67, 69, 72];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * colorMode(HSB);
+ * background(0, 0, 86);
+ * text('tap to start/stop', 10, 20);
+ *
+ * //the looper's callback is passed the timeFromNow
+ * //this value should be used as a reference point from
+ * //which to schedule sounds
+ * let intervalInSeconds = 0.2;
+ * soundLoop = new p5.SoundLoop(onSoundLoop, intervalInSeconds);
+ *
+ * synth = new p5.MonoSynth();
+ * }
+ *
+ * function canvasPressed() {
+ * // ensure audio is enabled
+ * userStartAudio();
+ *
+ * if (soundLoop.isPlaying) {
+ * soundLoop.stop();
+ * } else {
+ * // start the loop
+ * soundLoop.start();
+ * }
+ * }
+ *
+ * function onSoundLoop(timeFromNow) {
+ * let noteIndex = (soundLoop.iterations - 1) % notePattern.length;
+ * let note = midiToFreq(notePattern[noteIndex]);
+ * synth.play(note, 0.5, timeFromNow);
+ * background(noteIndex * 360 / notePattern.length, 50, 100);
+ * }
+ * PeakDetect works in conjunction with p5.FFT to + * look for onsets in some or all of the frequency spectrum. + *
+ *
+ * To use p5.PeakDetect, call update in the draw loop
+ * and pass in a p5.FFT object.
+ *
+ * You can listen for a specific part of the frequency spectrum by
+ * setting the range between freq1 and freq2.
+ *
threshold is the threshold for detecting a peak,
+ * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
+ * as 1.0.
+ * The update method is meant to be run in the draw loop, and
+ * frames determines how many loops must pass before
+ * another peak can be detected.
+ * For example, if the frameRate() = 60, you could detect the beat of a
+ * 120 beat-per-minute song with this equation:
+ * framesPerPeak = 60 / (estimatedBPM / 60 );
+ *
+ * Based on example contribtued by @b2renger, and a simple beat detection + * explanation by Felix Turner. + *
+ * + * @class p5.PeakDetect + * @constructor + * @param {Number} [freq1] lowFrequency - defaults to 20Hz + * @param {Number} [freq2] highFrequency - defaults to 20000 Hz + * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1 + * scaled logarithmically where 0.1 is 1/2 the loudness + * of 1.0. Defaults to 0.35. + * @param {Number} [framesPerPeak] Defaults to 20. + * @example + *
+ *
+ * var cnv, soundFile, fft, peakDetect;
+ * var ellipseWidth = 10;
+ *
+ * function preload() {
+ * soundFile = loadSound('assets/beat.mp3');
+ * }
+ *
+ * function setup() {
+ * background(0);
+ * noStroke();
+ * fill(255);
+ * textAlign(CENTER);
+ *
+ * // p5.PeakDetect requires a p5.FFT
+ * fft = new p5.FFT();
+ * peakDetect = new p5.PeakDetect();
+ * }
+ *
+ * function draw() {
+ * background(0);
+ * text('click to play/pause', width/2, height/2);
+ *
+ * // peakDetect accepts an fft post-analysis
+ * fft.analyze();
+ * peakDetect.update(fft);
+ *
+ * if ( peakDetect.isDetected ) {
+ * ellipseWidth = 50;
+ * } else {
+ * ellipseWidth *= 0.95;
+ * }
+ *
+ * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
+ * }
+ *
+ * // toggle play/stop when canvas is clicked
+ * function mouseClicked() {
+ * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
+ * if (soundFile.isPlaying() ) {
+ * soundFile.stop();
+ * } else {
+ * soundFile.play();
+ * }
+ * }
+ * }
+ *
+ * var cnv, soundFile, fft, peakDetect;
+ * var ellipseWidth = 0;
+ *
+ * function preload() {
+ * soundFile = loadSound('assets/beat.mp3');
+ * }
+ *
+ * function setup() {
+ * cnv = createCanvas(100,100);
+ * textAlign(CENTER);
+ *
+ * fft = new p5.FFT();
+ * peakDetect = new p5.PeakDetect();
+ *
+ * setupSound();
+ *
+ * // when a beat is detected, call triggerBeat()
+ * peakDetect.onPeak(triggerBeat);
+ * }
+ *
+ * function draw() {
+ * background(0);
+ * fill(255);
+ * text('click to play', width/2, height/2);
+ *
+ * fft.analyze();
+ * peakDetect.update(fft);
+ *
+ * ellipseWidth *= 0.95;
+ * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
+ * }
+ *
+ * // this function is called by peakDetect.onPeak
+ * function triggerBeat() {
+ * ellipseWidth = 50;
+ * }
+ *
+ * // mouseclick starts/stops sound
+ * function setupSound() {
+ * cnv.mouseClicked( function() {
+ * if (soundFile.isPlaying() ) {
+ * soundFile.stop();
+ * } else {
+ * soundFile.play();
+ * }
+ * });
+ * }
+ * Record sounds for playback and/or to save as a .wav file. + * The p5.SoundRecorder records all sound output from your sketch, + * or can be assigned a specific source with setInput().
+ *The record() method accepts a p5.SoundFile as a parameter. + * When playback is stopped (either after the given amount of time, + * or with the stop() method), the p5.SoundRecorder will send its + * recording to that p5.SoundFile for playback.
+ * + * @class p5.SoundRecorder + * @constructor + * @example + *
+ * let mic, recorder, soundFile;
+ * let state = 0;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * textAlign(CENTER, CENTER);
+ *
+ * // create an audio in
+ * mic = new p5.AudioIn();
+ *
+ * // prompts user to enable their browser mic
+ * mic.start();
+ *
+ * // create a sound recorder
+ * recorder = new p5.SoundRecorder();
+ *
+ * // connect the mic to the recorder
+ * recorder.setInput(mic);
+ *
+ * // this sound file will be used to
+ * // playback & save the recording
+ * soundFile = new p5.SoundFile();
+ *
+ * text('tap to record', width/2, height/2);
+ * }
+ *
+ * function canvasPressed() {
+ * // ensure audio is enabled
+ * userStartAudio();
+ *
+ * // make sure user enabled the mic
+ * if (state === 0 && mic.enabled) {
+ *
+ * // record to our p5.SoundFile
+ * recorder.record(soundFile);
+ *
+ * background(255,0,0);
+ * text('Recording!', width/2, height/2);
+ * state++;
+ * }
+ * else if (state === 1) {
+ * background(0,255,0);
+ *
+ * // stop recorder and
+ * // send result to soundFile
+ * recorder.stop();
+ *
+ * text('Done! Tap to play and download', width/2, height/2, width - 20);
+ * state++;
+ * }
+ *
+ * else if (state === 2) {
+ * soundFile.play(); // play the result!
+ * save(soundFile, 'mySound.wav');
+ * state++;
+ * }
+ * }
+ *
+ *
+ * // load two soundfile and crossfade beetween them
+ * let sound1,sound2;
+ * let sound1Gain, sound2Gain, mixGain;
+ * function preload(){
+ * soundFormats('ogg', 'mp3');
+ * sound1 = loadSound('assets/Damscray_-_Dancing_Tiger_01');
+ * sound2 = loadSound('assets/beat');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(startSound);
+ * // create a 'mix' gain bus to which we will connect both soundfiles
+ * mixGain = new p5.Gain();
+ * mixGain.connect();
+ * sound1.disconnect(); // diconnect from p5 output
+ * sound1Gain = new p5.Gain(); // setup a gain node
+ * sound1Gain.setInput(sound1); // connect the first sound to its input
+ * sound1Gain.connect(mixGain); // connect its output to the final mix bus
+ * sound2.disconnect();
+ * sound2Gain = new p5.Gain();
+ * sound2Gain.setInput(sound2);
+ * sound2Gain.connect(mixGain);
+ * }
+ * function startSound() {
+ * sound1.loop();
+ * sound2.loop();
+ * loop();
+ * }
+ * function mouseReleased() {
+ * sound1.stop();
+ * sound2.stop();
+ * }
+ * function draw(){
+ * background(220);
+ * textAlign(CENTER);
+ * textSize(11);
+ * fill(0);
+ * if (!sound1.isPlaying()) {
+ * text('tap and drag to play', width/2, height/2);
+ * return;
+ * }
+ * // map the horizontal position of the mouse to values useable for volume * control of sound1
+ * var sound1Volume = constrain(map(mouseX,width,0,0,1), 0, 1);
+ * var sound2Volume = 1-sound1Volume;
+ * sound1Gain.amp(sound1Volume);
+ * sound2Gain.amp(sound2Volume);
+ * // map the vertical position of the mouse to values useable for 'output * volume control'
+ * var outputVolume = constrain(map(mouseY,height,0,0,1), 0, 1);
+ * mixGain.amp(outputVolume);
+ * text('output', width/2, height - outputVolume * height * 0.9)
+ * fill(255, 0, 255);
+ * textAlign(LEFT);
+ * text('sound1', 5, height - sound1Volume * height * 0.9);
+ * textAlign(RIGHT);
+ * text('sound2', width - 5, height - sound2Volume * height * 0.9);
+ * }
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * let note = random(['Fb4', 'G4']);
+ * // note velocity (volume, from 0 to 1)
+ * let velocity = random();
+ * // time from now (in seconds)
+ * let time = 0;
+ * // note duration (in seconds)
+ * let dur = 1/6;
+ *
+ * monoSynth.play(note, velocity, time, dur);
+ * }
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * let note = random(['Fb4', 'G4']);
+ * // note velocity (volume, from 0 to 1)
+ * let velocity = random();
+ * // time from now (in seconds)
+ * let time = 0;
+ * // note duration (in seconds)
+ * let dur = 1/6;
+ *
+ * monoSynth.play(note, velocity, time, dur);
+ * }
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(triggerAttack);
+ * background(220);
+ * text('tap here for attack, let go to release', 5, 20, width - 20);
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function triggerAttack() {
+ * userStartAudio();
+ *
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(triggerAttack);
+ * background(220);
+ * text('tap here for attack, let go to release', 5, 20, width - 20);
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function triggerAttack() {
+ * userStartAudio();
+ *
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ * setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ */
+
+ }, {
+ key: "setADSR",
+ value: function setADSR(attack, decay, sustain, release) {
+ this.env.setADSR(attack, decay, sustain, release);
+ }
+ /**
+ * MonoSynth amp
+ * @method amp
+ * @for p5.MonoSynth
+ * @param {Number} vol desired volume
+ * @param {Number} [rampTime] Time to reach new volume
+ * @return {Number} new volume value
+ */
+
+ }, {
+ key: "amp",
+ value: function amp(vol, rampTime) {
+ var t = rampTime || 0;
+
+ if (typeof vol !== 'undefined') {
+ this.oscillator.amp(vol, t);
+ }
+
+ return this.oscillator.amp().value;
+ }
+ /**
+ * Connect to a p5.sound / Web Audio object.
+ *
+ * @method connect
+ * @for p5.MonoSynth
+ * @param {Object} unit A p5.sound or Web Audio object
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ var u = unit || main.input;
+ this.output.connect(u.input ? u.input : u);
+ }
+ /**
+ * Disconnect all outputs
+ *
+ * @method disconnect
+ * @for p5.MonoSynth
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.output) {
+ this.output.disconnect();
+ }
+ }
+ /**
+ * Get rid of the MonoSynth and free up its resources / memory.
+ *
+ * @method dispose
+ * @for p5.MonoSynth
+ */
+
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ monosynth_get(monosynth_getPrototypeOf(MonoSynth.prototype), "dispose", this).call(this);
+
+ if (this.env) {
+ this.env.dispose();
+ }
+
+ if (this.oscillator) {
+ this.oscillator.dispose();
+ }
+ }
+ }]);
+
+ return MonoSynth;
+}(audioVoice_0);
+
+ var monosynth = (monosynth_MonoSynth);
+function onsetDetect_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function onsetDetect_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function onsetDetect_createClass(Constructor, protoProps, staticProps) { if (protoProps) onsetDetect_defineProperties(Constructor.prototype, protoProps); if (staticProps) onsetDetect_defineProperties(Constructor, staticProps); return Constructor; }
+
+/**
+ * Listen for onsets (a sharp increase in volume) within a given
+ * frequency range.
+ *
+ * @class p5.OnsetDetect
+ * @constructor
+ * @param {Number} freqLow Low frequency
+ * @param {Number} freqHigh High frequency
+ * @param {Number} threshold Amplitude threshold between 0 (no energy) and 1 (maximum)
+ * @param {Function} callback Function to call when an onset is detected
+ */
+var OnsetDetect =
+function () {
+ function OnsetDetect(freqLow, freqHigh, threshold, callback) {
+ onsetDetect_classCallCheck(this, OnsetDetect);
+
+ this.isDetected = false;
+ this.freqLow = freqLow;
+ this.freqHigh = freqHigh;
+ this.treshold = threshold;
+ this.energy = 0;
+ this.penergy = 0;
+
+ this.sensitivity = 500;
+ this.callback = callback;
+ }
+
+
+ onsetDetect_createClass(OnsetDetect, [{
+ key: "update",
+ value: function update(fftObject, callback) {
+ this.energy = fftObject.getEnergy(this.freqLow, this.freqHigh) / 255;
+
+ if (this.isDetected === false) {
+ if (this.energy - this.penergy > this.treshold) {
+ this.isDetected = true;
+
+ if (this.callback) {
+ this.callback(this.energy);
+ } else if (callback) {
+ callback(this.energy);
+ }
+
+ var self = this;
+ setTimeout(function () {
+ self.isDetected = false;
+ }, this.sensitivity);
+ }
+ }
+
+ this.penergy = this.energy;
+ }
+ }]);
+
+ return OnsetDetect;
+}();
+
+ var onsetDetect = (OnsetDetect);
+function polysynth_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function polysynth_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function polysynth_createClass(Constructor, protoProps, staticProps) { if (protoProps) polysynth_defineProperties(Constructor.prototype, protoProps); if (staticProps) polysynth_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+
+/**
+ * An AudioVoice is used as a single voice for sound synthesis.
+ * The PolySynth class holds an array of AudioVoice, and deals
+ * with voices allocations, with setting notes to be played, and
+ * parameters to be set.
+ *
+ * @class p5.PolySynth
+ * @constructor
+ *
+ * @param {Number} [synthVoice] A monophonic synth voice inheriting
+ * the AudioVoice class. Defaults to p5.MonoSynth
+ * @param {Number} [maxVoices] Number of voices, defaults to 8;
+ * @example
+ *
+ * let polySynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * text('click to play', 20, 20);
+ *
+ * polySynth = new p5.PolySynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * // note duration (in seconds)
+ * let dur = 1.5;
+ *
+ * // time from now (in seconds)
+ * let time = 0;
+ *
+ * // velocity (volume, from 0 to 1)
+ * let vel = 0.1;
+ *
+ * // notes can overlap with each other
+ * polySynth.play('G2', vel, 0, dur);
+ * polySynth.play('C3', vel, time += 1/3, dur);
+ * polySynth.play('G3', vel, time += 1/3, dur);
+ * }
+ *
+ * let polySynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * text('click to play', 20, 20);
+ *
+ * polySynth = new p5.PolySynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * // note duration (in seconds)
+ * let dur = 1.5;
+ *
+ * // time from now (in seconds)
+ * let time = 0;
+ *
+ * // velocity (volume, from 0 to 1)
+ * let vel = 0.1;
+ *
+ * // notes can overlap with each other
+ * polySynth.play('G2', vel, 0, dur);
+ * polySynth.play('C3', vel, time += 1/3, dur);
+ * polySynth.play('G3', vel, time += 1/3, dur);
+ * }
+ * setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+ }, {
+ key: "noteADSR",
+ value: function noteADSR(note, a, d, s, r) {
+ var timeFromNow = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : 0;
+ var now = main.audiocontext.currentTime;
+ var t = now + timeFromNow;
+ this.audiovoices[this.notes[note].getValueAtTime(t)].setADSR(a, d, s, r);
+ }
+ /**
+ * Set the PolySynths global envelope. This method modifies the envelopes of each
+ * monosynth so that all notes are played with this envelope.
+ *
+ * @method setADSR
+ * @for p5.PolySynth
+ * @param {Number} [attackTime] Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+ }, {
+ key: "setADSR",
+ value: function setADSR(a, d, s, r) {
+ this.audiovoices.forEach(function (voice) {
+ voice.setADSR(a, d, s, r);
+ });
+ }
+ /**
+ * Trigger the Attack, and Decay portion of a MonoSynth.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go.
+ *
+ * @method noteAttack
+ * @for p5.PolySynth
+ * @param {Number} [note] midi note on which attack should be triggered.
+ * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
+ * @param {Number} [secondsFromNow] time from now (in seconds)
+ * @example
+ *
+ * let polySynth = new p5.PolySynth();
+ * let pitches = ['G', 'D', 'G', 'C'];
+ * let octaves = [2, 3, 4];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playChord);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playChord() {
+ * userStartAudio();
+ *
+ * // play a chord: multiple notes at the same time
+ * for (let i = 0; i < 4; i++) {
+ * let note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
+ * }
+ * }
+ *
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
+ * }
+ *
+ * let polySynth = new p5.PolySynth();
+ * let pitches = ['G', 'D', 'G', 'C'];
+ * let octaves = [2, 3, 4];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playChord);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playChord() {
+ * userStartAudio();
+ *
+ * // play a chord: multiple notes at the same time
+ * for (let i = 0; i < 4; i++) {
+ * let note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
+ * }
+ * }
+ *
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
+ * }
+ * >1,e+=T(e/t);S*_>>1 >=8,c-=8;0>3}function A(e,t,r,o){return e>>4|240&t|(240&r)<<4|(240&o)<<8}function P(e,t,r){return e>>4<<8|240&t|r>>4}function L(e,t,r){return eo[u]&&(o[u]=c,n[u]=r)}if(o[1]-a[1]>o[r=0]-a[0]&&(r=1),a[r=o[2]-a[2]>o[r]-a[r]?2:r]>=o[r])i[0]=0,i[1]=0,i[2]=1;else{for(a=l[r],n=n[r],l=[o=0,0,0],a=[a.g[0]-n.g[0],a.g[1]-n.g[1],a.g[2]-n.g[2]],u=[0,0,0],r=s.e;r!==s;r=r.e)u[0]=r.g[0]-n.g[0],u[1]=r.g[1]-n.g[1],u[2]=r.g[2]-n.g[2],l[0]=a[1]*u[2]-a[2]*u[1],l[1]=a[2]*u[0]-a[0]*u[2],l[2]=a[0]*u[1]-a[1]*u[0],o<(c=l[0]*l[0]+l[1]*l[1]+l[2]*l[2])&&(o=c,i[0]=l[0],i[1]=l[1],i[2]=l[2]);o<=0&&(i[0]=i[1]=i[2]=0,i[W(a)]=1)}s=!0}for(l=W(i),r=this.b.c,o=(l+1)%3,n=(l+2)%3,l=0>=1;)++n;if(i=1<>8&255,t[t.length]=255&o}return t},l.UTF16=function(e){return 2*e.length};var se,ie={"x-mac-croatian":"ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈ƫȅ ÀÃÕŒœĐ—“”‘’÷◊©⁄€‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ","x-mac-cyrillic":"АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю","x-mac-gaelic":"ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØḂ±≤≥ḃĊċḊḋḞḟĠġṀæøṁṖṗɼƒſṠ«»… ÀÃÕŒœ–—“”‘’ṡẛÿŸṪ€‹›Ŷŷṫ·Ỳỳ⁊ÂÊÁËÈÍÎÏÌÓÔ♣ÒÚÛÙıÝýŴŵẄẅẀẁẂẃ","x-mac-greek":"Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦€ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ","x-mac-icelandic":"ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüݰ¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄€ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ","x-mac-inuit":"ᐃᐄᐅᐆᐊᐋᐱᐲᐳᐴᐸᐹᑉᑎᑏᑐᑑᑕᑖᑦᑭᑮᑯᑰᑲᑳᒃᒋᒌᒍᒎᒐᒑ°ᒡᒥᒦ•¶ᒧ®©™ᒨᒪᒫᒻᓂᓃᓄᓅᓇᓈᓐᓯᓰᓱᓲᓴᓵᔅᓕᓖᓗᓘᓚᓛᓪᔨᔩᔪᔫᔭ… ᔮᔾᕕᕖᕗ–—“”‘’ᕘᕙᕚᕝᕆᕇᕈᕉᕋᕌᕐᕿᖀᖁᖂᖃᖄᖅᖏᖐᖑᖒᖓᖔᖕᙱᙲᙳᙴᙵᙶᖖᖠᖡᖢᖣᖤᖥᖦᕼŁł","x-mac-ce":"ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ",macintosh:"ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄€‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ","x-mac-romanian":"ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂȘ∞±≤≥¥µ∂∑∏π∫ªºΩăș¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄€‹›Țț‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ","x-mac-turkish":"ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔÒÚÛÙˆ˜¯˘˙˚¸˝˛ˇ"},ae=(m.MACSTRING=function(e,t,r,o){var n=ie[o];if(void 0!==n){for(var s="",i=0;i").concat(t," "),this.dummyDOM||(this.dummyDOM=document.getElementById(o).parentNode),this.descriptions?this.descriptions.fallbackElements||(this.descriptions.fallbackElements={}):this.descriptions={fallbackElements:{}},this.descriptions.fallbackElements[e]?this.descriptions.fallbackElements[e].innerHTML!==n&&(this.descriptions.fallbackElements[e].innerHTML=n):this._describeElementHTML("fallback",e,n),r===this.LABEL&&(this.descriptions.labelElements||(this.descriptions.labelElements={}),this.descriptions.labelElements[e]?this.descriptions.labelElements[e].innerHTML!==n&&(this.descriptions.labelElements[e].innerHTML=n):this._describeElementHTML("label",e,n)))},s.default.prototype._describeHTML=function(e,t){var r,o=this.canvas.id;"fallback"===e?(this.dummyDOM.querySelector("#".concat(o+i))?this.dummyDOM.querySelector("#"+o+l).insertAdjacentHTML("beforebegin",'')):(r='
')):(o='')):(o='
",void 0!==i[o][u]&&(c+=i[o][u]),c+=" ";s=s+c+""}return s}(e,this.ingredients.shapes),o!==t.summary.innerHTML&&(t.summary.innerHTML=o),n!==t.map.innerHTML&&(t.map.innerHTML=n),r.details!==t.shapeDetails.innerHTML&&(t.shapeDetails.innerHTML=r.details),this._accessibleOutputs[e]=t)};e=e.default;r.default=e},{"../core/main":283,"core-js/modules/es.array.concat":153,"core-js/modules/es.array.from":162,"core-js/modules/es.array.map":168,"core-js/modules/es.string.iterator":200}],265:[function(e,t,r){"use strict";e("core-js/modules/es.symbol"),e("core-js/modules/es.symbol.description"),e("core-js/modules/es.symbol.iterator"),e("core-js/modules/es.array.concat"),e("core-js/modules/es.array.fill"),e("core-js/modules/es.array.from"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.array.map"),e("core-js/modules/es.number.to-fixed"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.regexp.to-string"),e("core-js/modules/es.string.iterator"),e("core-js/modules/web.dom-collections.iterator"),e("core-js/modules/es.array.concat"),e("core-js/modules/es.array.fill"),e("core-js/modules/es.array.map"),e("core-js/modules/es.number.to-fixed"),Object.defineProperty(r,"__esModule",{value:!0}),r.default=void 0;var o=(e=e("../core/main"))&&e.__esModule?e:{default:e};function l(e){return function(e){if(Array.isArray(e)){for(var t=0,r=new Array(e.length);t "):(i+="').concat(t[r][s].color," ").concat(r," ");"line"===r?i+="location = ".concat(t[r][s].pos," length = ").concat(t[r][s].length," pixels location = ".concat(t[r][s].pos," "),"point"!==r&&(i+=" area = ".concat(t[r][s].area,"% ")),i+=""),o+=i,n++}return o}(e,this.ingredients.shapes),o!==t.summary.innerHTML&&(t.summary.innerHTML=o),r.listShapes!==t.list.innerHTML&&(t.list.innerHTML=r.listShapes),n!==t.shapeDetails.innerHTML&&(t.shapeDetails.innerHTML=n),this._accessibleOutputs[e]=t)};e=e.default;r.default=e},{"../core/main":283,"core-js/modules/es.array.concat":153}],267:[function(e,t,r){"use strict";var o=(o=e("./core/main"))&&o.__esModule?o:{default:o};e("./core/constants"),e("./core/environment"),e("./core/friendly_errors/stacktrace"),e("./core/friendly_errors/validate_params"),e("./core/friendly_errors/file_errors"),e("./core/friendly_errors/fes_core"),e("./core/friendly_errors/sketch_reader"),e("./core/helpers"),e("./core/legacy"),e("./core/preload"),e("./core/p5.Element"),e("./core/p5.Graphics"),e("./core/p5.Renderer"),e("./core/p5.Renderer2D"),e("./core/rendering"),e("./core/shim"),e("./core/structure"),e("./core/transform"),e("./core/shape/2d_primitives"),e("./core/shape/attributes"),e("./core/shape/curves"),e("./core/shape/vertex"),e("./accessibility/outputs"),e("./accessibility/textOutput"),e("./accessibility/gridOutput"),e("./accessibility/color_namer"),e("./color/color_conversion"),e("./color/creating_reading"),e("./color/p5.Color"),e("./color/setting"),e("./data/p5.TypedDict"),e("./data/local_storage.js"),e("./dom/dom"),e("./accessibility/describe"),e("./events/acceleration"),e("./events/keyboard"),e("./events/mouse"),e("./events/touch"),e("./image/filters"),e("./image/image"),e("./image/loading_displaying"),e("./image/p5.Image"),e("./image/pixels"),e("./io/files"),e("./io/p5.Table"),e("./io/p5.TableRow"),e("./io/p5.XML"),e("./math/calculation"),e("./math/math"),e("./math/noise"),e("./math/p5.Vector"),e("./math/random"),e("./math/trigonometry"),e("./typography/attributes"),e("./typography/loading_displaying"),e("./typography/p5.Font"),e("./utilities/array_functions"),e("./utilities/conversion"),e("./utilities/string_functions"),e("./utilities/time_date"),e("./webgl/3d_primitives"),e("./webgl/interaction"),e("./webgl/light"),e("./webgl/loading"),e("./webgl/material"),e("./webgl/p5.Camera"),e("./webgl/p5.DataArray"),e("./webgl/p5.Geometry"),e("./webgl/p5.Matrix"),e("./webgl/p5.Quat"),e("./webgl/p5.RendererGL.Immediate"),e("./webgl/p5.RendererGL"),e("./webgl/p5.RendererGL.Retained"),e("./webgl/p5.Framebuffer"),e("./webgl/p5.Shader"),e("./webgl/p5.RenderBuffer"),e("./webgl/p5.Texture"),e("./webgl/text"),e("./core/init"),t.exports=o.default},{"./accessibility/color_namer":262,"./accessibility/describe":263,"./accessibility/gridOutput":264,"./accessibility/outputs":265,"./accessibility/textOutput":266,"./color/color_conversion":268,"./color/creating_reading":269,"./color/p5.Color":270,"./color/setting":271,"./core/constants":272,"./core/environment":273,"./core/friendly_errors/fes_core":274,"./core/friendly_errors/file_errors":275,"./core/friendly_errors/sketch_reader":276,"./core/friendly_errors/stacktrace":277,"./core/friendly_errors/validate_params":278,"./core/helpers":279,"./core/init":280,"./core/legacy":282,"./core/main":283,"./core/p5.Element":284,"./core/p5.Graphics":285,"./core/p5.Renderer":286,"./core/p5.Renderer2D":287,"./core/preload":288,"./core/rendering":289,"./core/shape/2d_primitives":290,"./core/shape/attributes":291,"./core/shape/curves":292,"./core/shape/vertex":293,"./core/shim":294,"./core/structure":295,"./core/transform":296,"./data/local_storage.js":297,"./data/p5.TypedDict":298,"./dom/dom":299,"./events/acceleration":300,"./events/keyboard":301,"./events/mouse":302,"./events/touch":303,"./image/filters":304,"./image/image":305,"./image/loading_displaying":306,"./image/p5.Image":307,"./image/pixels":308,"./io/files":309,"./io/p5.Table":310,"./io/p5.TableRow":311,"./io/p5.XML":312,"./math/calculation":313,"./math/math":314,"./math/noise":315,"./math/p5.Vector":316,"./math/random":317,"./math/trigonometry":318,"./typography/attributes":319,"./typography/loading_displaying":320,"./typography/p5.Font":321,"./utilities/array_functions":322,"./utilities/conversion":323,"./utilities/string_functions":324,"./utilities/time_date":325,"./webgl/3d_primitives":326,"./webgl/interaction":328,"./webgl/light":329,"./webgl/loading":330,"./webgl/material":331,"./webgl/p5.Camera":332,"./webgl/p5.DataArray":333,"./webgl/p5.Framebuffer":334,"./webgl/p5.Geometry":335,"./webgl/p5.Matrix":336,"./webgl/p5.Quat":337,"./webgl/p5.RenderBuffer":338,"./webgl/p5.RendererGL":341,"./webgl/p5.RendererGL.Immediate":339,"./webgl/p5.RendererGL.Retained":340,"./webgl/p5.Shader":342,"./webgl/p5.Texture":343,"./webgl/text":344}],268:[function(e,t,r){"use strict";Object.defineProperty(r,"__esModule",{value:!0}),r.default=void 0;e=(e=e("../core/main"))&&e.__esModule?e:{default:e};e.default.ColorConversion={_hsbaToHSLA:function(e){var t=e[0],r=e[1],o=e[2],n=(2-r)*o/2;return 0!=n&&(1==n?r=0:n<.5?r/=2-r:r=r*o/(2-2*n)),[t,r,n,e[3]]},_hsbaToRGBA:function(e){var t,r,o,n,s,i=6*e[0],a=e[1],l=e[2];return 0===a?[l,l,l,e[3]]:(r=l*(1-a),o=l*(1-a*(i-(t=Math.floor(i)))),a=l*(1-a*(1+t-i)),i=1===t?(n=o,s=l,r):2===t?(n=r,s=l,a):3===t?(n=r,s=o,l):4===t?(n=a,s=r,l):5===t?(n=l,s=r,o):(n=l,s=a,r),[n,s,i,e[3]])},_hslaToHSBA:function(e){var t=e[0],r=e[1],o=e[2],n=o<.5?(1+r)*o:o+r-o*r;return[t,r=2*(n-o)/n,n,e[3]]},_hslaToRGBA:function(e){var t,r=6*e[0],o=e[1],n=e[2];return 0===o?[n,n,n,e[3]]:[(t=function(e,t,r){return e<0?e+=6:6<=e&&(e-=6),e<1?t+(r-t)*e:e<3?r:e<4?t+(r-t)*(4-e):t})(2+r,o=2*n-(n=n<.5?(1+o)*n:n+o-n*o),n),t(r,o,n),t(r-2,o,n),e[3]]},_rgbaToHSBA:function(e){var t,r,o=e[0],n=e[1],s=e[2],i=Math.max(o,n,s),a=i-Math.min(o,n,s);return 0==a?r=t=0:(r=a/i,o===i?t=(n-s)/a:n===i?t=2+(s-o)/a:s===i&&(t=4+(o-n)/a),t<0?t+=6:6<=t&&(t-=6)),[t/6,r,i,e[3]]},_rgbaToHSLA:function(e){var t,r,o=e[0],n=e[1],s=e[2],i=Math.max(o,n,s),a=Math.min(o,n,s),l=i+a,a=i-a;return 0==a?r=t=0:(r=l<1?a/l:a/(2-l),o===i?t=(n-s)/a:n===i?t=2+(s-o)/a:s===i&&(t=4+(o-n)/a),t<0?t+=6:6<=t&&(t-=6)),[t/6,r,l/2,e[3]]}};e=e.default.ColorConversion;r.default=e},{"../core/main":283}],269:[function(e,t,r){"use strict";function o(e){return(o="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function i(e){return(i="function"==typeof Symbol&&"symbol"===o(Symbol.iterator)?function(e){return o(e)}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":o(e)})(e)}e("core-js/modules/es.symbol"),e("core-js/modules/es.symbol.description"),e("core-js/modules/es.symbol.iterator"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.array.map"),e("core-js/modules/es.object.get-own-property-descriptor"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.string.iterator"),e("core-js/modules/es.weak-map"),e("core-js/modules/web.dom-collections.iterator"),e("core-js/modules/es.array.map"),Object.defineProperty(r,"__esModule",{value:!0}),r.default=void 0;var l=(n=e("../core/main"))&&n.__esModule?n:{default:n},u=function(e){if(e&&e.__esModule)return e;if(null===e||"object"!==i(e)&&"function"!=typeof e)return{default:e};var t=a();if(t&&t.has(e))return t.get(e);var r,o={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(r in e){var s;Object.prototype.hasOwnProperty.call(e,r)&&((s=n?Object.getOwnPropertyDescriptor(e,r):null)&&(s.get||s.set)?Object.defineProperty(o,r,s):o[r]=e[r])}o.default=e,t&&t.set(e,o);return o}(e("../core/constants"));function a(){var e;return"function"!=typeof WeakMap?null:(e=new WeakMap,a=function(){return e},e)}e("./p5.Color"),e("../core/friendly_errors/validate_params"),e("../core/friendly_errors/file_errors"),e("../core/friendly_errors/fes_core"),l.default.prototype.alpha=function(e){return l.default._validateParameters("alpha",arguments),this.color(e)._getAlpha()},l.default.prototype.blue=function(e){return l.default._validateParameters("blue",arguments),this.color(e)._getBlue()},l.default.prototype.brightness=function(e){return l.default._validateParameters("brightness",arguments),this.color(e)._getBrightness()},l.default.prototype.color=function(){for(var e,t=arguments.length,r=new Array(t),o=0;o/,"$2").replace(/\([^)]*\)/g,"")||void 0,args:void 0===(t=r.match(/\(([^)]*)\)/)?r.replace(/^[^(]+\(([^)]*)\)$/,"$1"):t)||"[arguments not available]"===t?void 0:t.split(","),fileName:o[0],lineNumber:o[1],columnNumber:o[2],source:e}},this)}}}e.default._getErrorStackParser=function(){return new o};e=e.default;r.default=e},{"../main":283,"core-js/modules/es.array.filter":157,"core-js/modules/es.array.index-of":164,"core-js/modules/es.array.join":166,"core-js/modules/es.array.map":168,"core-js/modules/es.array.slice":169,"core-js/modules/es.regexp.exec":195,"core-js/modules/es.string.match":201,"core-js/modules/es.string.replace":204,"core-js/modules/es.string.split":206}],278:[function(e,t,r){"use strict";function o(e){return(o="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}e("core-js/modules/es.symbol"),e("core-js/modules/es.symbol.description"),e("core-js/modules/es.symbol.iterator"),e("core-js/modules/es.array.concat"),e("core-js/modules/es.array.for-each"),e("core-js/modules/es.array.includes"),e("core-js/modules/es.array.index-of"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.array.join"),e("core-js/modules/es.array.last-index-of"),e("core-js/modules/es.array.map"),e("core-js/modules/es.array.slice"),e("core-js/modules/es.function.name"),e("core-js/modules/es.map"),e("core-js/modules/es.number.constructor"),e("core-js/modules/es.object.get-own-property-descriptor"),e("core-js/modules/es.object.get-prototype-of"),e("core-js/modules/es.object.keys"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.reflect.construct"),e("core-js/modules/es.regexp.exec"),e("core-js/modules/es.regexp.to-string"),e("core-js/modules/es.set"),e("core-js/modules/es.string.includes"),e("core-js/modules/es.string.iterator"),e("core-js/modules/es.string.split"),e("core-js/modules/es.weak-map"),e("core-js/modules/web.dom-collections.for-each"),e("core-js/modules/web.dom-collections.iterator"),e("core-js/modules/es.symbol"),e("core-js/modules/es.symbol.description"),e("core-js/modules/es.symbol.iterator"),e("core-js/modules/es.array.concat"),e("core-js/modules/es.array.for-each"),e("core-js/modules/es.array.includes"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.array.join"),e("core-js/modules/es.array.last-index-of"),e("core-js/modules/es.array.map"),e("core-js/modules/es.array.slice"),e("core-js/modules/es.function.name"),e("core-js/modules/es.number.constructor"),e("core-js/modules/es.object.keys"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.regexp.exec"),e("core-js/modules/es.regexp.to-string"),e("core-js/modules/es.set"),e("core-js/modules/es.string.includes"),e("core-js/modules/es.string.iterator"),e("core-js/modules/es.string.split"),e("core-js/modules/web.dom-collections.for-each"),e("core-js/modules/web.dom-collections.iterator"),Object.defineProperty(r,"__esModule",{value:!0}),r.default=void 0;var n=(n=e("../main"))&&n.__esModule?n:{default:n};(function(e){if(e&&e.__esModule)return;if(null===e||"object"!==a(e)&&"function"!=typeof e)return;var t=i();if(t&&t.has(e))return t.get(e);var r,o={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(r in e){var s;Object.prototype.hasOwnProperty.call(e,r)&&((s=n?Object.getOwnPropertyDescriptor(e,r):null)&&(s.get||s.set)?Object.defineProperty(o,r,s):o[r]=e[r])}o.default=e,t&&t.set(e,o)})(e("../constants")),e("../internationalization");function i(){var e;return"function"!=typeof WeakMap?null:(e=new WeakMap,i=function(){return e},e)}function a(e){return(a="function"==typeof Symbol&&"symbol"===o(Symbol.iterator)?function(e){return o(e)}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":o(e)})(e)}n.default._validateParameters=n.default._clearValidateParamsCache=function(){};e=n.default;r.default=e},{"../../../docs/parameterData.json":void 0,"../constants":272,"../internationalization":281,"../main":283,"core-js/modules/es.array.concat":153,"core-js/modules/es.array.for-each":161,"core-js/modules/es.array.includes":163,"core-js/modules/es.array.index-of":164,"core-js/modules/es.array.iterator":165,"core-js/modules/es.array.join":166,"core-js/modules/es.array.last-index-of":167,"core-js/modules/es.array.map":168,"core-js/modules/es.array.slice":169,"core-js/modules/es.function.name":174,"core-js/modules/es.map":175,"core-js/modules/es.number.constructor":179,"core-js/modules/es.object.get-own-property-descriptor":186,"core-js/modules/es.object.get-prototype-of":188,"core-js/modules/es.object.keys":189,"core-js/modules/es.object.to-string":190,"core-js/modules/es.reflect.construct":192,"core-js/modules/es.regexp.exec":195,"core-js/modules/es.regexp.to-string":196,"core-js/modules/es.set":197,"core-js/modules/es.string.includes":199,"core-js/modules/es.string.iterator":200,"core-js/modules/es.string.split":206,"core-js/modules/es.symbol":212,"core-js/modules/es.symbol.description":210,"core-js/modules/es.symbol.iterator":211,"core-js/modules/es.weak-map":244,"core-js/modules/web.dom-collections.for-each":245,"core-js/modules/web.dom-collections.iterator":246}],279:[function(e,t,r){"use strict";function o(e){return(o="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function i(e){return(i="function"==typeof Symbol&&"symbol"===o(Symbol.iterator)?function(e){return o(e)}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":o(e)})(e)}e("core-js/modules/es.symbol"),e("core-js/modules/es.symbol.description"),e("core-js/modules/es.symbol.iterator"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.object.get-own-property-descriptor"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.string.iterator"),e("core-js/modules/es.weak-map"),e("core-js/modules/web.dom-collections.iterator"),Object.defineProperty(r,"__esModule",{value:!0}),r.default=void 0;var a=function(e){if(e&&e.__esModule)return e;if(null===e||"object"!==i(e)&&"function"!=typeof e)return{default:e};var t=l();if(t&&t.has(e))return t.get(e);var r,o={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(r in e){var s;Object.prototype.hasOwnProperty.call(e,r)&&((s=n?Object.getOwnPropertyDescriptor(e,r):null)&&(s.get||s.set)?Object.defineProperty(o,r,s):o[r]=e[r])}o.default=e,t&&t.set(e,o);return o}(e("./constants"));function l(){var e;return"function"!=typeof WeakMap?null:(e=new WeakMap,l=function(){return e},e)}r.default={modeAdjust:function(e,t,r,o,n){var s;return n===a.CORNER?s={x:e,y:t,w:Math.abs(r),h:Math.abs(o)}:n===a.CORNERS?s={x:Math.min(e,r),y:Math.min(t,o),w:Math.abs(r-e),h:Math.abs(o-t)}:n===a.RADIUS?s={x:e-(r=Math.abs(r)),y:t-(o=Math.abs(o)),w:2*r,h:2*o}:n===a.CENTER&&(s={x:e-.5*(r=Math.abs(r)),y:t-.5*(o=Math.abs(o)),w:r,h:o}),s}}},{"./constants":272,"core-js/modules/es.array.iterator":165,"core-js/modules/es.object.get-own-property-descriptor":186,"core-js/modules/es.object.to-string":190,"core-js/modules/es.string.iterator":200,"core-js/modules/es.symbol":212,"core-js/modules/es.symbol.description":210,"core-js/modules/es.symbol.iterator":211,"core-js/modules/es.weak-map":244,"core-js/modules/web.dom-collections.iterator":246}],280:[function(e,t,r){"use strict";e("core-js/modules/es.array.iterator"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.promise"),e("core-js/modules/es.string.iterator"),e("core-js/modules/web.dom-collections.iterator"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.promise"),e("core-js/modules/es.string.iterator"),e("core-js/modules/web.dom-collections.iterator");var o=(n=e("../core/main"))&&n.__esModule?n:{default:n};e("./internationalization");var n=Promise.resolve();Promise.all([new Promise(function(e,t){"complete"===document.readyState?e():window.addEventListener("load",e,!1)}),n]).then(function(){var e;void 0!==window._setupDone?console.warn("p5.js seems to have been imported multiple times. Please remove the duplicate import"):window.mocha||(e=new Event("p5Ready"),window.dispatchEvent(e),(window.setup&&"function"==typeof window.setup||window.draw&&"function"==typeof window.draw)&&!o.default.instance&&new o.default)})},{"../core/main":283,"./internationalization":281,"core-js/modules/es.array.iterator":165,"core-js/modules/es.object.to-string":190,"core-js/modules/es.promise":191,"core-js/modules/es.string.iterator":200,"core-js/modules/web.dom-collections.iterator":246}],281:[function(e,t,r){"use strict";e("core-js/modules/es.array.includes"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.array.join"),e("core-js/modules/es.array.slice"),e("core-js/modules/es.object.keys"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.promise"),e("core-js/modules/es.regexp.exec"),e("core-js/modules/es.string.includes"),e("core-js/modules/es.string.iterator"),e("core-js/modules/es.string.split"),e("core-js/modules/web.dom-collections.iterator"),e("core-js/modules/es.array.includes"),e("core-js/modules/es.array.iterator"),e("core-js/modules/es.array.join"),e("core-js/modules/es.array.slice"),e("core-js/modules/es.object.keys"),e("core-js/modules/es.object.to-string"),e("core-js/modules/es.promise"),e("core-js/modules/es.regexp.exec"),e("core-js/modules/es.string.includes"),e("core-js/modules/es.string.iterator"),e("core-js/modules/es.string.split"),e("core-js/modules/web.dom-collections.iterator"),Object.defineProperty(r,"__esModule",{value:!0}),r.setTranslatorLanguage=r.currentTranslatorLanguage=r.availableTranslatorLanguages=r.initialize=r.translator=void 0;var n,s,o=a(e("i18next")),i=a(e("i18next-browser-languagedetector"));function a(e){return e&&e.__esModule?e:{default:e}}function l(e,t){for(var r=0;r"),"0"!==s[0]){n.print("
"),n.print(""),n.print("");for(var c=0;c ")}for(var h=0;h