diff --git a/libraries/p5/README.txt b/libraries/p5/README.txt
new file mode 100644
index 0000000..b2116cd
--- /dev/null
+++ b/libraries/p5/README.txt
@@ -0,0 +1,66 @@
+# Welcome to p5.js
+
+You have downloaded the complete p5.js library ZIP file, yay!
+
+# Contents of the p5 folder
+
+* p5.js file
+* p5.min.js file
+* addons folder
+ * p5.sound.js
+ * p5.sound.min.js
+* empty-example folder
+ * index.html
+ * p5.js
+ * p5.sound.js
+ * sketch.js
+
+## p5.js
+
+This file stores the complete p5.js library. It is easy to read by humans, so feel free to open it and explore its contents. It also has a friendly error system, which helps new programmers with common user errors.
+
+## p5.min.js
+
+This file is a minified version of the p5.js file. It is a lighter version, with the same functionalities, but smaller file size. This minified version is harder to read for humans, and does not include the friendly error system.
+
+## addons folder
+
+The addons folder includes additional p5.js related libraries, in both original versions and minified versions.
+
+### p5.sound.js, p5.sound.min.js
+
+p5.sound extends p5.js with Web Audio functionality including audio input, playback, analysis, and synthesis.
+
+## empty-example folder
+
+This is an empty example of a website. The folder includes the file for the website, index.html, the p5.js library, other related p5.js libraries, and a template starting point for your p5.js sketch, called sketch.js.
+
+### index.html
+
+index.html is a template for an HTML file. This index.html first imports the libraries included in the folder (p5.js, p5.sound.js) then loads and executes the file sketch.js which is where you can write your own code.
+
+### sketch.js
+
+The sketch.js is a template for the p5.js sketch, with the functions setup() and draw() that you can complete.
+
+## README.txt
+
+This README file formatted with Markdown :)
+
+# What's next?
+
+If you need more information to help get you started, please refer to our website:
+https://p5js.org/tutorials/get-started/ and https://p5js.org/tutorials/
+
+An online reference to the p5.js library is available here:
+https://p5js.org/reference/
+
+In order to run your website (including the empty-example), you need to enable a local server, please see this tutorial in our wiki:
+https://github.com/processing/p5.js/wiki/Local-server
+
+p5.js is a community and p5.js is built by contributions. If you want to learn more about us, visit:
+https://p5js.org/community/
+
+# License
+
+The p5.js library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, version 2.1.
diff --git a/libraries/p5/addons/p5.sound.js b/libraries/p5/addons/p5.sound.js
new file mode 100644
index 0000000..f7216fc
--- /dev/null
+++ b/libraries/p5/addons/p5.sound.js
@@ -0,0 +1,12268 @@
+/** [p5.sound] Version: 1.0.1 - 2021-05-25 */
+ /**
+ *
p5.sound extends p5 with Web Audio functionality including audio input,
+ * playback, analysis and synthesis.
+ *
p5.Envelope: An Envelope is a series
+ * of fades over time. Often used to control an object's
+ * output gain level as an "ADSR Envelope" (Attack, Decay,
+ * Sustain, Release). Can also modulate other parameters.
+ *
p5.Delay: A delay effect with
+ * parameters for feedback, delayTime, and lowpass filter.
+ *
p5.Filter: Filter the frequency range of a
+ * sound.
+ *
+ *
p5.Reverb: Add reverb to a sound by specifying
+ * duration and decay.
It is not only a good practice to give users control over starting
+ * audio. This policy is enforced by many web browsers, including iOS and
+ * Google Chrome, which create the Web Audio API's
+ * Audio Context
+ * in a suspended state.
+ *
+ *
In these browser-specific policies, sound will not play until a user
+ * interaction event (i.e. mousePressed()) explicitly resumes
+ * the AudioContext, or starts an audio node. This can be accomplished by
+ * calling start() on a p5.Oscillator,
+ * play() on a p5.SoundFile, or simply
+ * userStartAudio().
+ *
+ *
userStartAudio() starts the AudioContext on a user
+ * gesture. The default behavior will enable audio on any
+ * mouseUp or touchEnd event. It can also be placed in a specific
+ * interaction function, such as mousePressed() as in the
+ * example below. This method utilizes
+ * StartAudioContext
+ * , a library by Yotam Mann (MIT Licence, 2016).
+ * @param {Element|Array} [elements] This argument can be an Element,
+ * Selector String, NodeList, p5.Element,
+ * jQuery Element, or an Array of any of those.
+ * @param {Function} [callback] Callback to invoke when the AudioContext
+ * has started
+ * @return {Promise} Returns a Promise that resolves when
+ * the AudioContext state is 'running'
+ * @method userStartAudio
+ * @for p5
+ * @example
+ *
+ * function setup() {
+ * // mimics the autoplay policy
+ * getAudioContext().suspend();
+ *
+ * let mySynth = new p5.MonoSynth();
+ *
+ * // This won't play until the context has resumed
+ * mySynth.play('A6');
+ * }
+ * function draw() {
+ * background(220);
+ * textAlign(CENTER, CENTER);
+ * text(getAudioContext().state, width/2, height/2);
+ * }
+ * function mousePressed() {
+ * userStartAudio();
+ * }
+ *
+ */
+
+function userStartAudio(elements, callback) {
+ var elt = elements;
+
+ if (elements instanceof p5.Element) {
+ elt = elements.elt;
+ } else if (elements instanceof Array && elements[0] instanceof p5.Element) {
+ elt = elements.map(function (e) {
+ return e.elt;
+ });
+ }
+
+ return startaudiocontext__WEBPACK_IMPORTED_MODULE_0___default()(audiocontext, elt, callback);
+}
+ __webpack_exports__["a"] = (audiocontext);
+}.call(this, __webpack_require__(26)))
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(10)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Add=function(t){this.createInsOuts(2,0),this._sum=this.input[0]=this.input[1]=this.output=new i.Gain,this._param=this.input[1]=new i.Signal(t),this._param.connect(this._sum)},i.extend(i.Add,i.Signal),i.Add.prototype.dispose=function(){return i.prototype.dispose.call(this),this._sum.dispose(),this._sum=null,this._param.dispose(),this._param=null,this},i.Add}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports) {
+
+module.exports = {
+ recorderProcessor: 'recorder-processor',
+ soundFileProcessor: 'sound-file-processor',
+ amplitudeProcessor: 'amplitude-processor'
+};
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(15)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(e){"use strict";return e.WaveShaper=function(e,t){this._shaper=this.input=this.output=this.context.createWaveShaper(),this._curve=null,Array.isArray(e)?this.curve=e:isFinite(e)||this.isUndef(e)?this._curve=new Float32Array(this.defaultArg(e,1024)):this.isFunction(e)&&(this._curve=new Float32Array(this.defaultArg(t,1024)),this.setMap(e))},e.extend(e.WaveShaper,e.SignalBase),e.WaveShaper.prototype.setMap=function(e){for(var t=0,r=this._curve.length;te)this.cancelScheduledValues(e),this.linearRampToValueAtTime(t,e);else{var n=this._searchAfter(e);n&&(this.cancelScheduledValues(e),n.type===o.TimelineSignal.Type.Linear?this.linearRampToValueAtTime(t,e):n.type===o.TimelineSignal.Type.Exponential&&this.exponentialRampToValueAtTime(t,e)),this.setValueAtTime(t,e)}return this},o.TimelineSignal.prototype.linearRampToValueBetween=function(e,t,i){return this.setRampPoint(t),this.linearRampToValueAtTime(e,i),this},o.TimelineSignal.prototype.exponentialRampToValueBetween=function(e,t,i){return this.setRampPoint(t),this.exponentialRampToValueAtTime(e,i),this},o.TimelineSignal.prototype._searchBefore=function(e){return this._events.get(e)},o.TimelineSignal.prototype._searchAfter=function(e){return this._events.getAfter(e)},o.TimelineSignal.prototype.getValueAtTime=function(e){e=this.toSeconds(e);var t=this._searchAfter(e),i=this._searchBefore(e),n=this._initial;if(null===i)n=this._initial;else if(i.type===o.TimelineSignal.Type.Target){var a,l=this._events.getBefore(i.time);a=null===l?this._initial:l.value,n=this._exponentialApproach(i.time,a,i.value,i.constant,e)}else n=i.type===o.TimelineSignal.Type.Curve?this._curveInterpolate(i.time,i.value,i.duration,e):null===t?i.value:t.type===o.TimelineSignal.Type.Linear?this._linearInterpolate(i.time,i.value,t.time,t.value,e):t.type===o.TimelineSignal.Type.Exponential?this._exponentialInterpolate(i.time,i.value,t.time,t.value,e):i.value;return n},o.TimelineSignal.prototype.connect=o.SignalBase.prototype.connect,o.TimelineSignal.prototype._exponentialApproach=function(e,t,i,n,a){return i+(t-i)*Math.exp(-(a-e)/n)},o.TimelineSignal.prototype._linearInterpolate=function(e,t,i,n,a){return t+(a-e)/(i-e)*(n-t)},o.TimelineSignal.prototype._exponentialInterpolate=function(e,t,i,n,a){return(t=Math.max(this._minOutput,t))*Math.pow(n/t,(a-e)/(i-e))},o.TimelineSignal.prototype._curveInterpolate=function(e,t,i,n){var a=t.length;if(e+i<=n)return t[a-1];if(n<=e)return t[0];var l=(n-e)/i,s=Math.floor((a-1)*l),r=Math.ceil((a-1)*l),o=t[s],p=t[r];return r===s?o:this._linearInterpolate(s,o,r,p,l*(a-1))},o.TimelineSignal.prototype.dispose=function(){o.Signal.prototype.dispose.call(this),o.Param.prototype.dispose.call(this),this._events.dispose(),this._events=null},o.TimelineSignal}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(4),__webpack_require__(1),__webpack_require__(2)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Scale=function(t,e){this._outputMin=this.defaultArg(t,0),this._outputMax=this.defaultArg(e,1),this._scale=this.input=new i.Multiply(1),this._add=this.output=new i.Add(0),this._scale.connect(this._add),this._setRange()},i.extend(i.Scale,i.SignalBase),Object.defineProperty(i.Scale.prototype,"min",{get:function(){return this._outputMin},set:function(t){this._outputMin=t,this._setRange()}}),Object.defineProperty(i.Scale.prototype,"max",{get:function(){return this._outputMax},set:function(t){this._outputMax=t,this._setRange()}}),i.Scale.prototype._setRange=function(){this._add.value=this._outputMin,this._scale.value=this._outputMax-this._outputMin},i.Scale.prototype.dispose=function(){return i.prototype.dispose.call(this),this._add.dispose(),this._add=null,this._scale.dispose(),this._scale=null,this},i.Scale}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(16),__webpack_require__(30),__webpack_require__(31),__webpack_require__(12)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){return t.Type={Default:"number",Time:"time",Frequency:"frequency",TransportTime:"transportTime",Ticks:"ticks",NormalRange:"normalRange",AudioRange:"audioRange",Decibels:"db",Interval:"interval",BPM:"bpm",Positive:"positive",Cents:"cents",Degrees:"degrees",MIDI:"midi",BarsBeatsSixteenths:"barsBeatsSixteenths",Samples:"samples",Hertz:"hertz",Note:"note",Milliseconds:"milliseconds",Seconds:"seconds",Notation:"notation"},t.prototype.toSeconds=function(e){return this.isNumber(e)?e:this.isUndef(e)?this.now():this.isString(e)?new t.Time(e).toSeconds():e instanceof t.TimeBase?e.toSeconds():void 0},t.prototype.toFrequency=function(e){return this.isNumber(e)?e:this.isString(e)||this.isUndef(e)?new t.Frequency(e).valueOf():e instanceof t.TimeBase?e.toFrequency():void 0},t.prototype.toTicks=function(e){return this.isNumber(e)||this.isString(e)?new t.TransportTime(e).toTicks():this.isUndef(e)?t.Transport.ticks:e instanceof t.TimeBase?e.toTicks():void 0},t}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(18),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return window.GainNode&&!AudioContext.prototype.createGain&&(AudioContext.prototype.createGain=AudioContext.prototype.createGainNode),i.Gain=function(){var t=this.optionsObject(arguments,["gain","units"],i.Gain.defaults);this.input=this.output=this._gainNode=this.context.createGain(),this.gain=new i.Param({param:this._gainNode.gain,units:t.units,value:t.gain,convert:t.convert}),this._readOnly("gain")},i.extend(i.Gain),i.Gain.defaults={gain:1,convert:!0},i.Gain.prototype.dispose=function(){i.Param.prototype.dispose.call(this),this._gainNode.disconnect(),this._gainNode=null,this._writable("gain"),this.gain.dispose(),this.gain=null},i.prototype.createInsOuts=function(t,n){1===t?this.input=new i.Gain:1this._nextTick&&this._state;){var e=this._state.getValueAtTime(this._nextTick);if(e!==this._lastState){this._lastState=e;var i=this._state.get(this._nextTick);e===o.State.Started?(this._nextTick=i.time,this.isUndef(i.offset)||(this.ticks=i.offset),this.emit("start",i.time,this.ticks)):e===o.State.Stopped?(this.ticks=0,this.emit("stop",i.time)):e===o.State.Paused&&this.emit("pause",i.time)}var s=this._nextTick;this.frequency&&(this._nextTick+=1/this.frequency.getValueAtTime(this._nextTick),e===o.State.Started&&(this.callback(s),this.ticks++))}},o.Clock.prototype.getStateAtTime=function(t){return t=this.toSeconds(t),this._state.getValueAtTime(t)},o.Clock.prototype.dispose=function(){o.Emitter.prototype.dispose.call(this),this.context.off("tick",this._boundLoop),this._writable("frequency"),this.frequency.dispose(),this.frequency=null,this._boundLoop=null,this._nextTick=1/0,this.callback=null,this._state.dispose(),this._state=null},o.Clock}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(14)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){function t(e,t,n){if(e.input)Array.isArray(e.input)?(o.prototype.isUndef(n)&&(n=0),this.connect(e.input[n])):this.connect(e.input,t,n);else try{e instanceof AudioNode?i.call(this,e,t,n):i.call(this,e,t)}catch(t){throw new Error("error connecting to node: "+e+"\n"+t)}}var i,r;return!window.hasOwnProperty("AudioContext")&&window.hasOwnProperty("webkitAudioContext")&&(window.AudioContext=window.webkitAudioContext),o.Context=function(t){for(var e in o.Emitter.call(this),t=t||new window.AudioContext,this._context=t,this._context)this._defineProperty(this._context,e);this._latencyHint="interactive",this._lookAhead=.1,this._updateInterval=this._lookAhead/3,this._computedUpdateInterval=0,this._worker=this._createWorker(),this._constants={}},o.extend(o.Context,o.Emitter),o.Emitter.mixin(o.Context),o.Context.prototype._defineProperty=function(e,n){this.isUndef(this[n])&&Object.defineProperty(this,n,{get:function(){return"function"==typeof e[n]?e[n].bind(e):e[n]},set:function(t){e[n]=t}})},o.Context.prototype.now=function(){return this._context.currentTime},o.Context.prototype._createWorker=function(){window.URL=window.URL||window.webkitURL;var t=new Blob(["var timeoutTime = "+(1e3*this._updateInterval).toFixed(1)+";self.onmessage = function(msg){\ttimeoutTime = parseInt(msg.data);};function tick(){\tsetTimeout(tick, timeoutTime);\tself.postMessage('tick');}tick();"]),e=URL.createObjectURL(t),n=new Worker(e);return n.addEventListener("message",function(){this.emit("tick")}.bind(this)),n.addEventListener("message",function(){var t=this.now();if(this.isNumber(this._lastUpdate)){var e=t-this._lastUpdate;this._computedUpdateInterval=Math.max(e,.97*this._computedUpdateInterval)}this._lastUpdate=t}.bind(this)),n},o.Context.prototype.getConstant=function(t){if(this._constants[t])return this._constants[t];for(var e=this._context.createBuffer(1,128,this._context.sampleRate),n=e.getChannelData(0),o=0;othis.memory){var t=this.length-this.memory;this._timeline.splice(0,t)}return this},i.Timeline.prototype.remove=function(e){if(this._iterating)this._toRemove.push(e);else{var i=this._timeline.indexOf(e);-1!==i&&this._timeline.splice(i,1)}return this},i.Timeline.prototype.get=function(e){var i=this._search(e);return-1!==i?this._timeline[i]:null},i.Timeline.prototype.peek=function(){return this._timeline[0]},i.Timeline.prototype.shift=function(){return this._timeline.shift()},i.Timeline.prototype.getAfter=function(e){var i=this._search(e);return i+1=e&&(this._timeline=[]);return this},i.Timeline.prototype.cancelBefore=function(e){if(this._timeline.length){var i=this._search(e);0<=i&&(this._timeline=this._timeline.slice(i+1))}return this},i.Timeline.prototype._search=function(e){var i=0,t=this._timeline.length,n=t;if(0e)return r;s.time>e?n=r:s.time=e;)t--;return this._iterate(i,t+1),this},i.Timeline.prototype.forEachAtTime=function(i,t){var e=this._search(i);return-1!==e&&this._iterate(function(e){e.time===i&&t(e)},0,e),this},i.Timeline.prototype.dispose=function(){i.prototype.dispose.call(this),this._timeline=null,this._toRemove=null},i.Timeline}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(1),__webpack_require__(2)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){"use strict";return t.Negate=function(){this._multiply=this.input=this.output=new t.Multiply(-1)},t.extend(t.Negate,t.SignalBase),t.Negate.prototype.dispose=function(){return t.prototype.dispose.call(this),this._multiply.dispose(),this._multiply=null,this},t.Negate}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(1),__webpack_require__(6)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(e){"use strict";return e.GreaterThanZero=function(){this._thresh=this.output=new e.WaveShaper(function(e){return e<=0?0:1},127),this._scale=this.input=new e.Multiply(1e4),this._scale.connect(this._thresh)},e.extend(e.GreaterThanZero,e.SignalBase),e.GreaterThanZero.prototype.dispose=function(){return e.prototype.dispose.call(this),this._scale.dispose(),this._scale=null,this._thresh.dispose(),this._thresh=null,this},e.GreaterThanZero}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!function(e,t){ true?!(__WEBPACK_AMD_DEFINE_ARRAY__ = [], __WEBPACK_AMD_DEFINE_FACTORY__ = (t),
+ __WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
+ (__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)):undefined}(this,function(){var r=function(e,t){this._dragged=!1,this._element=e,this._bindedMove=this._moved.bind(this),this._bindedEnd=this._ended.bind(this,t),e.addEventListener("touchstart",this._bindedEnd),e.addEventListener("touchmove",this._bindedMove),e.addEventListener("touchend",this._bindedEnd),e.addEventListener("mouseup",this._bindedEnd)};function o(e){return"running"===e.state}return r.prototype._moved=function(e){this._dragged=!0},r.prototype._ended=function(e){this._dragged||function(e){var t=e.createBuffer(1,1,e.sampleRate),n=e.createBufferSource();n.buffer=t,n.connect(e.destination),n.start(0),e.resume&&e.resume()}(e),this._dragged=!1},r.prototype.dispose=function(){this._element.removeEventListener("touchstart",this._bindedEnd),this._element.removeEventListener("touchmove",this._bindedMove),this._element.removeEventListener("touchend",this._bindedEnd),this._element.removeEventListener("mouseup",this._bindedEnd),this._bindedMove=null,this._bindedEnd=null,this._element=null},function(t,e,n){var i=new Promise(function(e){!function(t,n){o(t)?n():function e(){o(t)?n():(requestAnimationFrame(e),t.resume&&t.resume())}()}(t,e)}),d=[];return function e(t,n,i){if(Array.isArray(t)||NodeList&&t instanceof NodeList)for(var d=0;d= this._length) {\n this._writeIndex = 0;\n } // For excessive frames, the buffer will be overwritten.\n\n\n this._framesAvailable += sourceLength;\n\n if (this._framesAvailable > this._length) {\n this._framesAvailable = this._length;\n }\n }\n /**\n * Pull data out of buffer and fill a given sequence of Float32Arrays.\n *\n * @param {array} arraySequence An array of Float32Arrays.\n */\n\n }, {\n key: \"pull\",\n value: function pull(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // If the FIFO is completely empty, do nothing.\n if (this._framesAvailable === 0) {\n return;\n }\n\n var destinationLength = arraySequence[0].length; // Transfer data from the internal buffer to the |arraySequence| storage.\n\n for (var i = 0; i < destinationLength; ++i) {\n var readIndex = (this._readIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n arraySequence[channel][i] = this._channelData[channel][readIndex];\n }\n }\n\n this._readIndex += destinationLength;\n\n if (this._readIndex >= this._length) {\n this._readIndex = 0;\n }\n\n this._framesAvailable -= destinationLength;\n\n if (this._framesAvailable < 0) {\n this._framesAvailable = 0;\n }\n }\n }, {\n key: \"framesAvailable\",\n get: function get() {\n return this._framesAvailable;\n }\n }]);\n\n return RingBuffer;\n }()\n}[\"default\"];\n\nvar RecorderProcessor =\n/*#__PURE__*/\nfunction (_AudioWorkletProcesso) {\n _inherits(RecorderProcessor, _AudioWorkletProcesso);\n\n function RecorderProcessor(options) {\n var _this;\n\n _classCallCheck(this, RecorderProcessor);\n\n _this = _possibleConstructorReturn(this, _getPrototypeOf(RecorderProcessor).call(this));\n var processorOptions = options.processorOptions || {};\n _this.numOutputChannels = options.outputChannelCount || 2;\n _this.numInputChannels = processorOptions.numInputChannels || 2;\n _this.bufferSize = processorOptions.bufferSize || 1024;\n _this.recording = false;\n\n _this.clear();\n\n _this.port.onmessage = function (event) {\n var data = event.data;\n\n if (data.name === 'start') {\n _this.record(data.duration);\n } else if (data.name === 'stop') {\n _this.stop();\n }\n };\n\n return _this;\n }\n\n _createClass(RecorderProcessor, [{\n key: \"process\",\n value: function process(inputs) {\n if (!this.recording) {\n return true;\n } else if (this.sampleLimit && this.recordedSamples >= this.sampleLimit) {\n this.stop();\n return true;\n }\n\n var input = inputs[0];\n this.inputRingBuffer.push(input);\n\n if (this.inputRingBuffer.framesAvailable >= this.bufferSize) {\n this.inputRingBuffer.pull(this.inputRingBufferArraySequence);\n\n for (var channel = 0; channel < this.numOutputChannels; ++channel) {\n var inputChannelCopy = this.inputRingBufferArraySequence[channel].slice();\n\n if (channel === 0) {\n this.leftBuffers.push(inputChannelCopy);\n\n if (this.numInputChannels === 1) {\n this.rightBuffers.push(inputChannelCopy);\n }\n } else if (channel === 1 && this.numInputChannels > 1) {\n this.rightBuffers.push(inputChannelCopy);\n }\n }\n\n this.recordedSamples += this.bufferSize;\n }\n\n return true;\n }\n }, {\n key: \"record\",\n value: function record(duration) {\n if (duration) {\n this.sampleLimit = Math.round(duration * sampleRate);\n }\n\n this.recording = true;\n }\n }, {\n key: \"stop\",\n value: function stop() {\n this.recording = false;\n var buffers = this.getBuffers();\n var leftBuffer = buffers[0].buffer;\n var rightBuffer = buffers[1].buffer;\n this.port.postMessage({\n name: 'buffers',\n leftBuffer: leftBuffer,\n rightBuffer: rightBuffer\n }, [leftBuffer, rightBuffer]);\n this.clear();\n }\n }, {\n key: \"getBuffers\",\n value: function getBuffers() {\n var buffers = [];\n buffers.push(this.mergeBuffers(this.leftBuffers));\n buffers.push(this.mergeBuffers(this.rightBuffers));\n return buffers;\n }\n }, {\n key: \"mergeBuffers\",\n value: function mergeBuffers(channelBuffer) {\n var result = new Float32Array(this.recordedSamples);\n var offset = 0;\n var lng = channelBuffer.length;\n\n for (var i = 0; i < lng; i++) {\n var buffer = channelBuffer[i];\n result.set(buffer, offset);\n offset += buffer.length;\n }\n\n return result;\n }\n }, {\n key: \"clear\",\n value: function clear() {\n var _this2 = this;\n\n this.leftBuffers = [];\n this.rightBuffers = [];\n this.inputRingBuffer = new RingBuffer(this.bufferSize, this.numInputChannels);\n this.inputRingBufferArraySequence = new Array(this.numInputChannels).fill(null).map(function () {\n return new Float32Array(_this2.bufferSize);\n });\n this.recordedSamples = 0;\n this.sampleLimit = null;\n }\n }]);\n\n return RecorderProcessor;\n}(_wrapNativeSuper(AudioWorkletProcessor));\n\nregisterProcessor(processorNames.recorderProcessor, RecorderProcessor);");
+
+ }),
+ (function(module, __webpack_exports__, __webpack_require__) {
+
+"use strict";
+__webpack_require__.r(__webpack_exports__);
+ __webpack_exports__["default"] = ("function _typeof(obj) { if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _wrapNativeSuper(Class) { var _cache = typeof Map === \"function\" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== \"function\") { throw new TypeError(\"Super expression must either be null or a function\"); } if (typeof _cache !== \"undefined\") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }\n\nfunction isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _construct(Parent, args, Class) { if (isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }\n\nfunction _isNativeFunction(fn) { return Function.toString.call(fn).indexOf(\"[native code]\") !== -1; }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\n// import dependencies via preval.require so that they're available as values at compile time\nvar processorNames = {\n \"recorderProcessor\": \"recorder-processor\",\n \"soundFileProcessor\": \"sound-file-processor\",\n \"amplitudeProcessor\": \"amplitude-processor\"\n};\nvar RingBuffer = {\n \"default\":\n /*#__PURE__*/\n function () {\n /**\n * @constructor\n * @param {number} length Buffer length in frames.\n * @param {number} channelCount Buffer channel count.\n */\n function RingBuffer(length, channelCount) {\n _classCallCheck(this, RingBuffer);\n\n this._readIndex = 0;\n this._writeIndex = 0;\n this._framesAvailable = 0;\n this._channelCount = channelCount;\n this._length = length;\n this._channelData = [];\n\n for (var i = 0; i < this._channelCount; ++i) {\n this._channelData[i] = new Float32Array(length);\n }\n }\n /**\n * Getter for Available frames in buffer.\n *\n * @return {number} Available frames in buffer.\n */\n\n\n _createClass(RingBuffer, [{\n key: \"push\",\n\n /**\n * Push a sequence of Float32Arrays to buffer.\n *\n * @param {array} arraySequence A sequence of Float32Arrays.\n */\n value: function push(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // Transfer data from the |arraySequence| storage to the internal buffer.\n var sourceLength = arraySequence[0] ? arraySequence[0].length : 0;\n\n for (var i = 0; i < sourceLength; ++i) {\n var writeIndex = (this._writeIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n this._channelData[channel][writeIndex] = arraySequence[channel][i];\n }\n }\n\n this._writeIndex += sourceLength;\n\n if (this._writeIndex >= this._length) {\n this._writeIndex = 0;\n } // For excessive frames, the buffer will be overwritten.\n\n\n this._framesAvailable += sourceLength;\n\n if (this._framesAvailable > this._length) {\n this._framesAvailable = this._length;\n }\n }\n /**\n * Pull data out of buffer and fill a given sequence of Float32Arrays.\n *\n * @param {array} arraySequence An array of Float32Arrays.\n */\n\n }, {\n key: \"pull\",\n value: function pull(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // If the FIFO is completely empty, do nothing.\n if (this._framesAvailable === 0) {\n return;\n }\n\n var destinationLength = arraySequence[0].length; // Transfer data from the internal buffer to the |arraySequence| storage.\n\n for (var i = 0; i < destinationLength; ++i) {\n var readIndex = (this._readIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n arraySequence[channel][i] = this._channelData[channel][readIndex];\n }\n }\n\n this._readIndex += destinationLength;\n\n if (this._readIndex >= this._length) {\n this._readIndex = 0;\n }\n\n this._framesAvailable -= destinationLength;\n\n if (this._framesAvailable < 0) {\n this._framesAvailable = 0;\n }\n }\n }, {\n key: \"framesAvailable\",\n get: function get() {\n return this._framesAvailable;\n }\n }]);\n\n return RingBuffer;\n }()\n}[\"default\"];\n\nvar SoundFileProcessor =\n/*#__PURE__*/\nfunction (_AudioWorkletProcesso) {\n _inherits(SoundFileProcessor, _AudioWorkletProcesso);\n\n function SoundFileProcessor(options) {\n var _this;\n\n _classCallCheck(this, SoundFileProcessor);\n\n _this = _possibleConstructorReturn(this, _getPrototypeOf(SoundFileProcessor).call(this));\n var processorOptions = options.processorOptions || {};\n _this.bufferSize = processorOptions.bufferSize || 256;\n _this.inputRingBuffer = new RingBuffer(_this.bufferSize, 1);\n _this.inputRingBufferArraySequence = [new Float32Array(_this.bufferSize)];\n return _this;\n }\n\n _createClass(SoundFileProcessor, [{\n key: \"process\",\n value: function process(inputs) {\n var input = inputs[0]; // we only care about the first input channel, because that contains the position data\n\n this.inputRingBuffer.push([input[0]]);\n\n if (this.inputRingBuffer.framesAvailable >= this.bufferSize) {\n this.inputRingBuffer.pull(this.inputRingBufferArraySequence);\n var inputChannel = this.inputRingBufferArraySequence[0];\n var position = inputChannel[inputChannel.length - 1] || 0;\n this.port.postMessage({\n name: 'position',\n position: position\n });\n }\n\n return true;\n }\n }]);\n\n return SoundFileProcessor;\n}(_wrapNativeSuper(AudioWorkletProcessor));\n\nregisterProcessor(processorNames.soundFileProcessor, SoundFileProcessor);");
+
+ }),
+ (function(module, __webpack_exports__, __webpack_require__) {
+
+"use strict";
+__webpack_require__.r(__webpack_exports__);
+ __webpack_exports__["default"] = ("function _typeof(obj) { if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _wrapNativeSuper(Class) { var _cache = typeof Map === \"function\" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== \"function\") { throw new TypeError(\"Super expression must either be null or a function\"); } if (typeof _cache !== \"undefined\") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); }\n\nfunction isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _construct(Parent, args, Class) { if (isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); }\n\nfunction _isNativeFunction(fn) { return Function.toString.call(fn).indexOf(\"[native code]\") !== -1; }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\n// import dependencies via preval.require so that they're available as values at compile time\nvar processorNames = {\n \"recorderProcessor\": \"recorder-processor\",\n \"soundFileProcessor\": \"sound-file-processor\",\n \"amplitudeProcessor\": \"amplitude-processor\"\n};\nvar RingBuffer = {\n \"default\":\n /*#__PURE__*/\n function () {\n /**\n * @constructor\n * @param {number} length Buffer length in frames.\n * @param {number} channelCount Buffer channel count.\n */\n function RingBuffer(length, channelCount) {\n _classCallCheck(this, RingBuffer);\n\n this._readIndex = 0;\n this._writeIndex = 0;\n this._framesAvailable = 0;\n this._channelCount = channelCount;\n this._length = length;\n this._channelData = [];\n\n for (var i = 0; i < this._channelCount; ++i) {\n this._channelData[i] = new Float32Array(length);\n }\n }\n /**\n * Getter for Available frames in buffer.\n *\n * @return {number} Available frames in buffer.\n */\n\n\n _createClass(RingBuffer, [{\n key: \"push\",\n\n /**\n * Push a sequence of Float32Arrays to buffer.\n *\n * @param {array} arraySequence A sequence of Float32Arrays.\n */\n value: function push(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // Transfer data from the |arraySequence| storage to the internal buffer.\n var sourceLength = arraySequence[0] ? arraySequence[0].length : 0;\n\n for (var i = 0; i < sourceLength; ++i) {\n var writeIndex = (this._writeIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n this._channelData[channel][writeIndex] = arraySequence[channel][i];\n }\n }\n\n this._writeIndex += sourceLength;\n\n if (this._writeIndex >= this._length) {\n this._writeIndex = 0;\n } // For excessive frames, the buffer will be overwritten.\n\n\n this._framesAvailable += sourceLength;\n\n if (this._framesAvailable > this._length) {\n this._framesAvailable = this._length;\n }\n }\n /**\n * Pull data out of buffer and fill a given sequence of Float32Arrays.\n *\n * @param {array} arraySequence An array of Float32Arrays.\n */\n\n }, {\n key: \"pull\",\n value: function pull(arraySequence) {\n // The channel count of arraySequence and the length of each channel must\n // match with this buffer obejct.\n // If the FIFO is completely empty, do nothing.\n if (this._framesAvailable === 0) {\n return;\n }\n\n var destinationLength = arraySequence[0].length; // Transfer data from the internal buffer to the |arraySequence| storage.\n\n for (var i = 0; i < destinationLength; ++i) {\n var readIndex = (this._readIndex + i) % this._length;\n\n for (var channel = 0; channel < this._channelCount; ++channel) {\n arraySequence[channel][i] = this._channelData[channel][readIndex];\n }\n }\n\n this._readIndex += destinationLength;\n\n if (this._readIndex >= this._length) {\n this._readIndex = 0;\n }\n\n this._framesAvailable -= destinationLength;\n\n if (this._framesAvailable < 0) {\n this._framesAvailable = 0;\n }\n }\n }, {\n key: \"framesAvailable\",\n get: function get() {\n return this._framesAvailable;\n }\n }]);\n\n return RingBuffer;\n }()\n}[\"default\"];\n\nvar AmplitudeProcessor =\n/*#__PURE__*/\nfunction (_AudioWorkletProcesso) {\n _inherits(AmplitudeProcessor, _AudioWorkletProcesso);\n\n function AmplitudeProcessor(options) {\n var _this;\n\n _classCallCheck(this, AmplitudeProcessor);\n\n _this = _possibleConstructorReturn(this, _getPrototypeOf(AmplitudeProcessor).call(this));\n var processorOptions = options.processorOptions || {};\n _this.numOutputChannels = options.outputChannelCount || 1;\n _this.numInputChannels = processorOptions.numInputChannels || 2;\n _this.normalize = processorOptions.normalize || false;\n _this.smoothing = processorOptions.smoothing || 0;\n _this.bufferSize = processorOptions.bufferSize || 2048;\n _this.inputRingBuffer = new RingBuffer(_this.bufferSize, _this.numInputChannels);\n _this.outputRingBuffer = new RingBuffer(_this.bufferSize, _this.numOutputChannels);\n _this.inputRingBufferArraySequence = new Array(_this.numInputChannels).fill(null).map(function () {\n return new Float32Array(_this.bufferSize);\n });\n _this.stereoVol = [0, 0];\n _this.stereoVolNorm = [0, 0];\n _this.volMax = 0.001;\n\n _this.port.onmessage = function (event) {\n var data = event.data;\n\n if (data.name === 'toggleNormalize') {\n _this.normalize = data.normalize;\n } else if (data.name === 'smoothing') {\n _this.smoothing = Math.max(0, Math.min(1, data.smoothing));\n }\n };\n\n return _this;\n } // TO DO make this stereo / dependent on # of audio channels\n\n\n _createClass(AmplitudeProcessor, [{\n key: \"process\",\n value: function process(inputs, outputs) {\n var input = inputs[0];\n var output = outputs[0];\n var smoothing = this.smoothing;\n this.inputRingBuffer.push(input);\n\n if (this.inputRingBuffer.framesAvailable >= this.bufferSize) {\n this.inputRingBuffer.pull(this.inputRingBufferArraySequence);\n\n for (var channel = 0; channel < this.numInputChannels; ++channel) {\n var inputBuffer = this.inputRingBufferArraySequence[channel];\n var bufLength = inputBuffer.length;\n var sum = 0;\n\n for (var i = 0; i < bufLength; i++) {\n var x = inputBuffer[i];\n\n if (this.normalize) {\n sum += Math.max(Math.min(x / this.volMax, 1), -1) * Math.max(Math.min(x / this.volMax, 1), -1);\n } else {\n sum += x * x;\n }\n } // ... then take the square root of the sum.\n\n\n var rms = Math.sqrt(sum / bufLength);\n this.stereoVol[channel] = Math.max(rms, this.stereoVol[channel] * smoothing);\n this.volMax = Math.max(this.stereoVol[channel], this.volMax);\n } // calculate stero normalized volume and add volume from all channels together\n\n\n var volSum = 0;\n\n for (var index = 0; index < this.stereoVol.length; index++) {\n this.stereoVolNorm[index] = Math.max(Math.min(this.stereoVol[index] / this.volMax, 1), 0);\n volSum += this.stereoVol[index];\n } // volume is average of channels\n\n\n var volume = volSum / this.stereoVol.length; // normalized value\n\n var volNorm = Math.max(Math.min(volume / this.volMax, 1), 0);\n this.port.postMessage({\n name: 'amplitude',\n volume: volume,\n volNorm: volNorm,\n stereoVol: this.stereoVol,\n stereoVolNorm: this.stereoVolNorm\n }); // pass input through to output\n\n this.outputRingBuffer.push(this.inputRingBufferArraySequence);\n } // pull 128 frames out of the ring buffer\n // if the ring buffer does not have enough frames, the output will be silent\n\n\n this.outputRingBuffer.pull(output);\n return true;\n }\n }]);\n\n return AmplitudeProcessor;\n}(_wrapNativeSuper(AudioWorkletProcessor));\n\nregisterProcessor(processorNames.amplitudeProcessor, AmplitudeProcessor);");
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(17)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){o.Frequency=function(e,t){if(!(this instanceof o.Frequency))return new o.Frequency(e,t);o.TimeBase.call(this,e,t)},o.extend(o.Frequency,o.TimeBase),o.Frequency.prototype._primaryExpressions=Object.create(o.TimeBase.prototype._primaryExpressions),o.Frequency.prototype._primaryExpressions.midi={regexp:/^(\d+(?:\.\d+)?midi)/,method:function(e){return this.midiToFrequency(e)}},o.Frequency.prototype._primaryExpressions.note={regexp:/^([a-g]{1}(?:b|#|x|bb)?)(-?[0-9]+)/i,method:function(e,t){var r=n[e.toLowerCase()]+12*(parseInt(t)+1);return this.midiToFrequency(r)}},o.Frequency.prototype._primaryExpressions.tr={regexp:/^(\d+(?:\.\d+)?):(\d+(?:\.\d+)?):?(\d+(?:\.\d+)?)?/,method:function(e,t,r){var n=1;return e&&"0"!==e&&(n*=this._beatsToUnits(this._timeSignature()*parseFloat(e))),t&&"0"!==t&&(n*=this._beatsToUnits(parseFloat(t))),r&&"0"!==r&&(n*=this._beatsToUnits(parseFloat(r)/4)),n}},o.Frequency.prototype.transpose=function(e){return this._expr=function(e,t){return e()*this.intervalToFrequencyRatio(t)}.bind(this,this._expr,e),this},o.Frequency.prototype.harmonize=function(e){return this._expr=function(e,t){for(var r=e(),n=[],o=0;oScale the output of all sound in this sketch
+ * Scaled between 0.0 (silence) and 1.0 (full volume).
+ * 1.0 is the maximum amplitude of a digital sound, so multiplying
+ * by greater than 1.0 may cause digital distortion. To
+ * fade, provide a rampTime parameter. For more
+ * complex fades, see the Envelope class.
+ *
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ *
How This Works: When you load the p5.sound module, it
+ * creates a single instance of p5sound. All sound objects in this
+ * module output to p5sound before reaching your computer's output.
+ * So if you change the amplitude of p5sound, it impacts all of the
+ * sound in this module.
+ *
+ *
If no value is provided, returns a Web Audio API Gain Node
+ *
+ * @method outputVolume
+ * @param {Number|Object} volume Volume (amplitude) between 0.0
+ * and 1.0 or modulating signal/oscillator
+ * @param {Number} [rampTime] Fade for t seconds
+ * @param {Number} [timeFromNow] Schedule this event to happen at
+ * t seconds in the future
+ */
+
+
+p5.prototype.outputVolume = function (vol) {
+ var rampTime = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
+ var tFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+
+ if (typeof vol === 'number') {
+ var now = p5sound.audiocontext.currentTime;
+ var currentVol = p5sound.output.gain.value;
+ p5sound.output.gain.cancelScheduledValues(now + tFromNow);
+ p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
+ p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
+ } else if (vol) {
+ vol.connect(p5sound.output.gain);
+ } else {
+ return p5sound.output.gain;
+ }
+};
+/**
+ * `p5.soundOut` is the p5.sound final output bus. It sends output to
+ * the destination of this window's web audio context. It contains
+ * Web Audio API nodes including a dyanmicsCompressor (.limiter),
+ * and Gain Nodes for .input and .output.
+ *
+ * @property {Object} soundOut
+ */
+
+
+p5.prototype.soundOut = p5.soundOut = p5sound;
+
+p5.soundOut._silentNode = p5sound.audiocontext.createGain();
+p5.soundOut._silentNode.gain.value = 0;
+
+p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
+
+ var main = (p5sound);
+var processorNames = __webpack_require__(5);
+var processorNames_default = __webpack_require__.n(processorNames);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+
+
+/**
+ * @for p5
+ */
+
+/**
+ * Returns a number representing the sample rate, in samples per second,
+ * of all sound objects in this audio context. It is determined by the
+ * sampling rate of your operating system's sound card, and it is not
+ * currently possile to change.
+ * It is often 44100, or twice the range of human hearing.
+ *
+ * @method sampleRate
+ * @return {Number} samplerate samples per second
+ */
+
+function sampleRate() {
+ return main.audiocontext.sampleRate;
+}
+/**
+ * Returns the closest MIDI note value for
+ * a given frequency.
+ *
+ * @method freqToMidi
+ * @param {Number} frequency A freqeuncy, for example, the "A"
+ * above Middle C is 440Hz
+ * @return {Number} MIDI note value
+ */
+
+
+function freqToMidi(f) {
+ var mathlog2 = Math.log(f / 440) / Math.log(2);
+ var m = Math.round(12 * mathlog2) + 69;
+ return m;
+}
+/**
+ * Returns the frequency value of a MIDI note value.
+ * General MIDI treats notes as integers where middle C
+ * is 60, C# is 61, D is 62 etc. Useful for generating
+ * musical frequencies with oscillators.
+ *
+ * @method midiToFreq
+ * @param {Number} midiNote The number of a MIDI note
+ * @return {Number} Frequency value of the given MIDI note
+ * @example
+ *
+ */
+
+
+function midiToFreq(m) {
+ return 440 * Math.pow(2, (m - 69) / 12.0);
+}
+
+
+function noteToFreq(note) {
+ if (typeof note !== 'string') {
+ return note;
+ }
+
+ var wholeNotes = {
+ A: 21,
+ B: 23,
+ C: 24,
+ D: 26,
+ E: 28,
+ F: 29,
+ G: 31
+ };
+ var value = wholeNotes[note[0].toUpperCase()];
+ var octave = ~~note.slice(-1);
+ value += 12 * (octave - 1);
+
+ switch (note[1]) {
+ case '#':
+ value += 1;
+ break;
+
+ case 'b':
+ value -= 1;
+ break;
+
+ default:
+ break;
+ }
+
+ return midiToFreq(value);
+}
+/**
+ * List the SoundFile formats that you will include. LoadSound
+ * will search your directory for these extensions, and will pick
+ * a format that is compatable with the client's web browser.
+ * Here is a free online file
+ * converter.
+ *
+ * @method soundFormats
+ * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg'
+ * @example
+ *
+ * function preload() {
+ * // set the global sound formats
+ * soundFormats('mp3', 'ogg');
+ *
+ * // load either beatbox.mp3, or .ogg, depending on browser
+ * mySound = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * text('sound loaded! tap to play', 10, 20, width - 20);
+ * cnv.mousePressed(function() {
+ * mySound.play();
+ * });
+ * }
+ *
+ */
+
+
+function soundFormats() {
+ main.extensions = [];
+
+ for (var i = 0; i < arguments.length; i++) {
+ arguments[i] = arguments[i].toLowerCase();
+
+ if (['mp3', 'wav', 'ogg', 'm4a', 'aac'].indexOf(arguments[i]) > -1) {
+ main.extensions.push(arguments[i]);
+ } else {
+ throw arguments[i] + ' is not a valid sound format!';
+ }
+ }
+}
+
+function disposeSound() {
+ for (var i = 0; i < main.soundArray.length; i++) {
+ main.soundArray[i].dispose();
+ }
+}
+
+function _checkFileFormats(paths) {
+ var path;
+
+ if (typeof paths === 'string') {
+ path = paths;
+
+ var extTest = path.split('.').pop();
+
+ if (['mp3', 'wav', 'ogg', 'm4a', 'aac'].indexOf(extTest) > -1) {
+ if (!p5.prototype.isFileSupported(extTest)) {
+ var pathSplit = path.split('.');
+ var pathCore = pathSplit[pathSplit.length - 1];
+
+ for (var _i = 0; _i < main.extensions.length; _i++) {
+ var _extension = main.extensions[_i];
+
+ var _supported = p5.prototype.isFileSupported(_extension);
+
+ if (_supported) {
+ pathCore = '';
+
+ if (pathSplit.length === 2) {
+ pathCore += pathSplit[0];
+ }
+
+ for (var _i2 = 1; _i2 <= pathSplit.length - 2; _i2++) {
+ var p = pathSplit[_i2];
+ pathCore += '.' + p;
+ }
+
+ path = pathCore += '.';
+ path = path += _extension;
+ break;
+ }
+ }
+ }
+ }
+ else {
+ for (var _i3 = 0; _i3 < main.extensions.length; _i3++) {
+ var _extension2 = main.extensions[_i3];
+
+ var _supported2 = p5.prototype.isFileSupported(_extension2);
+
+ if (_supported2) {
+ path = path + '.' + _extension2;
+ break;
+ }
+ }
+ }
+ }
+ else if (_typeof(paths) === 'object') {
+ for (var i = 0; i < paths.length; i++) {
+ var extension = paths[i].split('.').pop();
+ var supported = p5.prototype.isFileSupported(extension);
+
+ if (supported) {
+ path = paths[i];
+ break;
+ }
+ }
+ }
+
+ return path;
+}
+/**
+ * Used by Osc and Envelope to chain signal math
+ */
+
+
+function _mathChain(o, math, thisChain, nextChain, type) {
+ for (var i in o.mathOps) {
+ if (o.mathOps[i] instanceof type) {
+ o.mathOps[i].dispose();
+ thisChain = i;
+
+ if (thisChain < o.mathOps.length - 1) {
+ nextChain = o.mathOps[i + 1];
+ }
+ }
+ }
+
+ o.mathOps[thisChain - 1].disconnect();
+ o.mathOps[thisChain - 1].connect(math);
+ math.connect(nextChain);
+ o.mathOps[thisChain] = math;
+ return o;
+}
+
+
+function convertToWav(audioBuffer) {
+ var leftChannel, rightChannel;
+ leftChannel = audioBuffer.getChannelData(0);
+
+ if (audioBuffer.numberOfChannels > 1) {
+ rightChannel = audioBuffer.getChannelData(1);
+ } else {
+ rightChannel = leftChannel;
+ }
+
+ var interleaved = interleave(leftChannel, rightChannel);
+
+ var buffer = new window.ArrayBuffer(44 + interleaved.length * 2);
+ var view = new window.DataView(buffer);
+
+ writeUTFBytes(view, 0, 'RIFF');
+ view.setUint32(4, 36 + interleaved.length * 2, true);
+ writeUTFBytes(view, 8, 'WAVE');
+
+ writeUTFBytes(view, 12, 'fmt ');
+ view.setUint32(16, 16, true);
+ view.setUint16(20, 1, true);
+
+ view.setUint16(22, 2, true);
+ view.setUint32(24, main.audiocontext.sampleRate, true);
+ view.setUint32(28, main.audiocontext.sampleRate * 4, true);
+ view.setUint16(32, 4, true);
+ view.setUint16(34, 16, true);
+
+ writeUTFBytes(view, 36, 'data');
+ view.setUint32(40, interleaved.length * 2, true);
+
+ var lng = interleaved.length;
+ var index = 44;
+ var volume = 1;
+
+ for (var i = 0; i < lng; i++) {
+ view.setInt16(index, interleaved[i] * (0x7fff * volume), true);
+ index += 2;
+ }
+
+ return view;
+}
+
+
+function interleave(leftChannel, rightChannel) {
+ var length = leftChannel.length + rightChannel.length;
+ var result = new Float32Array(length);
+ var inputIndex = 0;
+
+ for (var index = 0; index < length;) {
+ result[index++] = leftChannel[inputIndex];
+ result[index++] = rightChannel[inputIndex];
+ inputIndex++;
+ }
+
+ return result;
+}
+
+function writeUTFBytes(view, offset, string) {
+ var lng = string.length;
+
+ for (var i = 0; i < lng; i++) {
+ view.setUint8(offset + i, string.charCodeAt(i));
+ }
+}
+
+function safeBufferSize(idealBufferSize) {
+ var bufferSize = idealBufferSize;
+
+ var tempAudioWorkletNode = new AudioWorkletNode(main.audiocontext, processorNames_default.a.soundFileProcessor);
+
+ if (tempAudioWorkletNode instanceof ScriptProcessorNode) {
+ bufferSize = tempAudioWorkletNode.bufferSize;
+ }
+
+ tempAudioWorkletNode.disconnect();
+ tempAudioWorkletNode = null;
+ return bufferSize;
+}
+/**
+ * Save a p5.SoundFile as a .wav file. The browser will prompt the user
+ * to download the file to their device.
+ * For uploading audio to a server, use
+ * `p5.SoundFile.saveBlob`.
+ *
+ * @for p5
+ * @method saveSound
+ * @param {p5.SoundFile} soundFile p5.SoundFile that you wish to save
+ * @param {String} fileName name of the resulting .wav file.
+ */
+
+
+function saveSound(soundFile, fileName) {
+ var dataView = convertToWav(soundFile.buffer);
+ p5.prototype.writeFile([dataView], fileName, 'wav');
+}
+
+
+var CustomError = function CustomError(name, errorTrace, failedPath) {
+ var err = new Error();
+ var tempStack, splitStack;
+ err.name = name;
+ err.originalStack = err.stack + errorTrace;
+ tempStack = err.stack + errorTrace;
+ err.failedPath = failedPath;
+
+ splitStack = tempStack.split('\n').filter(function (ln) {
+ return !ln.match(/(p5.|native code|globalInit)/g);
+ });
+ err.stack = splitStack.join('\n');
+ return err;
+};
+
+ var errorHandler = (CustomError);
+
+var moduleSources = [__webpack_require__(27)["default"], __webpack_require__(28)["default"], __webpack_require__(29)["default"]];
+var audioWorklet_ac = main.audiocontext;
+var initializedAudioWorklets = false;
+
+function loadAudioWorkletModules() {
+ return Promise.all(moduleSources.map(function (moduleSrc) {
+ var blob = new Blob([moduleSrc], {
+ type: 'application/javascript'
+ });
+ var objectURL = URL.createObjectURL(blob);
+ return audioWorklet_ac.audioWorklet.addModule(objectURL);
+ }));
+}
+
+p5.prototype.registerMethod('init', function () {
+ if (initializedAudioWorklets) return;
+
+ if (!this.preload && !window.preload) {
+ this.preload = function () {};
+ }
+
+
+ this._incrementPreload();
+
+ var onWorkletModulesLoad = function () {
+ initializedAudioWorklets = true;
+
+ this._decrementPreload();
+ }.bind(this);
+
+ loadAudioWorkletModules().then(onWorkletModulesLoad);
+});
+function panner_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+
+var panner_ac = main.audiocontext;
+var panner;
+
+if (typeof panner_ac.createStereoPanner !== 'undefined') {
+ var Panner =
+ function () {
+ function Panner(input, output) {
+ panner_classCallCheck(this, Panner);
+
+ this.stereoPanner = this.input = panner_ac.createStereoPanner();
+ input.connect(this.stereoPanner);
+ this.stereoPanner.connect(output);
+ }
+
+ _createClass(Panner, [{
+ key: "pan",
+ value: function pan(val, tFromNow) {
+ var time = tFromNow || 0;
+ var t = panner_ac.currentTime + time;
+ this.stereoPanner.pan.linearRampToValueAtTime(val, t);
+ }
+
+ }, {
+ key: "inputChannels",
+ value: function inputChannels() {}
+ }, {
+ key: "connect",
+ value: function connect(obj) {
+ this.stereoPanner.connect(obj);
+ }
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.stereoPanner) {
+ this.stereoPanner.disconnect();
+ }
+ }
+ }]);
+
+ return Panner;
+ }();
+
+ panner = Panner;
+} else {
+ var _Panner =
+ function () {
+ function _Panner(input, output, numInputChannels) {
+ panner_classCallCheck(this, _Panner);
+
+ this.input = panner_ac.createGain();
+ input.connect(this.input);
+ this.left = panner_ac.createGain();
+ this.right = panner_ac.createGain();
+ this.left.channelInterpretation = 'discrete';
+ this.right.channelInterpretation = 'discrete';
+
+ if (numInputChannels > 1) {
+ this.splitter = panner_ac.createChannelSplitter(2);
+ this.input.connect(this.splitter);
+ this.splitter.connect(this.left, 1);
+ this.splitter.connect(this.right, 0);
+ } else {
+ this.input.connect(this.left);
+ this.input.connect(this.right);
+ }
+
+ this.output = panner_ac.createChannelMerger(2);
+ this.left.connect(this.output, 0, 1);
+ this.right.connect(this.output, 0, 0);
+ this.output.connect(output);
+ }
+
+
+ _createClass(_Panner, [{
+ key: "pan",
+ value: function pan(val, tFromNow) {
+ var time = tFromNow || 0;
+ var t = panner_ac.currentTime + time;
+ var v = (val + 1) / 2;
+ var rightVal = Math.cos(v * Math.PI / 2);
+ var leftVal = Math.sin(v * Math.PI / 2);
+ this.left.gain.linearRampToValueAtTime(leftVal, t);
+ this.right.gain.linearRampToValueAtTime(rightVal, t);
+ }
+ }, {
+ key: "inputChannels",
+ value: function inputChannels(numChannels) {
+ if (numChannels === 1) {
+ this.input.disconnect();
+ this.input.connect(this.left);
+ this.input.connect(this.right);
+ } else if (numChannels === 2) {
+ if (typeof this.splitter === 'undefined') {
+ this.splitter = panner_ac.createChannelSplitter(2);
+ }
+
+ this.input.disconnect();
+ this.input.connect(this.splitter);
+ this.splitter.connect(this.left, 1);
+ this.splitter.connect(this.right, 0);
+ }
+ }
+ }, {
+ key: "connect",
+ value: function connect(obj) {
+ this.output.connect(obj);
+ }
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.output) {
+ this.output.disconnect();
+ }
+ }
+ }]);
+
+ return _Panner;
+ }();
+
+ panner = _Panner;
+}
+
+ var panner_0 = (panner);
+function soundfile_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { soundfile_typeof = function _typeof(obj) { return typeof obj; }; } else { soundfile_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return soundfile_typeof(obj); }
+
+function soundfile_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function soundfile_createClass(Constructor, protoProps, staticProps) { if (protoProps) soundfile_defineProperties(Constructor.prototype, protoProps); if (staticProps) soundfile_defineProperties(Constructor, staticProps); return Constructor; }
+
+function soundfile_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+
+
+
+
+
+var soundfile_ac = main.audiocontext;
+
+var _createCounterBuffer = function _createCounterBuffer(buffer) {
+ var len = buffer.length;
+ var audioBuf = soundfile_ac.createBuffer(1, buffer.length, soundfile_ac.sampleRate);
+ var arrayBuffer = audioBuf.getChannelData(0);
+
+ for (var index = 0; index < len; index++) {
+ arrayBuffer[index] = index;
+ }
+
+ return audioBuf;
+};
+
+
+var Cue = function Cue(callback, time, id, val) {
+ soundfile_classCallCheck(this, Cue);
+
+ this.callback = callback;
+ this.time = time;
+ this.id = id;
+ this.val = val;
+};
+
+
+function _clearOnEnd(e) {
+ var thisBufferSourceNode = e.target;
+ var soundFile = this;
+
+ thisBufferSourceNode._playing = false;
+ thisBufferSourceNode.removeEventListener('ended', soundFile._clearOnEnd);
+
+ soundFile._onended(soundFile);
+
+
+ soundFile.bufferSourceNodes.map(function (_, i) {
+ return i;
+ }).reverse().forEach(function (i) {
+ var n = soundFile.bufferSourceNodes[i];
+
+ if (n._playing === false) {
+ soundFile.bufferSourceNodes.splice(i, 1);
+ }
+ });
+
+ if (soundFile.bufferSourceNodes.length === 0) {
+ soundFile._playing = false;
+ }
+}
+/**
+ *
SoundFile object with a path to a file.
+ *
+ *
The p5.SoundFile may not be available immediately because
+ * it loads the file information asynchronously.
+ *
+ *
To do something with the sound as soon as it loads
+ * pass the name of a function as the second parameter.
+ *
+ *
Only one file path is required. However, audio file formats
+ * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all
+ * web browsers. If you want to ensure compatability, instead of a single
+ * file path, you may include an Array of filepaths, and the browser will
+ * choose a format that works.
+ *
+ * @class p5.SoundFile
+ * @constructor
+ * @param {String|Array} path path to a sound file (String). Optionally,
+ * you may include multiple file formats in
+ * an array. Alternately, accepts an object
+ * from the HTML5 File API, or a p5.File.
+ * @param {Function} [successCallback] Name of a function to call once file loads
+ * @param {Function} [errorCallback] Name of a function to call if file fails to
+ * load. This function will receive an error or
+ * XMLHttpRequest object with information
+ * about what went wrong.
+ * @param {Function} [whileLoadingCallback] Name of a function to call while file
+ * is loading. That function will
+ * receive progress of the request to
+ * load the sound file
+ * (between 0 and 1) as its first
+ * parameter. This progress
+ * does not account for the additional
+ * time needed to decode the audio data.
+ *
+ * @example
+ *
+ * let mySound;
+ * function preload() {
+ * soundFormats('mp3', 'ogg');
+ * mySound = loadSound('assets/doorbell');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap here to play', 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * // playing a sound file on a user gesture
+ * // is equivalent to `userStartAudio()`
+ * mySound.play();
+ * }
+ *
+ */
+
+
+var soundfile_SoundFile =
+function () {
+ function SoundFile(paths, onload, onerror, whileLoading) {
+ soundfile_classCallCheck(this, SoundFile);
+
+ if (typeof paths !== 'undefined') {
+ if (typeof paths === 'string' || typeof paths[0] === 'string') {
+ var path = p5.prototype._checkFileFormats(paths);
+
+ this.url = path;
+ } else if (soundfile_typeof(paths) === 'object') {
+ if (!(window.File && window.FileReader && window.FileList && window.Blob)) {
+ throw 'Unable to load file because the File API is not supported';
+ }
+ }
+
+
+ if (paths.file) {
+ paths = paths.file;
+ }
+
+ this.file = paths;
+ }
+
+
+ this._onended = function () {};
+
+ this._looping = false;
+ this._playing = false;
+ this._paused = false;
+ this._pauseTime = 0;
+
+ this._cues = [];
+ this._cueIDCounter = 0;
+
+ this._lastPos = 0;
+ this._counterNode = null;
+ this._workletNode = null;
+
+ this.bufferSourceNodes = [];
+
+ this.bufferSourceNode = null;
+ this.buffer = null;
+ this.playbackRate = 1;
+ this.input = main.audiocontext.createGain();
+ this.output = main.audiocontext.createGain();
+ this.reversed = false;
+
+ this.startTime = 0;
+ this.endTime = null;
+ this.pauseTime = 0;
+
+ this.mode = 'sustain';
+
+ this.startMillis = null;
+
+ this.panPosition = 0.0;
+ this.panner = new panner_0(this.output, main.input, 2);
+
+ if (this.url || this.file) {
+ this.load(onload, onerror);
+ }
+
+
+ main.soundArray.push(this);
+
+ if (typeof whileLoading === 'function') {
+ this._whileLoading = whileLoading;
+ } else {
+ this._whileLoading = function () {};
+ }
+
+ this._clearOnEnd = _clearOnEnd.bind(this);
+
+ this.amp = this.setVolume;
+
+ this.fade = this.setVolume;
+ }
+ /**
+ * This is a helper function that the p5.SoundFile calls to load
+ * itself. Accepts a callback (the name of another function)
+ * as an optional parameter.
+ *
+ * @private
+ * @for p5.SoundFile
+ * @param {Function} [successCallback] Name of a function to call once file loads
+ * @param {Function} [errorCallback] Name of a function to call if there is an error
+ */
+
+
+ soundfile_createClass(SoundFile, [{
+ key: "load",
+ value: function load(callback, errorCallback) {
+ var self = this;
+ var errorTrace = new Error().stack;
+
+ if (this.url !== undefined && this.url !== '') {
+ var request = new XMLHttpRequest();
+ request.addEventListener('progress', function (evt) {
+ self._updateProgress(evt);
+ }, false);
+ request.open('GET', this.url, true);
+ request.responseType = 'arraybuffer';
+
+ request.onload = function () {
+ if (request.status === 200) {
+ if (!self.panner) return;
+ soundfile_ac.decodeAudioData(request.response,
+ function (buff) {
+ if (!self.panner) return;
+ self.buffer = buff;
+ self.panner.inputChannels(buff.numberOfChannels);
+
+ if (callback) {
+ callback(self);
+ }
+ },
+ function () {
+ if (!self.panner) return;
+ var err = new errorHandler('decodeAudioData', errorTrace, self.url);
+ var msg = 'AudioContext error at decodeAudioData for ' + self.url;
+
+ if (errorCallback) {
+ err.msg = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ });
+ }
+ else {
+ if (!self.panner) return;
+ var err = new errorHandler('loadSound', errorTrace, self.url);
+ var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ }
+ };
+
+
+ request.onerror = function () {
+ var err = new errorHandler('loadSound', errorTrace, self.url);
+ var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ };
+
+ request.send();
+ } else if (this.file !== undefined) {
+ var reader = new FileReader();
+
+ reader.onload = function () {
+ if (!self.panner) return;
+ soundfile_ac.decodeAudioData(reader.result, function (buff) {
+ if (!self.panner) return;
+ self.buffer = buff;
+ self.panner.inputChannels(buff.numberOfChannels);
+
+ if (callback) {
+ callback(self);
+ }
+ });
+ };
+
+ reader.onerror = function (e) {
+ if (!self.panner) return;
+
+ if (onerror) {
+ onerror(e);
+ }
+ };
+
+ reader.readAsArrayBuffer(this.file);
+ }
+ }
+
+ }, {
+ key: "_updateProgress",
+ value: function _updateProgress(evt) {
+ if (evt.lengthComputable) {
+ var percentComplete = evt.loaded / evt.total * 0.99;
+
+ this._whileLoading(percentComplete, evt);
+
+ } else {
+ this._whileLoading('size unknown');
+ }
+ }
+ /**
+ * Returns true if the sound file finished loading successfully.
+ *
+ * @method isLoaded
+ * @for p5.SoundFile
+ * @return {Boolean}
+ */
+
+ }, {
+ key: "isLoaded",
+ value: function isLoaded() {
+ if (this.buffer) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+ /**
+ * Play the p5.SoundFile
+ *
+ * @method play
+ * @for p5.SoundFile
+ * @param {Number} [startTime] (optional) schedule playback to start (in seconds from now).
+ * @param {Number} [rate] (optional) playback rate
+ * @param {Number} [amp] (optional) amplitude (volume)
+ * of playback
+ * @param {Number} [cueStart] (optional) cue start time in seconds
+ * @param {Number} [duration] (optional) duration of playback in seconds
+ */
+
+ }, {
+ key: "play",
+ value: function play(startTime, rate, amp, _cueStart, duration) {
+ if (!this.output) {
+ console.warn('SoundFile.play() called after dispose');
+ return;
+ }
+
+ var now = main.audiocontext.currentTime;
+ var cueStart, cueEnd;
+ var time = startTime || 0;
+
+ if (time < 0) {
+ time = 0;
+ }
+
+ time = time + now;
+
+ if (typeof rate !== 'undefined') {
+ this.rate(rate);
+ }
+
+ if (typeof amp !== 'undefined') {
+ this.setVolume(amp);
+ }
+
+
+ if (this.buffer) {
+ this._pauseTime = 0;
+
+ if (this.mode === 'restart' && this.buffer && this.bufferSourceNode) {
+ this.bufferSourceNode.stop(time);
+
+ this._counterNode.stop(time);
+ }
+
+
+ if (this.mode === 'untildone' && this.isPlaying()) {
+ return;
+ }
+
+
+ this.bufferSourceNode = this._initSourceNode();
+
+ delete this._counterNode;
+ this._counterNode = this._initCounterNode();
+
+ if (_cueStart) {
+ if (_cueStart >= 0 && _cueStart < this.buffer.duration) {
+ cueStart = _cueStart;
+ } else {
+ throw 'start time out of range';
+ }
+ } else {
+ cueStart = 0;
+ }
+
+ if (duration) {
+ duration = duration <= this.buffer.duration - cueStart ? duration : this.buffer.duration;
+ }
+
+
+ if (this._paused) {
+ this.bufferSourceNode.start(time, this.pauseTime, duration);
+
+ this._counterNode.start(time, this.pauseTime, duration);
+ } else {
+ this.bufferSourceNode.start(time, cueStart, duration);
+
+ this._counterNode.start(time, cueStart, duration);
+ }
+
+ this._playing = true;
+ this._paused = false;
+
+ this.bufferSourceNodes.push(this.bufferSourceNode);
+ this.bufferSourceNode._arrayIndex = this.bufferSourceNodes.length - 1;
+ this.bufferSourceNode.addEventListener('ended', this._clearOnEnd);
+ }
+ else {
+ throw 'not ready to play file, buffer has yet to load. Try preload()';
+ }
+
+
+ this.bufferSourceNode.loop = this._looping;
+ this._counterNode.loop = this._looping;
+
+ if (this._looping === true) {
+ cueEnd = duration ? duration : cueStart - 0.000000000000001;
+ this.bufferSourceNode.loopStart = cueStart;
+ this.bufferSourceNode.loopEnd = cueEnd;
+ this._counterNode.loopStart = cueStart;
+ this._counterNode.loopEnd = cueEnd;
+ }
+ }
+ /**
+ * p5.SoundFile has two play modes: restart and
+ * sustain. Play Mode determines what happens to a
+ * p5.SoundFile if it is triggered while in the middle of playback.
+ * In sustain mode, playback will continue simultaneous to the
+ * new playback. In restart mode, play() will stop playback
+ * and start over. With untilDone, a sound will play only if it's
+ * not already playing. Sustain is the default mode.
+ *
+ * @method playMode
+ * @for p5.SoundFile
+ * @param {String} str 'restart' or 'sustain' or 'untilDone'
+ * @example
+ *
+ */
+
+ }, {
+ key: "playMode",
+ value: function playMode(str) {
+ var s = str.toLowerCase();
+
+ if (s === 'restart' && this.buffer && this.bufferSourceNode) {
+ for (var i = 0; i < this.bufferSourceNodes.length - 1; i++) {
+ var now = main.audiocontext.currentTime;
+ this.bufferSourceNodes[i].stop(now);
+ }
+ }
+
+
+ if (s === 'restart' || s === 'sustain' || s === 'untildone') {
+ this.mode = s;
+ } else {
+ throw 'Invalid play mode. Must be either "restart" or "sustain"';
+ }
+ }
+ /**
+ * Pauses a file that is currently playing. If the file is not
+ * playing, then nothing will happen.
+ *
+ * After pausing, .play() will resume from the paused
+ * position.
+ * If p5.SoundFile had been set to loop before it was paused,
+ * it will continue to loop after it is unpaused with .play().
+ *
+ * @method pause
+ * @for p5.SoundFile
+ * @param {Number} [startTime] (optional) schedule event to occur
+ * seconds from now
+ * @example
+ *
+ */
+
+ }, {
+ key: "loop",
+ value: function loop(startTime, rate, amp, loopStart, duration) {
+ this._looping = true;
+ this.play(startTime, rate, amp, loopStart, duration);
+ }
+ /**
+ * Set a p5.SoundFile's looping flag to true or false. If the sound
+ * is currently playing, this change will take effect when it
+ * reaches the end of the current playback.
+ *
+ * @method setLoop
+ * @for p5.SoundFile
+ * @param {Boolean} Boolean set looping to true or false
+ */
+
+ }, {
+ key: "setLoop",
+ value: function setLoop(bool) {
+ if (bool === true) {
+ this._looping = true;
+ } else if (bool === false) {
+ this._looping = false;
+ } else {
+ throw 'Error: setLoop accepts either true or false';
+ }
+
+ if (this.bufferSourceNode) {
+ this.bufferSourceNode.loop = this._looping;
+ this._counterNode.loop = this._looping;
+ }
+ }
+ /**
+ * Returns 'true' if a p5.SoundFile is currently looping and playing, 'false' if not.
+ *
+ * @method isLooping
+ * @for p5.SoundFile
+ * @return {Boolean}
+ */
+
+ }, {
+ key: "isLooping",
+ value: function isLooping() {
+ if (!this.bufferSourceNode) {
+ return false;
+ }
+
+ if (this._looping === true && this.isPlaying() === true) {
+ return true;
+ }
+
+ return false;
+ }
+ /**
+ * Returns true if a p5.SoundFile is playing, false if not (i.e.
+ * paused or stopped).
+ *
+ * @method isPlaying
+ * @for p5.SoundFile
+ * @return {Boolean}
+ */
+
+ }, {
+ key: "isPlaying",
+ value: function isPlaying() {
+ return this._playing;
+ }
+ /**
+ * Returns true if a p5.SoundFile is paused, false if not (i.e.
+ * playing or stopped).
+ *
+ * @method isPaused
+ * @for p5.SoundFile
+ * @return {Boolean}
+ */
+
+ }, {
+ key: "isPaused",
+ value: function isPaused() {
+ return this._paused;
+ }
+ /**
+ * Stop soundfile playback.
+ *
+ * @method stop
+ * @for p5.SoundFile
+ * @param {Number} [startTime] (optional) schedule event to occur
+ * in seconds from now
+ */
+
+ }, {
+ key: "stop",
+ value: function stop(timeFromNow) {
+ var time = timeFromNow || 0;
+
+ if (this.mode === 'sustain' || this.mode === 'untildone') {
+ this.stopAll(time);
+ this._playing = false;
+ this.pauseTime = 0;
+ this._paused = false;
+ } else if (this.buffer && this.bufferSourceNode) {
+ var now = main.audiocontext.currentTime;
+ var t = time || 0;
+ this.pauseTime = 0;
+ this.bufferSourceNode.stop(now + t);
+
+ this._counterNode.stop(now + t);
+
+ this._playing = false;
+ this._paused = false;
+ }
+ }
+ /**
+ * Stop playback on all of this soundfile's sources.
+ * @private
+ */
+
+ }, {
+ key: "stopAll",
+ value: function stopAll(_time) {
+ var now = main.audiocontext.currentTime;
+ var time = _time || 0;
+
+ if (this.buffer && this.bufferSourceNode) {
+ for (var i in this.bufferSourceNodes) {
+ var bufferSourceNode = this.bufferSourceNodes[i];
+
+ if (bufferSourceNode) {
+ try {
+ bufferSourceNode.stop(now + time);
+ } catch (e) {
+ }
+ }
+ }
+
+ this._counterNode.stop(now + time);
+ }
+ }
+ }, {
+ key: "getVolume",
+ value: function getVolume() {
+ return this.output.gain.value;
+ }
+ /**
+ * Set the stereo panning of a p5.sound object to
+ * a floating point number between -1.0 (left) and 1.0 (right).
+ * Default is 0.0 (center).
+ *
+ * @method pan
+ * @for p5.SoundFile
+ * @param {Number} [panValue] Set the stereo panner
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ * @example
+ *
+ * let ballX = 0;
+ * let soundFile;
+ *
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function draw() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * ballX = constrain(mouseX, 0, width);
+ * ellipse(ballX, height/2, 20, 20);
+ * }
+ *
+ * function canvasPressed(){
+ * // map the ball's x location to a panning degree
+ * // between -1.0 (left) and 1.0 (right)
+ * let panning = map(ballX, 0., width,-1.0, 1.0);
+ * soundFile.pan(panning);
+ * soundFile.play();
+ * }
+ *
+ */
+
+ }, {
+ key: "pan",
+ value: function pan(pval, tFromNow) {
+ this.panPosition = pval;
+ this.panner.pan(pval, tFromNow);
+ }
+ /**
+ * Returns the current stereo pan position (-1.0 to 1.0)
+ *
+ * @method getPan
+ * @for p5.SoundFile
+ * @return {Number} Returns the stereo pan setting of the Oscillator
+ * as a number between -1.0 (left) and 1.0 (right).
+ * 0.0 is center and default.
+ */
+
+ }, {
+ key: "getPan",
+ value: function getPan() {
+ return this.panPosition;
+ }
+ /**
+ * Set the playback rate of a sound file. Will change the speed and the pitch.
+ * Values less than zero will reverse the audio buffer.
+ *
+ * @method rate
+ * @for p5.SoundFile
+ * @param {Number} [playbackRate] Set the playback rate. 1.0 is normal,
+ * .5 is half-speed, 2.0 is twice as fast.
+ * Values less than zero play backwards.
+ * @example
+ *
+ * let mySound;
+ *
+ * function preload() {
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * }
+ * function canvasPressed() {
+ * mySound.loop();
+ * }
+ * function mouseReleased() {
+ * mySound.pause();
+ * }
+ * function draw() {
+ * background(220);
+ *
+ * // Set the rate to a range between 0.1 and 4
+ * // Changing the rate also alters the pitch
+ * let playbackRate = map(mouseY, 0.1, height, 2, 0);
+ * playbackRate = constrain(playbackRate, 0.01, 4);
+ * mySound.rate(playbackRate);
+ *
+ * line(0, mouseY, width, mouseY);
+ * text('rate: ' + round(playbackRate * 100) + '%', 10, 20);
+ * }
+ *
+ *
+ *
+ *
+ */
+
+ }, {
+ key: "rate",
+ value: function rate(playbackRate) {
+ var reverse = false;
+
+ if (typeof playbackRate === 'undefined') {
+ return this.playbackRate;
+ }
+
+ this.playbackRate = playbackRate;
+
+ if (playbackRate === 0) {
+ playbackRate = 0.0000000000001;
+ } else if (playbackRate < 0 && !this.reversed) {
+ playbackRate = Math.abs(playbackRate);
+ reverse = true;
+ } else if (playbackRate > 0 && this.reversed) {
+ reverse = true;
+ }
+
+ if (this.bufferSourceNode) {
+ var now = main.audiocontext.currentTime;
+ this.bufferSourceNode.playbackRate.cancelScheduledValues(now);
+ this.bufferSourceNode.playbackRate.linearRampToValueAtTime(Math.abs(playbackRate), now);
+
+ this._counterNode.playbackRate.cancelScheduledValues(now);
+
+ this._counterNode.playbackRate.linearRampToValueAtTime(Math.abs(playbackRate), now);
+ }
+
+ if (reverse) {
+ this.reverseBuffer();
+ }
+
+ return this.playbackRate;
+ }
+
+ }, {
+ key: "setPitch",
+ value: function setPitch(num) {
+ var newPlaybackRate = midiToFreq(num) / midiToFreq(60);
+ this.rate(newPlaybackRate);
+ }
+ }, {
+ key: "getPlaybackRate",
+ value: function getPlaybackRate() {
+ return this.playbackRate;
+ }
+ /**
+ * Multiply the output volume (amplitude) of a sound file
+ * between 0.0 (silence) and 1.0 (full volume).
+ * 1.0 is the maximum amplitude of a digital sound, so multiplying
+ * by greater than 1.0 may cause digital distortion. To
+ * fade, provide a rampTime parameter. For more
+ * complex fades, see the Envelope class.
+ *
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ * @method setVolume
+ * @for p5.SoundFile
+ * @param {Number|Object} volume Volume (amplitude) between 0.0
+ * and 1.0 or modulating signal/oscillator
+ * @param {Number} [rampTime] Fade for t seconds
+ * @param {Number} [timeFromNow] Schedule this event to happen at
+ * t seconds in the future
+ */
+
+ }, {
+ key: "setVolume",
+ value: function setVolume(vol, _rampTime, _tFromNow) {
+ if (typeof vol === 'number') {
+ var rampTime = _rampTime || 0;
+ var tFromNow = _tFromNow || 0;
+ var now = main.audiocontext.currentTime;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
+ } else if (vol) {
+ vol.connect(this.output.gain);
+ } else {
+ return this.output.gain;
+ }
+ }
+ /**
+ * Returns the duration of a sound file in seconds.
+ *
+ * @method duration
+ * @for p5.SoundFile
+ * @return {Number} The duration of the soundFile in seconds.
+ */
+
+ }, {
+ key: "duration",
+ value: function duration() {
+ if (this.buffer) {
+ return this.buffer.duration;
+ } else {
+ return 0;
+ }
+ }
+ /**
+ * Return the current position of the p5.SoundFile playhead, in seconds.
+ * Time is relative to the normal buffer direction, so if `reverseBuffer`
+ * has been called, currentTime will count backwards.
+ *
+ * @method currentTime
+ * @for p5.SoundFile
+ * @return {Number} currentTime of the soundFile in seconds.
+ */
+
+ }, {
+ key: "currentTime",
+ value: function currentTime() {
+ return this.reversed ? Math.abs(this._lastPos - this.buffer.length) / soundfile_ac.sampleRate : this._lastPos / soundfile_ac.sampleRate;
+ }
+ /**
+ * Move the playhead of a soundfile that is currently playing to a
+ * new position and a new duration, in seconds.
+ * If none are given, will reset the file to play entire duration
+ * from start to finish. To set the position of a soundfile that is
+ * not currently playing, use the `play` or `loop` methods.
+ *
+ * @method jump
+ * @for p5.SoundFile
+ * @param {Number} cueTime cueTime of the soundFile in seconds.
+ * @param {Number} duration duration in seconds.
+ */
+
+ }, {
+ key: "jump",
+ value: function jump(cueTime, duration) {
+ if (cueTime < 0 || cueTime > this.buffer.duration) {
+ throw 'jump time out of range';
+ }
+
+ if (duration > this.buffer.duration - cueTime) {
+ throw 'end time out of range';
+ }
+
+ var cTime = cueTime || 0;
+ var dur = duration || undefined;
+
+ if (this.isPlaying()) {
+ this.stop(0);
+ this.play(0, this.playbackRate, this.output.gain.value, cTime, dur);
+ }
+ }
+ /**
+ * Return the number of channels in a sound file.
+ * For example, Mono = 1, Stereo = 2.
+ *
+ * @method channels
+ * @for p5.SoundFile
+ * @return {Number} [channels]
+ */
+
+ }, {
+ key: "channels",
+ value: function channels() {
+ return this.buffer.numberOfChannels;
+ }
+ /**
+ * Return the sample rate of the sound file.
+ *
+ * @method sampleRate
+ * @for p5.SoundFile
+ * @return {Number} [sampleRate]
+ */
+
+ }, {
+ key: "sampleRate",
+ value: function sampleRate() {
+ return this.buffer.sampleRate;
+ }
+ /**
+ * Return the number of samples in a sound file.
+ * Equal to sampleRate * duration.
+ *
+ * @method frames
+ * @for p5.SoundFile
+ * @return {Number} [sampleCount]
+ */
+
+ }, {
+ key: "frames",
+ value: function frames() {
+ return this.buffer.length;
+ }
+ /**
+ * Returns an array of amplitude peaks in a p5.SoundFile that can be
+ * used to draw a static waveform. Scans through the p5.SoundFile's
+ * audio buffer to find the greatest amplitudes. Accepts one
+ * parameter, 'length', which determines size of the array.
+ * Larger arrays result in more precise waveform visualizations.
+ *
+ * Inspired by Wavesurfer.js.
+ *
+ * @method getPeaks
+ * @for p5.SoundFile
+ * @params {Number} [length] length is the size of the returned array.
+ * Larger length results in more precision.
+ * Defaults to 5*width of the browser window.
+ * @returns {Float32Array} Array of peaks.
+ */
+
+ }, {
+ key: "getPeaks",
+ value: function getPeaks(length) {
+ if (this.buffer) {
+ if (!length) {
+ length = window.width * 5;
+ }
+
+ if (this.buffer) {
+ var buffer = this.buffer;
+ var sampleSize = buffer.length / length;
+ var sampleStep = ~~(sampleSize / 10) || 1;
+ var channels = buffer.numberOfChannels;
+ var peaks = new Float32Array(Math.round(length));
+
+ for (var c = 0; c < channels; c++) {
+ var chan = buffer.getChannelData(c);
+
+ for (var i = 0; i < length; i++) {
+ var start = ~~(i * sampleSize);
+ var end = ~~(start + sampleSize);
+ var max = 0;
+
+ for (var j = start; j < end; j += sampleStep) {
+ var value = chan[j];
+
+ if (value > max) {
+ max = value;
+ } else if (-value > max) {
+ max = value;
+ }
+ }
+
+ if (c === 0 || Math.abs(max) > peaks[i]) {
+ peaks[i] = max;
+ }
+ }
+ }
+
+ return peaks;
+ }
+ } else {
+ throw 'Cannot load peaks yet, buffer is not loaded';
+ }
+ }
+ /**
+ * Reverses the p5.SoundFile's buffer source.
+ * Playback must be handled separately (see example).
+ *
+ * @method reverseBuffer
+ * @for p5.SoundFile
+ * @example
+ *
+ */
+
+ }, {
+ key: "reverseBuffer",
+ value: function reverseBuffer() {
+ if (this.buffer) {
+ var currentPos = this._lastPos / soundfile_ac.sampleRate;
+ var curVol = this.getVolume();
+ this.setVolume(0, 0.001);
+ var numChannels = this.buffer.numberOfChannels;
+
+ for (var i = 0; i < numChannels; i++) {
+ this.buffer.getChannelData(i).reverse();
+ }
+
+
+ this.reversed = !this.reversed;
+
+ if (this.isPlaying() && currentPos) {
+ this.jump(this.duration() - currentPos);
+ }
+
+ this.setVolume(curVol, 0.001);
+ } else {
+ throw 'SoundFile is not done loading';
+ }
+ }
+ /**
+ * Schedule an event to be called when the soundfile
+ * reaches the end of a buffer. If the soundfile is
+ * playing through once, this will be called when it
+ * ends. If it is looping, it will be called when
+ * stop is called.
+ *
+ * @method onended
+ * @for p5.SoundFile
+ * @param {Function} callback function to call when the
+ * soundfile has ended.
+ */
+
+ }, {
+ key: "onended",
+ value: function onended(callback) {
+ this._onended = callback;
+ return this;
+ }
+ }, {
+ key: "add",
+ value: function add() {
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var now = main.audiocontext.currentTime;
+
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+ this.stop(now);
+
+ if (this.buffer && this.bufferSourceNode) {
+ for (var i = 0; i < this.bufferSourceNodes.length - 1; i++) {
+ if (this.bufferSourceNodes[i] !== null) {
+ this.bufferSourceNodes[i].disconnect();
+
+ try {
+ this.bufferSourceNodes[i].stop(now);
+ } catch (e) {
+ console.warn('no buffer source node to dispose');
+ }
+
+ this.bufferSourceNodes[i] = null;
+ }
+ }
+
+ if (this.isPlaying()) {
+ try {
+ this._counterNode.stop(now);
+ } catch (e) {
+ console.log(e);
+ }
+
+ this._counterNode = null;
+ }
+ }
+
+ if (this.output) {
+ this.output.disconnect();
+ this.output = null;
+ }
+
+ if (this.panner) {
+ this.panner.disconnect();
+ this.panner = null;
+ }
+ }
+ /**
+ * Connects the output of a p5sound object to input of another
+ * p5.sound object. For example, you may connect a p5.SoundFile to an
+ * FFT or an Effect. If no parameter is given, it will connect to
+ * the main output. Most p5sound objects connect to the master
+ * output when they are created.
+ *
+ * @method connect
+ * @for p5.SoundFile
+ * @param {Object} [object] Audio object that accepts an input
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ if (!unit) {
+ this.panner.connect(main.input);
+ } else {
+ if (unit.hasOwnProperty('input')) {
+ this.panner.connect(unit.input);
+ } else {
+ this.panner.connect(unit);
+ }
+ }
+ }
+ /**
+ * Disconnects the output of this p5sound object.
+ *
+ * @method disconnect
+ * @for p5.SoundFile
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.panner) {
+ this.panner.disconnect();
+ }
+ }
+ /**
+ */
+
+ }, {
+ key: "getLevel",
+ value: function getLevel() {
+ console.warn('p5.SoundFile.getLevel has been removed from the library. Use p5.Amplitude instead');
+ }
+ /**
+ * Reset the source for this SoundFile to a
+ * new path (URL).
+ *
+ * @method setPath
+ * @for p5.SoundFile
+ * @param {String} path path to audio file
+ * @param {Function} callback Callback
+ */
+
+ }, {
+ key: "setPath",
+ value: function setPath(p, callback) {
+ var path = p5.prototype._checkFileFormats(p);
+
+ this.url = path;
+ this.load(callback);
+ }
+ /**
+ * Replace the current Audio Buffer with a new Buffer.
+ *
+ * @method setBuffer
+ * @for p5.SoundFile
+ * @param {Array} buf Array of Float32 Array(s). 2 Float32 Arrays
+ * will create a stereo source. 1 will create
+ * a mono source.
+ */
+
+ }, {
+ key: "setBuffer",
+ value: function setBuffer(buf) {
+ var numChannels = buf.length;
+ var size = buf[0].length;
+ var newBuffer = soundfile_ac.createBuffer(numChannels, size, soundfile_ac.sampleRate);
+
+ if (!(buf[0] instanceof Float32Array)) {
+ buf[0] = new Float32Array(buf[0]);
+ }
+
+ for (var channelNum = 0; channelNum < numChannels; channelNum++) {
+ var channel = newBuffer.getChannelData(channelNum);
+ channel.set(buf[channelNum]);
+ }
+
+ this.buffer = newBuffer;
+
+ this.panner.inputChannels(numChannels);
+ }
+
+ }, {
+ key: "_initCounterNode",
+ value: function _initCounterNode() {
+ var _this = this;
+
+ var self = this;
+ var now = soundfile_ac.currentTime;
+ var cNode = soundfile_ac.createBufferSource();
+ var workletBufferSize = safeBufferSize(256);
+
+ if (self._workletNode) {
+ self._workletNode.disconnect();
+
+ delete self._workletNode;
+ }
+
+ self._workletNode = new AudioWorkletNode(soundfile_ac, processorNames_default.a.soundFileProcessor, {
+ processorOptions: {
+ bufferSize: workletBufferSize
+ }
+ });
+
+ self._workletNode.port.onmessage = function (event) {
+ if (event.data.name === 'position') {
+ if (event.data.position === 0) {
+ return;
+ }
+
+ _this._lastPos = event.data.position;
+
+ _this._onTimeUpdate(self._lastPos);
+ }
+ };
+
+
+ cNode.buffer = _createCounterBuffer(self.buffer);
+ cNode.playbackRate.setValueAtTime(self.playbackRate, now);
+ cNode.connect(self._workletNode);
+
+ self._workletNode.connect(p5.soundOut._silentNode);
+
+ return cNode;
+ }
+
+ }, {
+ key: "_initSourceNode",
+ value: function _initSourceNode() {
+ var bufferSourceNode = soundfile_ac.createBufferSource();
+ bufferSourceNode.buffer = this.buffer;
+ bufferSourceNode.playbackRate.value = this.playbackRate;
+ bufferSourceNode.connect(this.output);
+ return bufferSourceNode;
+ }
+ }, {
+ key: "processPeaks",
+ value: function processPeaks(callback, _initThreshold, _minThreshold, _minPeaks) {
+ console.warn('processPeaks is deprecated');
+ }
+ /**
+ * Schedule events to trigger every time a MediaElement
+ * (audio/video) reaches a playback cue point.
+ *
+ * Accepts a callback function, a time (in seconds) at which to trigger
+ * the callback, and an optional parameter for the callback.
+ *
+ * Time will be passed as the first parameter to the callback function,
+ * and param will be the second parameter.
+ *
+ *
+ * @method addCue
+ * @for p5.SoundFile
+ * @param {Number} time Time in seconds, relative to this media
+ * element's playback. For example, to trigger
+ * an event every time playback reaches two
+ * seconds, pass in the number 2. This will be
+ * passed as the first parameter to
+ * the callback function.
+ * @param {Function} callback Name of a function that will be
+ * called at the given time. The callback will
+ * receive time and (optionally) param as its
+ * two parameters.
+ * @param {Object} [value] An object to be passed as the
+ * second parameter to the
+ * callback function.
+ * @return {Number} id ID of this cue,
+ * useful for removeCue(id)
+ * @example
+ *
+ */
+
+ }, {
+ key: "addCue",
+ value: function addCue(time, callback, val) {
+ var id = this._cueIDCounter++;
+ var cue = new Cue(callback, time, id, val);
+
+ this._cues.push(cue);
+
+
+ return id;
+ }
+ /**
+ * Remove a callback based on its ID. The ID is returned by the
+ * addCue method.
+ *
+ * @method removeCue
+ * @for p5.SoundFile
+ * @param {Number} id ID of the cue, as returned by addCue
+ */
+
+ }, {
+ key: "removeCue",
+ value: function removeCue(id) {
+ var cueLength = this._cues.length;
+
+ for (var i = 0; i < cueLength; i++) {
+ var cue = this._cues[i];
+
+ if (cue.id === id) {
+ this._cues.splice(i, 1);
+
+ break;
+ }
+ }
+
+ if (this._cues.length === 0) {
+ }
+ }
+ /**
+ * Remove all of the callbacks that had originally been scheduled
+ * via the addCue method.
+ *
+ * @method clearCues
+ */
+
+ }, {
+ key: "clearCues",
+ value: function clearCues() {
+ this._cues = [];
+ }
+
+ }, {
+ key: "_onTimeUpdate",
+ value: function _onTimeUpdate(position) {
+ var playbackTime = position / this.buffer.sampleRate;
+ var cueLength = this._cues.length;
+
+ for (var i = 0; i < cueLength; i++) {
+ var cue = this._cues[i];
+ var callbackTime = cue.time;
+ var val = cue.val;
+ var leftLimit = this._prevUpdateTime || 0;
+ var rightLimit = playbackTime;
+
+ if (leftLimit <= callbackTime && callbackTime <= rightLimit) {
+ cue.callback(val);
+ }
+ }
+
+ this._prevUpdateTime = playbackTime;
+ }
+ /**
+ * Save a p5.SoundFile as a .wav file. The browser will prompt the user
+ * to download the file to their device. To upload a file to a server, see
+ * getBlob
+ *
+ * @method save
+ * @for p5.SoundFile
+ * @param {String} [fileName] name of the resulting .wav file.
+ * @example
+ *
+ */
+
+ }, {
+ key: "save",
+ value: function save(fileName) {
+ p5.prototype.saveSound(this, fileName, 'wav');
+ }
+ /**
+ * This method is useful for sending a SoundFile to a server. It returns the
+ * .wav-encoded audio data as a "Blob".
+ * A Blob is a file-like data object that can be uploaded to a server
+ * with an http request. We'll
+ * use the `httpDo` options object to send a POST request with some
+ * specific options: we encode the request as `multipart/form-data`,
+ * and attach the blob as one of the form values using `FormData`.
+ *
+ *
+ * @method getBlob
+ * @for p5.SoundFile
+ * @returns {Blob} A file-like data object
+ * @example
+ *
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ *
+ * function setup() {
+ * noCanvas();
+ * let soundBlob = mySound.getBlob();
+ *
+ * // Now we can send the blob to a server...
+ * let serverUrl = 'https://jsonplaceholder.typicode.com/posts';
+ * let httpRequestOptions = {
+ * method: 'POST',
+ * body: new FormData().append('soundBlob', soundBlob),
+ * headers: new Headers({
+ * 'Content-Type': 'multipart/form-data'
+ * })
+ * };
+ * httpDo(serverUrl, httpRequestOptions);
+ *
+ * // We can also create an `ObjectURL` pointing to the Blob
+ * let blobUrl = URL.createObjectURL(soundBlob);
+ *
+ * // The `
+ */
+
+ }, {
+ key: "getBlob",
+ value: function getBlob() {
+ var dataView = convertToWav(this.buffer);
+ return new Blob([dataView], {
+ type: 'audio/wav'
+ });
+ }
+ }]);
+
+ return SoundFile;
+}();
+/**
+ * loadSound() returns a new p5.SoundFile from a specified
+ * path. If called during preload(), the p5.SoundFile will be ready
+ * to play in time for setup() and draw(). If called outside of
+ * preload, the p5.SoundFile will not be ready immediately, so
+ * loadSound accepts a callback as the second parameter. Using a
+ *
+ * local server is recommended when loading external files.
+ *
+ * @method loadSound
+ * @for p5
+ * @param {String|Array} path Path to the sound file, or an array with
+ * paths to soundfiles in multiple formats
+ * i.e. ['sound.ogg', 'sound.mp3'].
+ * Alternately, accepts an object: either
+ * from the HTML5 File API, or a p5.File.
+ * @param {Function} [successCallback] Name of a function to call once file loads
+ * @param {Function} [errorCallback] Name of a function to call if there is
+ * an error loading the file.
+ * @param {Function} [whileLoading] Name of a function to call while file is loading.
+ * This function will receive the percentage loaded
+ * so far, from 0.0 to 1.0.
+ * @return {SoundFile} Returns a p5.SoundFile
+ * @example
+ *
+ * let mySound;
+ * function preload() {
+ * soundFormats('mp3', 'ogg');
+ * mySound = loadSound('assets/doorbell');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap here to play', 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * // playing a sound file on a user gesture
+ * // is equivalent to `userStartAudio()`
+ * mySound.play();
+ * }
+ *
+ */
+
+
+function loadSound(path, callback, onerror, whileLoading) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ window.alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+ }
+
+ var self = this;
+ var s = new soundfile_SoundFile(path, function () {
+ if (typeof callback === 'function') {
+ callback.apply(self, arguments);
+ }
+
+ if (typeof self._decrementPreload === 'function') {
+ self._decrementPreload();
+ }
+ }, onerror, whileLoading);
+ return s;
+}
+
+ var soundfile = (soundfile_SoundFile);
+
+function amplitude_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function amplitude_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function amplitude_createClass(Constructor, protoProps, staticProps) { if (protoProps) amplitude_defineProperties(Constructor.prototype, protoProps); if (staticProps) amplitude_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+
+/**
+ * Amplitude measures volume between 0.0 and 1.0.
+ * Listens to all p5sound by default, or use setInput()
+ * to listen to a specific sound source. Accepts an optional
+ * smoothing value, which defaults to 0.
+ *
+ * @class p5.Amplitude
+ * @constructor
+ * @param {Number} [smoothing] between 0.0 and .999 to smooth
+ * amplitude readings (defaults to 0)
+ * @example
+ *
+ */
+
+ }, {
+ key: "getLevel",
+ value: function getLevel(channel) {
+ if (typeof channel !== 'undefined') {
+ if (this.normalize) {
+ return this.stereoVolNorm[channel];
+ } else {
+ return this.stereoVol[channel];
+ }
+ } else if (this.normalize) {
+ return this.volNorm;
+ } else {
+ return this.volume;
+ }
+ }
+ /**
+ * Determines whether the results of Amplitude.process() will be
+ * Normalized. To normalize, Amplitude finds the difference the
+ * loudest reading it has processed and the maximum amplitude of
+ * 1.0. Amplitude adds this difference to all values to produce
+ * results that will reliably map between 0.0 and 1.0. However,
+ * if a louder moment occurs, the amount that Normalize adds to
+ * all the values will change. Accepts an optional boolean parameter
+ * (true or false). Normalizing is off by default.
+ *
+ * @method toggleNormalize
+ * @for p5.Amplitude
+ * @param {boolean} [boolean] set normalize to true (1) or false (0)
+ */
+
+ }, {
+ key: "toggleNormalize",
+ value: function toggleNormalize(bool) {
+ if (typeof bool === 'boolean') {
+ this.normalize = bool;
+ } else {
+ this.normalize = !this.normalize;
+ }
+
+ this._workletNode.port.postMessage({
+ name: 'toggleNormalize',
+ normalize: this.normalize
+ });
+ }
+ /**
+ * Smooth Amplitude analysis by averaging with the last analysis
+ * frame. Off by default.
+ *
+ * @method smooth
+ * @for p5.Amplitude
+ * @param {Number} set smoothing from 0.0 <= 1
+ */
+
+ }, {
+ key: "smooth",
+ value: function smooth(s) {
+ if (s >= 0 && s < 1) {
+ this._workletNode.port.postMessage({
+ name: 'smoothing',
+ smoothing: s
+ });
+ } else {
+ console.log('Error: smoothing must be between 0 and 1');
+ }
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+
+ if (this.input) {
+ this.input.disconnect();
+ delete this.input;
+ }
+
+ if (this.output) {
+ this.output.disconnect();
+ delete this.output;
+ }
+
+ this._workletNode.disconnect();
+
+ delete this._workletNode;
+ }
+ }]);
+
+ return Amplitude;
+}();
+
+ var amplitude = (amplitude_Amplitude);
+function fft_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function fft_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function fft_createClass(Constructor, protoProps, staticProps) { if (protoProps) fft_defineProperties(Constructor.prototype, protoProps); if (staticProps) fft_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+/**
+ *
FFT (Fast Fourier Transform) is an analysis algorithm that
+ * isolates individual
+ *
+ * audio frequencies within a waveform.
+ *
+ *
Once instantiated, a p5.FFT object can return an array based on
+ * two types of analyses: • FFT.waveform() computes
+ * amplitude values along the time domain. The array indices correspond
+ * to samples across a brief moment in time. Each value represents
+ * amplitude of the waveform at that sample of time.
+ * • FFT.analyze() computes amplitude values along the
+ * frequency domain. The array indices correspond to frequencies (i.e.
+ * pitches), from the lowest to the highest that humans can hear. Each
+ * value represents amplitude at that slice of the frequency spectrum.
+ * Use with getEnergy() to measure amplitude at specific
+ * frequencies, or within a range of frequencies.
+ *
+ *
FFT analyzes a very short snapshot of sound called a sample
+ * buffer. It returns an array of amplitude measurements, referred
+ * to as bins. The array is 1024 bins long by default.
+ * You can change the bin array length, but it must be a power of 2
+ * between 16 and 1024 in order for the FFT algorithm to function
+ * correctly. The actual size of the FFT buffer is twice the
+ * number of bins, so given a standard sample rate, the buffer is
+ * 2048/44100 seconds long.
+ *
+ *
+ * @class p5.FFT
+ * @constructor
+ * @param {Number} [smoothing] Smooth results of Freq Spectrum.
+ * 0.0 < smoothing < 1.0.
+ * Defaults to 0.8.
+ * @param {Number} [bins] Length of resulting array.
+ * Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @example
+ *
+ * function preload(){
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup(){
+ * let cnv = createCanvas(100,100);
+ * cnv.mouseClicked(togglePlay);
+ * fft = new p5.FFT();
+ * sound.amp(0.2);
+ * }
+ *
+ * function draw(){
+ * background(220);
+ *
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * fill(255, 0, 255);
+ * for (let i = 0; i< spectrum.length; i++){
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h )
+ * }
+ *
+ * let waveform = fft.waveform();
+ * noFill();
+ * beginShape();
+ * stroke(20);
+ * for (let i = 0; i < waveform.length; i++){
+ * let x = map(i, 0, waveform.length, 0, width);
+ * let y = map( waveform[i], -1, 1, 0, height);
+ * vertex(x,y);
+ * }
+ * endShape();
+ *
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function togglePlay() {
+ * if (sound.isPlaying()) {
+ * sound.pause();
+ * } else {
+ * sound.loop();
+ * }
+ * }
+ *
+ */
+
+var fft_FFT =
+function () {
+ function FFT(smoothing, bins) {
+ fft_classCallCheck(this, FFT);
+
+ this.input = this.analyser = main.audiocontext.createAnalyser();
+ Object.defineProperties(this, {
+ bins: {
+ get: function get() {
+ return this.analyser.fftSize / 2;
+ },
+ set: function set(b) {
+ this.analyser.fftSize = b * 2;
+ },
+ configurable: true,
+ enumerable: true
+ },
+ smoothing: {
+ get: function get() {
+ return this.analyser.smoothingTimeConstant;
+ },
+ set: function set(s) {
+ this.analyser.smoothingTimeConstant = s;
+ },
+ configurable: true,
+ enumerable: true
+ }
+ });
+
+ this.smooth(smoothing);
+ this.bins = bins || 1024;
+
+ main.fftMeter.connect(this.analyser);
+ this.freqDomain = new Uint8Array(this.analyser.frequencyBinCount);
+ this.timeDomain = new Uint8Array(this.analyser.frequencyBinCount);
+
+ this.bass = [20, 140];
+ this.lowMid = [140, 400];
+ this.mid = [400, 2600];
+ this.highMid = [2600, 5200];
+ this.treble = [5200, 14000];
+
+ main.soundArray.push(this);
+ }
+ /**
+ * Set the input source for the FFT analysis. If no source is
+ * provided, FFT will analyze all sound in the sketch.
+ *
+ * @method setInput
+ * @for p5.FFT
+ * @param {Object} [source] p5.sound object (or web audio API source node)
+ */
+
+
+ fft_createClass(FFT, [{
+ key: "setInput",
+ value: function setInput(source) {
+ if (!source) {
+ main.fftMeter.connect(this.analyser);
+ } else {
+ if (source.output) {
+ source.output.connect(this.analyser);
+ } else if (source.connect) {
+ source.connect(this.analyser);
+ }
+
+ main.fftMeter.disconnect();
+ }
+ }
+ /**
+ * Returns an array of amplitude values (between -1.0 and +1.0) that represent
+ * a snapshot of amplitude readings in a single buffer. Length will be
+ * equal to bins (defaults to 1024). Can be used to draw the waveform
+ * of a sound.
+ *
+ * @method waveform
+ * @for p5.FFT
+ * @param {Number} [bins] Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @param {String} [precision] If any value is provided, will return results
+ * in a Float32 Array which is more precise
+ * than a regular array.
+ * @return {Array} Array Array of amplitude values (-1 to 1)
+ * over time. Array length = bins.
+ *
+ */
+
+ }, {
+ key: "waveform",
+ value: function waveform() {
+ var bins, mode;
+ var normalArray = new Array();
+
+ for (var i = 0; i < arguments.length; i++) {
+ if (typeof arguments[i] === 'number') {
+ bins = arguments[i];
+ this.analyser.fftSize = bins * 2;
+ }
+
+ if (typeof arguments[i] === 'string') {
+ mode = arguments[i];
+ }
+ }
+
+
+ if (mode && !p5.prototype._isSafari()) {
+ timeToFloat(this, this.timeDomain);
+ this.analyser.getFloatTimeDomainData(this.timeDomain);
+ return this.timeDomain;
+ } else {
+ timeToInt(this, this.timeDomain);
+ this.analyser.getByteTimeDomainData(this.timeDomain);
+
+ for (var j = 0; j < this.timeDomain.length; j++) {
+ var scaled = p5.prototype.map(this.timeDomain[j], 0, 255, -1, 1);
+ normalArray.push(scaled);
+ }
+
+ return normalArray;
+ }
+ }
+ /**
+ * Returns an array of amplitude values (between 0 and 255)
+ * across the frequency spectrum. Length is equal to FFT bins
+ * (1024 by default). The array indices correspond to frequencies
+ * (i.e. pitches), from the lowest to the highest that humans can
+ * hear. Each value represents amplitude at that slice of the
+ * frequency spectrum. Must be called prior to using
+ * getEnergy().
+ *
+ * @method analyze
+ * @for p5.FFT
+ * @param {Number} [bins] Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @param {Number} [scale] If "dB," returns decibel
+ * float measurements between
+ * -140 and 0 (max).
+ * Otherwise returns integers from 0-255.
+ * @return {Array} spectrum Array of energy (amplitude/volume)
+ * values across the frequency spectrum.
+ * Lowest energy (silence) = 0, highest
+ * possible is 255.
+ * @example
+ *
+ * let osc, fft;
+ *
+ * function setup(){
+ * let cnv = createCanvas(100,100);
+ * cnv.mousePressed(startSound);
+ * osc = new p5.Oscillator();
+ * osc.amp(0);
+ * fft = new p5.FFT();
+ * }
+ *
+ * function draw(){
+ * background(220);
+ *
+ * let freq = map(mouseX, 0, windowWidth, 20, 10000);
+ * freq = constrain(freq, 1, 20000);
+ * osc.freq(freq);
+ *
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * fill(255, 0, 255);
+ * for (let i = 0; i< spectrum.length; i++){
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h );
+ * }
+ *
+ * stroke(255);
+ * if (!osc.started) {
+ * text('tap here and drag to change frequency', 10, 20, width - 20);
+ * } else {
+ * text(round(freq)+'Hz', 10, 20);
+ * }
+ * }
+ *
+ * function startSound() {
+ * osc.start();
+ * osc.amp(0.5, 0.2);
+ * }
+ *
+ * function mouseReleased() {
+ * osc.amp(0, 0.2);
+ * }
+ *
+ *
+ *
+ */
+
+ }, {
+ key: "analyze",
+ value: function analyze() {
+ var mode;
+
+ for (var i = 0; i < arguments.length; i++) {
+ if (typeof arguments[i] === 'number') {
+ this.bins = arguments[i];
+ this.analyser.fftSize = this.bins * 2;
+ }
+
+ if (typeof arguments[i] === 'string') {
+ mode = arguments[i];
+ }
+ }
+
+ if (mode && mode.toLowerCase() === 'db') {
+ freqToFloat(this);
+ this.analyser.getFloatFrequencyData(this.freqDomain);
+ return this.freqDomain;
+ } else {
+ freqToInt(this, this.freqDomain);
+ this.analyser.getByteFrequencyData(this.freqDomain);
+ var normalArray = Array.apply([], this.freqDomain);
+ return normalArray;
+ }
+ }
+ /**
+ * Returns the amount of energy (volume) at a specific
+ *
+ * frequency, or the average amount of energy between two
+ * frequencies. Accepts Number(s) corresponding
+ * to frequency (in Hz), or a "string" corresponding to predefined
+ * frequency ranges ("bass", "lowMid", "mid", "highMid", "treble").
+ * Returns a range between 0 (no energy/volume at that frequency) and
+ * 255 (maximum energy).
+ * NOTE: analyze() must be called prior to getEnergy(). analyze()
+ * tells the FFT to analyze frequency data, and getEnergy() uses
+ * the results to determine the value at a specific frequency or
+ * range of frequencies.
+ *
+ * @method getEnergy
+ * @for p5.FFT
+ * @param {Number|String} frequency1 Will return a value representing
+ * energy at this frequency. Alternately,
+ * the strings "bass", "lowMid" "mid",
+ * "highMid", and "treble" will return
+ * predefined frequency ranges.
+ * @param {Number} [frequency2] If a second frequency is given,
+ * will return average amount of
+ * energy that exists between the
+ * two frequencies.
+ * @return {Number} Energy Energy (volume/amplitude) from
+ * 0 and 255.
+ *
+ */
+
+ }, {
+ key: "getEnergy",
+ value: function getEnergy(frequency1, frequency2) {
+ var nyquist = main.audiocontext.sampleRate / 2;
+
+ if (frequency1 === 'bass') {
+ frequency1 = this.bass[0];
+ frequency2 = this.bass[1];
+ } else if (frequency1 === 'lowMid') {
+ frequency1 = this.lowMid[0];
+ frequency2 = this.lowMid[1];
+ } else if (frequency1 === 'mid') {
+ frequency1 = this.mid[0];
+ frequency2 = this.mid[1];
+ } else if (frequency1 === 'highMid') {
+ frequency1 = this.highMid[0];
+ frequency2 = this.highMid[1];
+ } else if (frequency1 === 'treble') {
+ frequency1 = this.treble[0];
+ frequency2 = this.treble[1];
+ }
+
+ if (typeof frequency1 !== 'number') {
+ throw 'invalid input for getEnergy()';
+ } else if (!frequency2) {
+ var index = Math.round(frequency1 / nyquist * this.freqDomain.length);
+ return this.freqDomain[index];
+ } else if (frequency1 && frequency2) {
+ if (frequency1 > frequency2) {
+ var swap = frequency2;
+ frequency2 = frequency1;
+ frequency1 = swap;
+ }
+
+ var lowIndex = Math.round(frequency1 / nyquist * this.freqDomain.length);
+ var highIndex = Math.round(frequency2 / nyquist * this.freqDomain.length);
+ var total = 0;
+ var numFrequencies = 0;
+
+ for (var i = lowIndex; i <= highIndex; i++) {
+ total += this.freqDomain[i];
+ numFrequencies += 1;
+ }
+
+
+ var toReturn = total / numFrequencies;
+ return toReturn;
+ } else {
+ throw 'invalid input for getEnergy()';
+ }
+ }
+
+ }, {
+ key: "getFreq",
+ value: function getFreq(freq1, freq2) {
+ console.log('getFreq() is deprecated. Please use getEnergy() instead.');
+ var x = this.getEnergy(freq1, freq2);
+ return x;
+ }
+ /**
+ * Returns the
+ *
+ * spectral centroid of the input signal.
+ * NOTE: analyze() must be called prior to getCentroid(). Analyze()
+ * tells the FFT to analyze frequency data, and getCentroid() uses
+ * the results determine the spectral centroid.
+ *
+ * @method getCentroid
+ * @for p5.FFT
+ * @return {Number} Spectral Centroid Frequency of the spectral centroid in Hz.
+ *
+ *
+ * @example
+ *
+ * function setup(){
+ * cnv = createCanvas(100,100);
+ * cnv.mousePressed(userStartAudio);
+ * sound = new p5.AudioIn();
+ * sound.start();
+ * fft = new p5.FFT();
+ * sound.connect(fft);
+ *}
+ *
+ *function draw() {
+ * if (getAudioContext().state !== 'running') {
+ * background(220);
+ * text('tap here and enable mic to begin', 10, 20, width - 20);
+ * return;
+ * }
+ * let centroidplot = 0.0;
+ * let spectralCentroid = 0;
+ *
+ * background(0);
+ * stroke(0,255,0);
+ * let spectrum = fft.analyze();
+ * fill(0,255,0); // spectrum is green
+ *
+ * //draw the spectrum
+ * for (let i = 0; i < spectrum.length; i++){
+ * let x = map(log(i), 0, log(spectrum.length), 0, width);
+ * let h = map(spectrum[i], 0, 255, 0, height);
+ * let rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
+ * rect(x, height, rectangle_width, -h )
+ * }
+ * let nyquist = 22050;
+ *
+ * // get the centroid
+ * spectralCentroid = fft.getCentroid();
+ *
+ * // the mean_freq_index calculation is for the display.
+ * let mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
+ *
+ * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
+ *
+ * stroke(255,0,0); // the line showing where the centroid is will be red
+ *
+ * rect(centroidplot, 0, width / spectrum.length, height)
+ * noStroke();
+ * fill(255,255,255); // text is white
+ * text('centroid: ', 10, 20);
+ * text(round(spectralCentroid)+' Hz', 10, 40);
+ *}
+ *
+ */
+
+ }, {
+ key: "getCentroid",
+ value: function getCentroid() {
+ var nyquist = main.audiocontext.sampleRate / 2;
+ var cumulative_sum = 0;
+ var centroid_normalization = 0;
+
+ for (var i = 0; i < this.freqDomain.length; i++) {
+ cumulative_sum += i * this.freqDomain[i];
+ centroid_normalization += this.freqDomain[i];
+ }
+
+ var mean_freq_index = 0;
+
+ if (centroid_normalization !== 0) {
+ mean_freq_index = cumulative_sum / centroid_normalization;
+ }
+
+ var spec_centroid_freq = mean_freq_index * (nyquist / this.freqDomain.length);
+ return spec_centroid_freq;
+ }
+ /**
+ * Smooth FFT analysis by averaging with the last analysis frame.
+ *
+ * @method smooth
+ * @param {Number} smoothing 0.0 < smoothing < 1.0.
+ * Defaults to 0.8.
+ */
+
+ }, {
+ key: "smooth",
+ value: function smooth(s) {
+ if (typeof s !== 'undefined') {
+ this.smoothing = s;
+ }
+
+ return this.smoothing;
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+
+ if (this.analyser) {
+ this.analyser.disconnect();
+ delete this.analyser;
+ }
+ }
+ /**
+ * Returns an array of average amplitude values for a given number
+ * of frequency bands split equally. N defaults to 16.
+ * NOTE: analyze() must be called prior to linAverages(). Analyze()
+ * tells the FFT to analyze frequency data, and linAverages() uses
+ * the results to group them into a smaller set of averages.
+ *
+ * @method linAverages
+ * @for p5.FFT
+ * @param {Number} N Number of returned frequency groups
+ * @return {Array} linearAverages Array of average amplitude values for each group
+ */
+
+ }, {
+ key: "linAverages",
+ value: function linAverages(_N) {
+ var N = _N || 16;
+
+ var spectrum = this.freqDomain;
+ var spectrumLength = spectrum.length;
+ var spectrumStep = Math.floor(spectrumLength / N);
+ var linearAverages = new Array(N);
+
+ var groupIndex = 0;
+
+ for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
+ linearAverages[groupIndex] = linearAverages[groupIndex] !== undefined ? (linearAverages[groupIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex];
+
+ if (specIndex % spectrumStep === spectrumStep - 1) {
+ groupIndex++;
+ }
+ }
+
+ return linearAverages;
+ }
+ /**
+ * Returns an array of average amplitude values of the spectrum, for a given
+ * set of
+ * Octave Bands
+ * NOTE: analyze() must be called prior to logAverages(). Analyze()
+ * tells the FFT to analyze frequency data, and logAverages() uses
+ * the results to group them into a smaller set of averages.
+ *
+ * @method logAverages
+ * @for p5.FFT
+ * @param {Array} octaveBands Array of Octave Bands objects for grouping
+ * @return {Array} logAverages Array of average amplitude values for each group
+ */
+
+ }, {
+ key: "logAverages",
+ value: function logAverages(octaveBands) {
+ var nyquist = main.audiocontext.sampleRate / 2;
+ var spectrum = this.freqDomain;
+ var spectrumLength = spectrum.length;
+ var logAverages = new Array(octaveBands.length);
+
+ var octaveIndex = 0;
+
+ for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
+ var specIndexFrequency = Math.round(specIndex * nyquist / this.freqDomain.length);
+
+ if (specIndexFrequency > octaveBands[octaveIndex].hi) {
+ octaveIndex++;
+ }
+
+ logAverages[octaveIndex] = logAverages[octaveIndex] !== undefined ? (logAverages[octaveIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex];
+ }
+
+ return logAverages;
+ }
+ /**
+ * Calculates and Returns the 1/N
+ * Octave Bands
+ * N defaults to 3 and minimum central frequency to 15.625Hz.
+ * (1/3 Octave Bands ~= 31 Frequency Bands)
+ * Setting fCtr0 to a central value of a higher octave will ignore the lower bands
+ * and produce less frequency groups.
+ *
+ * @method getOctaveBands
+ * @for p5.FFT
+ * @param {Number} N Specifies the 1/N type of generated octave bands
+ * @param {Number} fCtr0 Minimum central frequency for the lowest band
+ * @return {Array} octaveBands Array of octave band objects with their bounds
+ */
+
+ }, {
+ key: "getOctaveBands",
+ value: function getOctaveBands(_N, _fCtr0) {
+ var N = _N || 3;
+
+ var fCtr0 = _fCtr0 || 15.625;
+
+ var octaveBands = [];
+ var lastFrequencyBand = {
+ lo: fCtr0 / Math.pow(2, 1 / (2 * N)),
+ ctr: fCtr0,
+ hi: fCtr0 * Math.pow(2, 1 / (2 * N))
+ };
+ octaveBands.push(lastFrequencyBand);
+ var nyquist = main.audiocontext.sampleRate / 2;
+
+ while (lastFrequencyBand.hi < nyquist) {
+ var newFrequencyBand = {};
+ newFrequencyBand.lo = lastFrequencyBand.hi;
+ newFrequencyBand.ctr = lastFrequencyBand.ctr * Math.pow(2, 1 / N);
+ newFrequencyBand.hi = newFrequencyBand.ctr * Math.pow(2, 1 / (2 * N));
+ octaveBands.push(newFrequencyBand);
+ lastFrequencyBand = newFrequencyBand;
+ }
+
+ return octaveBands;
+ }
+ }]);
+
+ return FFT;
+}();
+
+
+function freqToFloat(fft) {
+ if (fft.freqDomain instanceof Float32Array === false) {
+ fft.freqDomain = new Float32Array(fft.analyser.frequencyBinCount);
+ }
+}
+
+function freqToInt(fft) {
+ if (fft.freqDomain instanceof Uint8Array === false) {
+ fft.freqDomain = new Uint8Array(fft.analyser.frequencyBinCount);
+ }
+}
+
+function timeToFloat(fft) {
+ if (fft.timeDomain instanceof Float32Array === false) {
+ fft.timeDomain = new Float32Array(fft.analyser.frequencyBinCount);
+ }
+}
+
+function timeToInt(fft) {
+ if (fft.timeDomain instanceof Uint8Array === false) {
+ fft.timeDomain = new Uint8Array(fft.analyser.frequencyBinCount);
+ }
+}
+
+ var fft = (fft_FFT);
+var Add = __webpack_require__(4);
+var Add_default = __webpack_require__.n(Add);
+
+var Multiply = __webpack_require__(1);
+var Multiply_default = __webpack_require__.n(Multiply);
+
+var Scale = __webpack_require__(8);
+var Scale_default = __webpack_require__.n(Scale);
+
+function oscillator_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { oscillator_typeof = function _typeof(obj) { return typeof obj; }; } else { oscillator_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return oscillator_typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (oscillator_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function oscillator_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function oscillator_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function oscillator_createClass(Constructor, protoProps, staticProps) { if (protoProps) oscillator_defineProperties(Constructor.prototype, protoProps); if (staticProps) oscillator_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+
+
+
+function sigChain(o, mathObj, thisChain, nextChain, type) {
+ var chainSource = o.oscillator;
+
+ for (var i in o.mathOps) {
+ if (o.mathOps[i] instanceof type) {
+ chainSource.disconnect();
+ o.mathOps[i].dispose();
+ thisChain = i;
+
+ if (thisChain < o.mathOps.length - 2) {
+ nextChain = o.mathOps[i + 1];
+ }
+ }
+ }
+
+ if (thisChain === o.mathOps.length - 1) {
+ o.mathOps.push(nextChain);
+ }
+
+
+ if (i > 0) {
+ chainSource = o.mathOps[i - 1];
+ }
+
+ chainSource.disconnect();
+ chainSource.connect(mathObj);
+ mathObj.connect(nextChain);
+ o.mathOps[thisChain] = mathObj;
+ return o;
+}
+/**
+ *
Creates a signal that oscillates between -1.0 and 1.0.
+ * By default, the oscillation takes the form of a sinusoidal
+ * shape ('sine'). Additional types include 'triangle',
+ * 'sawtooth' and 'square'. The frequency defaults to
+ * 440 oscillations per second (440Hz, equal to the pitch of an
+ * 'A' note).
+ * let osc, playing, freq, amp;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playOscillator);
+ * osc = new p5.Oscillator('sine');
+ * }
+ *
+ * function draw() {
+ * background(220)
+ * freq = constrain(map(mouseX, 0, width, 100, 500), 100, 500);
+ * amp = constrain(map(mouseY, height, 0, 0, 1), 0, 1);
+ *
+ * text('tap to play', 20, 20);
+ * text('freq: ' + freq, 20, 40);
+ * text('amp: ' + amp, 20, 60);
+ *
+ * if (playing) {
+ * // smooth the transitions by 0.1 seconds
+ * osc.freq(freq, 0.1);
+ * osc.amp(amp, 0.1);
+ * }
+ * }
+ *
+ * function playOscillator() {
+ * // starting an oscillator on a user gesture will enable audio
+ * // in browsers that have a strict autoplay policy.
+ * // See also: userStartAudio();
+ * osc.start();
+ * playing = true;
+ * }
+ *
+ * function mouseReleased() {
+ * // ramp amplitude to 0 over 0.5 seconds
+ * osc.amp(0, 0.5);
+ * playing = false;
+ * }
+ *
+ */
+
+
+var oscillator_Oscillator =
+function () {
+ function Oscillator(freq, type) {
+ oscillator_classCallCheck(this, Oscillator);
+
+ if (typeof freq === 'string') {
+ var f = type;
+ type = freq;
+ freq = f;
+ }
+
+ if (typeof type === 'number') {
+ var _f = type;
+ type = freq;
+ freq = _f;
+ }
+
+ this.started = false;
+
+ this.phaseAmount = undefined;
+ this.oscillator = main.audiocontext.createOscillator();
+ this.f = freq || 440.0;
+
+ this.oscillator.type = type || 'sine';
+ this.oscillator.frequency.setValueAtTime(this.f, main.audiocontext.currentTime);
+
+ this.output = main.audiocontext.createGain();
+ this._freqMods = [];
+
+ this.output.gain.value = 0.5;
+ this.output.gain.setValueAtTime(0.5, main.audiocontext.currentTime);
+ this.oscillator.connect(this.output);
+
+ this.panPosition = 0.0;
+ this.connection = main.input;
+
+ this.panner = new panner_0(this.output, this.connection, 1);
+
+ this.mathOps = [this.output];
+
+ main.soundArray.push(this);
+
+ this.fade = this.amp;
+ }
+ /**
+ * Start an oscillator.
+ *
+ * Starting an oscillator on a user gesture will enable audio in browsers
+ * that have a strict autoplay policy, including Chrome and most mobile
+ * devices. See also: `userStartAudio()`.
+ *
+ * @method start
+ * @for p5.Oscillator
+ * @param {Number} [time] startTime in seconds from now.
+ * @param {Number} [frequency] frequency in Hz.
+ */
+
+
+ oscillator_createClass(Oscillator, [{
+ key: "start",
+ value: function start(time, f) {
+ if (this.started) {
+ var now = main.audiocontext.currentTime;
+ this.stop(now);
+ }
+
+ if (!this.started) {
+ var freq = f || this.f;
+ var type = this.oscillator.type;
+
+ if (this.oscillator) {
+ this.oscillator.disconnect();
+ delete this.oscillator;
+ }
+
+
+ this.oscillator = main.audiocontext.createOscillator();
+ this.oscillator.frequency.value = Math.abs(freq);
+ this.oscillator.type = type;
+
+ this.oscillator.connect(this.output);
+ time = time || 0;
+ this.oscillator.start(time + main.audiocontext.currentTime);
+ this.freqNode = this.oscillator.frequency;
+
+ for (var i in this._freqMods) {
+ if (typeof this._freqMods[i].connect !== 'undefined') {
+ this._freqMods[i].connect(this.oscillator.frequency);
+ }
+ }
+
+ this.started = true;
+ }
+ }
+ /**
+ * Stop an oscillator. Accepts an optional parameter
+ * to determine how long (in seconds from now) until the
+ * oscillator stops.
+ *
+ * @method stop
+ * @for p5.Oscillator
+ * @param {Number} secondsFromNow Time, in seconds from now.
+ */
+
+ }, {
+ key: "stop",
+ value: function stop(time) {
+ if (this.started) {
+ var t = time || 0;
+ var now = main.audiocontext.currentTime;
+ this.oscillator.stop(t + now);
+ this.started = false;
+ }
+ }
+ /**
+ * Set the amplitude between 0 and 1.0. Or, pass in an object
+ * such as an oscillator to modulate amplitude with an audio signal.
+ *
+ * @method amp
+ * @for p5.Oscillator
+ * @param {Number|Object} vol between 0 and 1.0
+ * or a modulating signal/oscillator
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ * @return {AudioParam} gain If no value is provided,
+ * returns the Web Audio API
+ * AudioParam that controls
+ * this oscillator's
+ * gain/amplitude/volume)
+ */
+
+ }, {
+ key: "amp",
+ value: function amp(vol) {
+ var rampTime = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
+ var tFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+
+ if (typeof vol === 'number') {
+ var now = main.audiocontext.currentTime;
+ this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
+ } else if (vol) {
+ vol.connect(this.output.gain);
+ } else {
+ return this.output.gain;
+ }
+ }
+ /**
+ * Returns the value of output gain
+ *
+ * @method getAmp
+ * @for p5.Oscillator
+ *
+ * @returns {number} Amplitude value between 0.0 and 1.0
+ */
+
+ }, {
+ key: "getAmp",
+ value: function getAmp() {
+ return this.output.gain.value;
+ }
+ /**
+ * Set frequency of an oscillator to a value. Or, pass in an object
+ * such as an oscillator to modulate the frequency with an audio signal.
+ *
+ * @method freq
+ * @for p5.Oscillator
+ * @param {Number|Object} Frequency Frequency in Hz
+ * or modulating signal/oscillator
+ * @param {Number} [rampTime] Ramp time (in seconds)
+ * @param {Number} [timeFromNow] Schedule this event to happen
+ * at x seconds from now
+ * @return {AudioParam} Frequency If no value is provided,
+ * returns the Web Audio API
+ * AudioParam that controls
+ * this oscillator's frequency
+ * @example
+ *
+ * let osc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playOscillator);
+ * osc = new p5.Oscillator(300);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playOscillator() {
+ * osc.start();
+ * osc.amp(0.5);
+ * // start at 700Hz
+ * osc.freq(700);
+ * // ramp to 60Hz over 0.7 seconds
+ * osc.freq(60, 0.7);
+ * osc.amp(0, 0.1, 0.7);
+ * }
+ *
+ */
+
+ }, {
+ key: "freq",
+ value: function freq(val) {
+ var rampTime = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
+ var tFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+
+ if (typeof val === 'number' && !isNaN(val)) {
+ this.f = val;
+ var now = main.audiocontext.currentTime;
+
+ if (rampTime === 0) {
+ this.oscillator.frequency.setValueAtTime(val, tFromNow + now);
+ } else {
+ if (val > 0) {
+ this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
+ } else {
+ this.oscillator.frequency.linearRampToValueAtTime(val, tFromNow + rampTime + now);
+ }
+ }
+
+
+ if (this.phaseAmount) {
+ this.phase(this.phaseAmount);
+ }
+ } else if (val) {
+ if (val.output) {
+ val = val.output;
+ }
+
+ val.connect(this.oscillator.frequency);
+
+ this._freqMods.push(val);
+ } else {
+ return this.oscillator.frequency;
+ }
+ }
+ /**
+ * Returns the value of frequency of oscillator
+ *
+ * @method getFreq
+ * @for p5.Oscillator
+ * @returns {number} Frequency of oscillator in Hertz
+ */
+
+ }, {
+ key: "getFreq",
+ value: function getFreq() {
+ return this.oscillator.frequency.value;
+ }
+ /**
+ * Set type to 'sine', 'triangle', 'sawtooth' or 'square'.
+ *
+ * @method setType
+ * @for p5.Oscillator
+ * @param {String} type 'sine', 'triangle', 'sawtooth' or 'square'.
+ */
+
+ }, {
+ key: "setType",
+ value: function setType(type) {
+ this.oscillator.type = type;
+ }
+ /**
+ * Returns current type of oscillator eg. 'sine', 'triangle', 'sawtooth' or 'square'.
+ *
+ * @method getType
+ * @for p5.Oscillator
+ * @returns {String} type of oscillator eg . 'sine', 'triangle', 'sawtooth' or 'square'.
+ */
+
+ }, {
+ key: "getType",
+ value: function getType() {
+ return this.oscillator.type;
+ }
+ /**
+ * Connect to a p5.sound / Web Audio object.
+ *
+ * @method connect
+ * @for p5.Oscillator
+ * @param {Object} unit A p5.sound or Web Audio object
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ if (!unit) {
+ this.panner.connect(main.input);
+ } else if (unit.hasOwnProperty('input')) {
+ this.panner.connect(unit.input);
+ this.connection = unit.input;
+ } else {
+ this.panner.connect(unit);
+ this.connection = unit;
+ }
+ }
+ /**
+ * Disconnect all outputs
+ *
+ * @method disconnect
+ * @for p5.Oscillator
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.output) {
+ this.output.disconnect();
+ }
+
+ if (this.panner) {
+ this.panner.disconnect();
+
+ if (this.output) {
+ this.output.connect(this.panner);
+ }
+ }
+
+ this.oscMods = [];
+ }
+ /**
+ * Pan between Left (-1) and Right (1)
+ *
+ * @method pan
+ * @for p5.Oscillator
+ * @param {Number} panning Number between -1 and 1
+ * @param {Number} timeFromNow schedule this event to happen
+ * seconds from now
+ */
+
+ }, {
+ key: "pan",
+ value: function pan(pval, tFromNow) {
+ this.panPosition = pval;
+ this.panner.pan(pval, tFromNow);
+ }
+ /**
+ * Returns the current value of panPosition , between Left (-1) and Right (1)
+ *
+ * @method getPan
+ * @for p5.Oscillator
+ *
+ * @returns {number} panPosition of oscillator , between Left (-1) and Right (1)
+ */
+
+ }, {
+ key: "getPan",
+ value: function getPan() {
+ return this.panPosition;
+ }
+
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+
+ if (this.oscillator) {
+ var now = main.audiocontext.currentTime;
+ this.stop(now);
+ this.disconnect();
+ this.panner = null;
+ this.oscillator = null;
+ }
+
+
+ if (this.osc2) {
+ this.osc2.dispose();
+ }
+ }
+ /**
+ * Set the phase of an oscillator between 0.0 and 1.0.
+ * In this implementation, phase is a delay time
+ * based on the oscillator's current frequency.
+ *
+ * @method phase
+ * @for p5.Oscillator
+ * @param {Number} phase float between 0.0 and 1.0
+ */
+
+ }, {
+ key: "phase",
+ value: function phase(p) {
+ var delayAmt = p5.prototype.map(p, 0, 1.0, 0, 1 / this.f);
+ var now = main.audiocontext.currentTime;
+ this.phaseAmount = p;
+
+ if (!this.dNode) {
+ this.dNode = main.audiocontext.createDelay();
+
+ this.oscillator.disconnect();
+ this.oscillator.connect(this.dNode);
+ this.dNode.connect(this.output);
+ }
+
+
+ this.dNode.delayTime.setValueAtTime(delayAmt, now);
+ }
+ /**
+ * Add a value to the p5.Oscillator's output amplitude,
+ * and return the oscillator. Calling this method again
+ * will override the initial add() with a new value.
+ *
+ * @method add
+ * @for p5.Oscillator
+ * @param {Number} number Constant number to add
+ * @return {p5.Oscillator} Oscillator Returns this oscillator
+ * with scaled output
+ *
+ */
+
+ }, {
+ key: "add",
+ value: function add(num) {
+ var add = new Add_default.a(num);
+ var thisChain = this.mathOps.length - 1;
+ var nextChain = this.output;
+ return sigChain(this, add, thisChain, nextChain, Add_default.a);
+ }
+ /**
+ * Multiply the p5.Oscillator's output amplitude
+ * by a fixed value (i.e. turn it up!). Calling this method
+ * again will override the initial mult() with a new value.
+ *
+ * @method mult
+ * @for p5.Oscillator
+ * @param {Number} number Constant number to multiply
+ * @return {p5.Oscillator} Oscillator Returns this oscillator
+ * with multiplied output
+ */
+
+ }, {
+ key: "mult",
+ value: function mult(num) {
+ var mult = new Multiply_default.a(num);
+ var thisChain = this.mathOps.length - 1;
+ var nextChain = this.output;
+ return sigChain(this, mult, thisChain, nextChain, Multiply_default.a);
+ }
+ /**
+ * Scale this oscillator's amplitude values to a given
+ * range, and return the oscillator. Calling this method
+ * again will override the initial scale() with new values.
+ *
+ * @method scale
+ * @for p5.Oscillator
+ * @param {Number} inMin input range minumum
+ * @param {Number} inMax input range maximum
+ * @param {Number} outMin input range minumum
+ * @param {Number} outMax input range maximum
+ * @return {p5.Oscillator} Oscillator Returns this oscillator
+ * with scaled output
+ */
+
+ }, {
+ key: "scale",
+ value: function scale(inMin, inMax, outMin, outMax) {
+ var mapOutMin, mapOutMax;
+
+ if (arguments.length === 4) {
+ mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5;
+
+ mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5;
+ } else {
+ mapOutMin = arguments[0];
+ mapOutMax = arguments[1];
+ }
+
+ var scale = new Scale_default.a(mapOutMin, mapOutMax);
+ var thisChain = this.mathOps.length - 1;
+ var nextChain = this.output;
+ return sigChain(this, scale, thisChain, nextChain, Scale_default.a);
+ }
+ }]);
+
+ return Oscillator;
+}();
+
+/**
+ * Constructor: new p5.SinOsc().
+ * This creates a Sine Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sine')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('sine').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SinOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var SinOsc =
+function (_Oscillator) {
+ _inherits(SinOsc, _Oscillator);
+
+ function SinOsc(freq) {
+ oscillator_classCallCheck(this, SinOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(SinOsc).call(this, freq, 'sine'));
+ }
+
+ return SinOsc;
+}(oscillator_Oscillator);
+/**
+ * Constructor: new p5.TriOsc().
+ * This creates a Triangle Wave Oscillator and is
+ * equivalent to new p5.Oscillator('triangle')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('triangle').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.TriOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var TriOsc =
+function (_Oscillator2) {
+ _inherits(TriOsc, _Oscillator2);
+
+ function TriOsc(freq) {
+ oscillator_classCallCheck(this, TriOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(TriOsc).call(this, freq, 'triangle'));
+ }
+
+ return TriOsc;
+}(oscillator_Oscillator);
+/**
+ * Constructor: new p5.SawOsc().
+ * This creates a SawTooth Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sawtooth')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('sawtooth').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SawOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var SawOsc =
+function (_Oscillator3) {
+ _inherits(SawOsc, _Oscillator3);
+
+ function SawOsc(freq) {
+ oscillator_classCallCheck(this, SawOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(SawOsc).call(this, freq, 'sawtooth'));
+ }
+
+ return SawOsc;
+}(oscillator_Oscillator);
+/**
+ * Constructor: new p5.SqrOsc().
+ * This creates a Square Wave Oscillator and is
+ * equivalent to new p5.Oscillator('square')
+ * or creating a p5.Oscillator and then calling
+ * its method setType('square').
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SqrOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+var SqrOsc =
+function (_Oscillator4) {
+ _inherits(SqrOsc, _Oscillator4);
+
+ function SqrOsc(freq) {
+ oscillator_classCallCheck(this, SqrOsc);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(SqrOsc).call(this, freq, 'square'));
+ }
+
+ return SqrOsc;
+}(oscillator_Oscillator);
+
+ var oscillator = (oscillator_Oscillator);
+
+var TimelineSignal = __webpack_require__(7);
+var TimelineSignal_default = __webpack_require__.n(TimelineSignal);
+
+
+
+
+
+
+/**
+ *
Envelopes are pre-defined amplitude distribution over time.
+ * Typically, envelopes are used to control the output volume
+ * of an object, a series of fades referred to as Attack, Decay,
+ * Sustain and Release (
+ * ADSR
+ * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can
+ * control an Oscillator's frequency like this: osc.freq(env).
+ *
Use setRange to change the attack/release level.
+ * Use setADSR to change attackTime, decayTime, sustainPercent and releaseTime.
+ *
Use the play method to play the entire envelope,
+ * the ramp method for a pingable trigger,
+ * or triggerAttack/
+ * triggerRelease to trigger noteOn/noteOff.
+ * let t1 = 0.1; // attack time in seconds
+ * let l1 = 0.7; // attack level 0.0 to 1.0
+ * let t2 = 0.3; // decay time in seconds
+ * let l2 = 0.1; // decay level 0.0 to 1.0
+ *
+ * let env;
+ * let triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * cnv.mousePressed(playSound);
+ *
+ * env = new p5.Envelope(t1, l1, t2, l2);
+ * triOsc = new p5.Oscillator('triangle');
+ * }
+ *
+ * function playSound() {
+ * // starting the oscillator ensures that audio is enabled.
+ * triOsc.start();
+ * env.play(triOsc);
+ * }
+ *
+ */
+
+p5.Envelope = function (t1, l1, t2, l2, t3, l3) {
+ /**
+ * Time until envelope reaches attackLevel
+ * @property attackTime
+ */
+ this.aTime = t1 || 0.1;
+ /**
+ * Level once attack is complete.
+ * @property attackLevel
+ */
+
+ this.aLevel = l1 || 1;
+ /**
+ * Time until envelope reaches decayLevel.
+ * @property decayTime
+ */
+
+ this.dTime = t2 || 0.5;
+ /**
+ * Level after decay. The envelope will sustain here until it is released.
+ * @property decayLevel
+ */
+
+ this.dLevel = l2 || 0;
+ /**
+ * Duration of the release portion of the envelope.
+ * @property releaseTime
+ */
+
+ this.rTime = t3 || 0;
+ /**
+ * Level at the end of the release.
+ * @property releaseLevel
+ */
+
+ this.rLevel = l3 || 0;
+ this._rampHighPercentage = 0.98;
+ this._rampLowPercentage = 0.02;
+ this.output = main.audiocontext.createGain();
+ this.control = new TimelineSignal_default.a();
+
+ this._init();
+
+
+ this.control.connect(this.output);
+
+ this.connection = null;
+
+ this.mathOps = [this.control];
+
+ this.isExponential = false;
+
+ this.sourceToClear = null;
+
+ this.wasTriggered = false;
+
+ main.soundArray.push(this);
+};
+
+
+p5.Envelope.prototype._init = function () {
+ var now = main.audiocontext.currentTime;
+ var t = now;
+ this.control.setTargetAtTime(0.00001, t, 0.001);
+
+ this._setRampAD(this.aTime, this.dTime);
+};
+/**
+ * Reset the envelope with a series of time/value pairs.
+ *
+ * @method set
+ * @for p5.Envelope
+ * @param {Number} attackTime Time (in seconds) before level
+ * reaches attackLevel
+ * @param {Number} attackLevel Typically an amplitude between
+ * 0.0 and 1.0
+ * @param {Number} decayTime Time
+ * @param {Number} decayLevel Amplitude (In a standard ADSR envelope,
+ * decayLevel = sustainLevel)
+ * @param {Number} releaseTime Release Time (in seconds)
+ * @param {Number} releaseLevel Amplitude
+ * @example
+ *
+ * let attackTime;
+ * let l1 = 0.7; // attack level 0.0 to 1.0
+ * let t2 = 0.3; // decay time in seconds
+ * let l2 = 0.1; // decay level 0.0 to 1.0
+ * let l3 = 0.2; // release time in seconds
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ *
+ * attackTime = map(mouseX, 0, width, 0.0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 20);
+ * }
+ *
+ * // mouseClick triggers envelope if over canvas
+ * function playSound() {
+ * env.set(attackTime, l1, t2, l2, l3);
+ *
+ * triOsc.start();
+ * env.play(triOsc);
+ * }
+ *
+ *
+ */
+
+
+p5.Envelope.prototype.set = function (t1, l1, t2, l2, t3, l3) {
+ this.aTime = t1;
+ this.aLevel = l1;
+ this.dTime = t2 || 0;
+ this.dLevel = l2 || 0;
+ this.rTime = t3 || 0;
+ this.rLevel = l3 || 0;
+
+ this._setRampAD(t1, t2);
+};
+/**
+ * Set values like a traditional
+ *
+ * ADSR envelope
+ * .
+ *
+ * @method setADSR
+ * @for p5.Envelope
+ * @param {Number} attackTime Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ * @example
+ *
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackTime = map(mouseX, 0, width, 0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 40);
+ * }
+ *
+ * function playEnv() {
+ * triOsc.start();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.play();
+ * }
+ *
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackLevel = map(mouseY, height, 0, 0, 1.0);
+ * text('attack level: ' + attackLevel, 5, height - 20);
+ * }
+ *
+ * function playEnv() {
+ * triOsc.start();
+ * env.setRange(attackLevel, releaseLevel);
+ * env.play();
+ * }
+ *
+ */
+
+
+p5.Envelope.prototype.setRange = function (aLevel, rLevel) {
+ this.aLevel = aLevel || 1;
+ this.rLevel = rLevel || 0;
+};
+
+
+p5.Envelope.prototype._setRampAD = function (t1, t2) {
+ this._rampAttackTime = this.checkExpInput(t1);
+ this._rampDecayTime = this.checkExpInput(t2);
+ var TCDenominator = 1.0;
+
+ TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
+ this._rampAttackTC = t1 / this.checkExpInput(TCDenominator);
+ TCDenominator = Math.log(1.0 / this._rampLowPercentage);
+ this._rampDecayTC = t2 / this.checkExpInput(TCDenominator);
+};
+
+
+p5.Envelope.prototype.setRampPercentages = function (p1, p2) {
+ this._rampHighPercentage = this.checkExpInput(p1);
+ this._rampLowPercentage = this.checkExpInput(p2);
+ var TCDenominator = 1.0;
+
+ TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
+ this._rampAttackTC = this._rampAttackTime / this.checkExpInput(TCDenominator);
+ TCDenominator = Math.log(1.0 / this._rampLowPercentage);
+ this._rampDecayTC = this._rampDecayTime / this.checkExpInput(TCDenominator);
+};
+/**
+ * Assign a parameter to be controlled by this envelope.
+ * If a p5.Sound object is given, then the p5.Envelope will control its
+ * output gain. If multiple inputs are provided, the env will
+ * control all of them.
+ *
+ * @method setInput
+ * @for p5.Envelope
+ * @param {Object} [...inputs] A p5.sound object or
+ * Web Audio Param.
+ */
+
+
+p5.Envelope.prototype.setInput = function () {
+ for (var i = 0; i < arguments.length; i++) {
+ this.connect(arguments[i]);
+ }
+};
+/**
+ * Set whether the envelope ramp is linear (default) or exponential.
+ * Exponential ramps can be useful because we perceive amplitude
+ * and frequency logarithmically.
+ *
+ * @method setExp
+ * @for p5.Envelope
+ * @param {Boolean} isExp true is exponential, false is linear
+ */
+
+
+p5.Envelope.prototype.setExp = function (isExp) {
+ this.isExponential = isExp;
+};
+
+
+p5.Envelope.prototype.checkExpInput = function (value) {
+ if (value <= 0) {
+ value = 0.00000001;
+ }
+
+ return value;
+};
+/**
+ *
Play tells the envelope to start acting on a given input.
+ * If the input is a p5.sound object (i.e. AudioIn, Oscillator,
+ * SoundFile), then Envelope will control its output volume.
+ * Envelopes can also be used to control any
+ * Web Audio Audio Param.
+ *
+ * @method play
+ * @for p5.Envelope
+ * @param {Object} unit A p5.sound object or
+ * Web Audio Param.
+ * @param {Number} [startTime] time from now (in seconds) at which to play
+ * @param {Number} [sustainTime] time to sustain before releasing the envelope
+ * @example
+ *
+ */
+
+
+p5.Envelope.prototype.play = function (unit, secondsFromNow, susTime) {
+ var tFromNow = secondsFromNow || 0;
+
+ if (unit) {
+ if (this.connection !== unit) {
+ this.connect(unit);
+ }
+ }
+
+ this.triggerAttack(unit, tFromNow);
+ this.triggerRelease(unit, tFromNow + this.aTime + this.dTime + ~~susTime);
+};
+/**
+ * Trigger the Attack, and Decay portion of the Envelope.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go. Input can be
+ * any p5.sound object, or a
+ * Web Audio Param.
+ *
+ * @method triggerAttack
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow time from now (in seconds)
+ * @example
+ *
Note: This uses the getUserMedia/
+ * Stream API, which is not supported by certain browsers. Access in Chrome browser
+ * is limited to localhost and https, but access over http may be limited.
+ *
+ * @class p5.AudioIn
+ * @constructor
+ * @param {Function} [errorCallback] A function to call if there is an error
+ * accessing the AudioIn. For example,
+ * Safari and iOS devices do not
+ * currently allow microphone access.
+ * @example
+ *
+ * let mic;
+ *
+ * function setup(){
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(userStartAudio);
+ * textAlign(CENTER);
+ * mic = new p5.AudioIn();
+ * mic.start();
+ * }
+ *
+ * function draw(){
+ * background(0);
+ * fill(255);
+ * text('tap to start', width/2, 20);
+ *
+ * micLevel = mic.getLevel();
+ * let y = height - micLevel * height;
+ * ellipse(width/2, y, 10, 10);
+ * }
+ *
+ */
+
+var audioin_AudioIn =
+function () {
+ function AudioIn(errorCallback) {
+ audioin_classCallCheck(this, AudioIn);
+
+
+ /**
+ * @property {GainNode} input
+ */
+ this.input = main.audiocontext.createGain();
+ /**
+ * @property {GainNode} output
+ */
+
+ this.output = main.audiocontext.createGain();
+ /**
+ * @property {MediaStream|null} stream
+ */
+
+ this.stream = null;
+ /**
+ * @property {MediaStreamAudioSourceNode|null} mediaStream
+ */
+
+ this.mediaStream = null;
+ /**
+ * @property {Number|null} currentSource
+ */
+
+ this.currentSource = null;
+ /**
+ * Client must allow browser to access their microphone / audioin source.
+ * Default: false. Will become true when the client enables access.
+ *
+ * @property {Boolean} enabled
+ */
+
+ this.enabled = false;
+ /**
+ * Input amplitude, connect to it by default but not to master out
+ *
+ * @property {p5.Amplitude} amplitude
+ */
+
+ this.amplitude = new amplitude();
+ this.output.connect(this.amplitude.input);
+
+ if (!window.MediaStreamTrack || !window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {
+ errorCallback ? errorCallback() : window.alert('This browser does not support MediaStreamTrack and mediaDevices');
+ }
+
+
+ main.soundArray.push(this);
+ }
+ /**
+ * Start processing audio input. This enables the use of other
+ * AudioIn methods like getLevel(). Note that by default, AudioIn
+ * is not connected to p5.sound's output. So you won't hear
+ * anything unless you use the connect() method.
+ *
+ * Certain browsers limit access to the user's microphone. For example,
+ * Chrome only allows access from localhost and over https. For this reason,
+ * you may want to include an errorCallback—a function that is called in case
+ * the browser won't provide mic access.
+ *
+ * @method start
+ * @for p5.AudioIn
+ * @param {Function} [successCallback] Name of a function to call on
+ * success.
+ * @param {Function} [errorCallback] Name of a function to call if
+ * there was an error. For example,
+ * some browsers do not support
+ * getUserMedia.
+ */
+
+
+ audioin_createClass(AudioIn, [{
+ key: "start",
+ value: function start(successCallback, errorCallback) {
+ var self = this;
+
+ if (this.stream) {
+ this.stop();
+ }
+
+
+ var audioSource = main.inputSources[self.currentSource];
+ var constraints = {
+ audio: {
+ sampleRate: main.audiocontext.sampleRate,
+ echoCancellation: false
+ }
+ };
+
+ if (main.inputSources[this.currentSource]) {
+ constraints.audio.deviceId = audioSource.deviceId;
+ }
+
+ window.navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
+ self.stream = stream;
+ self.enabled = true;
+
+ self.mediaStream = main.audiocontext.createMediaStreamSource(stream);
+ self.mediaStream.connect(self.output);
+
+ self.amplitude.setInput(self.output);
+ if (successCallback) successCallback();
+ })["catch"](function (err) {
+ if (errorCallback) errorCallback(err);else console.error(err);
+ });
+ }
+ /**
+ * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel().
+ * If re-starting, the user may be prompted for permission access.
+ *
+ * @method stop
+ * @for p5.AudioIn
+ */
+
+ }, {
+ key: "stop",
+ value: function stop() {
+ if (this.stream) {
+ this.stream.getTracks().forEach(function (track) {
+ track.stop();
+ });
+ this.mediaStream.disconnect();
+ delete this.mediaStream;
+ delete this.stream;
+ }
+ }
+ /**
+ * Connect to an audio unit. If no parameter is provided, will
+ * connect to the main output (i.e. your speakers).
+ *
+ * @method connect
+ * @for p5.AudioIn
+ * @param {Object} [unit] An object that accepts audio input,
+ * such as an FFT
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ if (unit) {
+ if (unit.hasOwnProperty('input')) {
+ this.output.connect(unit.input);
+ } else if (unit.hasOwnProperty('analyser')) {
+ this.output.connect(unit.analyser);
+ } else {
+ this.output.connect(unit);
+ }
+ } else {
+ this.output.connect(main.input);
+ }
+ }
+ /**
+ * Disconnect the AudioIn from all audio units. For example, if
+ * connect() had been called, disconnect() will stop sending
+ * signal to your speakers.
+ *
+ * @method disconnect
+ * @for p5.AudioIn
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.output) {
+ this.output.disconnect();
+
+ this.output.connect(this.amplitude.input);
+ }
+ }
+ /**
+ * Read the Amplitude (volume level) of an AudioIn. The AudioIn
+ * class contains its own instance of the Amplitude class to help
+ * make it easy to get a microphone's volume level. Accepts an
+ * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must
+ * .start() before using .getLevel().
+ *
+ * @method getLevel
+ * @for p5.AudioIn
+ * @param {Number} [smoothing] Smoothing is 0.0 by default.
+ * Smooths values based on previous values.
+ * @return {Number} Volume level (between 0.0 and 1.0)
+ */
+
+ }, {
+ key: "getLevel",
+ value: function getLevel(smoothing) {
+ if (smoothing) {
+ this.amplitude.smoothing = smoothing;
+ }
+
+ return this.amplitude.getLevel();
+ }
+ /**
+ * Set amplitude (volume) of a mic input between 0 and 1.0.
+ *
+ * @method amp
+ * @for p5.AudioIn
+ * @param {Number} vol between 0 and 1.0
+ * @param {Number} [time] ramp time (optional)
+ */
+
+ }, {
+ key: "amp",
+ value: function amp(vol, t) {
+ if (t) {
+ var rampTime = t || 0;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(main.audiocontext.currentTime);
+ this.output.gain.setValueAtTime(currentVol, main.audiocontext.currentTime);
+ this.output.gain.linearRampToValueAtTime(vol, rampTime + main.audiocontext.currentTime);
+ } else {
+ this.output.gain.cancelScheduledValues(main.audiocontext.currentTime);
+ this.output.gain.setValueAtTime(vol, main.audiocontext.currentTime);
+ }
+ }
+ /**
+ * Returns a list of available input sources. This is a wrapper
+ * for
+ * MediaDevices.enumerateDevices() - Web APIs | MDN
+ * and it returns a Promise.
+ * @method getSources
+ * @for p5.AudioIn
+ * @param {Function} [successCallback] This callback function handles the sources when they
+ * have been enumerated. The callback function
+ * receives the deviceList array as its only argument
+ * @param {Function} [errorCallback] This optional callback receives the error
+ * message as its argument.
+ * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar
+ * to the enumerateDevices() method
+ * @example
+ *
+ * let audioIn;
+ *
+ * function setup(){
+ * text('getting sources...', 0, 20);
+ * audioIn = new p5.AudioIn();
+ * audioIn.getSources(gotSources);
+ * }
+ *
+ * function gotSources(deviceList) {
+ * if (deviceList.length > 0) {
+ * //set the source to the first item in the deviceList array
+ * audioIn.setSource(0);
+ * let currentSource = deviceList[audioIn.currentSource];
+ * text('set source to: ' + currentSource.deviceId, 5, 20, width);
+ * }
+ * }
+ *
+ */
+
+ }, {
+ key: "getSources",
+ value: function getSources(onSuccess, onError) {
+ return new Promise(function (resolve, reject) {
+ window.navigator.mediaDevices.enumerateDevices().then(function (devices) {
+ main.inputSources = devices.filter(function (device) {
+ return device.kind === 'audioinput';
+ });
+ resolve(main.inputSources);
+
+ if (onSuccess) {
+ onSuccess(main.inputSources);
+ }
+ })["catch"](function (error) {
+ reject(error);
+
+ if (onError) {
+ onError(error);
+ } else {
+ console.error('This browser does not support MediaStreamTrack.getSources()');
+ }
+ });
+ });
+ }
+ /**
+ * Set the input source. Accepts a number representing a
+ * position in the array returned by getSources().
+ * This is only available in browsers that support
+ *
+ * navigator.mediaDevices.enumerateDevices()
+ *
+ * @method setSource
+ * @for p5.AudioIn
+ * @param {number} num position of input source in the array
+ * @example
+ *
+ * let audioIn;
+ *
+ * function setup(){
+ * text('getting sources...', 0, 20);
+ * audioIn = new p5.AudioIn();
+ * audioIn.getSources(gotSources);
+ * }
+ *
+ * function gotSources(deviceList) {
+ * if (deviceList.length > 0) {
+ * //set the source to the first item in the deviceList array
+ * audioIn.setSource(0);
+ * let currentSource = deviceList[audioIn.currentSource];
+ * text('set source to: ' + currentSource.deviceId, 5, 20, width);
+ * }
+ * }
+ *
+ */
+
+ }, {
+ key: "setSource",
+ value: function setSource(num) {
+ if (main.inputSources.length > 0 && num < main.inputSources.length) {
+ this.currentSource = num;
+ console.log('set source to ', main.inputSources[this.currentSource]);
+ } else {
+ console.log('unable to set input source');
+ }
+
+
+ if (this.stream && this.stream.active) {
+ this.start();
+ }
+ }
+
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+ this.stop();
+
+ if (this.output) {
+ this.output.disconnect();
+ }
+
+ if (this.amplitude) {
+ this.amplitude.disconnect();
+ }
+
+ delete this.amplitude;
+ delete this.output;
+ }
+ }]);
+
+ return AudioIn;
+}();
+
+ var audioin = (audioin_AudioIn);
+var CrossFade = __webpack_require__(23);
+var CrossFade_default = __webpack_require__.n(CrossFade);
+
+function effect_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function effect_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function effect_createClass(Constructor, protoProps, staticProps) { if (protoProps) effect_defineProperties(Constructor.prototype, protoProps); if (staticProps) effect_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+/**
+ * Effect is a base class for audio effects in p5.
+ * This module handles the nodes and methods that are
+ * common and useful for current and future effects.
+ *
+ *
+ * This class is extended by p5.Distortion,
+ * p5.Compressor,
+ * p5.Delay,
+ * p5.Filter,
+ * p5.Reverb.
+ *
+ * @class p5.Effect
+ * @constructor
+ *
+ * @param {Object} [ac] Reference to the audio context of the p5 object
+ * @param {AudioNode} [input] Gain Node effect wrapper
+ * @param {AudioNode} [output] Gain Node effect wrapper
+ * @param {Object} [_drywet] Tone.JS CrossFade node (defaults to value: 1)
+ * @param {AudioNode} [wet] Effects that extend this class should connect
+ * to the wet signal to this gain node, so that dry and wet
+ * signals are mixed properly.
+ */
+
+var effect_Effect =
+function () {
+ function Effect() {
+ effect_classCallCheck(this, Effect);
+
+ this.ac = main.audiocontext;
+ this.input = this.ac.createGain();
+ this.output = this.ac.createGain();
+ /**
+ * The p5.Effect class is built
+ * using Tone.js CrossFade
+ * @private
+ */
+
+ this._drywet = new CrossFade_default.a(1);
+ /**
+ * In classes that extend
+ * p5.Effect, connect effect nodes
+ * to the wet parameter
+ */
+
+ this.wet = this.ac.createGain();
+ this.input.connect(this._drywet.a);
+ this.wet.connect(this._drywet.b);
+
+ this._drywet.connect(this.output);
+
+ this.connect();
+
+ main.soundArray.push(this);
+ }
+ /**
+ * Set the output volume of the filter.
+ *
+ * @method amp
+ * @for p5.Effect
+ * @param {Number} [vol] amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts until rampTime
+ * @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds
+ */
+
+
+ effect_createClass(Effect, [{
+ key: "amp",
+ value: function amp(vol) {
+ var rampTime = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
+ var tFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+ var now = main.audiocontext.currentTime;
+ var startTime = now + tFromNow;
+ var endTime = startTime + rampTime + 0.001;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(now);
+ this.output.gain.linearRampToValueAtTime(currentVol, startTime + 0.001);
+ this.output.gain.linearRampToValueAtTime(vol, endTime);
+ }
+ /**
+ * Link effects together in a chain
+ * Example usage: filter.chain(reverb, delay, panner);
+ * May be used with an open-ended number of arguments
+ *
+ * @method chain
+ * @for p5.Effect
+ * @param {Object} [arguments] Chain together multiple sound objects
+ */
+
+ }, {
+ key: "chain",
+ value: function chain() {
+ if (arguments.length > 0) {
+ this.connect(arguments[0]);
+
+ for (var i = 1; i < arguments.length; i += 1) {
+ arguments[i - 1].connect(arguments[i]);
+ }
+ }
+
+ return this;
+ }
+ /**
+ * Adjust the dry/wet value.
+ *
+ * @method drywet
+ * @for p5.Effect
+ * @param {Number} [fade] The desired drywet value (0 - 1.0)
+ */
+
+ }, {
+ key: "drywet",
+ value: function drywet(fade) {
+ if (typeof fade !== 'undefined') {
+ this._drywet.fade.value = fade;
+ }
+
+ return this._drywet.fade.value;
+ }
+ /**
+ * Send output to a p5.js-sound, Web Audio Node, or use signal to
+ * control an AudioParam
+ *
+ * @method connect
+ * @for p5.Effect
+ * @param {Object} unit
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ var u = unit || p5.soundOut.input;
+ this.output.connect(u.input ? u.input : u);
+ }
+ /**
+ * Disconnect all output.
+ * @method disconnect
+ * @for p5.Effect
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.output) {
+ this.output.disconnect();
+ }
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+
+ if (this.input) {
+ this.input.disconnect();
+ delete this.input;
+ }
+
+ if (this.output) {
+ this.output.disconnect();
+ delete this.output;
+ }
+
+ if (this._drywet) {
+ this._drywet.disconnect();
+
+ delete this._drywet;
+ }
+
+ if (this.wet) {
+ this.wet.disconnect();
+ delete this.wet;
+ }
+
+ this.ac = undefined;
+ }
+ }]);
+
+ return Effect;
+}();
+
+ var effect = (effect_Effect);
+function filter_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { filter_typeof = function _typeof(obj) { return typeof obj; }; } else { filter_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return filter_typeof(obj); }
+
+function filter_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function filter_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function filter_createClass(Constructor, protoProps, staticProps) { if (protoProps) filter_defineProperties(Constructor.prototype, protoProps); if (staticProps) filter_defineProperties(Constructor, staticProps); return Constructor; }
+
+function filter_possibleConstructorReturn(self, call) { if (call && (filter_typeof(call) === "object" || typeof call === "function")) { return call; } return filter_assertThisInitialized(self); }
+
+function filter_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { _get = Reflect.get; } else { _get = function _get(target, property, receiver) { var base = _superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return _get(target, property, receiver || target); }
+
+function _superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = filter_getPrototypeOf(object); if (object === null) break; } return object; }
+
+function filter_getPrototypeOf(o) { filter_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return filter_getPrototypeOf(o); }
+
+function filter_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) filter_setPrototypeOf(subClass, superClass); }
+
+function filter_setPrototypeOf(o, p) { filter_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return filter_setPrototypeOf(o, p); }
+
+
+/**
+ *
A p5.Filter uses a Web Audio Biquad Filter to filter
+ * the frequency response of an input source. Subclasses
+ * include:
+ * p5.LowPass:
+ * Allows frequencies below the cutoff frequency to pass through,
+ * and attenuates frequencies above the cutoff.
+ * p5.HighPass:
+ * The opposite of a lowpass filter.
+ * p5.BandPass:
+ * Allows a range of frequencies to pass through and attenuates
+ * the frequencies below and above this frequency range.
+ *
+ * The .res() method controls either width of the
+ * bandpass, or resonance of the low/highpass cutoff frequency.
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.Filter
+ * @extends p5.Effect
+ * @constructor
+ * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'
+ * @example
+ *
+ * let fft, noise, filter;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100,100);
+ * cnv.mousePressed(makeNoise);
+ * fill(255, 0, 255);
+ *
+ * filter = new p5.BandPass();
+ * noise = new p5.Noise();
+ * noise.disconnect();
+ * noise.connect(filter);
+ *
+ * fft = new p5.FFT();
+ * }
+ *
+ * function draw() {
+ * background(220);
+ *
+ * // set the BandPass frequency based on mouseX
+ * let freq = map(mouseX, 0, width, 20, 10000);
+ * freq = constrain(freq, 0, 22050);
+ * filter.freq(freq);
+ * // give the filter a narrow band (lower res = wider bandpass)
+ * filter.res(50);
+ *
+ * // draw filtered spectrum
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * for (let i = 0; i < spectrum.length; i++) {
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width/spectrum.length, h);
+ * }
+ * if (!noise.started) {
+ * text('tap here and drag to change frequency', 10, 20, width - 20);
+ * } else {
+ * text('Frequency: ' + round(freq)+'Hz', 20, 20, width - 20);
+ * }
+ * }
+ *
+ * function makeNoise() {
+ * // see also: `userStartAudio()`
+ * noise.start();
+ * noise.amp(0.5, 0.2);
+ * }
+ *
+ * function mouseReleased() {
+ * noise.amp(0, 0.2);
+ * }
+ *
+ *
+ */
+
+var Filter =
+function (_Effect) {
+ filter_inherits(Filter, _Effect);
+
+ function Filter(type) {
+ var _this;
+
+ filter_classCallCheck(this, Filter);
+
+ _this = filter_possibleConstructorReturn(this, filter_getPrototypeOf(Filter).call(this));
+
+ /**
+ * The p5.Filter is built with a
+ *
+ * Web Audio BiquadFilter Node.
+ *
+ * @property {DelayNode} biquadFilter
+ */
+
+ _this.biquad = _this.ac.createBiquadFilter();
+
+ _this.input.connect(_this.biquad);
+
+ _this.biquad.connect(_this.wet);
+
+ if (type) {
+ _this.setType(type);
+ }
+
+
+ _this._on = true;
+ _this._untoggledType = _this.biquad.type;
+ return _this;
+ }
+ /**
+ * Filter an audio signal according to a set
+ * of filter parameters.
+ *
+ * @method process
+ * @param {Object} Signal An object that outputs audio
+ * @param {Number} [freq] Frequency in Hz, from 10 to 22050
+ * @param {Number} [res] Resonance/Width of the filter frequency
+ * from 0.001 to 1000
+ */
+
+
+ filter_createClass(Filter, [{
+ key: "process",
+ value: function process(src, freq, res, time) {
+ src.connect(this.input);
+ this.set(freq, res, time);
+ }
+ /**
+ * Set the frequency and the resonance of the filter.
+ *
+ * @method set
+ * @param {Number} [freq] Frequency in Hz, from 10 to 22050
+ * @param {Number} [res] Resonance (Q) from 0.001 to 1000
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ */
+
+ }, {
+ key: "set",
+ value: function set(freq, res, time) {
+ if (freq) {
+ this.freq(freq, time);
+ }
+
+ if (res) {
+ this.res(res, time);
+ }
+ }
+ /**
+ * Set the filter frequency, in Hz, from 10 to 22050 (the range of
+ * human hearing, although in reality most people hear in a narrower
+ * range).
+ *
+ * @method freq
+ * @param {Number} freq Filter Frequency
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ * @return {Number} value Returns the current frequency value
+ */
+
+ }, {
+ key: "freq",
+ value: function freq(_freq, time) {
+ var t = time || 0;
+
+ if (_freq <= 0) {
+ _freq = 1;
+ }
+
+ if (typeof _freq === 'number') {
+ this.biquad.frequency.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.biquad.frequency.exponentialRampToValueAtTime(_freq, this.ac.currentTime + 0.02 + t);
+ } else if (_freq) {
+ _freq.connect(this.biquad.frequency);
+ }
+
+ return this.biquad.frequency.value;
+ }
+ /**
+ * Controls either width of a bandpass frequency,
+ * or the resonance of a low/highpass cutoff frequency.
+ *
+ * @method res
+ * @param {Number} res Resonance/Width of filter freq
+ * from 0.001 to 1000
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ * @return {Number} value Returns the current res value
+ */
+
+ }, {
+ key: "res",
+ value: function res(_res, time) {
+ var t = time || 0;
+
+ if (typeof _res === 'number') {
+ this.biquad.Q.value = _res;
+ this.biquad.Q.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.biquad.Q.linearRampToValueAtTime(_res, this.ac.currentTime + 0.02 + t);
+ } else if (_res) {
+ _res.connect(this.biquad.Q);
+ }
+
+ return this.biquad.Q.value;
+ }
+ /**
+ * Controls the gain attribute of a Biquad Filter.
+ * This is distinctly different from .amp() which is inherited from p5.Effect
+ * .amp() controls the volume via the output gain node
+ * p5.Filter.gain() controls the gain parameter of a Biquad Filter node.
+ *
+ * @method gain
+ * @param {Number} gain
+ * @return {Number} Returns the current or updated gain value
+ */
+
+ }, {
+ key: "gain",
+ value: function gain(_gain, time) {
+ var t = time || 0;
+
+ if (typeof _gain === 'number') {
+ this.biquad.gain.value = _gain;
+ this.biquad.gain.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.biquad.gain.linearRampToValueAtTime(_gain, this.ac.currentTime + 0.02 + t);
+ } else if (_gain) {
+ _gain.connect(this.biquad.gain);
+ }
+
+ return this.biquad.gain.value;
+ }
+ /**
+ * Toggle function. Switches between the specified type and allpass
+ *
+ * @method toggle
+ * @return {boolean} [Toggle value]
+ */
+
+ }, {
+ key: "toggle",
+ value: function toggle() {
+ this._on = !this._on;
+
+ if (this._on === true) {
+ this.biquad.type = this._untoggledType;
+ } else if (this._on === false) {
+ this.biquad.type = 'allpass';
+ }
+
+ return this._on;
+ }
+ /**
+ * Set the type of a p5.Filter. Possible types include:
+ * "lowpass" (default), "highpass", "bandpass",
+ * "lowshelf", "highshelf", "peaking", "notch",
+ * "allpass".
+ *
+ * @method setType
+ * @param {String} t
+ */
+
+ }, {
+ key: "setType",
+ value: function setType(t) {
+ this.biquad.type = t;
+ this._untoggledType = this.biquad.type;
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ _get(filter_getPrototypeOf(Filter.prototype), "dispose", this).call(this);
+
+ if (this.biquad) {
+ this.biquad.disconnect();
+ delete this.biquad;
+ }
+ }
+ }]);
+
+ return Filter;
+}(effect);
+/**
+ * Constructor: new p5.LowPass() Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('lowpass').
+ * See p5.Filter for methods.
+ *
+ * @class p5.LowPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+
+var LowPass =
+function (_Filter) {
+ filter_inherits(LowPass, _Filter);
+
+ function LowPass() {
+ filter_classCallCheck(this, LowPass);
+
+ return filter_possibleConstructorReturn(this, filter_getPrototypeOf(LowPass).call(this, 'lowpass'));
+ }
+
+ return LowPass;
+}(Filter);
+/**
+ * Constructor: new p5.HighPass() Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('highpass').
+ * See p5.Filter for methods.
+ *
+ * @class p5.HighPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+
+var HighPass =
+function (_Filter2) {
+ filter_inherits(HighPass, _Filter2);
+
+ function HighPass() {
+ filter_classCallCheck(this, HighPass);
+
+ return filter_possibleConstructorReturn(this, filter_getPrototypeOf(HighPass).call(this, 'highpass'));
+ }
+
+ return HighPass;
+}(Filter);
+/**
+ * Constructor: new p5.BandPass() Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('bandpass').
+ * See p5.Filter for methods.
+ *
+ * @class p5.BandPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+
+var BandPass =
+function (_Filter3) {
+ filter_inherits(BandPass, _Filter3);
+
+ function BandPass() {
+ filter_classCallCheck(this, BandPass);
+
+ return filter_possibleConstructorReturn(this, filter_getPrototypeOf(BandPass).call(this, 'bandpass'));
+ }
+
+ return BandPass;
+}(Filter);
+
+ var filter = (Filter);
+
+function eqFilter_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { eqFilter_typeof = function _typeof(obj) { return typeof obj; }; } else { eqFilter_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return eqFilter_typeof(obj); }
+
+function eqFilter_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function eqFilter_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function eqFilter_createClass(Constructor, protoProps, staticProps) { if (protoProps) eqFilter_defineProperties(Constructor.prototype, protoProps); if (staticProps) eqFilter_defineProperties(Constructor, staticProps); return Constructor; }
+
+function eqFilter_possibleConstructorReturn(self, call) { if (call && (eqFilter_typeof(call) === "object" || typeof call === "function")) { return call; } return eqFilter_assertThisInitialized(self); }
+
+function eqFilter_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function eqFilter_getPrototypeOf(o) { eqFilter_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return eqFilter_getPrototypeOf(o); }
+
+function eqFilter_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) eqFilter_setPrototypeOf(subClass, superClass); }
+
+function eqFilter_setPrototypeOf(o, p) { eqFilter_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return eqFilter_setPrototypeOf(o, p); }
+
+
+
+/**
+ * EQFilter extends p5.Filter with constraints
+ * necessary for the p5.EQ
+ *
+ * @private
+ */
+
+var eqFilter_EQFilter =
+function (_Filter) {
+ eqFilter_inherits(EQFilter, _Filter);
+
+ function EQFilter(freq, res) {
+ var _this;
+
+ eqFilter_classCallCheck(this, EQFilter);
+
+ _this = eqFilter_possibleConstructorReturn(this, eqFilter_getPrototypeOf(EQFilter).call(this, 'peaking'));
+
+ _this.disconnect();
+
+ _this.set(freq, res);
+
+ _this.biquad.gain.value = 0;
+ delete _this.input;
+ delete _this.output;
+ delete _this._drywet;
+ delete _this.wet;
+ return _this;
+ }
+
+ eqFilter_createClass(EQFilter, [{
+ key: "amp",
+ value: function amp() {
+ console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`');
+ }
+ }, {
+ key: "drywet",
+ value: function drywet() {
+ console.warn('`drywet()` is not available for p5.EQ bands.');
+ }
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ var u = unit || p5.soundOut.input;
+
+ if (this.biquad) {
+ this.biquad.connect(u.input ? u.input : u);
+ } else {
+ this.output.connect(u.input ? u.input : u);
+ }
+ }
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.biquad) {
+ this.biquad.disconnect();
+ }
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+ this.disconnect();
+ delete this.biquad;
+ }
+ }]);
+
+ return EQFilter;
+}(filter);
+
+ var eqFilter = (eqFilter_EQFilter);
+function eq_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { eq_typeof = function _typeof(obj) { return typeof obj; }; } else { eq_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return eq_typeof(obj); }
+
+function eq_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function eq_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function eq_createClass(Constructor, protoProps, staticProps) { if (protoProps) eq_defineProperties(Constructor.prototype, protoProps); if (staticProps) eq_defineProperties(Constructor, staticProps); return Constructor; }
+
+function eq_possibleConstructorReturn(self, call) { if (call && (eq_typeof(call) === "object" || typeof call === "function")) { return call; } return eq_assertThisInitialized(self); }
+
+function eq_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function eq_get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { eq_get = Reflect.get; } else { eq_get = function _get(target, property, receiver) { var base = eq_superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return eq_get(target, property, receiver || target); }
+
+function eq_superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = eq_getPrototypeOf(object); if (object === null) break; } return object; }
+
+function eq_getPrototypeOf(o) { eq_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return eq_getPrototypeOf(o); }
+
+function eq_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) eq_setPrototypeOf(subClass, superClass); }
+
+function eq_setPrototypeOf(o, p) { eq_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return eq_setPrototypeOf(o, p); }
+
+
+
+/**
+ * p5.EQ is an audio effect that performs the function of a multiband
+ * audio equalizer. Equalization is used to adjust the balance of
+ * frequency compoenents of an audio signal. This process is commonly used
+ * in sound production and recording to change the waveform before it reaches
+ * a sound output device. EQ can also be used as an audio effect to create
+ * interesting distortions by filtering out parts of the spectrum. p5.EQ is
+ * built using a chain of Web Audio Biquad Filter Nodes and can be
+ * instantiated with 3 or 8 bands. Bands can be added or removed from
+ * the EQ by directly modifying p5.EQ.bands (the array that stores filters).
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.EQ
+ * @constructor
+ * @extends p5.Effect
+ * @param {Number} [_eqsize] Constructor will accept 3 or 8, defaults to 3
+ * @return {Object} p5.EQ object
+ *
+ * @example
+ *
+ */
+
+var delay_Delay =
+function (_Effect) {
+ delay_inherits(Delay, _Effect);
+
+ function Delay() {
+ var _this;
+
+ delay_classCallCheck(this, Delay);
+
+ _this = delay_possibleConstructorReturn(this, delay_getPrototypeOf(Delay).call(this));
+ _this._split = _this.ac.createChannelSplitter(2);
+ _this._merge = _this.ac.createChannelMerger(2);
+ _this._leftGain = _this.ac.createGain();
+ _this._rightGain = _this.ac.createGain();
+ /**
+ * The p5.Delay is built with two
+ *
+ * Web Audio Delay Nodes, one for each stereo channel.
+ *
+ * @for p5.Delay
+ * @property {DelayNode} leftDelay
+ */
+
+ _this.leftDelay = _this.ac.createDelay();
+ /**
+ * The p5.Delay is built with two
+ *
+ * Web Audio Delay Nodes, one for each stereo channel.
+ * @for p5.Delay
+ * @property {DelayNode} rightDelay
+ */
+
+ _this.rightDelay = _this.ac.createDelay();
+ _this._leftFilter = new filter();
+ _this._rightFilter = new filter();
+
+ _this._leftFilter.disconnect();
+
+ _this._rightFilter.disconnect();
+
+ _this._leftFilter.biquad.frequency.setValueAtTime(1200, _this.ac.currentTime);
+
+ _this._rightFilter.biquad.frequency.setValueAtTime(1200, _this.ac.currentTime);
+
+ _this._leftFilter.biquad.Q.setValueAtTime(0.3, _this.ac.currentTime);
+
+ _this._rightFilter.biquad.Q.setValueAtTime(0.3, _this.ac.currentTime);
+
+
+ _this.input.connect(_this._split);
+
+ _this.leftDelay.connect(_this._leftGain);
+
+ _this.rightDelay.connect(_this._rightGain);
+
+ _this._leftGain.connect(_this._leftFilter.input);
+
+ _this._rightGain.connect(_this._rightFilter.input);
+
+ _this._merge.connect(_this.wet);
+
+ _this._leftFilter.biquad.gain.setValueAtTime(1, _this.ac.currentTime);
+
+ _this._rightFilter.biquad.gain.setValueAtTime(1, _this.ac.currentTime);
+
+
+ _this.setType(0);
+
+ _this._maxDelay = _this.leftDelay.delayTime.maxValue;
+
+ _this.feedback(0.5);
+
+ return _this;
+ }
+ /**
+ * Add delay to an audio signal according to a set
+ * of delay parameters.
+ *
+ * @method process
+ * @for p5.Delay
+ * @param {Object} Signal An object that outputs audio
+ * @param {Number} [delayTime] Time (in seconds) of the delay/echo.
+ * Some browsers limit delayTime to
+ * 1 second.
+ * @param {Number} [feedback] sends the delay back through itself
+ * in a loop that decreases in volume
+ * each time.
+ * @param {Number} [lowPass] Cutoff frequency. Only frequencies
+ * below the lowPass will be part of the
+ * delay.
+ */
+
+
+ delay_createClass(Delay, [{
+ key: "process",
+ value: function process(src, _delayTime, _feedback, _filter) {
+ var feedback = _feedback || 0;
+ var delayTime = _delayTime || 0;
+
+ if (feedback >= 1.0) {
+ throw new Error('Feedback value will force a positive feedback loop.');
+ }
+
+ if (delayTime >= this._maxDelay) {
+ throw new Error('Delay Time exceeds maximum delay time of ' + this._maxDelay + ' second.');
+ }
+
+ src.connect(this.input);
+ this.leftDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
+ this.rightDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
+ this._leftGain.gain.value = feedback;
+ this._rightGain.gain.value = feedback;
+
+ if (_filter) {
+ this._leftFilter.freq(_filter);
+
+ this._rightFilter.freq(_filter);
+ }
+ }
+ /**
+ * Set the delay (echo) time, in seconds. Usually this value will be
+ * a floating point number between 0.0 and 1.0.
+ *
+ * @method delayTime
+ * @for p5.Delay
+ * @param {Number} delayTime Time (in seconds) of the delay
+ */
+
+ }, {
+ key: "delayTime",
+ value: function delayTime(t) {
+ if (typeof t !== 'number') {
+ t.connect(this.leftDelay.delayTime);
+ t.connect(this.rightDelay.delayTime);
+ } else {
+ this.leftDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
+ this.rightDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
+ this.leftDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
+ this.rightDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
+ }
+ }
+ /**
+ * Feedback occurs when Delay sends its signal back through its input
+ * in a loop. The feedback amount determines how much signal to send each
+ * time through the loop. A feedback greater than 1.0 is not desirable because
+ * it will increase the overall output each time through the loop,
+ * creating an infinite feedback loop. The default value is 0.5
+ *
+ * @method feedback
+ * @for p5.Delay
+ * @param {Number|Object} feedback 0.0 to 1.0, or an object such as an
+ * Oscillator that can be used to
+ * modulate this param
+ * @returns {Number} Feedback value
+ *
+ */
+
+ }, {
+ key: "feedback",
+ value: function feedback(f) {
+ if (f && typeof f !== 'number') {
+ f.connect(this._leftGain.gain);
+ f.connect(this._rightGain.gain);
+ } else if (f >= 1.0) {
+ throw new Error('Feedback value will force a positive feedback loop.');
+ } else if (typeof f === 'number') {
+ this._leftGain.gain.value = f;
+ this._rightGain.gain.value = f;
+ }
+
+
+ return this._leftGain.gain.value;
+ }
+ /**
+ * Set a lowpass filter frequency for the delay. A lowpass filter
+ * will cut off any frequencies higher than the filter frequency.
+ *
+ * @method filter
+ * @for p5.Delay
+ * @param {Number|Object} cutoffFreq A lowpass filter will cut off any
+ * frequencies higher than the filter frequency.
+ * @param {Number|Object} res Resonance of the filter frequency
+ * cutoff, or an object (i.e. a p5.Oscillator)
+ * that can be used to modulate this parameter.
+ * High numbers (i.e. 15) will produce a resonance,
+ * low numbers (i.e. .2) will produce a slope.
+ */
+
+ }, {
+ key: "filter",
+ value: function filter(freq, q) {
+ this._leftFilter.set(freq, q);
+
+ this._rightFilter.set(freq, q);
+ }
+ /**
+ * Choose a preset type of delay. 'pingPong' bounces the signal
+ * from the left to the right channel to produce a stereo effect.
+ * Any other parameter will revert to the default delay setting.
+ *
+ * @method setType
+ * @for p5.Delay
+ * @param {String|Number} type 'pingPong' (1) or 'default' (0)
+ */
+
+ }, {
+ key: "setType",
+ value: function setType(t) {
+ if (t === 1) {
+ t = 'pingPong';
+ }
+
+ this._split.disconnect();
+
+ this._leftFilter.disconnect();
+
+ this._rightFilter.disconnect();
+
+ this._split.connect(this.leftDelay, 0);
+
+ this._split.connect(this.rightDelay, 1);
+
+ switch (t) {
+ case 'pingPong':
+ this._rightFilter.setType(this._leftFilter.biquad.type);
+
+ this._leftFilter.output.connect(this._merge, 0, 0);
+
+ this._rightFilter.output.connect(this._merge, 0, 1);
+
+ this._leftFilter.output.connect(this.rightDelay);
+
+ this._rightFilter.output.connect(this.leftDelay);
+
+ break;
+
+ default:
+ this._leftFilter.output.connect(this._merge, 0, 0);
+
+ this._rightFilter.output.connect(this._merge, 0, 1);
+
+ this._leftFilter.output.connect(this.leftDelay);
+
+ this._rightFilter.output.connect(this.rightDelay);
+
+ }
+ }
+
+ /**
+ * Set the output level of the delay effect.
+ *
+ * @method amp
+ * @for p5.Delay
+ * @param {Number} volume amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ */
+
+ /**
+ * Send output to a p5.sound or web audio object
+ *
+ * @method connect
+ * @for p5.Delay
+ * @param {Object} unit
+ */
+
+ /**
+ * Disconnect all output.
+ *
+ * @method disconnect
+ * @for p5.Delay
+ */
+
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ delay_get(delay_getPrototypeOf(Delay.prototype), "dispose", this).call(this);
+
+ this._split.disconnect();
+
+ this._leftFilter.dispose();
+
+ this._rightFilter.dispose();
+
+ this._merge.disconnect();
+
+ this._leftGain.disconnect();
+
+ this._rightGain.disconnect();
+
+ this.leftDelay.disconnect();
+ this.rightDelay.disconnect();
+ this._split = undefined;
+ this._leftFilter = undefined;
+ this._rightFilter = undefined;
+ this._merge = undefined;
+ this._leftGain = undefined;
+ this._rightGain = undefined;
+ this.leftDelay = undefined;
+ this.rightDelay = undefined;
+ }
+ }]);
+
+ return Delay;
+}(effect);
+
+ var delay = (delay_Delay);
+function reverb_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { reverb_typeof = function _typeof(obj) { return typeof obj; }; } else { reverb_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return reverb_typeof(obj); }
+
+function reverb_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function reverb_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function reverb_createClass(Constructor, protoProps, staticProps) { if (protoProps) reverb_defineProperties(Constructor.prototype, protoProps); if (staticProps) reverb_defineProperties(Constructor, staticProps); return Constructor; }
+
+function reverb_possibleConstructorReturn(self, call) { if (call && (reverb_typeof(call) === "object" || typeof call === "function")) { return call; } return reverb_assertThisInitialized(self); }
+
+function reverb_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function reverb_get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { reverb_get = Reflect.get; } else { reverb_get = function _get(target, property, receiver) { var base = reverb_superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return reverb_get(target, property, receiver || target); }
+
+function reverb_superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = reverb_getPrototypeOf(object); if (object === null) break; } return object; }
+
+function reverb_getPrototypeOf(o) { reverb_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return reverb_getPrototypeOf(o); }
+
+function reverb_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) reverb_setPrototypeOf(subClass, superClass); }
+
+function reverb_setPrototypeOf(o, p) { reverb_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return reverb_setPrototypeOf(o, p); }
+
+
+
+
+/**
+ * Reverb adds depth to a sound through a large number of decaying
+ * echoes. It creates the perception that sound is occurring in a
+ * physical space. The p5.Reverb has parameters for Time (how long does the
+ * reverb last) and decayRate (how much the sound decays with each echo)
+ * that can be set with the .set() or .process() methods. The p5.Convolver
+ * extends p5.Reverb allowing you to recreate the sound of actual physical
+ * spaces through convolution.
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.Reverb
+ * @extends p5.Effect
+ * @constructor
+ * @example
+ *
+ * let soundFile, reverb;
+ * function preload() {
+ * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ *
+ * reverb = new p5.Reverb();
+ * soundFile.disconnect(); // so we'll only hear reverb...
+ *
+ * // connect soundFile to reverb, process w/
+ * // 3 second reverbTime, decayRate of 2%
+ * reverb.process(soundFile, 3, 2);
+ * }
+ *
+ * function draw() {
+ * let dryWet = constrain(map(mouseX, 0, width, 0, 1), 0, 1);
+ * // 1 = all reverb, 0 = no reverb
+ * reverb.drywet(dryWet);
+ *
+ * background(220);
+ * text('tap to play', 10, 20);
+ * text('dry/wet: ' + round(dryWet * 100) + '%', 10, height - 20);
+ * }
+ *
+ * function playSound() {
+ * soundFile.play();
+ * }
+ *
+ */
+
+var Reverb =
+function (_Effect) {
+ reverb_inherits(Reverb, _Effect);
+
+ function Reverb() {
+ var _this;
+
+ reverb_classCallCheck(this, Reverb);
+
+ _this = reverb_possibleConstructorReturn(this, reverb_getPrototypeOf(Reverb).call(this));
+
+ _this._initConvolverNode();
+
+
+ _this.input.gain.value = 0.5;
+
+ _this._seconds = 3;
+ _this._decay = 2;
+ _this._reverse = false;
+
+ _this._buildImpulse();
+
+ return _this;
+ }
+
+ reverb_createClass(Reverb, [{
+ key: "_initConvolverNode",
+ value: function _initConvolverNode() {
+ this.convolverNode = this.ac.createConvolver();
+ this.input.connect(this.convolverNode);
+ this.convolverNode.connect(this.wet);
+ }
+ }, {
+ key: "_teardownConvolverNode",
+ value: function _teardownConvolverNode() {
+ if (this.convolverNode) {
+ this.convolverNode.disconnect();
+ delete this.convolverNode;
+ }
+ }
+ }, {
+ key: "_setBuffer",
+ value: function _setBuffer(audioBuffer) {
+ this._teardownConvolverNode();
+
+ this._initConvolverNode();
+
+ this.convolverNode.buffer = audioBuffer;
+ }
+ /**
+ * Connect a source to the reverb, and assign reverb parameters.
+ *
+ * @method process
+ * @for p5.Reverb
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ * @param {Number} [seconds] Duration of the reverb, in seconds.
+ * Min: 0, Max: 10. Defaults to 3.
+ * @param {Number} [decayRate] Percentage of decay with each echo.
+ * Min: 0, Max: 100. Defaults to 2.
+ * @param {Boolean} [reverse] Play the reverb backwards or forwards.
+ */
+
+ }, {
+ key: "process",
+ value: function process(src, seconds, decayRate, reverse) {
+ src.connect(this.input);
+ var rebuild = false;
+
+ if (seconds) {
+ this._seconds = seconds;
+ rebuild = true;
+ }
+
+ if (decayRate) {
+ this._decay = decayRate;
+ }
+
+ if (reverse) {
+ this._reverse = reverse;
+ }
+
+ if (rebuild) {
+ this._buildImpulse();
+ }
+ }
+ /**
+ * Set the reverb settings. Similar to .process(), but without
+ * assigning a new input.
+ *
+ * @method set
+ * @for p5.Reverb
+ * @param {Number} [seconds] Duration of the reverb, in seconds.
+ * Min: 0, Max: 10. Defaults to 3.
+ * @param {Number} [decayRate] Percentage of decay with each echo.
+ * Min: 0, Max: 100. Defaults to 2.
+ * @param {Boolean} [reverse] Play the reverb backwards or forwards.
+ */
+
+ }, {
+ key: "set",
+ value: function set(seconds, decayRate, reverse) {
+ var rebuild = false;
+
+ if (seconds) {
+ this._seconds = seconds;
+ rebuild = true;
+ }
+
+ if (decayRate) {
+ this._decay = decayRate;
+ }
+
+ if (reverse) {
+ this._reverse = reverse;
+ }
+
+ if (rebuild) {
+ this._buildImpulse();
+ }
+ }
+
+ /**
+ * Set the output level of the reverb effect.
+ *
+ * @method amp
+ * @for p5.Reverb
+ * @param {Number} volume amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ */
+
+ /**
+ * Send output to a p5.sound or web audio object
+ *
+ * @method connect
+ * @for p5.Reverb
+ * @param {Object} unit
+ */
+
+ /**
+ * Disconnect all output.
+ *
+ * @method disconnect
+ * @for p5.Reverb
+ */
+
+ /**
+ * Inspired by Simple Reverb by Jordan Santell
+ * https://github.com/web-audio-components/simple-reverb/blob/master/index.js
+ *
+ * Utility function for building an impulse response
+ * based on the module parameters.
+ *
+ * @private
+ */
+
+ }, {
+ key: "_buildImpulse",
+ value: function _buildImpulse() {
+ var rate = this.ac.sampleRate;
+ var length = rate * this._seconds;
+ var decay = this._decay;
+ var impulse = this.ac.createBuffer(2, length, rate);
+ var impulseL = impulse.getChannelData(0);
+ var impulseR = impulse.getChannelData(1);
+ var n, i;
+
+ for (i = 0; i < length; i++) {
+ n = this._reverse ? length - i : i;
+ impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
+ impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
+ }
+
+ this._setBuffer(impulse);
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ reverb_get(reverb_getPrototypeOf(Reverb.prototype), "dispose", this).call(this);
+
+ this._teardownConvolverNode();
+ }
+ }]);
+
+ return Reverb;
+}(effect);
+
+/**
+ *
p5.Convolver extends p5.Reverb. It can emulate the sound of real
+ * physical spaces through a process called
+ * convolution.
+ *
+ *
Convolution multiplies any audio input by an "impulse response"
+ * to simulate the dispersion of sound over time. The impulse response is
+ * generated from an audio file that you provide. One way to
+ * generate an impulse response is to pop a balloon in a reverberant space
+ * and record the echo. Convolution can also be used to experiment with
+ * sound.
+ *
+ *
Use the method createConvolution(path) to instantiate a
+ * p5.Convolver with a path to your impulse response audio file.
+ *
+ * @class p5.Convolver
+ * @extends p5.Effect
+ * @constructor
+ * @param {String} path path to a sound file
+ * @param {Function} [callback] function to call when loading succeeds
+ * @param {Function} [errorCallback] function to call if loading fails.
+ * This function will receive an error or
+ * XMLHttpRequest object with information
+ * about what went wrong.
+ * @example
+ *
+ * let cVerb, sound;
+ * function preload() {
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
+ *
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
+ *
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * // disconnect from main output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ * }
+ *
+ * function playSound() {
+ * sound.play();
+ * }
+ *
+ */
+
+
+var reverb_Convolver =
+function (_Reverb) {
+ reverb_inherits(Convolver, _Reverb);
+
+ function Convolver(path, callback, errorCallback) {
+ var _this2;
+
+ reverb_classCallCheck(this, Convolver);
+
+ _this2 = reverb_possibleConstructorReturn(this, reverb_getPrototypeOf(Convolver).call(this));
+ /**
+ * Internally, the p5.Convolver uses the a
+ *
+ * Web Audio Convolver Node.
+ *
+ * @property {ConvolverNode} convolverNode
+ */
+
+ _this2._initConvolverNode();
+
+
+ _this2.input.gain.value = 0.5;
+
+ if (path) {
+ _this2.impulses = [];
+
+ _this2._loadBuffer(path, callback, errorCallback);
+ } else {
+ _this2._seconds = 3;
+ _this2._decay = 2;
+ _this2._reverse = false;
+
+ _this2._buildImpulse();
+ }
+ /**
+ * If you load multiple impulse files using the .addImpulse method,
+ * they will be stored as Objects in this Array. Toggle between them
+ * with the toggleImpulse(id) method.
+ *
+ * @property {Array} impulses
+ * @for p5.Convolver
+ */
+
+
+ _this2.impulses = [];
+ _this2.set = null;
+ return _this2;
+ }
+ /**
+ * Private method to load a buffer as an Impulse Response,
+ * assign it to the convolverNode, and add to the Array of .impulses.
+ *
+ * @param {String} path
+ * @param {Function} callback
+ * @param {Function} errorCallback
+ * @private
+ */
+
+
+ reverb_createClass(Convolver, [{
+ key: "_loadBuffer",
+ value: function _loadBuffer(_path, callback, errorCallback) {
+ var path = p5.prototype._checkFileFormats(_path);
+
+ var self = this;
+ var errorTrace = new Error().stack;
+ var ac = Object(audiocontext["b" ])();
+ var request = new XMLHttpRequest();
+ request.open('GET', path, true);
+ request.responseType = 'arraybuffer';
+
+ request.onload = function () {
+ if (request.status === 200) {
+ ac.decodeAudioData(request.response, function (buff) {
+ var buffer = {};
+ var chunks = path.split('/');
+ buffer.name = chunks[chunks.length - 1];
+ buffer.audioBuffer = buff;
+ self.impulses.push(buffer);
+
+ self._setBuffer(buffer.audioBuffer);
+
+ if (callback) {
+ callback(buffer);
+ }
+ },
+ function () {
+ var err = new errorHandler('decodeAudioData', errorTrace, self.url);
+ var msg = 'AudioContext error at decodeAudioData for ' + self.url;
+
+ if (errorCallback) {
+ err.msg = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ });
+ }
+ else {
+ var err = new errorHandler('loadConvolver', errorTrace, self.url);
+ var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ }
+ };
+
+
+ request.onerror = function () {
+ var err = new errorHandler('loadConvolver', errorTrace, self.url);
+ var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ };
+
+ request.send();
+ }
+ /**
+ * Connect a source to the convolver.
+ *
+ * @method process
+ * @for p5.Convolver
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ * @example
+ *
+ * let cVerb, sound;
+ * function preload() {
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
+ *
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
+ *
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * // disconnect from main output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ * }
+ *
+ * function playSound() {
+ * sound.play();
+ * }
+ *
+ *
+ */
+
+ }, {
+ key: "process",
+ value: function process(src) {
+ src.connect(this.input);
+ }
+ /**
+ * Load and assign a new Impulse Response to the p5.Convolver.
+ * The impulse is added to the .impulses array. Previous
+ * impulses can be accessed with the .toggleImpulse(id)
+ * method.
+ *
+ * @method addImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
+ */
+
+ }, {
+ key: "addImpulse",
+ value: function addImpulse(path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+ }
+
+ this._loadBuffer(path, callback, errorCallback);
+ }
+ /**
+ * Similar to .addImpulse, except that the .impulses
+ * Array is reset to save memory. A new .impulses
+ * array is created with this impulse as the only item.
+ *
+ * @method resetImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
+ */
+
+ }, {
+ key: "resetImpulse",
+ value: function resetImpulse(path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+ }
+
+ this.impulses = [];
+
+ this._loadBuffer(path, callback, errorCallback);
+ }
+ /**
+ * If you have used .addImpulse() to add multiple impulses
+ * to a p5.Convolver, then you can use this method to toggle between
+ * the items in the .impulses Array. Accepts a parameter
+ * to identify which impulse you wish to use, identified either by its
+ * original filename (String) or by its position in the .impulses
+ * Array (Number).
+ * You can access the objects in the .impulses Array directly. Each
+ * Object has two attributes: an .audioBuffer (type:
+ * Web Audio
+ * AudioBuffer) and a .name, a String that corresponds
+ * with the original filename.
+ *
+ * @method toggleImpulse
+ * @for p5.Convolver
+ * @param {String|Number} id Identify the impulse by its original filename
+ * (String), or by its position in the
+ * .impulses Array (Number).
+ */
+
+ }, {
+ key: "toggleImpulse",
+ value: function toggleImpulse(id) {
+ if (typeof id === 'number' && id < this.impulses.length) {
+ this._setBuffer(this.impulses[id].audioBuffer);
+ }
+
+ if (typeof id === 'string') {
+ for (var i = 0; i < this.impulses.length; i++) {
+ if (this.impulses[i].name === id) {
+ this._setBuffer(this.impulses[i].audioBuffer);
+
+ break;
+ }
+ }
+ }
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ reverb_get(reverb_getPrototypeOf(Convolver.prototype), "dispose", this).call(this);
+
+
+ for (var i in this.impulses) {
+ if (this.impulses[i]) {
+ this.impulses[i] = null;
+ }
+ }
+ }
+ }]);
+
+ return Convolver;
+}(Reverb);
+/**
+ * Create a p5.Convolver. Accepts a path to a soundfile
+ * that will be used to generate an impulse response.
+ *
+ * @method createConvolver
+ * @for p5
+ * @param {String} path path to a sound file
+ * @param {Function} [callback] function to call if loading is successful.
+ * The object will be passed in as the argument
+ * to the callback function.
+ * @param {Function} [errorCallback] function to call if loading is not successful.
+ * A custom error will be passed in as the argument
+ * to the callback function.
+ * @return {p5.Convolver}
+ * @example
+ *
+ * let cVerb, sound;
+ * function preload() {
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
+ *
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
+ *
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * // disconnect from main output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ * }
+ *
+ * function playSound() {
+ * sound.play();
+ * }
+ *
+ */
+
+
+function createConvolver(path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+ }
+
+ var self = this;
+ var cReverb = new reverb_Convolver(path, function (buffer) {
+ if (typeof callback === 'function') {
+ callback(buffer);
+ }
+
+ if (typeof self._decrementPreload === 'function') {
+ self._decrementPreload();
+ }
+ }, errorCallback);
+ cReverb.impulses = [];
+ return cReverb;
+}
+
+
+var Clock = __webpack_require__(11);
+var Clock_default = __webpack_require__.n(Clock);
+
+function metro_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function metro_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function metro_createClass(Constructor, protoProps, staticProps) { if (protoProps) metro_defineProperties(Constructor.prototype, protoProps); if (staticProps) metro_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+
+var metro_Metro =
+function () {
+ function Metro() {
+ metro_classCallCheck(this, Metro);
+
+ this.clock = new Clock_default.a({
+ callback: this.ontick.bind(this)
+ });
+ this.syncedParts = [];
+ this.bpm = 120;
+
+ this._init();
+
+ this.prevTick = 0;
+ this.tatumTime = 0;
+
+ this.tickCallback = function () {};
+ }
+
+ metro_createClass(Metro, [{
+ key: "ontick",
+ value: function ontick(tickTime) {
+ var elapsedTime = tickTime - this.prevTick;
+ var secondsFromNow = tickTime - main.audiocontext.currentTime;
+
+ if (elapsedTime - this.tatumTime <= -0.02) {
+ return;
+ } else {
+ this.prevTick = tickTime;
+
+ var self = this;
+ this.syncedParts.forEach(function (thisPart) {
+ if (!thisPart.isPlaying) return;
+ thisPart.incrementStep(secondsFromNow);
+
+ thisPart.phrases.forEach(function (thisPhrase) {
+ var phraseArray = thisPhrase.sequence;
+ var bNum = self.metroTicks % phraseArray.length;
+
+ if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping)) {
+ thisPhrase.callback(secondsFromNow, phraseArray[bNum]);
+ }
+ });
+ });
+ this.metroTicks += 1;
+ this.tickCallback(secondsFromNow);
+ }
+ }
+ }, {
+ key: "setBPM",
+ value: function setBPM(bpm) {
+ var rampTime = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
+ var beatTime = 60 / (bpm * this.tatums);
+ var now = main.audiocontext.currentTime;
+ this.tatumTime = beatTime;
+ this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);
+ this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);
+ this.bpm = bpm;
+ }
+ }, {
+ key: "getBPM",
+ value: function getBPM() {
+ return this.clock.getRate() / this.tatums * 60;
+ }
+ }, {
+ key: "_init",
+ value: function _init() {
+ this.metroTicks = 0;
+ }
+
+ }, {
+ key: "resetSync",
+ value: function resetSync(part) {
+ this.syncedParts = [part];
+ }
+
+ }, {
+ key: "pushSync",
+ value: function pushSync(part) {
+ this.syncedParts.push(part);
+ }
+ }, {
+ key: "start",
+ value: function start(timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = main.audiocontext.currentTime;
+ this.clock.start(now + t);
+ this.setBPM(this.bpm);
+ }
+ }, {
+ key: "stop",
+ value: function stop(timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = main.audiocontext.currentTime;
+ this.clock.stop(now + t);
+ }
+ }, {
+ key: "beatLength",
+ value: function beatLength(tatums) {
+ this.tatums = 1 / tatums / 4;
+ }
+ }]);
+
+ return Metro;
+}();
+
+ var metro = (metro_Metro);
+function looper_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function looper_createClass(Constructor, protoProps, staticProps) { if (protoProps) looper_defineProperties(Constructor.prototype, protoProps); if (staticProps) looper_defineProperties(Constructor, staticProps); return Constructor; }
+
+function looper_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+
+
+var BPM = 120;
+/**
+ * Set the global tempo, in beats per minute, for all
+ * p5.Parts. This method will impact all active p5.Parts.
+ *
+ * @method setBPM
+ * @for p5
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} rampTime Seconds from now
+ */
+
+p5.prototype.setBPM = function (bpm, rampTime) {
+ BPM = bpm;
+
+ for (var i in main.parts) {
+ if (main.parts[i]) {
+ main.parts[i].setBPM(bpm, rampTime);
+ }
+ }
+};
+/**
+ *
A phrase is a pattern of musical events over time, i.e.
+ * a series of notes and rests.
+ *
+ *
Phrases must be added to a p5.Part for playback, and
+ * each part can play multiple phrases at the same time.
+ * For example, one Phrase might be a kick drum, another
+ * could be a snare, and another could be the bassline.
+ *
+ *
The first parameter is a name so that the phrase can be
+ * modified or deleted later. The callback is a a function that
+ * this phrase will call at every step—for example it might be
+ * called playNote(value){}. The array determines
+ * which value is passed into the callback at each step of the
+ * phrase. It can be numbers, an object with multiple numbers,
+ * or a zero (0) indicates a rest so the callback won't be called).
+ *
+ * @class p5.Phrase
+ * @constructor
+ * @param {String} name Name so that you can access the Phrase.
+ * @param {Function} callback The name of a function that this phrase
+ * will call. Typically it will play a sound,
+ * and accept two parameters: a time at which
+ * to play the sound (in seconds from now),
+ * and a value from the sequence array. The
+ * time should be passed into the play() or
+ * start() method to ensure precision.
+ * @param {Array} sequence Array of values to pass into the callback
+ * at each step of the phrase.
+ * @example
+ *
+ */
+
+
+var Phrase = function Phrase(name, callback, sequence) {
+ looper_classCallCheck(this, Phrase);
+
+ this.phraseStep = 0;
+ this.name = name;
+ this.callback = callback;
+ /**
+ * Array of values to pass into the callback
+ * at each step of the phrase. Depending on the callback
+ * function's requirements, these values may be numbers,
+ * strings, or an object with multiple parameters.
+ * Zero (0) indicates a rest.
+ *
+ * @property {Array} sequence
+ */
+
+ this.sequence = sequence;
+};
+/**
+ *
A p5.Part plays back one or more p5.Phrases. Instantiate a part
+ * with steps and tatums. By default, each step represents a 1/16th note.
+ *
+ *
See p5.Phrase for more about musical timing.
+ *
+ * @class p5.Part
+ * @constructor
+ * @param {Number} [steps] Steps in the part
+ * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)
+ * @example
+ *
+ */
+
+
+var looper_Part =
+function () {
+ function Part(steps, bLength) {
+ looper_classCallCheck(this, Part);
+
+ this.length = steps || 0;
+
+ this.partStep = 0;
+ this.phrases = [];
+ this.isPlaying = false;
+ this.noLoop();
+ this.tatums = bLength || 0.0625;
+
+ this.metro = new metro();
+
+ this.metro._init();
+
+ this.metro.beatLength(this.tatums);
+ this.metro.setBPM(BPM);
+ main.parts.push(this);
+
+ this.callback = function () {};
+ }
+ /**
+ * Set the tempo of this part, in Beats Per Minute.
+ *
+ * @method setBPM
+ * @for p5.Part
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} [rampTime] Seconds from now
+ */
+
+
+ looper_createClass(Part, [{
+ key: "setBPM",
+ value: function setBPM(tempo, rampTime) {
+ this.metro.setBPM(tempo, rampTime);
+ }
+ /**
+ * Returns the tempo, in Beats Per Minute, of this part.
+ *
+ * @method getBPM
+ * @for p5.Part
+ * @return {Number}
+ */
+
+ }, {
+ key: "getBPM",
+ value: function getBPM() {
+ return this.metro.getBPM();
+ }
+ /**
+ * Start playback of this part. It will play
+ * through all of its phrases at a speed
+ * determined by setBPM.
+ *
+ * @method start
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
+ */
+
+ }, {
+ key: "start",
+ value: function start(time) {
+ if (!this.isPlaying) {
+ this.isPlaying = true;
+ this.metro.resetSync(this);
+ var t = time || 0;
+ this.metro.start(t);
+ }
+ }
+ /**
+ * Loop playback of this part. It will begin
+ * looping through all of its phrases at a speed
+ * determined by setBPM.
+ *
+ * @method loop
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
+ */
+
+ }, {
+ key: "loop",
+ value: function loop(time) {
+ this.looping = true;
+
+ this.onended = function () {
+ this.partStep = 0;
+ };
+
+ var t = time || 0;
+ this.start(t);
+ }
+ /**
+ * Tell the part to stop looping.
+ *
+ * @method noLoop
+ * @for p5.Part
+ */
+
+ }, {
+ key: "noLoop",
+ value: function noLoop() {
+ this.looping = false;
+
+ this.onended = function () {
+ this.stop();
+ };
+ }
+ /**
+ * Stop the part and cue it to step 0. Playback will resume from the begining of the Part when it is played again.
+ *
+ * @method stop
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
+ */
+
+ }, {
+ key: "stop",
+ value: function stop(time) {
+ this.partStep = 0;
+ this.pause(time);
+ }
+ /**
+ * Pause the part. Playback will resume
+ * from the current step.
+ *
+ * @method pause
+ * @for p5.Part
+ * @param {Number} time seconds from now
+ */
+
+ }, {
+ key: "pause",
+ value: function pause(time) {
+ this.isPlaying = false;
+ var t = time || 0;
+ this.metro.stop(t);
+ }
+ /**
+ * Add a p5.Phrase to this Part.
+ *
+ * @method addPhrase
+ * @for p5.Part
+ * @param {p5.Phrase} phrase reference to a p5.Phrase
+ */
+
+ }, {
+ key: "addPhrase",
+ value: function addPhrase(name, callback, array) {
+ var p;
+
+ if (arguments.length === 3) {
+ p = new Phrase(name, callback, array);
+ } else if (arguments[0] instanceof Phrase) {
+ p = arguments[0];
+ } else {
+ throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase';
+ }
+
+ this.phrases.push(p);
+
+ if (p.sequence.length > this.length) {
+ this.length = p.sequence.length;
+ }
+ }
+ /**
+ * Remove a phrase from this part, based on the name it was
+ * given when it was created.
+ *
+ * @method removePhrase
+ * @for p5.Part
+ * @param {String} phraseName
+ */
+
+ }, {
+ key: "removePhrase",
+ value: function removePhrase(name) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ this.phrases.splice(i, 1);
+ }
+ }
+ }
+ /**
+ * Get a phrase from this part, based on the name it was
+ * given when it was created. Now you can modify its array.
+ *
+ * @method getPhrase
+ * @for p5.Part
+ * @param {String} phraseName
+ */
+
+ }, {
+ key: "getPhrase",
+ value: function getPhrase(name) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ return this.phrases[i];
+ }
+ }
+ }
+ /**
+ * Find all sequences with the specified name, and replace their patterns with the specified array.
+ *
+ * @method replaceSequence
+ * @for p5.Part
+ * @param {String} phraseName
+ * @param {Array} sequence Array of values to pass into the callback
+ * at each step of the phrase.
+ */
+
+ }, {
+ key: "replaceSequence",
+ value: function replaceSequence(name, array) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ this.phrases[i].sequence = array;
+ }
+ }
+ }
+ }, {
+ key: "incrementStep",
+ value: function incrementStep(time) {
+ if (this.partStep < this.length - 1) {
+ this.callback(time);
+ this.partStep += 1;
+ } else {
+ if (!this.looping && this.partStep === this.length - 1) {
+ this.onended();
+ }
+ }
+ }
+ /**
+ * Set the function that will be called at every step. This will clear the previous function.
+ *
+ * @method onStep
+ * @for p5.Part
+ * @param {Function} callback The name of the callback
+ * you want to fire
+ * on every beat/tatum.
+ */
+
+ }, {
+ key: "onStep",
+ value: function onStep(callback) {
+ this.callback = callback;
+ }
+ }]);
+
+ return Part;
+}();
+
+/**
+ * A Score consists of a series of Parts. The parts will
+ * be played back in order. For example, you could have an
+ * A part, a B part, and a C part, and play them back in this order
+ * new p5.Score(a, a, b, a, c)
+ *
+ * @class p5.Score
+ * @constructor
+ * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.
+ */
+
+
+var Score =
+function () {
+ function Score() {
+ looper_classCallCheck(this, Score);
+
+ this.parts = [];
+ this.currentPart = new Array(arguments.length);
+ ;
+ var thisScore = this;
+
+ for (var i in arguments) {
+ this.parts[i] = arguments[i];
+ this.parts[i].nextPart = this.parts[i + 1];
+
+ this.parts[i].onended = function () {
+ thisScore.resetPart(i);
+ playNextPart(thisScore);
+ };
+ }
+
+ this.looping = false;
+ }
+
+ looper_createClass(Score, [{
+ key: "onended",
+ value: function onended() {
+ if (this.looping) {
+ this.parts[0].start();
+ } else {
+ this.parts[this.parts.length - 1].onended = function () {
+ this.stop();
+ this.resetParts();
+ };
+ }
+
+ this.currentPart = 0;
+ }
+ /**
+ * Start playback of the score.
+ *
+ * @method start
+ * @for p5.Score
+ */
+
+ }, {
+ key: "start",
+ value: function start() {
+ this.parts[this.currentPart].start();
+ this.scoreStep = 0;
+ }
+ /**
+ * Stop playback of the score.
+ *
+ * @method stop
+ * @for p5.Score
+ */
+
+ }, {
+ key: "stop",
+ value: function stop() {
+ this.parts[this.currentPart].stop();
+ this.currentPart = 0;
+ this.scoreStep = 0;
+ }
+ /**
+ * Pause playback of the score.
+ *
+ * @method pause
+ * @for p5.Score
+ */
+
+ }, {
+ key: "pause",
+ value: function pause() {
+ this.parts[this.currentPart].stop();
+ }
+ /**
+ * Loop playback of the score.
+ *
+ * @method loop
+ * @for p5.Score
+ */
+
+ }, {
+ key: "loop",
+ value: function loop() {
+ this.looping = true;
+ this.start();
+ }
+ /**
+ * Stop looping playback of the score. If it
+ * is currently playing, this will go into effect
+ * after the current round of playback completes.
+ *
+ * @method noLoop
+ * @for p5.Score
+ */
+
+ }, {
+ key: "noLoop",
+ value: function noLoop() {
+ this.looping = false;
+ }
+ }, {
+ key: "resetParts",
+ value: function resetParts() {
+ var self = this;
+ this.parts.forEach(function (part) {
+ self.resetParts[part];
+ });
+ }
+ }, {
+ key: "resetPart",
+ value: function resetPart(i) {
+ this.parts[i].stop();
+ this.parts[i].partStep = 0;
+
+ for (var p in this.parts[i].phrases) {
+ if (this.parts[i]) {
+ this.parts[i].phrases[p].phraseStep = 0;
+ }
+ }
+ }
+ /**
+ * Set the tempo for all parts in the score
+ *
+ * @method setBPM
+ * @for p5.Score
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} rampTime Seconds from now
+ */
+
+ }, {
+ key: "setBPM",
+ value: function setBPM(bpm, rampTime) {
+ for (var i in this.parts) {
+ if (this.parts[i]) {
+ this.parts[i].setBPM(bpm, rampTime);
+ }
+ }
+ }
+ }]);
+
+ return Score;
+}();
+
+function playNextPart(aScore) {
+ aScore.currentPart++;
+
+ if (aScore.currentPart >= aScore.parts.length) {
+ aScore.scoreStep = 0;
+ aScore.onended();
+ } else {
+ aScore.scoreStep = 0;
+ aScore.parts[aScore.currentPart - 1].stop();
+ aScore.parts[aScore.currentPart].start();
+ }
+}
+
+
+function soundLoop_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function soundLoop_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function soundLoop_createClass(Constructor, protoProps, staticProps) { if (protoProps) soundLoop_defineProperties(Constructor.prototype, protoProps); if (staticProps) soundLoop_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+/**
+ * SoundLoop
+ *
+ * @class p5.SoundLoop
+ * @constructor
+ *
+ * @param {Function} callback this function will be called on each iteration of theloop
+ * @param {Number|String} [interval] amount of time (if a number) or beats (if a string, following Tone.Time convention) for each iteration of the loop. Defaults to 1 second.
+ *
+ * @example
+ *
+ * let synth, soundLoop;
+ * let notePattern = [60, 62, 64, 67, 69, 72];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * colorMode(HSB);
+ * background(0, 0, 86);
+ * text('tap to start/stop', 10, 20);
+ *
+ * //the looper's callback is passed the timeFromNow
+ * //this value should be used as a reference point from
+ * //which to schedule sounds
+ * let intervalInSeconds = 0.2;
+ * soundLoop = new p5.SoundLoop(onSoundLoop, intervalInSeconds);
+ *
+ * synth = new p5.MonoSynth();
+ * }
+ *
+ * function canvasPressed() {
+ * // ensure audio is enabled
+ * userStartAudio();
+ *
+ * if (soundLoop.isPlaying) {
+ * soundLoop.stop();
+ * } else {
+ * // start the loop
+ * soundLoop.start();
+ * }
+ * }
+ *
+ * function onSoundLoop(timeFromNow) {
+ * let noteIndex = (soundLoop.iterations - 1) % notePattern.length;
+ * let note = midiToFreq(notePattern[noteIndex]);
+ * synth.play(note, 0.5, timeFromNow);
+ * background(noteIndex * 360 / notePattern.length, 50, 100);
+ * }
+ *
+ */
+
+var soundLoop_SoundLoop =
+function () {
+ function SoundLoop(callback, interval) {
+ soundLoop_classCallCheck(this, SoundLoop);
+
+ /**
+ * Getters and Setters, setting any parameter will result in a change in the clock's
+ * frequency, that will be reflected after the next callback
+ * beats per minute (defaults to 60)
+ * @property {Number} bpm
+ * @for p5.SoundLoop
+ */
+ Object.defineProperty(this, 'bpm', {
+ get: function get() {
+ return this._bpm;
+ },
+ set: function set(bpm) {
+ if (!this.musicalTimeMode) {
+ console.warn('Changing the BPM in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+ }
+
+ this._bpm = bpm;
+
+ this._update();
+ }
+ });
+ /**
+ * number of quarter notes in a measure (defaults to 4)
+ * @property {Number} timeSignature
+ * @for p5.SoundLoop
+ */
+
+ Object.defineProperty(this, 'timeSignature', {
+ get: function get() {
+ return this._timeSignature;
+ },
+ set: function set(timeSig) {
+ if (!this.musicalTimeMode) {
+ console.warn('Changing the timeSignature in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+ }
+
+ this._timeSignature = timeSig;
+
+ this._update();
+ }
+ });
+ /**
+ * length of the loops interval
+ * @property {Number|String} interval
+ * @for p5.SoundLoop
+ */
+
+ Object.defineProperty(this, 'interval', {
+ get: function get() {
+ return this._interval;
+ },
+ set: function set(interval) {
+ this.musicalTimeMode = typeof interval === 'number' ? false : true;
+ this._interval = interval;
+
+ this._update();
+ }
+ });
+ /**
+ * how many times the callback has been called so far
+ * @property {Number} iterations
+ * @for p5.SoundLoop
+ * @readonly
+ */
+
+ Object.defineProperty(this, 'iterations', {
+ get: function get() {
+ return this.clock.ticks;
+ }
+ });
+ this.callback = callback;
+ /**
+ * musicalTimeMode uses Tone.Time convention
+ * true if string, false if number
+ * @property {Boolean} musicalTimeMode
+ */
+
+ this.musicalTimeMode = typeof this._interval === 'number' ? false : true;
+ this._interval = interval || 1;
+ /**
+ * musicalTimeMode variables
+ * modify these only when the interval is specified in musicalTime format as a string
+ */
+
+ this._timeSignature = 4;
+ this._bpm = 60;
+ this.isPlaying = false;
+ /**
+ * Set a limit to the number of loops to play. defaults to Infinity
+ * @property {Number} maxIterations
+ */
+
+ this.maxIterations = Infinity;
+ var self = this;
+ this.clock = new Clock_default.a({
+ callback: function callback(time) {
+ var timeFromNow = time - main.audiocontext.currentTime;
+ /**
+ * Do not initiate the callback if timeFromNow is < 0
+ * This ususually occurs for a few milliseconds when the page
+ * is not fully loaded
+ *
+ * The callback should only be called until maxIterations is reached
+ */
+
+ if (timeFromNow > 0 && self.iterations <= self.maxIterations) {
+ self.callback(timeFromNow);
+ }
+ },
+ frequency: this._calcFreq()
+ });
+ }
+ /**
+ * Start the loop
+ * @method start
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a starting time
+ */
+
+
+ soundLoop_createClass(SoundLoop, [{
+ key: "start",
+ value: function start(timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = main.audiocontext.currentTime;
+
+ if (!this.isPlaying) {
+ this.clock.start(now + t);
+ this.isPlaying = true;
+ }
+ }
+ /**
+ * Stop the loop
+ * @method stop
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a stopping time
+ */
+
+ }, {
+ key: "stop",
+ value: function stop(timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = main.audiocontext.currentTime;
+
+ if (this.isPlaying) {
+ this.clock.stop(now + t);
+ this.isPlaying = false;
+ }
+ }
+ /**
+ * Pause the loop
+ * @method pause
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a pausing time
+ */
+
+ }, {
+ key: "pause",
+ value: function pause(timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = main.audiocontext.currentTime;
+
+ if (this.isPlaying) {
+ this.clock.pause(now + t);
+ this.isPlaying = false;
+ }
+ }
+ /**
+ * Synchronize loops. Use this method to start two or more loops in synchronization
+ * or to start a loop in synchronization with a loop that is already playing
+ * This method will schedule the implicit loop in sync with the explicit master loop
+ * i.e. loopToStart.syncedStart(loopToSyncWith)
+ *
+ * @method syncedStart
+ * @for p5.SoundLoop
+ * @param {Object} otherLoop a p5.SoundLoop to sync with
+ * @param {Number} [timeFromNow] Start the loops in sync after timeFromNow seconds
+ */
+
+ }, {
+ key: "syncedStart",
+ value: function syncedStart(otherLoop, timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = main.audiocontext.currentTime;
+
+ if (!otherLoop.isPlaying) {
+ otherLoop.clock.start(now + t);
+ otherLoop.isPlaying = true;
+ this.clock.start(now + t);
+ this.isPlaying = true;
+ } else if (otherLoop.isPlaying) {
+ var time = otherLoop.clock._nextTick - main.audiocontext.currentTime;
+ this.clock.start(now + time);
+ this.isPlaying = true;
+ }
+ }
+ /**
+ * Updates frequency value, reflected in next callback
+ * @private
+ * @for p5.SoundLoop
+ * @method _update
+ */
+
+ }, {
+ key: "_update",
+ value: function _update() {
+ this.clock.frequency.value = this._calcFreq();
+ }
+ /**
+ * Calculate the frequency of the clock's callback based on bpm, interval, and timesignature
+ * @private
+ * @for p5.SoundLoop
+ * @method _calcFreq
+ * @return {Number} new clock frequency value
+ */
+
+ }, {
+ key: "_calcFreq",
+ value: function _calcFreq() {
+ if (typeof this._interval === 'number') {
+ this.musicalTimeMode = false;
+ return 1 / this._interval;
+ }
+ else if (typeof this._interval === 'string') {
+ this.musicalTimeMode = true;
+ return this._bpm / 60 / this._convertNotation(this._interval) * (this._timeSignature / 4);
+ }
+ }
+ /**
+ * Convert notation from musical time format to seconds
+ * Uses Tone.Time convention
+ * @private
+ * @for p5.SoundLoop
+ * @method _convertNotation
+ * @param {String} value value to be converted
+ * @return {Number} converted value in seconds
+ */
+
+ }, {
+ key: "_convertNotation",
+ value: function _convertNotation(value) {
+ var type = value.slice(-1);
+ value = Number(value.slice(0, -1));
+
+ switch (type) {
+ case 'm':
+ return this._measure(value);
+
+ case 'n':
+ return this._note(value);
+
+ default:
+ console.warn('Specified interval is not formatted correctly. See Tone.js ' + 'timing reference for more info: https://github.com/Tonejs/Tone.js/wiki/Time');
+ }
+ }
+ /**
+ * Helper conversion methods of measure and note
+ * @private
+ * @for p5.SoundLoop
+ * @method _measure
+ */
+
+ }, {
+ key: "_measure",
+ value: function _measure(value) {
+ return value * this._timeSignature;
+ }
+ /**
+ * @private
+ * @method _note
+ * @for p5.SoundLoop
+ */
+
+ }, {
+ key: "_note",
+ value: function _note(value) {
+ return this._timeSignature / value;
+ }
+ }]);
+
+ return SoundLoop;
+}();
+
+ var soundLoop = (soundLoop_SoundLoop);
+function compressor_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { compressor_typeof = function _typeof(obj) { return typeof obj; }; } else { compressor_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return compressor_typeof(obj); }
+
+function compressor_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function compressor_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function compressor_createClass(Constructor, protoProps, staticProps) { if (protoProps) compressor_defineProperties(Constructor.prototype, protoProps); if (staticProps) compressor_defineProperties(Constructor, staticProps); return Constructor; }
+
+function compressor_possibleConstructorReturn(self, call) { if (call && (compressor_typeof(call) === "object" || typeof call === "function")) { return call; } return compressor_assertThisInitialized(self); }
+
+function compressor_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function compressor_get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { compressor_get = Reflect.get; } else { compressor_get = function _get(target, property, receiver) { var base = compressor_superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return compressor_get(target, property, receiver || target); }
+
+function compressor_superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = compressor_getPrototypeOf(object); if (object === null) break; } return object; }
+
+function compressor_getPrototypeOf(o) { compressor_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return compressor_getPrototypeOf(o); }
+
+function compressor_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) compressor_setPrototypeOf(subClass, superClass); }
+
+function compressor_setPrototypeOf(o, p) { compressor_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return compressor_setPrototypeOf(o, p); }
+
+
+/**
+ * Compressor is an audio effect class that performs dynamics compression
+ * on an audio input source. This is a very commonly used technique in music
+ * and sound production. Compression creates an overall louder, richer,
+ * and fuller sound by lowering the volume of louds and raising that of softs.
+ * Compression can be used to avoid clipping (sound distortion due to
+ * peaks in volume) and is especially useful when many sounds are played
+ * at once. Compression can be used on indivudal sound sources in addition
+ * to the main output.
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.Compressor
+ * @constructor
+ * @extends p5.Effect
+ *
+ *
+ */
+
+var Compressor =
+function (_Effect) {
+ compressor_inherits(Compressor, _Effect);
+
+ function Compressor() {
+ var _this;
+
+ compressor_classCallCheck(this, Compressor);
+
+ _this = compressor_possibleConstructorReturn(this, compressor_getPrototypeOf(Compressor).call(this));
+ /**
+ *
+ * The p5.Compressor is built with a Web Audio Dynamics Compressor Node
+ *
+ * @property {AudioNode} compressor
+ */
+
+ _this.compressor = _this.ac.createDynamicsCompressor();
+
+ _this.input.connect(_this.compressor);
+
+ _this.compressor.connect(_this.wet);
+
+ return _this;
+ }
+ /**
+ * Performs the same function as .connect, but also accepts
+ * optional parameters to set compressor's audioParams
+ * @method process
+ * @for p5.Compressor
+ *
+ * @param {Object} src Sound source to be connected
+ *
+ * @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} [knee] A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} [threshold] The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
+ */
+
+
+ compressor_createClass(Compressor, [{
+ key: "process",
+ value: function process(src, attack, knee, ratio, threshold, release) {
+ src.connect(this.input);
+ this.set(attack, knee, ratio, threshold, release);
+ }
+ /**
+ * Set the parameters of a compressor.
+ * @method set
+ * @for p5.Compressor
+ * @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} knee A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} ratio The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} threshold The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
+ */
+
+ }, {
+ key: "set",
+ value: function set(attack, knee, ratio, threshold, release) {
+ if (typeof attack !== 'undefined') {
+ this.attack(attack);
+ }
+
+ if (typeof knee !== 'undefined') {
+ this.knee(knee);
+ }
+
+ if (typeof ratio !== 'undefined') {
+ this.ratio(ratio);
+ }
+
+ if (typeof threshold !== 'undefined') {
+ this.threshold(threshold);
+ }
+
+ if (typeof release !== 'undefined') {
+ this.release(release);
+ }
+ }
+ /**
+ * Get current attack or set value w/ time ramp
+ *
+ *
+ * @method attack
+ * @for p5.Compressor
+ * @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+ }, {
+ key: "attack",
+ value: function attack(_attack, time) {
+ var t = time || 0;
+
+ if (typeof _attack === 'number') {
+ this.compressor.attack.value = _attack;
+ this.compressor.attack.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.attack.linearRampToValueAtTime(_attack, this.ac.currentTime + 0.02 + t);
+ } else if (typeof _attack !== 'undefined') {
+ _attack.connect(this.compressor.attack);
+ }
+
+ return this.compressor.attack.value;
+ }
+ /**
+ * Get current knee or set value w/ time ramp
+ *
+ * @method knee
+ * @for p5.Compressor
+ * @param {Number} [knee] A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+ }, {
+ key: "knee",
+ value: function knee(_knee, time) {
+ var t = time || 0;
+
+ if (typeof _knee === 'number') {
+ this.compressor.knee.value = _knee;
+ this.compressor.knee.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.knee.linearRampToValueAtTime(_knee, this.ac.currentTime + 0.02 + t);
+ } else if (typeof _knee !== 'undefined') {
+ _knee.connect(this.compressor.knee);
+ }
+
+ return this.compressor.knee.value;
+ }
+ /**
+ * Get current ratio or set value w/ time ramp
+ * @method ratio
+ * @for p5.Compressor
+ * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+ }, {
+ key: "ratio",
+ value: function ratio(_ratio, time) {
+ var t = time || 0;
+
+ if (typeof _ratio === 'number') {
+ this.compressor.ratio.value = _ratio;
+ this.compressor.ratio.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.ratio.linearRampToValueAtTime(_ratio, this.ac.currentTime + 0.02 + t);
+ } else if (typeof _ratio !== 'undefined') {
+ _ratio.connect(this.compressor.ratio);
+ }
+
+ return this.compressor.ratio.value;
+ }
+ /**
+ * Get current threshold or set value w/ time ramp
+ * @method threshold
+ * @for p5.Compressor
+ * @param {Number} threshold The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+ }, {
+ key: "threshold",
+ value: function threshold(_threshold, time) {
+ var t = time || 0;
+
+ if (typeof _threshold === 'number') {
+ this.compressor.threshold.value = _threshold;
+ this.compressor.threshold.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.threshold.linearRampToValueAtTime(_threshold, this.ac.currentTime + 0.02 + t);
+ } else if (typeof _threshold !== 'undefined') {
+ _threshold.connect(this.compressor.threshold);
+ }
+
+ return this.compressor.threshold.value;
+ }
+ /**
+ * Get current release or set value w/ time ramp
+ * @method release
+ * @for p5.Compressor
+ * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
+ *
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+ }, {
+ key: "release",
+ value: function release(_release, time) {
+ var t = time || 0;
+
+ if (typeof _release === 'number') {
+ this.compressor.release.value = _release;
+ this.compressor.release.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.release.linearRampToValueAtTime(_release, this.ac.currentTime + 0.02 + t);
+ } else if (typeof number !== 'undefined') {
+ _release.connect(this.compressor.release);
+ }
+
+ return this.compressor.release.value;
+ }
+ /**
+ * Return the current reduction value
+ *
+ * @method reduction
+ * @for p5.Compressor
+ * @return {Number} Value of the amount of gain reduction that is applied to the signal
+ */
+
+ }, {
+ key: "reduction",
+ value: function reduction() {
+ return this.compressor.reduction.value;
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ compressor_get(compressor_getPrototypeOf(Compressor.prototype), "dispose", this).call(this);
+
+ if (this.compressor) {
+ this.compressor.disconnect();
+ delete this.compressor;
+ }
+ }
+ }]);
+
+ return Compressor;
+}(effect);
+
+ var compressor = (Compressor);
+function peakDetect_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function peakDetect_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function peakDetect_createClass(Constructor, protoProps, staticProps) { if (protoProps) peakDetect_defineProperties(Constructor.prototype, protoProps); if (staticProps) peakDetect_defineProperties(Constructor, staticProps); return Constructor; }
+
+/**
+ *
PeakDetect works in conjunction with p5.FFT to
+ * look for onsets in some or all of the frequency spectrum.
+ *
+ *
+ * To use p5.PeakDetect, call update in the draw loop
+ * and pass in a p5.FFT object.
+ *
+ *
+ * You can listen for a specific part of the frequency spectrum by
+ * setting the range between freq1 and freq2.
+ *
+ *
+ *
threshold is the threshold for detecting a peak,
+ * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
+ * as 1.0.
+ *
+ *
+ * The update method is meant to be run in the draw loop, and
+ * frames determines how many loops must pass before
+ * another peak can be detected.
+ * For example, if the frameRate() = 60, you could detect the beat of a
+ * 120 beat-per-minute song with this equation:
+ * framesPerPeak = 60 / (estimatedBPM / 60 );
+ *
+ *
+ *
+ * Based on example contribtued by @b2renger, and a simple beat detection
+ * explanation by Felix Turner.
+ *
+ *
+ * @class p5.PeakDetect
+ * @constructor
+ * @param {Number} [freq1] lowFrequency - defaults to 20Hz
+ * @param {Number} [freq2] highFrequency - defaults to 20000 Hz
+ * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1
+ * scaled logarithmically where 0.1 is 1/2 the loudness
+ * of 1.0. Defaults to 0.35.
+ * @param {Number} [framesPerPeak] Defaults to 20.
+ * @example
+ *
+ */
+
+ }, {
+ key: "onPeak",
+ value: function onPeak(callback, val) {
+ var self = this;
+
+ self._onPeak = function () {
+ callback(self.energy, val);
+ };
+ }
+ }]);
+
+ return PeakDetect;
+}();
+
+ var peakDetect = (PeakDetect);
+function soundRecorder_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function soundRecorder_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function soundRecorder_createClass(Constructor, protoProps, staticProps) { if (protoProps) soundRecorder_defineProperties(Constructor.prototype, protoProps); if (staticProps) soundRecorder_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+
+var soundRecorder_ac = main.audiocontext;
+/**
+ *
Record sounds for playback and/or to save as a .wav file.
+ * The p5.SoundRecorder records all sound output from your sketch,
+ * or can be assigned a specific source with setInput().
+ *
The record() method accepts a p5.SoundFile as a parameter.
+ * When playback is stopped (either after the given amount of time,
+ * or with the stop() method), the p5.SoundRecorder will send its
+ * recording to that p5.SoundFile for playback.
+ * let mic, recorder, soundFile;
+ * let state = 0;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * textAlign(CENTER, CENTER);
+ *
+ * // create an audio in
+ * mic = new p5.AudioIn();
+ *
+ * // prompts user to enable their browser mic
+ * mic.start();
+ *
+ * // create a sound recorder
+ * recorder = new p5.SoundRecorder();
+ *
+ * // connect the mic to the recorder
+ * recorder.setInput(mic);
+ *
+ * // this sound file will be used to
+ * // playback & save the recording
+ * soundFile = new p5.SoundFile();
+ *
+ * text('tap to record', width/2, height/2);
+ * }
+ *
+ * function canvasPressed() {
+ * // ensure audio is enabled
+ * userStartAudio();
+ *
+ * // make sure user enabled the mic
+ * if (state === 0 && mic.enabled) {
+ *
+ * // record to our p5.SoundFile
+ * recorder.record(soundFile);
+ *
+ * background(255,0,0);
+ * text('Recording!', width/2, height/2);
+ * state++;
+ * }
+ * else if (state === 1) {
+ * background(0,255,0);
+ *
+ * // stop recorder and
+ * // send result to soundFile
+ * recorder.stop();
+ *
+ * text('Done! Tap to play and download', width/2, height/2, width - 20);
+ * state++;
+ * }
+ *
+ * else if (state === 2) {
+ * soundFile.play(); // play the result!
+ * save(soundFile, 'mySound.wav');
+ * state++;
+ * }
+ * }
+ *
+ */
+
+var soundRecorder_SoundRecorder =
+function () {
+ function SoundRecorder() {
+ soundRecorder_classCallCheck(this, SoundRecorder);
+
+ this.input = soundRecorder_ac.createGain();
+ this.output = soundRecorder_ac.createGain();
+ this._inputChannels = 2;
+ this._outputChannels = 2;
+
+ var workletBufferSize = safeBufferSize(1024);
+ this._workletNode = new AudioWorkletNode(soundRecorder_ac, processorNames_default.a.recorderProcessor, {
+ outputChannelCount: [this._outputChannels],
+ processorOptions: {
+ numInputChannels: this._inputChannels,
+ bufferSize: workletBufferSize
+ }
+ });
+
+ this._workletNode.port.onmessage = function (event) {
+ if (event.data.name === 'buffers') {
+ var buffers = [new Float32Array(event.data.leftBuffer), new Float32Array(event.data.rightBuffer)];
+
+ this._callback(buffers);
+ }
+ }.bind(this);
+ /**
+ * callback invoked when the recording is over
+ * @private
+ * @type Function(Float32Array)
+ */
+
+
+ this._callback = function () {};
+
+
+ this._workletNode.connect(p5.soundOut._silentNode);
+
+ this.setInput();
+
+ main.soundArray.push(this);
+ }
+ /**
+ * Connect a specific device to the p5.SoundRecorder.
+ * If no parameter is given, p5.SoundRecorer will record
+ * all audible p5.sound from your sketch.
+ *
+ * @method setInput
+ * @for p5.SoundRecorder
+ * @param {Object} [unit] p5.sound object or a web audio unit
+ * that outputs sound
+ */
+
+
+ soundRecorder_createClass(SoundRecorder, [{
+ key: "setInput",
+ value: function setInput(unit) {
+ this.input.disconnect();
+ this.input = null;
+ this.input = soundRecorder_ac.createGain();
+ this.input.connect(this._workletNode);
+ this.input.connect(this.output);
+
+ if (unit) {
+ unit.connect(this.input);
+ } else {
+ p5.soundOut.output.connect(this.input);
+ }
+ }
+ /**
+ * Start recording. To access the recording, provide
+ * a p5.SoundFile as the first parameter. The p5.SoundRecorder
+ * will send its recording to that p5.SoundFile for playback once
+ * recording is complete. Optional parameters include duration
+ * (in seconds) of the recording, and a callback function that
+ * will be called once the complete recording has been
+ * transfered to the p5.SoundFile.
+ *
+ * @method record
+ * @for p5.SoundRecorder
+ * @param {p5.SoundFile} soundFile p5.SoundFile
+ * @param {Number} [duration] Time (in seconds)
+ * @param {Function} [callback] The name of a function that will be
+ * called once the recording completes
+ */
+
+ }, {
+ key: "record",
+ value: function record(sFile, duration, callback) {
+ this._workletNode.port.postMessage({
+ name: 'start',
+ duration: duration
+ });
+
+ if (sFile && callback) {
+ this._callback = function (buffer) {
+ sFile.setBuffer(buffer);
+ callback();
+ };
+ } else if (sFile) {
+ this._callback = function (buffer) {
+ sFile.setBuffer(buffer);
+ };
+ }
+ }
+ /**
+ * Stop the recording. Once the recording is stopped,
+ * the results will be sent to the p5.SoundFile that
+ * was given on .record(), and if a callback function
+ * was provided on record, that function will be called.
+ *
+ * @method stop
+ * @for p5.SoundRecorder
+ */
+
+ }, {
+ key: "stop",
+ value: function stop() {
+ this._workletNode.port.postMessage({
+ name: 'stop'
+ });
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+
+ this._callback = function () {};
+
+ if (this.input) {
+ this.input.disconnect();
+ }
+
+ this.input = null;
+ this._workletNode = null;
+ }
+ }]);
+
+ return SoundRecorder;
+}();
+
+ var soundRecorder = (soundRecorder_SoundRecorder);
+function distortion_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { distortion_typeof = function _typeof(obj) { return typeof obj; }; } else { distortion_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return distortion_typeof(obj); }
+
+function distortion_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function distortion_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function distortion_createClass(Constructor, protoProps, staticProps) { if (protoProps) distortion_defineProperties(Constructor.prototype, protoProps); if (staticProps) distortion_defineProperties(Constructor, staticProps); return Constructor; }
+
+function distortion_possibleConstructorReturn(self, call) { if (call && (distortion_typeof(call) === "object" || typeof call === "function")) { return call; } return distortion_assertThisInitialized(self); }
+
+function distortion_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function distortion_get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { distortion_get = Reflect.get; } else { distortion_get = function _get(target, property, receiver) { var base = distortion_superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return distortion_get(target, property, receiver || target); }
+
+function distortion_superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = distortion_getPrototypeOf(object); if (object === null) break; } return object; }
+
+function distortion_getPrototypeOf(o) { distortion_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return distortion_getPrototypeOf(o); }
+
+function distortion_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) distortion_setPrototypeOf(subClass, superClass); }
+
+function distortion_setPrototypeOf(o, p) { distortion_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return distortion_setPrototypeOf(o, p); }
+
+
+
+function makeDistortionCurve(amount) {
+ var k = typeof amount === 'number' ? amount : 50;
+ var numSamples = 44100;
+ var curve = new Float32Array(numSamples);
+ var deg = Math.PI / 180;
+ var i = 0;
+ var x;
+
+ for (; i < numSamples; ++i) {
+ x = i * 2 / numSamples - 1;
+ curve[i] = (3 + k) * x * 20 * deg / (Math.PI + k * Math.abs(x));
+ }
+
+ return curve;
+}
+/**
+ * A Distortion effect created with a Waveshaper Node,
+ * with an approach adapted from
+ * [Kevin Ennis](http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion)
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.Distortion
+ * @extends p5.Effect
+ * @constructor
+ * @param {Number} [amount=0.25] Unbounded distortion amount.
+ * Normal values range from 0-1.
+ * @param {String} [oversample='none'] 'none', '2x', or '4x'.
+ *
+ */
+
+
+var Distortion =
+function (_Effect) {
+ distortion_inherits(Distortion, _Effect);
+
+ function Distortion(amount, oversample) {
+ var _this;
+
+ distortion_classCallCheck(this, Distortion);
+
+ _this = distortion_possibleConstructorReturn(this, distortion_getPrototypeOf(Distortion).call(this));
+
+ if (typeof amount === 'undefined') {
+ amount = 0.25;
+ }
+
+ if (typeof amount !== 'number') {
+ throw new Error('amount must be a number');
+ }
+
+ if (typeof oversample === 'undefined') {
+ oversample = '2x';
+ }
+
+ if (typeof oversample !== 'string') {
+ throw new Error('oversample must be a String');
+ }
+
+ var curveAmount = p5.prototype.map(amount, 0.0, 1.0, 0, 2000);
+ /**
+ * The p5.Distortion is built with a
+ *
+ * Web Audio WaveShaper Node.
+ *
+ * @property {AudioNode} WaveShaperNode
+ */
+
+ _this.waveShaperNode = _this.ac.createWaveShaper();
+ _this.amount = curveAmount;
+ _this.waveShaperNode.curve = makeDistortionCurve(curveAmount);
+ _this.waveShaperNode.oversample = oversample;
+
+ _this.input.connect(_this.waveShaperNode);
+
+ _this.waveShaperNode.connect(_this.wet);
+
+ return _this;
+ }
+ /**
+ * Process a sound source, optionally specify amount and oversample values.
+ *
+ * @method process
+ * @for p5.Distortion
+ * @param {Number} [amount=0.25] Unbounded distortion amount.
+ * Normal values range from 0-1.
+ * @param {String} [oversample='none'] 'none', '2x', or '4x'.
+ */
+
+
+ distortion_createClass(Distortion, [{
+ key: "process",
+ value: function process(src, amount, oversample) {
+ src.connect(this.input);
+ this.set(amount, oversample);
+ }
+ /**
+ * Set the amount and oversample of the waveshaper distortion.
+ *
+ * @method set
+ * @for p5.Distortion
+ * @param {Number} [amount=0.25] Unbounded distortion amount.
+ * Normal values range from 0-1.
+ * @param {String} [oversample='none'] 'none', '2x', or '4x'.
+ */
+
+ }, {
+ key: "set",
+ value: function set(amount, oversample) {
+ if (amount) {
+ var curveAmount = p5.prototype.map(amount, 0.0, 1.0, 0, 2000);
+ this.amount = curveAmount;
+ this.waveShaperNode.curve = makeDistortionCurve(curveAmount);
+ }
+
+ if (oversample) {
+ this.waveShaperNode.oversample = oversample;
+ }
+ }
+ /**
+ * Return the distortion amount, typically between 0-1.
+ *
+ * @method getAmount
+ * @for p5.Distortion
+ * @return {Number} Unbounded distortion amount.
+ * Normal values range from 0-1.
+ */
+
+ }, {
+ key: "getAmount",
+ value: function getAmount() {
+ return this.amount;
+ }
+ /**
+ * Return the oversampling.
+ *
+ * @method getOversample
+ * @for p5.Distortion
+ * @return {String} Oversample can either be 'none', '2x', or '4x'.
+ */
+
+ }, {
+ key: "getOversample",
+ value: function getOversample() {
+ return this.waveShaperNode.oversample;
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ distortion_get(distortion_getPrototypeOf(Distortion.prototype), "dispose", this).call(this);
+
+ if (this.waveShaperNode) {
+ this.waveShaperNode.disconnect();
+ this.waveShaperNode = null;
+ }
+ }
+ }]);
+
+ return Distortion;
+}(effect);
+
+ var distortion = (Distortion);
+function gain_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function gain_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function gain_createClass(Constructor, protoProps, staticProps) { if (protoProps) gain_defineProperties(Constructor.prototype, protoProps); if (staticProps) gain_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+/**
+ * A gain node is usefull to set the relative volume of sound.
+ * It's typically used to build mixers.
+ *
+ * @class p5.Gain
+ * @constructor
+ * @example
+ *
+ *
+ * // load two soundfile and crossfade beetween them
+ * let sound1,sound2;
+ * let sound1Gain, sound2Gain, mixGain;
+ * function preload(){
+ * soundFormats('ogg', 'mp3');
+ * sound1 = loadSound('assets/Damscray_-_Dancing_Tiger_01');
+ * sound2 = loadSound('assets/beat');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(startSound);
+ * // create a 'mix' gain bus to which we will connect both soundfiles
+ * mixGain = new p5.Gain();
+ * mixGain.connect();
+ * sound1.disconnect(); // diconnect from p5 output
+ * sound1Gain = new p5.Gain(); // setup a gain node
+ * sound1Gain.setInput(sound1); // connect the first sound to its input
+ * sound1Gain.connect(mixGain); // connect its output to the final mix bus
+ * sound2.disconnect();
+ * sound2Gain = new p5.Gain();
+ * sound2Gain.setInput(sound2);
+ * sound2Gain.connect(mixGain);
+ * }
+ * function startSound() {
+ * sound1.loop();
+ * sound2.loop();
+ * loop();
+ * }
+ * function mouseReleased() {
+ * sound1.stop();
+ * sound2.stop();
+ * }
+ * function draw(){
+ * background(220);
+ * textAlign(CENTER);
+ * textSize(11);
+ * fill(0);
+ * if (!sound1.isPlaying()) {
+ * text('tap and drag to play', width/2, height/2);
+ * return;
+ * }
+ * // map the horizontal position of the mouse to values useable for volume * control of sound1
+ * var sound1Volume = constrain(map(mouseX,width,0,0,1), 0, 1);
+ * var sound2Volume = 1-sound1Volume;
+ * sound1Gain.amp(sound1Volume);
+ * sound2Gain.amp(sound2Volume);
+ * // map the vertical position of the mouse to values useable for 'output * volume control'
+ * var outputVolume = constrain(map(mouseY,height,0,0,1), 0, 1);
+ * mixGain.amp(outputVolume);
+ * text('output', width/2, height - outputVolume * height * 0.9)
+ * fill(255, 0, 255);
+ * textAlign(LEFT);
+ * text('sound1', 5, height - sound1Volume * height * 0.9);
+ * textAlign(RIGHT);
+ * text('sound2', width - 5, height - sound2Volume * height * 0.9);
+ * }
+ *
+ */
+
+var gain_Gain =
+function () {
+ function Gain() {
+ gain_classCallCheck(this, Gain);
+
+ this.ac = main.audiocontext;
+ this.input = this.ac.createGain();
+ this.output = this.ac.createGain();
+
+ this.input.gain.value = 0.5;
+ this.input.connect(this.output);
+
+ main.soundArray.push(this);
+ }
+ /**
+ * Connect a source to the gain node.
+ *
+ * @method setInput
+ * @for p5.Gain
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ */
+
+
+ gain_createClass(Gain, [{
+ key: "setInput",
+ value: function setInput(src) {
+ src.connect(this.input);
+ }
+ /**
+ * Send output to a p5.sound or web audio object
+ *
+ * @method connect
+ * @for p5.Gain
+ * @param {Object} unit
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ var u = unit || p5.soundOut.input;
+ this.output.connect(u.input ? u.input : u);
+ }
+ /**
+ * Disconnect all output.
+ *
+ * @method disconnect
+ * @for p5.Gain
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.output) {
+ this.output.disconnect();
+ }
+ }
+ /**
+ * Set the output level of the gain node.
+ *
+ * @method amp
+ * @for p5.Gain
+ * @param {Number} volume amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ */
+
+ }, {
+ key: "amp",
+ value: function amp(vol) {
+ var rampTime = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
+ var tFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+ var now = main.audiocontext.currentTime;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(now);
+ this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ var index = main.soundArray.indexOf(this);
+ main.soundArray.splice(index, 1);
+
+ if (this.output) {
+ this.output.disconnect();
+ delete this.output;
+ }
+
+ if (this.input) {
+ this.input.disconnect();
+ delete this.input;
+ }
+ }
+ }]);
+
+ return Gain;
+}();
+
+ var gain = (gain_Gain);
+function audioVoice_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function audioVoice_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function audioVoice_createClass(Constructor, protoProps, staticProps) { if (protoProps) audioVoice_defineProperties(Constructor.prototype, protoProps); if (staticProps) audioVoice_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+/**
+ * Base class for monophonic synthesizers. Any extensions of this class
+ * should follow the API and implement the methods below in order to
+ * remain compatible with p5.PolySynth();
+ *
+ * @class p5.AudioVoice
+ * @constructor
+ */
+
+var audioVoice_AudioVoice =
+function () {
+ function AudioVoice() {
+ audioVoice_classCallCheck(this, AudioVoice);
+
+ this.ac = main.audiocontext;
+ this.output = this.ac.createGain();
+ this.connect();
+ main.soundArray.push(this);
+ }
+
+ audioVoice_createClass(AudioVoice, [{
+ key: "play",
+ value: function play(note, velocity, secondsFromNow, sustime) {}
+ }, {
+ key: "triggerAttack",
+ value: function triggerAttack(note, velocity, secondsFromNow) {}
+ }, {
+ key: "triggerRelease",
+ value: function triggerRelease(secondsFromNow) {}
+ }, {
+ key: "amp",
+ value: function amp(vol, rampTime) {}
+ /**
+ * Connect to p5 objects or Web Audio Nodes
+ * @method connect
+ * @for p5.AudioVoice
+ * @param {Object} unit
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ var u = unit || main.input;
+ this.output.connect(u.input ? u.input : u);
+ }
+ /**
+ * Disconnect from soundOut
+ * @method disconnect
+ * @for p5.AudioVoice
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ this.output.disconnect();
+ }
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ if (this.output) {
+ this.output.disconnect();
+ delete this.output;
+ }
+ }
+ }]);
+
+ return AudioVoice;
+}();
+
+ var audioVoice_0 = (audioVoice_AudioVoice);
+function monosynth_typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { monosynth_typeof = function _typeof(obj) { return typeof obj; }; } else { monosynth_typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return monosynth_typeof(obj); }
+
+function monosynth_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function monosynth_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function monosynth_createClass(Constructor, protoProps, staticProps) { if (protoProps) monosynth_defineProperties(Constructor.prototype, protoProps); if (staticProps) monosynth_defineProperties(Constructor, staticProps); return Constructor; }
+
+function monosynth_possibleConstructorReturn(self, call) { if (call && (monosynth_typeof(call) === "object" || typeof call === "function")) { return call; } return monosynth_assertThisInitialized(self); }
+
+function monosynth_assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function monosynth_get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { monosynth_get = Reflect.get; } else { monosynth_get = function _get(target, property, receiver) { var base = monosynth_superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return monosynth_get(target, property, receiver || target); }
+
+function monosynth_superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = monosynth_getPrototypeOf(object); if (object === null) break; } return object; }
+
+function monosynth_getPrototypeOf(o) { monosynth_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return monosynth_getPrototypeOf(o); }
+
+function monosynth_inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) monosynth_setPrototypeOf(subClass, superClass); }
+
+function monosynth_setPrototypeOf(o, p) { monosynth_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return monosynth_setPrototypeOf(o, p); }
+
+
+
+
+
+
+var DEFAULT_SUSTAIN = 0.15;
+/**
+ * A MonoSynth is used as a single voice for sound synthesis.
+ * This is a class to be used in conjunction with the PolySynth
+ * class. Custom synthetisers should be built inheriting from
+ * this class.
+ *
+ * @class p5.MonoSynth
+ * @constructor
+ * @example
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * let note = random(['Fb4', 'G4']);
+ * // note velocity (volume, from 0 to 1)
+ * let velocity = random();
+ * // time from now (in seconds)
+ * let time = 0;
+ * // note duration (in seconds)
+ * let dur = 1/6;
+ *
+ * monoSynth.play(note, velocity, time, dur);
+ * }
+ *
+ **/
+
+var monosynth_MonoSynth =
+function (_AudioVoice) {
+ monosynth_inherits(MonoSynth, _AudioVoice);
+
+ function MonoSynth() {
+ var _this;
+
+ monosynth_classCallCheck(this, MonoSynth);
+
+ _this = monosynth_possibleConstructorReturn(this, monosynth_getPrototypeOf(MonoSynth).call(this));
+ _this.oscillator = new oscillator();
+ _this.env = new envelope();
+
+ _this.env.setRange(1, 0);
+
+ _this.env.setExp(true);
+
+
+ _this.setADSR(0.02, 0.25, 0.05, 0.35);
+
+
+ _this.oscillator.disconnect();
+
+ _this.oscillator.connect(_this.output);
+
+ _this.env.disconnect();
+
+ _this.env.setInput(_this.output.gain);
+
+
+ _this.oscillator.output.gain.value = 1.0;
+
+ _this.oscillator.start();
+
+ _this.connect();
+
+ main.soundArray.push(monosynth_assertThisInitialized(_this));
+ /**
+ * Getters and Setters
+ * @property {Number} attack
+ * @for p5.MonoSynth
+ */
+
+ /**
+ * @property {Number} decay
+ * @for p5.MonoSynth
+ */
+
+ /**
+ * @property {Number} sustain
+ * @for p5.MonoSynth
+ */
+
+ /**
+ * @property {Number} release
+ * @for p5.MonoSynth
+ */
+
+ Object.defineProperties(monosynth_assertThisInitialized(_this), {
+ attack: {
+ get: function get() {
+ return this.env.aTime;
+ },
+ set: function set(attack) {
+ this.env.setADSR(attack, this.env.dTime, this.env.sPercent, this.env.rTime);
+ }
+ },
+ decay: {
+ get: function get() {
+ return this.env.dTime;
+ },
+ set: function set(decay) {
+ this.env.setADSR(this.env.aTime, decay, this.env.sPercent, this.env.rTime);
+ }
+ },
+ sustain: {
+ get: function get() {
+ return this.env.sPercent;
+ },
+ set: function set(sustain) {
+ this.env.setADSR(this.env.aTime, this.env.dTime, sustain, this.env.rTime);
+ }
+ },
+ release: {
+ get: function get() {
+ return this.env.rTime;
+ },
+ set: function set(release) {
+ this.env.setADSR(this.env.aTime, this.env.dTime, this.env.sPercent, release);
+ }
+ }
+ });
+ return _this;
+ }
+ /**
+ * Play tells the MonoSynth to start playing a note. This method schedules
+ * the calling of .triggerAttack and .triggerRelease.
+ *
+ * @method play
+ * @for p5.MonoSynth
+ * @param {String | Number} note the note you want to play, specified as a
+ * frequency in Hertz (Number) or as a midi
+ * value in Note/Octave format ("C4", "Eb3"...etc")
+ * See
+ * Tone. Defaults to 440 hz.
+ * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
+ * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
+ * @param {Number} [sustainTime] time to sustain before releasing the envelope. Defaults to 0.15 seconds.
+ * @example
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * let note = random(['Fb4', 'G4']);
+ * // note velocity (volume, from 0 to 1)
+ * let velocity = random();
+ * // time from now (in seconds)
+ * let time = 0;
+ * // note duration (in seconds)
+ * let dur = 1/6;
+ *
+ * monoSynth.play(note, velocity, time, dur);
+ * }
+ *
+ *
+ */
+
+
+ monosynth_createClass(MonoSynth, [{
+ key: "play",
+ value: function play(note, velocity, secondsFromNow, susTime) {
+ this.triggerAttack(note, velocity, ~~secondsFromNow);
+ this.triggerRelease(~~secondsFromNow + (susTime || DEFAULT_SUSTAIN));
+ }
+ /**
+ * Trigger the Attack, and Decay portion of the Envelope.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go.
+ *
+ * @param {String | Number} note the note you want to play, specified as a
+ * frequency in Hertz (Number) or as a midi
+ * value in Note/Octave format ("C4", "Eb3"...etc")
+ * See
+ * Tone. Defaults to 440 hz
+ * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
+ * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
+ * @method triggerAttack
+ * @for p5.MonoSynth
+ * @example
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(triggerAttack);
+ * background(220);
+ * text('tap here for attack, let go to release', 5, 20, width - 20);
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function triggerAttack() {
+ * userStartAudio();
+ *
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ *
+ */
+
+ }, {
+ key: "triggerAttack",
+ value: function triggerAttack(note, velocity) {
+ var secondsFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+ var freq = noteToFreq(note);
+ var vel = velocity || 0.1;
+ this.oscillator.freq(freq, 0, secondsFromNow);
+ this.env.ramp(this.output.gain, secondsFromNow, vel);
+ }
+ /**
+ * Trigger the release of the Envelope. This is similar to releasing
+ * the key on a piano and letting the sound fade according to the
+ * release level and release time.
+ *
+ * @param {Number} secondsFromNow time to trigger the release
+ * @method triggerRelease
+ * @for p5.MonoSynth
+ * @example
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(triggerAttack);
+ * background(220);
+ * text('tap here for attack, let go to release', 5, 20, width - 20);
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function triggerAttack() {
+ * userStartAudio();
+ *
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ *
+ */
+
+ }, {
+ key: "triggerRelease",
+ value: function triggerRelease() {
+ var secondsFromNow = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
+ this.env.ramp(this.output.gain, secondsFromNow, 0);
+ }
+ /**
+ * Set values like a traditional
+ *
+ * ADSR envelope
+ * .
+ *
+ * @method setADSR
+ * @for p5.MonoSynth
+ * @param {Number} attackTime Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ */
+
+ }, {
+ key: "setADSR",
+ value: function setADSR(attack, decay, sustain, release) {
+ this.env.setADSR(attack, decay, sustain, release);
+ }
+ /**
+ * MonoSynth amp
+ * @method amp
+ * @for p5.MonoSynth
+ * @param {Number} vol desired volume
+ * @param {Number} [rampTime] Time to reach new volume
+ * @return {Number} new volume value
+ */
+
+ }, {
+ key: "amp",
+ value: function amp(vol, rampTime) {
+ var t = rampTime || 0;
+
+ if (typeof vol !== 'undefined') {
+ this.oscillator.amp(vol, t);
+ }
+
+ return this.oscillator.amp().value;
+ }
+ /**
+ * Connect to a p5.sound / Web Audio object.
+ *
+ * @method connect
+ * @for p5.MonoSynth
+ * @param {Object} unit A p5.sound or Web Audio object
+ */
+
+ }, {
+ key: "connect",
+ value: function connect(unit) {
+ var u = unit || main.input;
+ this.output.connect(u.input ? u.input : u);
+ }
+ /**
+ * Disconnect all outputs
+ *
+ * @method disconnect
+ * @for p5.MonoSynth
+ */
+
+ }, {
+ key: "disconnect",
+ value: function disconnect() {
+ if (this.output) {
+ this.output.disconnect();
+ }
+ }
+ /**
+ * Get rid of the MonoSynth and free up its resources / memory.
+ *
+ * @method dispose
+ * @for p5.MonoSynth
+ */
+
+ }, {
+ key: "dispose",
+ value: function dispose() {
+ monosynth_get(monosynth_getPrototypeOf(MonoSynth.prototype), "dispose", this).call(this);
+
+ if (this.env) {
+ this.env.dispose();
+ }
+
+ if (this.oscillator) {
+ this.oscillator.dispose();
+ }
+ }
+ }]);
+
+ return MonoSynth;
+}(audioVoice_0);
+
+ var monosynth = (monosynth_MonoSynth);
+function onsetDetect_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function onsetDetect_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function onsetDetect_createClass(Constructor, protoProps, staticProps) { if (protoProps) onsetDetect_defineProperties(Constructor.prototype, protoProps); if (staticProps) onsetDetect_defineProperties(Constructor, staticProps); return Constructor; }
+
+/**
+ * Listen for onsets (a sharp increase in volume) within a given
+ * frequency range.
+ *
+ * @class p5.OnsetDetect
+ * @constructor
+ * @param {Number} freqLow Low frequency
+ * @param {Number} freqHigh High frequency
+ * @param {Number} threshold Amplitude threshold between 0 (no energy) and 1 (maximum)
+ * @param {Function} callback Function to call when an onset is detected
+ */
+var OnsetDetect =
+function () {
+ function OnsetDetect(freqLow, freqHigh, threshold, callback) {
+ onsetDetect_classCallCheck(this, OnsetDetect);
+
+ this.isDetected = false;
+ this.freqLow = freqLow;
+ this.freqHigh = freqHigh;
+ this.treshold = threshold;
+ this.energy = 0;
+ this.penergy = 0;
+
+ this.sensitivity = 500;
+ this.callback = callback;
+ }
+
+
+ onsetDetect_createClass(OnsetDetect, [{
+ key: "update",
+ value: function update(fftObject, callback) {
+ this.energy = fftObject.getEnergy(this.freqLow, this.freqHigh) / 255;
+
+ if (this.isDetected === false) {
+ if (this.energy - this.penergy > this.treshold) {
+ this.isDetected = true;
+
+ if (this.callback) {
+ this.callback(this.energy);
+ } else if (callback) {
+ callback(this.energy);
+ }
+
+ var self = this;
+ setTimeout(function () {
+ self.isDetected = false;
+ }, this.sensitivity);
+ }
+ }
+
+ this.penergy = this.energy;
+ }
+ }]);
+
+ return OnsetDetect;
+}();
+
+ var onsetDetect = (OnsetDetect);
+function polysynth_classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function polysynth_defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function polysynth_createClass(Constructor, protoProps, staticProps) { if (protoProps) polysynth_defineProperties(Constructor.prototype, protoProps); if (staticProps) polysynth_defineProperties(Constructor, staticProps); return Constructor; }
+
+
+
+
+/**
+ * An AudioVoice is used as a single voice for sound synthesis.
+ * The PolySynth class holds an array of AudioVoice, and deals
+ * with voices allocations, with setting notes to be played, and
+ * parameters to be set.
+ *
+ * @class p5.PolySynth
+ * @constructor
+ *
+ * @param {Number} [synthVoice] A monophonic synth voice inheriting
+ * the AudioVoice class. Defaults to p5.MonoSynth
+ * @param {Number} [maxVoices] Number of voices, defaults to 8;
+ * @example
+ *
+ * let polySynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * text('click to play', 20, 20);
+ *
+ * polySynth = new p5.PolySynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * // note duration (in seconds)
+ * let dur = 1.5;
+ *
+ * // time from now (in seconds)
+ * let time = 0;
+ *
+ * // velocity (volume, from 0 to 1)
+ * let vel = 0.1;
+ *
+ * // notes can overlap with each other
+ * polySynth.play('G2', vel, 0, dur);
+ * polySynth.play('C3', vel, time += 1/3, dur);
+ * polySynth.play('G3', vel, time += 1/3, dur);
+ * }
+ *
+ **/
+
+var polysynth_PolySynth =
+function () {
+ function PolySynth(audioVoice, maxVoices) {
+ polysynth_classCallCheck(this, PolySynth);
+
+ this.audiovoices = [];
+ /**
+ * An object that holds information about which notes have been played and
+ * which notes are currently being played. New notes are added as keys
+ * on the fly. While a note has been attacked, but not released, the value of the
+ * key is the audiovoice which is generating that note. When notes are released,
+ * the value of the key becomes undefined.
+ * @property notes
+ */
+
+ this.notes = {};
+
+ this._newest = 0;
+ this._oldest = 0;
+ /**
+ * A PolySynth must have at least 1 voice, defaults to 8
+ * @property polyvalue
+ */
+
+ this.maxVoices = maxVoices || 8;
+ /**
+ * Monosynth that generates the sound for each note that is triggered. The
+ * p5.PolySynth defaults to using the p5.MonoSynth as its voice.
+ * @property AudioVoice
+ */
+
+ this.AudioVoice = audioVoice === undefined ? p5.MonoSynth : audioVoice;
+ /**
+ * This value must only change as a note is attacked or released. Due to delay
+ * and sustain times, Tone.TimelineSignal is required to schedule the change in value.
+ * @private
+ * @property {Tone.TimelineSignal} _voicesInUse
+ */
+
+ this._voicesInUse = new TimelineSignal_default.a(0);
+ this.output = main.audiocontext.createGain();
+ this.connect();
+
+ this._allocateVoices();
+
+ main.soundArray.push(this);
+ }
+ /**
+ * Construct the appropriate number of audiovoices
+ * @private
+ * @for p5.PolySynth
+ * @method _allocateVoices
+ */
+
+
+ polysynth_createClass(PolySynth, [{
+ key: "_allocateVoices",
+ value: function _allocateVoices() {
+ for (var i = 0; i < this.maxVoices; i++) {
+ this.audiovoices.push(new this.AudioVoice());
+ this.audiovoices[i].disconnect();
+ this.audiovoices[i].connect(this.output);
+ }
+ }
+ /**
+ * Play a note by triggering noteAttack and noteRelease with sustain time
+ *
+ * @method play
+ * @for p5.PolySynth
+ * @param {Number} [note] midi note to play (ranging from 0 to 127 - 60 being a middle C)
+ * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
+ * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
+ * @param {Number} [sustainTime] time to sustain before releasing the envelope
+ * @example
+ *
+ * let polySynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * text('click to play', 20, 20);
+ *
+ * polySynth = new p5.PolySynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * // note duration (in seconds)
+ * let dur = 1.5;
+ *
+ * // time from now (in seconds)
+ * let time = 0;
+ *
+ * // velocity (volume, from 0 to 1)
+ * let vel = 0.1;
+ *
+ * // notes can overlap with each other
+ * polySynth.play('G2', vel, 0, dur);
+ * polySynth.play('C3', vel, time += 1/3, dur);
+ * polySynth.play('G3', vel, time += 1/3, dur);
+ * }
+ *
+ */
+
+ }, {
+ key: "play",
+ value: function play(note, velocity, secondsFromNow) {
+ var susTime = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 1;
+ this.noteAttack(note, velocity, secondsFromNow);
+ this.noteRelease(note, secondsFromNow + susTime);
+ }
+ /**
+ * noteADSR sets the envelope for a specific note that has just been triggered.
+ * Using this method modifies the envelope of whichever audiovoice is being used
+ * to play the desired note. The envelope should be reset before noteRelease is called
+ * in order to prevent the modified envelope from being used on other notes.
+ *
+ * @method noteADSR
+ * @for p5.PolySynth
+ * @param {Number} [note] Midi note on which ADSR should be set.
+ * @param {Number} [attackTime] Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+ }, {
+ key: "noteADSR",
+ value: function noteADSR(note, a, d, s, r) {
+ var timeFromNow = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : 0;
+ var now = main.audiocontext.currentTime;
+ var t = now + timeFromNow;
+ this.audiovoices[this.notes[note].getValueAtTime(t)].setADSR(a, d, s, r);
+ }
+ /**
+ * Set the PolySynths global envelope. This method modifies the envelopes of each
+ * monosynth so that all notes are played with this envelope.
+ *
+ * @method setADSR
+ * @for p5.PolySynth
+ * @param {Number} [attackTime] Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+ }, {
+ key: "setADSR",
+ value: function setADSR(a, d, s, r) {
+ this.audiovoices.forEach(function (voice) {
+ voice.setADSR(a, d, s, r);
+ });
+ }
+ /**
+ * Trigger the Attack, and Decay portion of a MonoSynth.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go.
+ *
+ * @method noteAttack
+ * @for p5.PolySynth
+ * @param {Number} [note] midi note on which attack should be triggered.
+ * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
+ * @param {Number} [secondsFromNow] time from now (in seconds)
+ * @example
+ *
+ * let polySynth = new p5.PolySynth();
+ * let pitches = ['G', 'D', 'G', 'C'];
+ * let octaves = [2, 3, 4];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playChord);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playChord() {
+ * userStartAudio();
+ *
+ * // play a chord: multiple notes at the same time
+ * for (let i = 0; i < 4; i++) {
+ * let note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
+ * }
+ * }
+ *
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
+ * }
+ *
+ */
+
+ }, {
+ key: "noteAttack",
+ value: function noteAttack(_note, _velocity) {
+ var secondsFromNow = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+ var acTime = main.audiocontext.currentTime + secondsFromNow;
+
+ var note = noteToFreq(_note);
+ var velocity = _velocity || 0.1;
+ var currentVoice;
+
+ if (this.notes[note] && this.notes[note].getValueAtTime(acTime) !== null) {
+ this.noteRelease(note, 0);
+ }
+
+
+ if (this._voicesInUse.getValueAtTime(acTime) < this.maxVoices) {
+ currentVoice = Math.max(~~this._voicesInUse.getValueAtTime(acTime), 0);
+ }
+ else {
+ currentVoice = this._oldest;
+ oldestNote = freqToMidi(this.audiovoices[this._oldest].oscillator.freq().value);
+ this.noteRelease(oldestNote);
+ this._oldest = (this._oldest + 1) % (this.maxVoices - 1);
+ }
+
+
+ this.notes[note] = new TimelineSignal_default.a();
+ this.notes[note].setValueAtTime(currentVoice, acTime);
+
+ var previousVal = this._voicesInUse._searchBefore(acTime) === null ? 0 : this._voicesInUse._searchBefore(acTime).value;
+
+ this._voicesInUse.setValueAtTime(previousVal + 1, acTime);
+
+
+ this._updateAfter(acTime, 1);
+
+ this._newest = currentVoice;
+
+ if (typeof velocity === 'number') {
+ var maxRange = 1 / this._voicesInUse.getValueAtTime(acTime) * 2;
+ velocity = velocity > maxRange ? maxRange : velocity;
+ }
+
+
+ this.audiovoices[currentVoice].triggerAttack(note, velocity, secondsFromNow);
+ }
+ /**
+ * Private method to ensure accurate values of this._voicesInUse
+ * Any time a new value is scheduled, it is necessary to increment all subsequent
+ * scheduledValues after attack, and decrement all subsequent
+ * scheduledValues after release
+ *
+ * @private
+ * @for p5.PolySynth
+ * @param {[type]} time [description]
+ * @param {[type]} value [description]
+ * @return {[type]} [description]
+ */
+
+ }, {
+ key: "_updateAfter",
+ value: function _updateAfter(time, value) {
+ if (this._voicesInUse._searchAfter(time) === null) {
+ return;
+ } else {
+ this._voicesInUse._searchAfter(time).value += value;
+
+ var nextTime = this._voicesInUse._searchAfter(time).time;
+
+ this._updateAfter(nextTime, value);
+ }
+ }
+ /**
+ * Trigger the Release of an AudioVoice note. This is similar to releasing
+ * the key on a piano and letting the sound fade according to the
+ * release level and release time.
+ *
+ * @method noteRelease
+ * @for p5.PolySynth
+ * @param {Number} [note] midi note on which attack should be triggered.
+ * If no value is provided, all notes will be released.
+ * @param {Number} [secondsFromNow] time to trigger the release
+ * @example
+ *
+ * let polySynth = new p5.PolySynth();
+ * let pitches = ['G', 'D', 'G', 'C'];
+ * let octaves = [2, 3, 4];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playChord);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playChord() {
+ * userStartAudio();
+ *
+ * // play a chord: multiple notes at the same time
+ * for (let i = 0; i < 4; i++) {
+ * let note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
+ * }
+ * }
+ *
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
+ * }
+ *
'),this.dummyDOM.querySelector(i)?this.dummyDOM.querySelector(i).insertAdjacentHTML("afterend",n):this.dummyDOM.querySelector("#".concat(o)).innerHTML=n,this._accessibleOutputs[r].map=this.dummyDOM.querySelector("#".concat(r,"_map"))),this._accessibleOutputs[r].shapeDetails=this.dummyDOM.querySelector("#".concat(r,"_shapeDetails")),this._accessibleOutputs[r].summary=this.dummyDOM.querySelector("#".concat(r,"_summary"))},o.default.prototype._updateAccsOutput=function(){var e=this.canvas.id;JSON.stringify(this.ingredients.shapes)===this.ingredients.pShapes&&this.ingredients.colors.background===this.ingredients.pBackground||(this.ingredients.pShapes=JSON.stringify(this.ingredients.shapes),this._accessibleOutputs.text&&this._updateTextOutput(e+"textOutput"),this._accessibleOutputs.grid&&this._updateGridOutput(e+"gridOutput"),this._accessibleOutputs.textLabel&&this._updateTextOutput(e+"textOutputLabel"),this._accessibleOutputs.gridLabel&&this._updateGridOutput(e+"gridOutputLabel"))},o.default.prototype._accsBackground=function(e){this.ingredients.pShapes=JSON.stringify(this.ingredients.shapes),this.ingredients.pBackground=this.ingredients.colors.background,this.ingredients.shapes={},this.ingredients.colors.backgroundRGBA!==e&&(this.ingredients.colors.backgroundRGBA=e,this.ingredients.colors.background=this._rgbColorName(e))},o.default.prototype._accsCanvasColors=function(e,t){"fill"===e?this.ingredients.colors.fillRGBA!==t&&(this.ingredients.colors.fillRGBA=t,this.ingredients.colors.fill=this._rgbColorName(t)):"stroke"===e&&this.ingredients.colors.strokeRGBA!==t&&(this.ingredients.colors.strokeRGBA=t,this.ingredients.colors.stroke=this._rgbColorName(t))},o.default.prototype._accsOutput=function(e,t){"ellipse"===e&&t[2]===t[3]?e="circle":"rectangle"===e&&t[2]===t[3]&&(e="square");var r,o,n={},s=!0,i=function(e,t){var r;e="rectangle"===e||"ellipse"===e||"arc"===e||"circle"===e||"square"===e?(r=Math.round(t[0]+t[2]/2),Math.round(t[1]+t[3]/2)):"triangle"===e?(r=(t[0]+t[2]+t[4])/3,(t[1]+t[3]+t[5])/3):"quadrilateral"===e?(r=(t[0]+t[2]+t[4]+t[6])/4,(t[1]+t[3]+t[5]+t[7])/4):"line"===e?(r=(t[0]+t[2])/2,(t[1]+t[3])/2):(r=t[0],t[1]);return[r,e]}(e,t);if("line"===e?(n.color=this.ingredients.colors.stroke,n.length=Math.round(this.dist(t[0],t[1],t[2],t[3])),r=this._getPos(t[0],[1]),o=this._getPos(t[2],[3]),n.loc=u(i,this.width,this.height),n.pos=r===o?"at ".concat(r):"from ".concat(r," to ").concat(o)):("point"===e?n.color=this.ingredients.colors.stroke:(n.color=this.ingredients.colors.fill,n.area=this._getArea(e,t)),n.pos=this._getPos.apply(this,l(i)),n.loc=u(i,this.width,this.height)),this.ingredients.shapes[e]){if(this.ingredients.shapes[e]!==[n]){for(var a in this.ingredients.shapes[e])JSON.stringify(this.ingredients.shapes[e][a])===JSON.stringify(n)&&(s=!1);!0===s&&this.ingredients.shapes[e].push(n)}}else this.ingredients.shapes[e]=[n]},o.default.prototype._getPos=function(e,t){var e=new DOMPointReadOnly(e,t),t=this._renderer.isP3D?new DOMMatrix(this._renderer.uMVMatrix.mat4):this.drawingContext.getTransform(),e=e.matrixTransform(t),t=e.x,e=e.y,r=this.width*this._pixelDensity,o=this.height*this._pixelDensity;return t<.4*r?e<.4*o?"top left":.6*oMath.PI?n+=o:n-=o)):"ellipse"===e||"circle"===e?n=3.14*t[2]/2*t[3]/2:"line"===e||"point"===e?n=0:"quadrilateral"===e?n=Math.abs((t[6]+t[0])*(t[7]-t[1])+(t[0]+t[2])*(t[1]-t[3])+(t[2]+t[4])*(t[3]-t[5])+(t[4]+t[6])*(t[5]-t[7]))/2:"rectangle"===e||"square"===e?n=t[2]*t[3]:"triangle"===e&&(n=Math.abs(t[0]*(t[3]-t[5])+t[2]*(t[5]-t[1])+t[4]*(t[1]-t[3]))/2),this.width*this._pixelDensity),i=this.height*this._pixelDensity,a=[new DOMPoint(0,0),new DOMPoint(s,0),new DOMPoint(s,i),new DOMPoint(0,i)],l=(this._renderer.isP3D?new DOMMatrix(this._renderer.uMVMatrix.mat4):this.drawingContext.getTransform()).inverse(),u=a.map(function(e){return e.matrixTransform(l)}),c=Math.abs((u[3].x+u[0].x)*(u[3].y-u[0].y)+(u[0].x+u[1].x)*(u[0].y-u[1].y)+(u[1].x+u[2].x)*(u[1].y-u[2].y)+(u[2].x+u[3].x)*(u[2].y-u[3].y))/2;return Math.round(100*n/c)};e=o.default;r.default=e},{"../core/main":283,"core-js/modules/es.array.concat":153,"core-js/modules/es.array.fill":156,"core-js/modules/es.array.from":162,"core-js/modules/es.array.iterator":165,"core-js/modules/es.array.map":168,"core-js/modules/es.number.to-fixed":182,"core-js/modules/es.object.to-string":190,"core-js/modules/es.regexp.to-string":196,"core-js/modules/es.string.iterator":200,"core-js/modules/es.symbol":212,"core-js/modules/es.symbol.description":210,"core-js/modules/es.symbol.iterator":211,"core-js/modules/web.dom-collections.iterator":246}],266:[function(e,t,r){"use strict";e("core-js/modules/es.array.concat"),e("core-js/modules/es.array.concat"),Object.defineProperty(r,"__esModule",{value:!0}),r.default=void 0;e=(e=e("../core/main"))&&e.__esModule?e:{default:e};e.default.prototype._updateTextOutput=function(e){var t,r,o,n;this.dummyDOM.querySelector("#".concat(e,"_summary"))&&(t=this._accessibleOutputs[e],o=function(e,t,r,o){r="Your output is a, ".concat(r," by ").concat(o," pixels, ").concat(t," canvas containing the following");r=1===e?"".concat(r," shape:"):"".concat(r," ").concat(e," shapes:");return r}((r=function(e,t){var r,o="",n=0;for(r in t)for(var s in t[r]){var i='
"):(i+=", at ".concat(t[r][s].pos),"point"!==r&&(i+=", covering ".concat(t[r][s].area,"% of the canvas")),i+=".
"),o+=i,n++}return{numShapes:n,listShapes:o}}(e,this.ingredients.shapes)).numShapes,this.ingredients.colors.background,this.width,this.height),n=function(e,t){var r,o="",n=0;for(r in t)for(var s in t[r]){var i='