Skip to content

Commit

Permalink
update inline examples (#388) (#403)
Browse files Browse the repository at this point in the history
- Update inline examples to start audio context on a user gesture, either by playing a sound (soundFile.play() / oscillator.start()) or by explicitly calling userStartAudio() #388

- Use let in examples, instead of var, but not const, as discussed in this issue where it was decided to use let exclusively in examples: processing/p5.js#3877

- Update styles for consistency with other p5 inline examples

Some examples use soundFile.play() or oscillator.start() rather than a potentially redundant call to userStartAudio(). It might be worth that redundant call, because the difference between methods that call "start" on a WebAudio node (thus enabling audio on a user gesture) and those that do not is pretty obfuscated...
  • Loading branch information
therewasaguy committed Jan 6, 2020
1 parent 7ce64b9 commit 9760dd0
Show file tree
Hide file tree
Showing 22 changed files with 1,010 additions and 769 deletions.
58 changes: 32 additions & 26 deletions fragments/before.frag
Original file line number Diff line number Diff line change
@@ -1,40 +1,46 @@
/**
* p5.sound extends p5 with <a href="http://caniuse.com/audio-api"
* <p>p5.sound extends p5 with <a href="http://caniuse.com/audio-api"
* target="_blank">Web Audio</a> functionality including audio input,
* playback, analysis and synthesis.
* <br/><br/>
* <a href="#/p5.SoundFile"><b>p5.SoundFile</b></a>: Load and play sound files.<br/>
* <a href="#/p5.Amplitude"><b>p5.Amplitude</b></a>: Get the current volume of a sound.<br/>
* <a href="#/p5.AudioIn"><b>p5.AudioIn</b></a>: Get sound from an input source, typically
* a computer microphone.<br/>
* <a href="#/p5.FFT"><b>p5.FFT</b></a>: Analyze the frequency of sound. Returns
* results from the frequency spectrum or time domain (waveform).<br/>
* <a href="#/p5.Oscillator"><b>p5.Oscillator</b></a>: Generate Sine,
* </p>
* <ul>
* <li><a href="#/p5.SoundFile"><b>p5.SoundFile</b></a>: Load and play sound files.</li>
* <li><a href="#/p5.Amplitude"><b>p5.Amplitude</b></a>: Get the current volume of a sound.</li>
* <li><a href="#/p5.AudioIn"><b>p5.AudioIn</b></a>: Get sound from an input source, typically
* a computer microphone.</li>
* <li><a href="#/p5.FFT"><b>p5.FFT</b></a>: Analyze the frequency of sound. Returns
* results from the frequency spectrum or time domain (waveform).</li>
* <li><a href="#/p5.Oscillator"><b>p5.Oscillator</b></a>: Generate Sine,
* Triangle, Square and Sawtooth waveforms. Base class of
* <a href="#/p5.Noise">p5.Noise</a> and <a href="#/p5.Pulse">p5.Pulse</a>.
* <br/>
* <a href="#/p5.Envelope"><b>p5.Envelope</b></a>: An Envelope is a series
* <li><a href="#/p5.Noise">p5.Noise</a> and <a href="#/p5.Pulse">p5.Pulse</a>.
* </li>
* <li>
* <a href="#/p5.MonoSynth">p5.MonoSynth</a> and <a href="#/p5.PolySynth">p5.PolySynth</a>: Play musical notes
* </li>
* <li><a href="#/p5.Envelope"><b>p5.Envelope</b></a>: An Envelope is a series
* of fades over time. Often used to control an object's
* output gain level as an "ADSR Envelope" (Attack, Decay,
* Sustain, Release). Can also modulate other parameters.<br/>
* <a href="#/p5.Delay"><b>p5.Delay</b></a>: A delay effect with
* parameters for feedback, delayTime, and lowpass filter.<br/>
* <a href="#/p5.Filter"><b>p5.Filter</b></a>: Filter the frequency range of a
* Sustain, Release). Can also modulate other parameters.</li>
* <li><a href="#/p5.Delay"><b>p5.Delay</b></a>: A delay effect with
* parameters for feedback, delayTime, and lowpass filter.</li>
* <li><a href="#/p5.Filter"><b>p5.Filter</b></a>: Filter the frequency range of a
* sound.
* <br/>
* <a href="#/p5.Reverb"><b>p5.Reverb</b></a>: Add reverb to a sound by specifying
* duration and decay. <br/>
* <b><a href="#/p5.Convolver">p5.Convolver</a>:</b> Extends
* </li>
* <li><a href="#/p5.Reverb"><b>p5.Reverb</b></a>: Add reverb to a sound by specifying
* duration and decay. </li>
* <b><li><a href="#/p5.Convolver">p5.Convolver</a>:</b> Extends
* <a href="#/p5.Reverb">p5.Reverb</a> to simulate the sound of real
* physical spaces through convolution.<br/>
* <b><a href="#/p5.SoundRecorder">p5.SoundRecorder</a></b>: Record sound for playback
* physical spaces through convolution.</li>
* <b><li><a href="#/p5.SoundRecorder">p5.SoundRecorder</a></b>: Record sound for playback
* / save the .wav file.
* <b><a href="#/p5.Phrase">p5.Phrase</a></b>, <b><a href="#/p5.Part">p5.Part</a></b> and
* <b><li><a href="#/p5.SoundLoop">p5.SoundLoop</a>, <a href="#/p5.Phrase">p5.Phrase</a></b>, <b><a href="#/p5.Part">p5.Part</a></b> and
* <b><a href="#/p5.Score">p5.Score</a></b>: Compose musical sequences.
* <br/><br/>
* p5.sound is on <a href="https://github.com/therewasaguy/p5.sound/">GitHub</a>.
* </li>
* <li><a href="#/p5/userStartAudio">userStartAudio</a>: Enable audio in a
* browser- and user-friendly way.</a>
* <p>p5.sound is on <a href="https://github.com/therewasaguy/p5.sound/">GitHub</a>.
* Download the latest version
* <a href="https://github.com/therewasaguy/p5.sound/blob/master/lib/p5.sound.js">here</a>.
* <a href="https://github.com/therewasaguy/p5.sound/blob/master/lib/p5.sound.js">here</a>.</p>
*
* @module p5.sound
* @submodule p5.sound
Expand Down
83 changes: 52 additions & 31 deletions src/amplitude.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,34 @@ define(function (require) {
* amplitude readings (defaults to 0)
* @example
* <div><code>
* var sound, amplitude, cnv;
* let sound, amplitude;
*
* function preload(){
* sound = loadSound('assets/beat.mp3');
* }
* function setup() {
* cnv = createCanvas(100,100);
* let cnv = createCanvas(100,100);
* cnv.mouseClicked(toggleSound);
* amplitude = new p5.Amplitude();
*
* // start / stop the sound when canvas is clicked
* cnv.mouseClicked(function() {
* if (sound.isPlaying() ){
* sound.stop();
* } else {
* sound.play();
* }
* });
* }
*
* function draw() {
* background(0);
* fill(255);
* var level = amplitude.getLevel();
* var size = map(level, 0, 1, 0, 200);
* background(220);
* text('tap to play', 20, 20);
*
* let level = amplitude.getLevel();
* let size = map(level, 0, 1, 0, 200);
* ellipse(width/2, height/2, size, size);
* }
*
* function toggleSound() {
* if (sound.isPlaying() ){
* sound.stop();
* } else {
* sound.play();
* }
* }
*
* </code></div>
*/
p5.Amplitude = function(smoothing) {
Expand Down Expand Up @@ -117,21 +119,30 @@ define(function (require) {
* sound2 = loadSound('assets/drum.mp3');
* }
* function setup(){
* cnv = createCanvas(100, 100);
* cnv.mouseClicked(toggleSound);
*
* amplitude = new p5.Amplitude();
* sound1.play();
* sound2.play();
* amplitude.setInput(sound2);
* }
*
* function draw() {
* background(0);
* fill(255);
* var level = amplitude.getLevel();
* var size = map(level, 0, 1, 0, 200);
* background(220);
* text('tap to play', 20, 20);
*
* let level = amplitude.getLevel();
* let size = map(level, 0, 1, 0, 200);
* ellipse(width/2, height/2, size, size);
* }
* function mouseClicked(){
* sound1.stop();
* sound2.stop();
*
* function toggleSound(){
* if (sound1.isPlaying() && sound2.isPlaying()) {
* sound1.stop();
* sound2.stop();
* } else {
* sound1.play();
* sound2.play();
* }
* }
* </code></div>
*/
Expand Down Expand Up @@ -197,19 +208,29 @@ define(function (require) {
* function preload(){
* sound = loadSound('assets/beat.mp3');
* }
*
* function setup() {
* let cnv = createCanvas(100, 100);
* cnv.mouseClicked(toggleSound);
* amplitude = new p5.Amplitude();
* sound.play();
* }
*
* function draw() {
* background(0);
* fill(255);
* var level = amplitude.getLevel();
* var size = map(level, 0, 1, 0, 200);
* background(220, 150);
* textAlign(CENTER);
* text('tap to play', width/2, 20);
*
* let level = amplitude.getLevel();
* let size = map(level, 0, 1, 0, 200);
* ellipse(width/2, height/2, size, size);
* }
* function mouseClicked(){
* sound.stop();
*
* function toggleSound(){
* if (sound.isPlaying()) {
* sound.stop();
* } else {
* sound.play();
* }
* }
* </code></div>
*/
Expand Down
60 changes: 36 additions & 24 deletions src/audiocontext.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ define(['startaudiocontext', 'Tone/core/Context', 'Tone/core/Tone'], function (S
* <p>Some browsers require users to startAudioContext
* with a user gesture, such as touchStarted in the example below.</p>
*
* @for p5
* @method getAudioContext
* @return {Object} AudioContext for this sketch
* @example
Expand Down Expand Up @@ -50,44 +51,55 @@ define(['startaudiocontext', 'Tone/core/Context', 'Tone/core/Tone'], function (S


/**
* <p>It is a good practice to give users control over starting audio playback.
* This practice is enforced by Google Chrome's autoplay policy as of r70
* (<a href="https://goo.gl/7K7WLu">info</a>), iOS Safari, and other browsers.
* </p>
* <p>It is not only a good practice to give users control over starting
* audio. This policy is enforced by many web browsers, including iOS and
* <a href="https://goo.gl/7K7WLu" title="Google Chrome's autoplay
* policy">Google Chrome</a>, which create the Web Audio API's
* <a href="https://developer.mozilla.org/en-US/docs/Web/API/AudioContext"
* title="Audio Context @ MDN">Audio Context</a>
* in a suspended state.</p>
*
* <p>
* userStartAudio() starts the <a href="https://developer.mozilla.org/en-US/docs/Web/API/AudioContext"
* target="_blank" title="Audio Context @ MDN">Audio Context</a> on a user gesture. It utilizes
* the <a href="https://github.com/tambien/StartAudioContext">StartAudioContext</a> library by
* Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext.
* </p>
* <p>In these browser-specific policies, sound will not play until a user
* interaction event (i.e. <code>mousePressed()</code>) explicitly resumes
* the AudioContext, or starts an audio node. This can be accomplished by
* calling <code>start()</code> on a <code>p5.Oscillator</code>,
* <code> play()</code> on a <code>p5.SoundFile</code>, or simply
* <code>userStartAudio()</code>.</p>
*
* <p>Starting the audio context on a user gesture can be as simple as <code>userStartAudio()</code>.
* Optional parameters let you decide on a specific element that will start the audio context,
* and/or call a function once the audio context is started.</p>
* <p><code>userStartAudio()</code> starts the AudioContext on a user
* gesture. The default behavior will enable audio on any
* mouseUp or touchEnd event. It can also be placed in a specific
* interaction function, such as <code>mousePressed()</code> as in the
* example below. This method utilizes
* <a href="https://github.com/tambien/StartAudioContext">StartAudioContext
* </a>, a library by Yotam Mann (MIT Licence, 2016).</p>
* @param {Element|Array} [element(s)] This argument can be an Element,
* Selector String, NodeList, p5.Element,
* jQuery Element, or an Array of any of those.
* @param {Function} [callback] Callback to invoke when the AudioContext has started
* @return {Promise} Returns a Promise which is resolved when
* @param {Function} [callback] Callback to invoke when the AudioContext
* has started
* @return {Promise} Returns a Promise that resolves when
* the AudioContext state is 'running'
* @method userStartAudio
* @for p5
* @example
* <div><code>
* function setup() {
* var myDiv = createDiv('click to start audio');
* myDiv.position(0, 0);
* // mimics the autoplay policy
* getAudioContext().suspend();
*
* var mySynth = new p5.MonoSynth();
* let mySynth = new p5.MonoSynth();
*
* // This won't play until the context has started
* // This won't play until the context has resumed
* mySynth.play('A6');
*
* // Start the audio context on a click/touch event
* userStartAudio().then(function() {
* myDiv.remove();
* });
* }
* function draw() {
* background(220);
* textAlign(CENTER, CENTER);
* text(getAudioContext().state, width/2, height/2);
* }
* function mousePressed() {
* userStartAudio();
* }
* </code></div>
*/
Expand Down
55 changes: 43 additions & 12 deletions src/audioin.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,24 @@ define(function (require) {
* currently allow microphone access.
* @example
* <div><code>
* var mic;
* function setup(){
* mic = new p5.AudioIn()
* let mic;
*
* function setup(){
* let cnv = createCanvas(100, 100);
* cnv.mousePressed(userStartAudio);
* textAlign(CENTER);
* mic = new p5.AudioIn();
* mic.start();
* }
*
* function draw(){
* background(0);
* fill(255);
* text('tap to start', width/2, 20);
*
* micLevel = mic.getLevel();
* ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);
* let y = height - micLevel * height;
* ellipse(width/2, y, 10, 10);
* }
* </code></div>
*/
Expand Down Expand Up @@ -267,18 +276,21 @@ define(function (require) {
* to the enumerateDevices() method
* @example
* <div><code>
* var audiograb;
* let audioIn;
*
* function setup(){
* //new audioIn
* audioGrab = new p5.AudioIn();
* text('getting sources...', 0, 20);
* audioIn = new p5.AudioIn();
* audioIn.getSources(gotSources);
* }
*
* audioGrab.getSources(function(deviceList) {
* //print out the array of available sources
* console.log(deviceList);
* function gotSources(deviceList) {
* if (deviceList.length > 0) {
* //set the source to the first item in the deviceList array
* audioGrab.setSource(0);
* });
* audioIn.setSource(0);
* let currentSource = deviceList[audioIn.currentSource];
* text('set source to: ' + currentSource.deviceId, 5, 20, width);
* }
* }
* </code></div>
*/
Expand Down Expand Up @@ -316,6 +328,25 @@ define(function (require) {
* @method setSource
* @for p5.AudioIn
* @param {number} num position of input source in the array
* @example
* <div><code>
* let audioIn;
*
* function setup(){
* text('getting sources...', 0, 20);
* audioIn = new p5.AudioIn();
* audioIn.getSources(gotSources);
* }
*
* function gotSources(deviceList) {
* if (deviceList.length > 0) {
* //set the source to the first item in the deviceList array
* audioIn.setSource(0);
* let currentSource = deviceList[audioIn.currentSource];
* text('set source to: ' + currentSource.deviceId, 5, 20, width);
* }
* }
* </code></div>
*/
p5.AudioIn.prototype.setSource = function(num) {
if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) {
Expand Down

0 comments on commit 9760dd0

Please sign in to comment.