|
5 | 5 | </head> |
6 | 6 | <body> |
7 | 7 | <div> |
8 | | - <div style="font-family: sans-serif"> |
9 | | - Volume: <input type="range" id="volumeSlider" min="0" max="2" value="1" step="0.01" /> |
10 | | - Reverb: <input type="checkbox" id="reverbToggle" /> |
11 | | - Noise: <input type="checkbox" id="noiseToggle" /> |
12 | | - </div> |
13 | | - <div id="cloudComputerDiv" style="height:720px;width:1280px"></div> |
| 8 | + <div style="font-family: sans-serif"> |
| 9 | + Volume: |
| 10 | + <input |
| 11 | + type="range" |
| 12 | + id="volumeSlider" |
| 13 | + min="0" |
| 14 | + max="2" |
| 15 | + value="1" |
| 16 | + step="0.01" |
| 17 | + /> |
| 18 | + Reverb: <input type="checkbox" id="reverbToggle" /> Noise: |
| 19 | + <input type="checkbox" id="noiseToggle" /> |
| 20 | + </div> |
| 21 | + <div id="cloudComputerDiv" style="height: 720px; width: 1280px"></div> |
14 | 22 | </div> |
15 | 23 | <script type="module"> |
16 | | - import Hyperbeam from 'https://unpkg.com/@hyperbeam/web@latest/dist/index.js'; |
| 24 | + import Hyperbeam from "https://unpkg.com/@hyperbeam/web@latest/dist/index.js"; |
17 | 25 | const audioCtx = new (window.AudioContext || window.webkitAudioContext)(); |
18 | 26 | const gainNode = audioCtx.createGain(); |
19 | 27 | const noiseNode = brownNoise(audioCtx); |
20 | 28 | const reverbNode = reverb(audioCtx, 2, 4); |
21 | | - const resp = await fetch('/computer'); |
| 29 | + const resp = await fetch("/computer"); |
22 | 30 | const data = await resp.json(); |
23 | 31 | const hb = await Hyperbeam(cloudComputerDiv, data.embed_url, { |
24 | | - audioTrackCb: tryAudio |
| 32 | + audioTrackCb: tryAudio, |
25 | 33 | }); |
26 | 34 |
|
27 | | - let source |
| 35 | + let source; |
28 | 36 | function tryAudio(track) { |
29 | 37 | // audioCtx.createMediaStreamTrackSource(track) is only supported by Firefox |
30 | 38 | const stream = new MediaStream([track]); |
31 | 39 | source = audioCtx.createMediaStreamSource(stream); |
32 | 40 | source.connect(gainNode); |
33 | 41 | gainNode.connect(audioCtx.destination); |
34 | | - console.log('connected audio track:', track); |
| 42 | + console.log("connected audio track:", track); |
35 | 43 | } |
36 | 44 |
|
37 | 45 | function enableReverb() { |
|
51 | 59 | noiseNode.disconnect(); |
52 | 60 | } |
53 | 61 |
|
54 | | - volumeSlider.addEventListener('input', (e) => { |
| 62 | + volumeSlider.addEventListener("input", (e) => { |
55 | 63 | gainNode.gain.value = e.target.value; |
56 | 64 | }); |
57 | 65 |
|
58 | | - reverbToggle.addEventListener('change', (e) => { |
| 66 | + reverbToggle.addEventListener("change", (e) => { |
59 | 67 | if (e.target.checked) { |
60 | 68 | enableReverb(); |
61 | 69 | } else { |
62 | 70 | disableReverb(); |
63 | 71 | } |
64 | | - }) |
| 72 | + }); |
65 | 73 |
|
66 | | - noiseToggle.addEventListener('change', (e) => { |
| 74 | + noiseToggle.addEventListener("change", (e) => { |
67 | 75 | if (e.target.checked) { |
68 | 76 | enableNoise(); |
69 | 77 | } else { |
70 | 78 | disableNoise(); |
71 | 79 | } |
72 | | - }) |
| 80 | + }); |
73 | 81 |
|
74 | | - window.addEventListener('mouseup', async () => { |
| 82 | + window.addEventListener("mouseup", async () => { |
75 | 83 | // AudioContext will start in a suspended state if no user gestures |
76 | 84 | // have been received on the page: need to check if suspended and resume |
77 | | - if (audioCtx.state === 'suspended') { |
| 85 | + if (audioCtx.state === "suspended") { |
78 | 86 | try { |
79 | 87 | await audioCtx.resume(); |
80 | | - console.log('audioCtx state:', audioCtx.state); |
| 88 | + console.log("audioCtx state:", audioCtx.state); |
81 | 89 | } catch (e) { |
82 | 90 | console.error(e); |
83 | 91 | } |
84 | 92 | } |
85 | | - }) |
| 93 | + }); |
86 | 94 |
|
87 | 95 | // Inspired by https://github.com/web-audio-components/simple-reverb/blob/master/index.js |
88 | 96 | function reverb(ctx, seconds, decay, reverse = false) { |
|
93 | 101 | const impulseR = impulse.getChannelData(1); |
94 | 102 | for (let i = 0; i < length; i++) { |
95 | 103 | const n = reverse ? length - i : i; |
96 | | - impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay); |
97 | | - impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay); |
| 104 | + impulseL[i] = |
| 105 | + (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay); |
| 106 | + impulseR[i] = |
| 107 | + (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay); |
98 | 108 | } |
99 | 109 | const reverbNode = ctx.createConvolver(); |
100 | 110 | reverbNode.buffer = impulse; |
|
107 | 117 | const bufferSize = 4096; |
108 | 118 | let lastOut = 0.0; |
109 | 119 | const node = ctx.createScriptProcessor(bufferSize, 1, 1); |
110 | | - node.onaudioprocess = function(e) { |
| 120 | + node.onaudioprocess = function (e) { |
111 | 121 | let output = e.outputBuffer.getChannelData(0); |
112 | 122 | for (let i = 0; i < bufferSize; i++) { |
113 | 123 | const white = Math.random() * 2 - 1; |
114 | | - output[i] = (lastOut + (0.02 * white)) / 1.02; |
| 124 | + output[i] = (lastOut + 0.02 * white) / 1.02; |
115 | 125 | lastOut = output[i]; |
116 | 126 | output[i] *= 3.5; // (roughly) compensate for gain |
117 | 127 | } |
118 | | - } |
| 128 | + }; |
119 | 129 | return node; |
120 | 130 | } |
121 | 131 | </script> |
122 | | - </div> |
123 | 132 | </body> |
124 | 133 | </html> |
0 commit comments