From c912f856a3bb3d7f1bef948c016e67e0da1bcb9c Mon Sep 17 00:00:00 2001 From: katspaugh <381895+katspaugh@users.noreply.github.com> Date: Thu, 19 Oct 2023 22:23:33 +0200 Subject: [PATCH] Feat: restore the "backend: 'WebAudio'" option (#3281) * Feat: webaudio "backend" * Prettier * Tests * Update readme * Fix test --- README.md | 7 +- cypress/e2e/basic.cy.js | 52 +-- cypress/e2e/options.cy.js | 783 ++++++++++++++++++-------------------- examples/basic.js | 4 +- examples/zoom-plugin.js | 14 +- src/event-emitter.ts | 64 ++-- src/wavesurfer.ts | 31 +- src/webaudio.ts | 166 ++++++++ 8 files changed, 626 insertions(+), 495 deletions(-) create mode 100644 src/webaudio.ts diff --git a/README.md b/README.md index 402081b37..f269c803f 100644 --- a/README.md +++ b/README.md @@ -95,16 +95,15 @@ See [this example](https://wavesurfer.xyz/examples/?styling.js) for play around Most options, events, and methods are similar to those in previous versions. ### Notable differences - * The `backend` option is removed – [HTML5 audio (or video) is the only playback mechanism](https://github.com/katspaugh/wavesurfer.js/discussions/2762#discussioncomment-5669347). However, you can still connect wavesurfer to Web Audio via `MediaElementSourceNode`. See this [example](https://wavesurfer.xyz/examples/?webaudio.js). * The Markers plugin is removed – you should use the Regions plugin with just a `startTime`. * No Microphone plugin – superseded by the new Record plugin with more features. * The Cursor plugin is replaced by the Hover plugin. ### Removed options - * `backend`, `audioContext`, `closeAudioContext`, `audioScriptProcessor` – there's no Web Audio backend, so no AudioContext + * `audioContext`, `closeAudioContext`, `audioScriptProcessor` * `autoCenterImmediately` – `autoCenter` is now always immediate unless the audio is playing * `backgroundColor`, `hideCursor` – this can be easily set via CSS - * `mediaType`, `mediaControls` – you should instead pass an entire media element in the `media` option. [Example](https://wavesurfer.xyz/examples/?video.js). + * `mediaType` – you should instead pass an entire media element in the `media` option. [Example](https://wavesurfer.xyz/examples/?video.js). * `partialRender` – done by default * `pixelRatio` – `window.devicePixelRatio` is used by default * `renderer` – there's just one renderer for now, so no need for this option @@ -117,7 +116,7 @@ Most options, events, and methods are similar to those in previous versions. * `barMinHeight` - the minimum bar height is now 1 pixel by default ### Removed methods - * `getFilters`, `setFilter` – as there's no Web Audio "backend" + * `getFilters`, `setFilter` – see the [Web Audio example](https://wavesurfer.xyz/examples/?webaudio.js) * `drawBuffer` – to redraw the waveform, use `setOptions` instead and pass new rendering options * `cancelAjax` – you can pass an [AbortSignal](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) in `fetchParams` * `skipForward`, `skipBackward`, `setPlayEnd` – can be implemented using `setTime(time)` diff --git a/cypress/e2e/basic.cy.js b/cypress/e2e/basic.cy.js index 7c63c28cd..f4c775d69 100644 --- a/cypress/e2e/basic.cy.js +++ b/cypress/e2e/basic.cy.js @@ -177,20 +177,20 @@ describe('WaveSurfer basic tests', () => { }) }) - describe('setMediaElement', () => { - const MEDIA_EVENTS = ['timeupdate', 'play', 'pause', 'emptied', 'ended', 'seeking'] - let orignalMedia - + describe.only('setMediaElement', () => { // Mock add/remove event listeners for `media` elements const attachMockListeners = (el) => { - el.eventListenerList = {} - el.addEventListener = (eventName, callback, options) => { - if (!el.eventListenerList[eventName]) el.eventListenerList[eventName] = []; - el.eventListenerList[eventName].push(callback); - }; + el.eventCount = 0 + const addEventListener = el.addEventListener + el.addEventListener = (eventName, callback, options) => { + if (!options || !options.once) el.eventCount++ + addEventListener.call(el, eventName, callback, options) + } + const removeEventListener = el.removeEventListener el.removeEventListener = (eventName, callback) => { - if (el.eventListenerList[eventName]) delete el.eventListenerList[eventName] + el.eventCount-- + removeEventListener.call(el, eventName, callback) } } @@ -198,20 +198,16 @@ describe('WaveSurfer basic tests', () => { cy.window().then((win) => { win.wavesurfer.destroy() - orignalMedia = document.createElement('audio') - attachMockListeners(orignalMedia) - - const waitForReady = new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: '#waveform', - url: '../../examples/audio/demo.wav', - media: orignalMedia - }) + const originalMedia = document.createElement('audio') + attachMockListeners(originalMedia) - win.wavesurfer.once('ready', () => resolve()) + win.wavesurfer = win.WaveSurfer.create({ + container: '#waveform', + url: '../../examples/audio/demo.wav', + media: originalMedia, }) - cy.wrap(waitForReady).then(done) + win.wavesurfer.once('ready', () => done()) }) }) @@ -226,15 +222,13 @@ describe('WaveSurfer basic tests', () => { it('should unsubscribe events from removed media element', () => { cy.window().then((win) => { + const originalMedia = win.wavesurfer.getMediaElement() const media = document.createElement('audio') - MEDIA_EVENTS.forEach((event) => { - expect(orignalMedia.eventListenerList[event]).to.exist - expect(orignalMedia.eventListenerList[event].length).to.equal(1) - }) + expect(originalMedia.eventCount).to.be.greaterThan(0) win.wavesurfer.setMediaElement(media) - expect(orignalMedia.eventListenerList).to.be.empty + expect(originalMedia.eventCount).to.equal(0) }) }) @@ -244,13 +238,9 @@ describe('WaveSurfer basic tests', () => { attachMockListeners(newMedia) win.wavesurfer.setMediaElement(newMedia) - MEDIA_EVENTS.forEach((event) => { - expect(newMedia.eventListenerList[event]).to.exist - expect(newMedia.eventListenerList[event].length).to.equal(1) - }) + expect(newMedia.eventCount).to.be.greaterThan(0) }) }) - }) it('should return true when calling isPlaying() after play()', (done) => { diff --git a/cypress/e2e/options.cy.js b/cypress/e2e/options.cy.js index 27546098a..1fc746ae5 100644 --- a/cypress/e2e/options.cy.js +++ b/cypress/e2e/options.cy.js @@ -8,579 +8,550 @@ describe('WaveSurfer options tests', () => { cy.window().its('WaveSurfer').should('exist') }) - it('should use minPxPerSec and hideScrollbar', () => { + it('should use minPxPerSec and hideScrollbar', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - minPxPerSec: 100, - hideScrollbar: true, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + minPxPerSec: 100, + hideScrollbar: true, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('minPxPerSec-hideScrollbar') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('minPxPerSec-hideScrollbar') + done() }) }) }) - it('should use barWidth', () => { + it('should use barWidth', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - barWidth: 3, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + barWidth: 3, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('barWidth') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('barWidth') + done() }) }) }) - it('should use all bar options', () => { + it('should use all bar options', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - barWidth: 4, - barGap: 3, - barRadius: 4, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + barWidth: 4, + barGap: 3, + barRadius: 4, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('bars') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('bars') + done() }) }) }) - it('should use barAlign=top to align the waveform vertically', () => { + it('should use barAlign=top to align the waveform vertically', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - barAlign: 'top', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + barAlign: 'top', + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('barAlign-top') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('barAlign-top') + done() }) }) }) - it('should use barAlign=bottom to align the waveform vertically', () => { + it('should use barAlign=bottom to align the waveform vertically', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - barAlign: 'bottom', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + barAlign: 'bottom', + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('barAlign-bottom') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('barAlign-bottom') + done() }) }) }) - it('should use barAlign and barWidth together', () => { + it('should use barAlign and barWidth together', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - barAlign: 'bottom', - barWidth: 4, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + barAlign: 'bottom', + barWidth: 4, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('barAlign-barWidth') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('barAlign-barWidth') + done() }) }) }) - it('should use barHeight to scale the waveform vertically', () => { + it('should use barHeight to scale the waveform vertically', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - barHeight: 2, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + barHeight: 2, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('barHeight') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('barHeight') + done() }) }) }) - it('should use color options', () => { + it('should use color options', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - waveColor: 'red', - progressColor: 'green', - cursorColor: 'blue', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + waveColor: 'red', + progressColor: 'green', + cursorColor: 'blue', + }) - win.wavesurfer.once('ready', () => { - win.wavesurfer.setTime(10) - cy.wait(100) - cy.get(id).matchImageSnapshot('colors') - resolve() - }) + wavesurfer.once('ready', () => { + wavesurfer.setTime(10) + cy.wait(100) + cy.get(id).matchImageSnapshot('colors') + done() }) }) }) - it('should use gradient color options', () => { + it('should use gradient color options', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - waveColor: ['rgb(200, 165, 49)', 'rgb(211, 194, 138)', 'rgb(205, 124, 49)', 'rgb(205, 98, 49)'], - progressColor: 'rgba(0, 0, 0, 0.25)', - cursorColor: 'blue', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + waveColor: ['rgb(200, 165, 49)', 'rgb(211, 194, 138)', 'rgb(205, 124, 49)', 'rgb(205, 98, 49)'], + progressColor: 'rgba(0, 0, 0, 0.25)', + cursorColor: 'blue', + }) - win.wavesurfer.once('ready', () => { - win.wavesurfer.setTime(10) - cy.wait(100) - cy.snap - cy.get(id).matchImageSnapshot('colors-gradient') - resolve() - }) + wavesurfer.once('ready', () => { + wavesurfer.setTime(10) + cy.wait(100) + cy.snap + cy.get(id).matchImageSnapshot('colors-gradient') + done() }) }) }) - it('should use cursor options', () => { + it('should use cursor options', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - cursorColor: 'red', - cursorWidth: 4, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + cursorColor: 'red', + cursorWidth: 4, + }) - win.wavesurfer.once('ready', () => { - win.wavesurfer.setTime(10) - cy.wait(100) - cy.get(id).matchImageSnapshot('cursor') - resolve() - }) + wavesurfer.once('ready', () => { + wavesurfer.setTime(10) + cy.wait(100) + cy.get(id).matchImageSnapshot('cursor') + done() }) }) }) - it('should not scroll with autoScroll false', () => { + it('should not scroll with autoScroll false', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - autoScroll: false, - minPxPerSec: 200, - hideScrollbar: true, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + autoScroll: false, + minPxPerSec: 200, + hideScrollbar: true, + }) - win.wavesurfer.once('ready', () => { - win.wavesurfer.setTime(10) - cy.wait(100) - cy.get(id).matchImageSnapshot('autoScroll-false') - resolve() - }) + wavesurfer.once('ready', () => { + wavesurfer.setTime(10) + cy.wait(100) + cy.get(id).matchImageSnapshot('autoScroll-false') + done() }) }) }) - it('should not scroll to center with autoCenter false', () => { + it('should not scroll to center with autoCenter false', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - autoCenter: false, - minPxPerSec: 200, - hideScrollbar: true, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + autoCenter: false, + minPxPerSec: 200, + hideScrollbar: true, + }) - win.wavesurfer.once('ready', () => { - win.wavesurfer.setTime(10) - cy.wait(100) - cy.get(id).matchImageSnapshot('autoCenter-false') - resolve() - }) + wavesurfer.once('ready', () => { + wavesurfer.setTime(10) + cy.wait(100) + cy.get(id).matchImageSnapshot('autoCenter-false') + done() }) }) }) - it('should use peaks', () => { + it('should use peaks', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - peaks: [ - [ - 0, 0.0023595101665705442, 0.012107174843549728, 0.005919494666159153, -0.31324470043182373, - 0.1511787623167038, 0.2473851442337036, 0.11443428695201874, -0.036057762801647186, -0.0968964695930481, - -0.03033737652003765, 0.10682467371225357, 0.23974689841270447, 0.013210971839725971, - -0.12377244979143143, 0.046145666390657425, -0.015757400542497635, 0.10884027928113937, - 0.06681904196739197, 0.09432944655418396, -0.17105795443058014, -0.023439358919858932, - -0.10380347073078156, 0.0034454423002898693, 0.08061369508504868, 0.026129156351089478, - 0.18730352818965912, 0.020447958260774612, -0.15030759572982788, 0.05689578503370285, - -0.0009095853311009705, 0.2749626338481903, 0.2565386891365051, 0.07571295648813248, 0.10791446268558502, - -0.06575305759906769, 0.15336275100708008, 0.07056761533021927, 0.03287476301193237, -0.09044631570577621, - 0.01777501218020916, -0.04906218498945236, -0.04756792634725571, -0.006875281687825918, - 0.04520256072282791, -0.02362387254834175, -0.0668797641992569, 0.12266506254673004, -0.10895221680402756, - 0.03791835159063339, -0.0195105392485857, -0.031097881495952606, 0.04252675920724869, - -0.09187793731689453, 0.0829525887966156, -0.003812957089394331, 0.0431736595928669, 0.07634212076663971, - -0.05335947126150131, 0.0345163568854332, -0.049201950430870056, 0.02300390601158142, - 0.007677287794649601, 0.015354577451944351, 0.007677287794649601, 0.007677288725972176, - ], + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + peaks: [ + [ + 0, 0.0023595101665705442, 0.012107174843549728, 0.005919494666159153, -0.31324470043182373, + 0.1511787623167038, 0.2473851442337036, 0.11443428695201874, -0.036057762801647186, -0.0968964695930481, + -0.03033737652003765, 0.10682467371225357, 0.23974689841270447, 0.013210971839725971, -0.12377244979143143, + 0.046145666390657425, -0.015757400542497635, 0.10884027928113937, 0.06681904196739197, 0.09432944655418396, + -0.17105795443058014, -0.023439358919858932, -0.10380347073078156, 0.0034454423002898693, + 0.08061369508504868, 0.026129156351089478, 0.18730352818965912, 0.020447958260774612, -0.15030759572982788, + 0.05689578503370285, -0.0009095853311009705, 0.2749626338481903, 0.2565386891365051, 0.07571295648813248, + 0.10791446268558502, -0.06575305759906769, 0.15336275100708008, 0.07056761533021927, 0.03287476301193237, + -0.09044631570577621, 0.01777501218020916, -0.04906218498945236, -0.04756792634725571, + -0.006875281687825918, 0.04520256072282791, -0.02362387254834175, -0.0668797641992569, 0.12266506254673004, + -0.10895221680402756, 0.03791835159063339, -0.0195105392485857, -0.031097881495952606, 0.04252675920724869, + -0.09187793731689453, 0.0829525887966156, -0.003812957089394331, 0.0431736595928669, 0.07634212076663971, + -0.05335947126150131, 0.0345163568854332, -0.049201950430870056, 0.02300390601158142, 0.007677287794649601, + 0.015354577451944351, 0.007677287794649601, 0.007677288725972176, ], - }) + ], + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('peaks') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('peaks') + done() }) }) }) - it('should use external media', () => { + it('should use external media', (done) => { cy.window().then((win) => { const audio = new Audio('../../examples/audio/demo.wav') - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - media: audio, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + media: audio, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('media') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('media') + done() }) }) }) - it('should split channels', () => { + it('should split channels', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/stereo.mp3', - splitChannels: true, - waveColor: 'rgb(200, 0, 200)', - progressColor: 'rgb(100, 0, 100)', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/stereo.mp3', + splitChannels: true, + waveColor: 'rgb(200, 0, 200)', + progressColor: 'rgb(100, 0, 100)', + }) - win.wavesurfer.once('ready', () => { - win.wavesurfer.setTime(2) - cy.wait(100) - cy.get(id).matchImageSnapshot('split-channels') - resolve() - }) + wavesurfer.once('ready', () => { + wavesurfer.setTime(2) + cy.wait(100) + cy.get(id).matchImageSnapshot('split-channels') + done() }) }) }) - it('should split channels with options', () => { + it('should split channels with options', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/stereo.mp3', - splitChannels: [ - { - waveColor: 'rgb(200, 0, 200)', - progressColor: 'rgb(100, 0, 100)', - }, - { - waveColor: 'rgb(0, 200, 200)', - progressColor: 'rgb(0, 100, 100)', - }, - ], - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/stereo.mp3', + splitChannels: [ + { + waveColor: 'rgb(200, 0, 200)', + progressColor: 'rgb(100, 0, 100)', + }, + { + waveColor: 'rgb(0, 200, 200)', + progressColor: 'rgb(0, 100, 100)', + }, + ], + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('split-channels-options') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('split-channels-options') + done() }) }) }) - it('should use plugins with Regions', () => { + it('should use plugins with Regions', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - const regions = win.Regions.create() - - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - plugins: [regions], - }) + const regions = win.Regions.create() - win.wavesurfer.once('ready', () => { - regions.addRegion({ - start: 1, - end: 3, - color: 'rgba(255, 0, 0, 0.1)', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + plugins: [regions], + }) - cy.get(id).matchImageSnapshot('plugins-regions') - resolve() + wavesurfer.once('ready', () => { + regions.addRegion({ + start: 1, + end: 3, + color: 'rgba(255, 0, 0, 0.1)', }) + + cy.get(id).matchImageSnapshot('plugins-regions') + done() }) }) }) - it('should use two plugins: Regions and Timeline', () => { + it('should use two plugins: Regions and Timeline', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - const regions = win.Regions.create() + const regions = win.Regions.create() - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - plugins: [regions, win.Timeline.create()], - }) - - win.wavesurfer.once('ready', () => { - regions.addRegion({ - start: 1, - end: 3, - color: 'rgba(255, 0, 0, 0.1)', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + plugins: [regions, win.Timeline.create()], + }) - cy.get(id).matchImageSnapshot('plugins-regions-timeline') - resolve() + wavesurfer.once('ready', () => { + regions.addRegion({ + start: 1, + end: 3, + color: 'rgba(255, 0, 0, 0.1)', }) + + cy.get(id).matchImageSnapshot('plugins-regions-timeline') + done() }) }) }) - it('should normalize', () => { + it('should normalize', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - normalize: true, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + normalize: true, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('normalize') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('normalize') + done() }) }) }) - it('should use height', () => { + it('should use height', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - height: 10, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + height: 10, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('height-10') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('height-10') + done() }) }) }) - it('should use parent height if height is auto', () => { + it('should use parent height if height is auto', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.document.querySelector(id).style.height = '200px' + win.document.querySelector(id).style.height = '200px' - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - height: 'auto', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + height: 'auto', + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('height-auto') - win.document.querySelector(id).style.height = '' - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('height-auto') + win.document.querySelector(id).style.height = '' + done() }) }) }) - it('should fall back to 128 if container height is not set', () => { + it('should fall back to 128 if container height is not set', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - height: 'auto', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + height: 'auto', + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('height-auto-0') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('height-auto-0') + done() }) }) }) - it('should use a custom rendering function', () => { + it('should use a custom rendering function', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - renderFunction: (channels, ctx) => { - const { width, height } = ctx.canvas - const scale = channels[0].length / width - const step = 10 - - ctx.translate(0, height / 2) - ctx.strokeStyle = ctx.fillStyle - ctx.beginPath() - - for (let i = 0; i < width; i += step * 2) { - const index = Math.floor(i * scale) - const value = Math.abs(channels[0][index]) - let x = i - let y = value * height - - ctx.moveTo(x, 0) - ctx.lineTo(x, y) - ctx.arc(x + step / 2, y, step / 2, Math.PI, 0, true) - ctx.lineTo(x + step, 0) - - x = x + step - y = -y - ctx.moveTo(x, 0) - ctx.lineTo(x, y) - ctx.arc(x + step / 2, y, step / 2, Math.PI, 0, false) - ctx.lineTo(x + step, 0) - } - - ctx.stroke() - ctx.closePath() - }, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + renderFunction: (channels, ctx) => { + const { width, height } = ctx.canvas + const scale = channels[0].length / width + const step = 10 + + ctx.translate(0, height / 2) + ctx.strokeStyle = ctx.fillStyle + ctx.beginPath() + + for (let i = 0; i < width; i += step * 2) { + const index = Math.floor(i * scale) + const value = Math.abs(channels[0][index]) + let x = i + let y = value * height + + ctx.moveTo(x, 0) + ctx.lineTo(x, y) + ctx.arc(x + step / 2, y, step / 2, Math.PI, 0, true) + ctx.lineTo(x + step, 0) + + x = x + step + y = -y + ctx.moveTo(x, 0) + ctx.lineTo(x, y) + ctx.arc(x + step / 2, y, step / 2, Math.PI, 0, false) + ctx.lineTo(x + step, 0) + } + + ctx.stroke() + ctx.closePath() + }, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('custom-render') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('custom-render') + done() }) }) }) - it('should pass custom parameters to fetch', () => { + it('should pass custom parameters to fetch', (done) => { cy.window().then((win) => { - return new Promise((resolve) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - fetchParams: { - headers: { - 'X-Custom-Header': 'foo', - }, + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + fetchParams: { + headers: { + 'X-Custom-Header': 'foo', }, - }) + }, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('fetch-options') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('fetch-options') + done() }) }) }) - it('should remount the container when set via setOptions', () => { + it('should remount the container when set via setOptions', (done) => { cy.window().then((win) => { - return new Promise((resolve, reject) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - barWidth: 4, - barGap: 3, - barRadius: 4, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + barWidth: 4, + barGap: 3, + barRadius: 4, + }) - win.wavesurfer.once('ready', () => { - win.wavesurfer.setOptions({ container: otherId }) - cy.get(id).children().should('have.length', 0) - cy.get(otherId).children().should('have.length', 1) - cy.get(otherId).matchImageSnapshot('bars') - resolve() - }) + wavesurfer.once('ready', () => { + wavesurfer.setOptions({ container: otherId }) + cy.get(id).children().should('have.length', 0) + cy.get(otherId).children().should('have.length', 1) + cy.get(otherId).matchImageSnapshot('bars') + done() }) }) }) - it('should accept a numeric width option', () => { + it('should accept a numeric width option', (done) => { cy.window().then((win) => { - return new Promise((resolve, reject) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - width: 100, - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + width: 100, + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('width-100') - win.wavesurfer.setOptions({ width: 300 }) - cy.get(id).matchImageSnapshot('width-300') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('width-100') + wavesurfer.setOptions({ width: 300 }) + cy.get(id).matchImageSnapshot('width-300') + done() }) }) }) - it('should accept a CSS value for the width option', () => { + it('should accept a CSS value for the width option', (done) => { cy.window().then((win) => { - return new Promise((resolve, reject) => { - win.wavesurfer = win.WaveSurfer.create({ - container: id, - url: '../../examples/audio/demo.wav', - width: '10rem', - }) + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + width: '10rem', + }) - win.wavesurfer.once('ready', () => { - cy.get(id).matchImageSnapshot('width-10rem') - win.wavesurfer.setOptions({ width: '200px' }) - cy.get(id).matchImageSnapshot('width-200px') - resolve() - }) + wavesurfer.once('ready', () => { + cy.get(id).matchImageSnapshot('width-10rem') + wavesurfer.setOptions({ width: '200px' }) + cy.get(id).matchImageSnapshot('width-200px') + done() + }) + }) + }) + + it('should support Web Audio playback', (done) => { + cy.window().then((win) => { + const wavesurfer = win.WaveSurfer.create({ + container: id, + url: '../../examples/audio/demo.wav', + backend: 'WebAudio', + }) + + wavesurfer.once('ready', () => { + expect(wavesurfer.getDuration().toFixed(2)).to.equal('21.77') + wavesurfer.setTime(10) + expect(wavesurfer.getCurrentTime().toFixed(2)).to.equal('10.00') + wavesurfer.setTime(21.6) + wavesurfer.play() + }) + + wavesurfer.on('timeupdate', () => { + console.log(wavesurfer.getCurrentTime()) + }) + + wavesurfer.once('finish', () => { + done() }) }) }) diff --git a/examples/basic.js b/examples/basic.js index 26912818d..f82c32aff 100644 --- a/examples/basic.js +++ b/examples/basic.js @@ -1,4 +1,4 @@ -// A super-basic example +// A basic example import WaveSurfer from 'https://unpkg.com/wavesurfer.js@7/dist/wavesurfer.esm.js' @@ -9,6 +9,6 @@ const wavesurfer = WaveSurfer.create({ url: '/examples/audio/audio.wav', }) -wavesurfer.once('interaction', () => { +wavesurfer.on('click', () => { wavesurfer.play() }) diff --git a/examples/zoom-plugin.js b/examples/zoom-plugin.js index d203e35fb..3d9579450 100644 --- a/examples/zoom-plugin.js +++ b/examples/zoom-plugin.js @@ -17,10 +17,12 @@ const wavesurfer = WaveSurfer.create({ }) // Initialize the Zoom plugin -wavesurfer.registerPlugin(ZoomPlugin.create({ - // the amount of zoom per wheel step, e.g. 0.1 means a 10% magnification per scroll - scale : 0.2 -})) +wavesurfer.registerPlugin( + ZoomPlugin.create({ + // the amount of zoom per wheel step, e.g. 0.1 means a 10% magnification per scroll + scale: 0.2, + }), +) // show the current minPxPerSec value const minPxPerSecSpan = document.querySelector('#minPxPerSec') @@ -40,9 +42,6 @@ wavesurfer.on('zoom', (minPxPerSec) => { * */ - - - // A few more controls /* @@ -59,7 +58,6 @@ const playButton = document.querySelector('#play') const forwardButton = document.querySelector('#forward') const backButton = document.querySelector('#backward') - playButton.onclick = () => { wavesurfer.playPause() } diff --git a/src/event-emitter.ts b/src/event-emitter.ts index f324bab3b..f3ab3d820 100644 --- a/src/event-emitter.ts +++ b/src/event-emitter.ts @@ -1,7 +1,7 @@ export type GeneralEventTypes = { // the name of the event and the data it dispatches with // e.g. 'entryCreated': [count: 1] - [EventName: string]: any[] // eslint-disable-line @typescript-eslint/no-explicit-any + [EventName: string]: unknown[] // eslint-disable-line @typescript-eslint/no-explicit-any } type EventListener = ( @@ -16,48 +16,48 @@ type EventMap = { class EventEmitter { private listeners = {} as EventMap - /** Subscribe to an event. Returns an unsubscribe function. */ - public on( - eventName: EventName, + /** Add an event listener */ + public addEventListener( + event: EventName, listener: EventListener, + options?: { once?: boolean }, ): () => void { - if (!this.listeners[eventName]) { - this.listeners[eventName] = new Set() + if (!this.listeners[event]) { + this.listeners[event] = new Set() } - this.listeners[eventName].add(listener) + this.listeners[event].add(listener) - return () => this.un(eventName, listener) + if (options?.once) { + const unsubscribeOnce = () => { + this.removeEventListener(event, unsubscribeOnce) + this.removeEventListener(event, listener) + } + this.addEventListener(event, unsubscribeOnce) + return unsubscribeOnce + } + + return () => this.removeEventListener(event, listener) } - /** Subscribe to an event only once */ - public once( - eventName: EventName, + public removeEventListener( + event: EventName, listener: EventListener, - ): () => void { - // The actual subscription - const unsubscribe = this.on(eventName, listener) - - // Another subscription that will unsubscribe the actual subscription and itself after the first event - const unsubscribeOnce = this.on(eventName, () => { - unsubscribe() - unsubscribeOnce() - }) - - return unsubscribe + ): void { + this.listeners[event]?.delete(listener) } + /** Subscribe to an event. Returns an unsubscribe function. */ + public on = this.addEventListener + /** Unsubscribe from an event */ - public un( - eventName: EventName, + public un = this.removeEventListener + + /** Subscribe to an event only once */ + public once( + event: EventName, listener: EventListener, - ): void { - if (this.listeners[eventName]) { - if (listener) { - this.listeners[eventName].delete(listener) - } else { - delete this.listeners[eventName] - } - } + ): () => void { + return this.on(event, listener, { once: true }) } /** Clear all events */ diff --git a/src/wavesurfer.ts b/src/wavesurfer.ts index aaf335356..cc266254d 100644 --- a/src/wavesurfer.ts +++ b/src/wavesurfer.ts @@ -4,6 +4,7 @@ import Fetcher from './fetcher.js' import Player from './player.js' import Renderer from './renderer.js' import Timer from './timer.js' +import WebAudioPlayer from './webaudio.js' export type WaveSurferOptions = { /** Required: an HTML element or selector where the waveform will be rendered */ @@ -70,6 +71,8 @@ export type WaveSurferOptions = { renderFunction?: (peaks: Array, ctx: CanvasRenderingContext2D) => void /** Options to pass to the fetch method */ fetchParams?: RequestInit + /** Playback "backend" to use, defaults to MediaElement */ + backend: 'WebAudio' | 'MediaElement' } const defaultOptions = { @@ -140,8 +143,12 @@ class WaveSurfer extends Player { /** Create a new WaveSurfer instance */ constructor(options: WaveSurferOptions) { + const media = + options.media || + (options.backend === 'WebAudio' ? (new WebAudioPlayer() as unknown as HTMLAudioElement) : undefined) + super({ - media: options.media, + media, mediaControls: options.mediaControls, autoplay: options.autoplay, playbackRate: options.audioRate, @@ -150,7 +157,7 @@ class WaveSurfer extends Player { this.options = Object.assign({}, defaultOptions, options) this.timer = new Timer() - const audioElement = !options.media ? this.getMediaElement() : undefined + const audioElement = media ? undefined : this.getMediaElement() this.renderer = new Renderer(this.options, audioElement) this.initPlayerEvents() @@ -159,7 +166,7 @@ class WaveSurfer extends Player { this.initPlugins() // Load audio if URL is passed or an external media with an src - const url = this.options.url || this.options.media?.currentSrc || this.options.media?.src + const url = this.options.url || this.getSrc() if (url) { this.load(url, this.options.peaks, this.options.duration) } @@ -335,17 +342,17 @@ class WaveSurfer extends Player { // Set the mediaelement source this.setSrc(url, blob) + // Wait for the audio duration + // It should be a promise to allow event listeners to subscribe to the ready and decode events + duration = + (await Promise.resolve(duration || this.getDuration())) || + (await new Promise((resolve) => { + this.onceMediaEvent('loadedmetadata', () => resolve(this.getDuration())) + })) || + (await Promise.resolve(0)) + // Decode the audio data or use user-provided peaks if (channelData) { - // Wait for the audio duration - // It should be a promise to allow event listeners to subscribe to the ready and decode events - duration = - (await Promise.resolve(duration || this.getDuration())) || - (await new Promise((resolve) => { - this.onceMediaEvent('loadedmetadata', () => resolve(this.getDuration())) - })) || - (await Promise.resolve(0)) - this.decodedData = Decoder.createBuffer(channelData, duration) } else if (blob) { const arrayBuffer = await blob.arrayBuffer() diff --git a/src/webaudio.ts b/src/webaudio.ts new file mode 100644 index 000000000..0aa7ff0a1 --- /dev/null +++ b/src/webaudio.ts @@ -0,0 +1,166 @@ +import EventEmitter from './event-emitter.js' + +type WebAudioPlayerEvents = { + loadedmetadata: [] + canplay: [] + play: [] + pause: [] + seeking: [] + timeupdate: [] + volumechange: [] + emptied: [] + ended: [] +} + +/** + * A Web Audio buffer player emulating the behavior of an HTML5 Audio element. + */ +class WebAudioPlayer extends EventEmitter { + private audioContext: AudioContext + private gainNode: GainNode + private bufferNode: AudioBufferSourceNode | null = null + private autoplay = false + private playStartTime = 0 + private playedDuration = 0 + private _muted = false + private buffer: AudioBuffer | null = null + public currentSrc = '' + public paused = true + public crossOrigin: string | null = null + + constructor(audioContext = new AudioContext()) { + super() + this.audioContext = audioContext + this.gainNode = this.audioContext.createGain() + this.gainNode.connect(this.audioContext.destination) + } + + async load() { + return + } + + get src() { + return this.currentSrc + } + + set src(value: string) { + this.currentSrc = value + + fetch(value) + .then((response) => response.arrayBuffer()) + .then((arrayBuffer) => this.audioContext.decodeAudioData(arrayBuffer)) + .then((audioBuffer) => { + this.buffer = audioBuffer + + this.emit('loadedmetadata') + this.emit('canplay') + + if (this.autoplay) this.play() + }) + } + + private _play() { + if (!this.paused) return + this.paused = false + + this.bufferNode?.disconnect() + this.bufferNode = this.audioContext.createBufferSource() + this.bufferNode.buffer = this.buffer + this.bufferNode.connect(this.gainNode) + + if (this.playedDuration >= this.duration) { + this.playedDuration = 0 + } + + this.bufferNode.start(this.audioContext.currentTime, this.playedDuration) + this.playStartTime = this.audioContext.currentTime + + this.bufferNode.onended = () => { + if (this.currentTime >= this.duration) { + this.pause() + this.emit('ended') + } + } + } + + private _pause() { + if (this.paused) return + this.paused = true + this.bufferNode?.stop() + this.playedDuration += this.audioContext.currentTime - this.playStartTime + } + + async play() { + this._play() + this.emit('play') + } + + pause() { + this._pause() + this.emit('pause') + } + + async setSinkId(deviceId: string) { + const ac = this.audioContext as AudioContext & { setSinkId: (id: string) => Promise } + return ac.setSinkId(deviceId) + } + + get playbackRate() { + return this.bufferNode?.playbackRate.value ?? 1 + } + set playbackRate(value) { + if (this.bufferNode) { + this.bufferNode.playbackRate.value = value + } + } + + get currentTime() { + return this.paused ? this.playedDuration : this.playedDuration + this.audioContext.currentTime - this.playStartTime + } + set currentTime(value) { + this.emit('seeking') + + if (this.paused) { + this.playedDuration = value + } else { + this._pause() + this.playedDuration = value + this._play() + } + + this.emit('timeupdate') + } + + get duration() { + return this.buffer?.duration || 0 + } + + get volume() { + return this.gainNode.gain.value + } + set volume(value) { + this.gainNode.gain.value = value + this.emit('volumechange') + } + + get muted() { + return this._muted + } + set muted(value: boolean) { + if (this._muted === value) return + this._muted = value + + if (this._muted) { + this.gainNode.disconnect() + } else { + this.gainNode.connect(this.audioContext.destination) + } + } + + /** Get the GainNode used to play the audio. Can be used to attach filters. */ + public getGainNode(): GainNode { + return this.gainNode + } +} + +export default WebAudioPlayer