diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 00000000..16e0c72e --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,14 @@ +{ + "permissions": { + "allow": [ + "Bash(turbo run:*)", + "Bash(npm run check-types)", + "Bash(npm run lint)", + "Bash(npm run fix)", + "Bash(npm run test:*)", + "Bash(pnpm install)", + "Bash(npm i -g pnpm*)" + ], + "deny": [] + } +} \ No newline at end of file diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 64a3e5b1..4f059d28 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - pull-requests: read + pull-requests: write issues: read id-token: write actions: read # Required for Claude to read CI results on PRs @@ -53,10 +53,20 @@ jobs: # allowed_tools: "Bash(npm install),Bash(npm run build),Bash(npm run test:*),Bash(npm run lint:*)" # Optional: Add custom instructions for Claude to customize its behavior for your project - # custom_instructions: | - # Follow our coding standards - # Ensure all new code has tests - # Use TypeScript for new files + custom_instructions: | + If reviewing the code, please first and foremost review the correctness of the feature to what it's trying to accomplish. + Also look for bugs and security issues. Be concise with your entire response. + Thanks! + + If developing code, please use `turbo run check-types`, `turbo run test`, `turbo run lint`, `turbo run fix` to ensure your code is correct. Or `npm run *` in the respective package. + If checks fail, make minimal edits and try again. Keep going until they pass unless you're blocked on something that you cannot solve. + Keep commits small & descriptive. Do NOT push until all your commits are done. The CI job will push only after checks pass. + + To set up pnpm: + npm i -g pnpm@10.15.0 + pnpm install + + Thanks! # Optional: Custom environment variables for Claude # claude_env: | diff --git a/packages/jamtools/core/modules/audio_io/audio_io_example.tsx b/packages/jamtools/core/modules/audio_io/audio_io_example.tsx new file mode 100644 index 00000000..4cb5d3e5 --- /dev/null +++ b/packages/jamtools/core/modules/audio_io/audio_io_example.tsx @@ -0,0 +1,191 @@ +import React, { useState, useEffect } from 'react'; +import springboard from 'springboard'; +import {WebAudioModule} from '@jamtools/core/types/audio_io_types'; + +// Import the audio IO module +import './audio_io_module'; + +springboard.registerModule('AudioIOExample', {}, async (moduleAPI) => { + const audioIOModule = moduleAPI.deps.module.moduleRegistry.getModule('audio_io'); + + // Create shared state for WAM instances + const wamInstancesState = await moduleAPI.statesAPI.createSharedState('wamInstances', []); + const masterVolumeState = await moduleAPI.statesAPI.createSharedState('masterVolume', 0.8); + + // Subscribe to WAM instance changes + audioIOModule.wamInstancesSubject.subscribe((instances: WebAudioModule[]) => { + wamInstancesState.setState(instances); + }); + + const ExampleComponent = () => { + const wamInstances = wamInstancesState.useState(); + const masterVolume = masterVolumeState.useState(); + const [isAudioInitialized, setIsAudioInitialized] = useState(false); + + useEffect(() => { + // Initialize audio on component mount + audioIOModule.ensureAudioInitialized().then(() => { + setIsAudioInitialized(true); + }); + }, []); + + const createSynth = async () => { + try { + const synth = await audioIOModule.instantiateWAM('com.jamtools.oscillator-synth', `synth-${Date.now()}`); + console.log('Created synthesizer:', synth.instanceId); + + // Connect to master output + const masterGain = audioIOModule.getMasterGainNode(); + if (masterGain) { + synth.audioNode.connect(masterGain); + } + } catch (error) { + console.error('Failed to create synthesizer:', error); + } + }; + + const createDelay = async () => { + try { + const delay = await audioIOModule.instantiateWAM('com.jamtools.delay', `delay-${Date.now()}`); + console.log('Created delay effect:', delay.instanceId); + + // Connect to master output + const masterGain = audioIOModule.getMasterGainNode(); + if (masterGain) { + delay.audioNode.connect(masterGain); + } + } catch (error) { + console.error('Failed to create delay:', error); + } + }; + + const createAnalyzer = async () => { + try { + const analyzer = await audioIOModule.instantiateWAM('com.jamtools.spectrum-analyzer', `analyzer-${Date.now()}`); + console.log('Created spectrum analyzer:', analyzer.instanceId); + + // Analyzer is typically inserted in the signal chain, not connected to output + console.log('Analyzer created - connect it between other WAMs for visualization'); + } catch (error) { + console.error('Failed to create analyzer:', error); + } + }; + + const destroyWAM = async (instanceId: string) => { + try { + await audioIOModule.destroyWAMInstance(instanceId); + console.log('Destroyed WAM:', instanceId); + } catch (error) { + console.error('Failed to destroy WAM:', error); + } + }; + + const handleVolumeChange = (event: React.ChangeEvent) => { + const volume = parseFloat(event.target.value); + audioIOModule.setMasterVolume(volume); + masterVolumeState.setState(volume); + }; + + const playTestNote = async () => { + // Find a synthesizer instance and play a test note + const synth = wamInstances.find(wam => wam.moduleId === 'com.jamtools.oscillator-synth'); + if (synth && synth.onMidi) { + // Play middle C (note 60) + const noteOnData = new Uint8Array([0x90, 60, 100]); // Note on, middle C, velocity 100 + synth.onMidi(noteOnData); + + // Stop after 1 second + setTimeout(() => { + const noteOffData = new Uint8Array([0x80, 60, 0]); // Note off + synth.onMidi?.(noteOffData); + }, 1000); + } else { + alert('Create a synthesizer first!'); + } + }; + + return ( +
+

Audio IO Module Example

+ +
+

Status: {isAudioInitialized ? '✅ Audio Initialized' : '⏳ Initializing...'}

+

Active WAM Instances: {wamInstances.length}

+
+ +
+ +
+ +
+

Create WAM Instances

+ + + +
+ +
+

Test Audio

+ +
+ +
+

Active WAM Instances

+ {wamInstances.length === 0 ? ( +

No WAM instances created yet.

+ ) : ( +
    + {wamInstances.map((wam: WebAudioModule) => ( +
  • + {wam.name} ({wam.instanceId}) +
    + Module: {wam.moduleId} +
    + +
  • + ))} +
+ )} +
+ +
+

How to Use

+
    +
  1. Click "Create Synthesizer" to create an audio synthesizer
  2. +
  3. Click "Play Test Note" to hear a middle C note
  4. +
  5. Adjust the master volume slider
  6. +
  7. Create delay effects and spectrum analyzers to enhance the audio
  8. +
  9. The module automatically integrates with MIDI input devices
  10. +
+
+
+ ); + }; + + moduleAPI.registerRoute('audio-io-example', {}, ExampleComponent); +}); \ No newline at end of file diff --git a/packages/jamtools/core/modules/audio_io/audio_io_module.spec.ts b/packages/jamtools/core/modules/audio_io/audio_io_module.spec.ts new file mode 100644 index 00000000..d7ea2e1e --- /dev/null +++ b/packages/jamtools/core/modules/audio_io/audio_io_module.spec.ts @@ -0,0 +1,146 @@ +import {describe, it, expect, beforeEach} from 'vitest'; +import {AudioIOModule, setAudioIODependencyCreator} from './audio_io_module'; +import {MockAudioService} from '@jamtools/core/test/services/mock_audio_service'; +import {MockWAMRegistryService} from '@jamtools/core/test/services/mock_wam_registry_service'; +import {Subject} from 'rxjs'; + +describe('AudioIOModule', () => { + let audioIOModule: AudioIOModule; + let mockCoreDeps: any; + let mockModDeps: any; + let mockModuleAPI: any; + + beforeEach(() => { + // Set up mock dependencies + setAudioIODependencyCreator(async () => ({ + audio: new MockAudioService(), + wamRegistry: new MockWAMRegistryService(), + })); + + mockCoreDeps = { + modules: { + io: { + midiInputSubject: new Subject(), + }, + }, + }; + + mockModDeps = { + toast: () => {}, + }; + + mockModuleAPI = { + statesAPI: { + createSharedState: async (name: string, initialState: any) => ({ + getState: () => initialState, + setState: (state: any) => { + Object.assign(initialState, state); + }, + useState: () => initialState, + }), + }, + }; + + audioIOModule = new AudioIOModule(mockCoreDeps, mockModDeps); + }); + + it('should initialize with correct module ID', () => { + expect(audioIOModule.moduleId).toBe('audio_io'); + }); + + it('should initialize with default state', () => { + expect(audioIOModule.state).toEqual({ + audioContext: null, + wamInstances: [], + isAudioInitialized: false, + masterVolume: 0.8, + }); + }); + + it('should initialize audio IO dependencies', async () => { + await audioIOModule.initialize(mockModuleAPI); + expect(audioIOModule['audioIODeps']).toBeDefined(); + expect(audioIOModule['audioIODeps'].audio).toBeInstanceOf(MockAudioService); + expect(audioIOModule['audioIODeps'].wamRegistry).toBeInstanceOf(MockWAMRegistryService); + }); + + it('should ensure audio initialization', async () => { + await audioIOModule.initialize(mockModuleAPI); + await audioIOModule.ensureAudioInitialized(); + expect(audioIOModule['isAudioInitialized']).toBe(true); + }); + + it('should set master volume', async () => { + await audioIOModule.initialize(mockModuleAPI); + audioIOModule.setMasterVolume(0.5); + + const state = audioIOModule['audioIOState'].getState(); + expect(state.masterVolume).toBe(0.5); + }); + + it('should instantiate WAM', async () => { + await audioIOModule.initialize(mockModuleAPI); + + const wam = await audioIOModule.instantiateWAM('com.jamtools.oscillator-synth', 'test-synth'); + expect(wam).toBeDefined(); + expect(wam.moduleId).toBe('com.jamtools.oscillator-synth'); + expect(wam.instanceId).toBe('test-synth'); + }); + + it('should destroy WAM instance', async () => { + await audioIOModule.initialize(mockModuleAPI); + + await audioIOModule.instantiateWAM('com.jamtools.oscillator-synth', 'test-synth'); + await audioIOModule.destroyWAMInstance('test-synth'); + + const instance = audioIOModule.getWAMInstance('test-synth'); + expect(instance).toBeNull(); + }); + + it('should get registered WAMs', async () => { + await audioIOModule.initialize(mockModuleAPI); + + const registeredWAMs = audioIOModule.getRegisteredWAMs(); + expect(registeredWAMs.length).toBeGreaterThan(0); + + const synthWAM = registeredWAMs.find(wam => wam.moduleId === 'com.jamtools.oscillator-synth'); + expect(synthWAM).toBeDefined(); + expect(synthWAM?.name).toBe('Mock Oscillator Synthesizer'); + }); + + it('should handle MIDI input', async () => { + await audioIOModule.initialize(mockModuleAPI); + + const wam = await audioIOModule.instantiateWAM('com.jamtools.oscillator-synth', 'test-synth'); + + // Mock MIDI event + const midiEvent = { + type: 'noteon', + number: 60, + velocity: 100, + channel: 0, + }; + + // Trigger MIDI input + mockCoreDeps.modules.io.midiInputSubject.next(midiEvent); + + // WAM should receive MIDI data (tested in mock implementation) + expect(wam.onMidi).toBeDefined(); + }); + + it('should convert MIDI events to bytes correctly', () => { + const convertMidiEventToBytes = audioIOModule['convertMidiEventToBytes']; + + const noteOnEvent = {type: 'noteon', number: 60, velocity: 100, channel: 0}; + const noteOnBytes = convertMidiEventToBytes(noteOnEvent); + expect(Array.from(noteOnBytes)).toEqual([0x90, 60, 100]); + + const noteOffEvent = {type: 'noteoff', number: 60, velocity: 0, channel: 0}; + const noteOffBytes = convertMidiEventToBytes(noteOffEvent); + expect(Array.from(noteOffBytes)).toEqual([0x80, 60, 0]); + + const ccEvent = {type: 'controlchange', number: 7, velocity: 127, channel: 0}; + const ccBytes = convertMidiEventToBytes(ccEvent); + expect(Array.from(ccBytes)).toEqual([0xB0, 7, 127]); + }); +}); \ No newline at end of file diff --git a/packages/jamtools/core/modules/audio_io/audio_io_module.tsx b/packages/jamtools/core/modules/audio_io/audio_io_module.tsx new file mode 100644 index 00000000..0b5e3ca4 --- /dev/null +++ b/packages/jamtools/core/modules/audio_io/audio_io_module.tsx @@ -0,0 +1,244 @@ +import {Subject} from 'rxjs'; + +import {CoreDependencies, ModuleDependencies} from 'springboard/types/module_types'; +import {Module} from 'springboard/module_registry/module_registry'; +import {AudioIOState, AudioService, WAMRegistryService, WebAudioModule} from '@jamtools/core/types/audio_io_types'; +import springboard from 'springboard'; +import {StateSupervisor} from 'springboard/services/states/shared_state_service'; +import {ModuleAPI} from 'springboard/engine/module_api'; +import {MidiEvent} from '@jamtools/core/modules/macro_module/macro_module_types'; +import {MockAudioService} from '@jamtools/core/test/services/mock_audio_service'; +import {MockWAMRegistryService} from '@jamtools/core/test/services/mock_wam_registry_service'; + +type AudioIODeps = { + audio: AudioService; + wamRegistry: WAMRegistryService; +} + +let createAudioIODependencies = async (): Promise => { + return { + audio: new MockAudioService(), + wamRegistry: new MockWAMRegistryService(), + }; +}; + +// @platform "browser" +createAudioIODependencies = async () => { + const {BrowserAudioService} = await import('@jamtools/core/services/browser/browser_audio_service'); + const {BrowserWAMRegistryService} = await import('@jamtools/core/services/browser/browser_wam_registry_service'); + + const audio = new BrowserAudioService(); + const wamRegistry = new BrowserWAMRegistryService(audio); + return { + audio, + wamRegistry, + }; +}; +// @platform end + +// @platform "node" +createAudioIODependencies = async () => { + if (process.env.DISABLE_AUDIO === 'true') { + return { + audio: new MockAudioService(), + wamRegistry: new MockWAMRegistryService(), + }; + } + + const {NodeAudioService} = await import('@jamtools/core/services/node/node_audio_service'); + const {NodeWAMRegistryService} = await import('@jamtools/core/services/node/node_wam_registry_service'); + + const audio = new NodeAudioService(); + const wamRegistry = new NodeWAMRegistryService(audio); + return { + audio, + wamRegistry, + }; +}; +// @platform end + +export const setAudioIODependencyCreator = (func: typeof createAudioIODependencies) => { + createAudioIODependencies = func; +}; + +springboard.registerClassModule((coreDeps: CoreDependencies, modDependencies: ModuleDependencies) => { + return new AudioIOModule(coreDeps, modDependencies); +}); + +declare module 'springboard/module_registry/module_registry' { + interface AllModules { + audio_io: AudioIOModule; + } +} + +export class AudioIOModule implements Module { + moduleId = 'audio_io'; + + cleanup: (() => void)[] = []; + + state: AudioIOState = { + audioContext: null, + wamInstances: [], + isAudioInitialized: false, + masterVolume: 0.8, + }; + + audioContextSubject!: Subject; + wamInstancesSubject!: Subject; + + audioIOState!: StateSupervisor; + + private audioIODeps!: AudioIODeps; + private isAudioInitialized = false; + + constructor(private coreDeps: CoreDependencies, private moduleDeps: ModuleDependencies) { + } + + ensureAudioInitialized = async () => { + if (this.isAudioInitialized) { + return; + } + + this.isAudioInitialized = true; + await this.audioIODeps.audio.initialize(); + + const audioContext = this.audioIODeps.audio.audioContext; + + const state: AudioIOState = { + audioContext, + wamInstances: this.audioIODeps.wamRegistry.getAllInstances().map(instance => ({ + id: instance.instanceId, + moduleId: instance.moduleId, + name: instance.name, + })), + isAudioInitialized: true, + masterVolume: this.state.masterVolume, + }; + + this.audioIOState.setState(state); + }; + + initialize = async (moduleAPI: ModuleAPI) => { + this.audioIODeps = await createAudioIODependencies(); + + this.audioContextSubject = this.audioIODeps.audio.onAudioContextChange; + + this.wamInstancesSubject = new Subject(); + + this.audioIOState = await moduleAPI.statesAPI.createSharedState('audio_io_state', this.state); + + // Subscribe to WAM registry changes + this.audioIODeps.wamRegistry.onWAMInstantiated.subscribe(({instance}) => { + const currentInstances = this.audioIODeps.wamRegistry.getAllInstances(); + this.wamInstancesSubject.next(currentInstances); + + // Update state + const state = this.audioIOState.getState(); + state.wamInstances = currentInstances.map(inst => ({ + id: inst.instanceId, + moduleId: inst.moduleId, + name: inst.name, + })); + this.audioIOState.setState(state); + }); + + this.audioIODeps.wamRegistry.onWAMDestroyed.subscribe(({instanceId}) => { + const currentInstances = this.audioIODeps.wamRegistry.getAllInstances(); + this.wamInstancesSubject.next(currentInstances); + + // Update state + const state = this.audioIOState.getState(); + state.wamInstances = currentInstances.map(inst => ({ + id: inst.instanceId, + moduleId: inst.moduleId, + name: inst.name, + })); + this.audioIOState.setState(state); + }); + + // Connect to existing MIDI infrastructure if available + this.setupMIDIIntegration(); + }; + + private setupMIDIIntegration = () => { + try { + const ioModule = this.coreDeps.modules.io; + if (ioModule && ioModule.midiInputSubject) { + ioModule.midiInputSubject.subscribe((midiEvent: any) => { + this.handleMidiInput(midiEvent); + }); + } + } catch (error) { + // IO module not available, skip MIDI integration + console.warn('MIDI integration not available:', error); + } + }; + + private handleMidiInput = (midiEvent: MidiEvent) => { + const midiData = this.convertMidiEventToBytes(midiEvent); + + // Send MIDI to all WAM instances that support it + this.audioIODeps.wamRegistry.getAllInstances().forEach(wam => { + if (wam.onMidi) { + wam.onMidi(midiData); + } + }); + }; + + private convertMidiEventToBytes = (midiEvent: MidiEvent): Uint8Array => { + // Convert jamtools MIDI event to standard MIDI bytes + const {type, number, velocity = 64, channel = 0} = midiEvent; + + switch (type) { + case 'noteon': + return new Uint8Array([0x90 | channel, number, velocity]); + case 'noteoff': + return new Uint8Array([0x80 | channel, number, 0]); + case 'controlchange': + return new Uint8Array([0xB0 | channel, number, velocity]); + default: + return new Uint8Array([0x90, 60, 64]); // Default middle C + } + }; + + public setMasterVolume = (volume: number) => { + this.ensureAudioInitialized(); + this.audioIODeps.audio.setMasterVolume(volume); + + const state = this.audioIOState.getState(); + state.masterVolume = volume; + this.audioIOState.setState(state); + }; + + public instantiateWAM = async (moduleId: string, instanceId: string) => { + await this.ensureAudioInitialized(); + return this.audioIODeps.wamRegistry.instantiateWAM(moduleId, instanceId); + }; + + public destroyWAMInstance = async (instanceId: string) => { + return this.audioIODeps.wamRegistry.destroyWAMInstance(instanceId); + }; + + public getWAMInstance = (instanceId: string) => { + return this.audioIODeps.wamRegistry.getWAMInstance(instanceId); + }; + + public getAllWAMInstances = () => { + return this.audioIODeps.wamRegistry.getAllInstances(); + }; + + public getRegisteredWAMs = () => { + return this.audioIODeps.wamRegistry.getRegisteredWAMs(); + }; + + public getAudioContext = (): AudioContext | null => { + return this.audioIODeps.audio.audioContext; + }; + + public getMasterGainNode = (): GainNode | null => { + if (!this.audioIODeps.audio.audioContext) { + return null; + } + return this.audioIODeps.audio.getMasterGainNode(); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/modules/index.ts b/packages/jamtools/core/modules/index.ts index f0b24291..df7fa54c 100644 --- a/packages/jamtools/core/modules/index.ts +++ b/packages/jamtools/core/modules/index.ts @@ -1,4 +1,5 @@ import './io/io_module'; +import './audio_io/audio_io_module'; import './macro_module/macro_module'; import './chord_families/chord_families_module'; import './midi_files/midi_files_module'; diff --git a/packages/jamtools/core/services/browser/browser_audio_service.ts b/packages/jamtools/core/services/browser/browser_audio_service.ts new file mode 100644 index 00000000..4271bce9 --- /dev/null +++ b/packages/jamtools/core/services/browser/browser_audio_service.ts @@ -0,0 +1,116 @@ +import {Subject} from 'rxjs'; +import {AudioService} from '@jamtools/core/types/audio_io_types'; + +export class BrowserAudioService implements AudioService { + audioContext: AudioContext | null = null; + onAudioContextChange = new Subject(); + + private masterGainNode: GainNode | null = null; + private isInitialized = false; + + initialize = async (): Promise => { + if (this.isInitialized) { + return; + } + + if (typeof AudioContext === 'undefined' && typeof webkitAudioContext === 'undefined') { + console.warn('Web Audio API not supported in this browser'); + return; + } + + try { + this.audioContext = new (window.AudioContext || (window as any).webkitAudioContext)(); + + // Create master gain node + this.masterGainNode = this.audioContext.createGain(); + this.masterGainNode.connect(this.audioContext.destination); + this.masterGainNode.gain.value = 0.8; + + // Handle audio context state changes + this.handleAudioContextStateChange(); + + this.onAudioContextChange.next(this.audioContext); + this.isInitialized = true; + + console.log('Audio context initialized:', this.audioContext.state); + } catch (error) { + console.error('Failed to initialize audio context:', error); + throw error; + } + }; + + createAudioContext = (): AudioContext => { + if (this.audioContext) { + return this.audioContext; + } + + if (typeof AudioContext === 'undefined' && typeof webkitAudioContext === 'undefined') { + throw new Error('Web Audio API not supported in this browser'); + } + + this.audioContext = new (window.AudioContext || (window as any).webkitAudioContext)(); + this.onAudioContextChange.next(this.audioContext); + return this.audioContext; + }; + + getMasterGainNode = (): GainNode => { + if (!this.masterGainNode) { + throw new Error('Audio service not initialized. Call initialize() first.'); + } + return this.masterGainNode; + }; + + setMasterVolume = (volume: number): void => { + if (this.masterGainNode) { + // Clamp volume between 0 and 1 + const clampedVolume = Math.max(0, Math.min(1, volume)); + this.masterGainNode.gain.value = clampedVolume; + } + }; + + private handleAudioContextStateChange = () => { + if (!this.audioContext) { + return; + } + + const handleStateChange = () => { + console.log('Audio context state changed:', this.audioContext?.state); + + if (this.audioContext?.state === 'suspended') { + // Try to resume on user interaction + this.setupUserInteractionResuming(); + } + }; + + this.audioContext.addEventListener('statechange', handleStateChange); + }; + + private setupUserInteractionResuming = () => { + if (!this.audioContext || this.audioContext.state !== 'suspended') { + return; + } + + const resumeAudio = async () => { + if (this.audioContext && this.audioContext.state === 'suspended') { + try { + await this.audioContext.resume(); + console.log('Audio context resumed'); + + // Remove event listeners after successful resume + document.removeEventListener('click', resumeAudio); + document.removeEventListener('touchstart', resumeAudio); + document.removeEventListener('keydown', resumeAudio); + } catch (error) { + console.warn('Failed to resume audio context:', error); + } + } + }; + + // Add event listeners for user interaction + document.addEventListener('click', resumeAudio, {once: true}); + document.addEventListener('touchstart', resumeAudio, {once: true}); + document.addEventListener('keydown', resumeAudio, {once: true}); + + console.log('Audio context suspended. Waiting for user interaction to resume...'); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/services/browser/browser_wam_registry_service.ts b/packages/jamtools/core/services/browser/browser_wam_registry_service.ts new file mode 100644 index 00000000..07a62944 --- /dev/null +++ b/packages/jamtools/core/services/browser/browser_wam_registry_service.ts @@ -0,0 +1,148 @@ +import {Subject} from 'rxjs'; +import {WAMRegistryService, WAMDescriptor, WebAudioModule, WAMConfig, AudioService} from '@jamtools/core/types/audio_io_types'; + +export class BrowserWAMRegistryService implements WAMRegistryService { + onWAMRegistered = new Subject(); + onWAMInstantiated = new Subject<{descriptor: WAMDescriptor; instance: WebAudioModule}>(); + onWAMDestroyed = new Subject<{instanceId: string}>(); + + private registeredWAMs: Map = new Map(); + private wamInstances: Map = new Map(); + + constructor(private audioService: AudioService) { + this.registerBuiltInWAMs(); + } + + registerWAM = (descriptor: WAMDescriptor): void => { + this.registeredWAMs.set(descriptor.moduleId, descriptor); + this.onWAMRegistered.next(descriptor); + }; + + getRegisteredWAMs = (): WAMDescriptor[] => { + return Array.from(this.registeredWAMs.values()); + }; + + instantiateWAM = async (moduleId: string, instanceId: string, config?: Partial): Promise => { + const descriptor = this.registeredWAMs.get(moduleId); + if (!descriptor) { + throw new Error(`WAM ${moduleId} not found. Available WAMs: ${Array.from(this.registeredWAMs.keys()).join(', ')}`); + } + + if (this.wamInstances.has(instanceId)) { + throw new Error(`WAM instance ${instanceId} already exists`); + } + + const audioContext = this.audioService.audioContext; + if (!audioContext) { + throw new Error('Audio context not initialized. Call audioService.initialize() first.'); + } + + try { + const fullConfig: WAMConfig = { + moduleId, + instanceId, + initialParameterValues: {}, + ...config, + }; + + let wamInstance: WebAudioModule; + + // Handle built-in WAMs + if (moduleId.startsWith('com.jamtools.')) { + wamInstance = await this.createBuiltInWAM(moduleId, fullConfig, audioContext); + } else { + // Handle external WAMs + const WAMClass = await import(descriptor.moduleUrl); + wamInstance = await WAMClass.default.create(audioContext, fullConfig); + } + + this.wamInstances.set(instanceId, wamInstance); + this.onWAMInstantiated.next({descriptor, instance: wamInstance}); + + return wamInstance; + } catch (error) { + console.error(`Failed to instantiate WAM ${moduleId}:`, error); + throw error; + } + }; + + destroyWAMInstance = async (instanceId: string): Promise => { + const instance = this.wamInstances.get(instanceId); + if (!instance) { + throw new Error(`WAM instance ${instanceId} not found`); + } + + try { + await instance.destroy(); + this.wamInstances.delete(instanceId); + this.onWAMDestroyed.next({instanceId}); + } catch (error) { + console.error(`Failed to destroy WAM instance ${instanceId}:`, error); + throw error; + } + }; + + getWAMInstance = (instanceId: string): WebAudioModule | null => { + return this.wamInstances.get(instanceId) || null; + }; + + getAllInstances = (): WebAudioModule[] => { + return Array.from(this.wamInstances.values()); + }; + + private registerBuiltInWAMs = (): void => { + // Register built-in synthesizer + this.registerWAM({ + moduleId: 'com.jamtools.oscillator-synth', + name: 'Oscillator Synthesizer', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Basic oscillator-based synthesizer with ADSR envelope', + moduleUrl: '', // Built-in, no URL needed + isInstrument: true, + keywords: ['synthesizer', 'oscillator', 'instrument'], + }); + + // Register built-in delay effect + this.registerWAM({ + moduleId: 'com.jamtools.delay', + name: 'Delay Effect', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Digital delay effect with feedback and wet/dry mix', + moduleUrl: '', // Built-in, no URL needed + isEffect: true, + keywords: ['delay', 'echo', 'effect'], + }); + + // Register built-in spectrum analyzer + this.registerWAM({ + moduleId: 'com.jamtools.spectrum-analyzer', + name: 'Spectrum Analyzer', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Real-time frequency spectrum analyzer', + moduleUrl: '', // Built-in, no URL needed + keywords: ['analyzer', 'spectrum', 'visualization'], + }); + }; + + private createBuiltInWAM = async (moduleId: string, config: WAMConfig, audioContext: AudioContext): Promise => { + switch (moduleId) { + case 'com.jamtools.oscillator-synth': + const {OscillatorSynthWAM} = await import('@jamtools/core/wams/oscillator_synth_wam'); + return new OscillatorSynthWAM(audioContext, config); + + case 'com.jamtools.delay': + const {DelayWAM} = await import('@jamtools/core/wams/delay_wam'); + return new DelayWAM(audioContext, config); + + case 'com.jamtools.spectrum-analyzer': + const {SpectrumAnalyzerWAM} = await import('@jamtools/core/wams/spectrum_analyzer_wam'); + return new SpectrumAnalyzerWAM(audioContext, config); + + default: + throw new Error(`Unknown built-in WAM: ${moduleId}`); + } + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/services/node/node_audio_service.ts b/packages/jamtools/core/services/node/node_audio_service.ts new file mode 100644 index 00000000..ac9c3a56 --- /dev/null +++ b/packages/jamtools/core/services/node/node_audio_service.ts @@ -0,0 +1,33 @@ +import {Subject} from 'rxjs'; +import {AudioService} from '@jamtools/core/types/audio_io_types'; + +export class NodeAudioService implements AudioService { + audioContext: AudioContext | null = null; + onAudioContextChange = new Subject(); + + private mockGainNode: any = { + gain: {value: 0.8}, + connect: () => {}, + disconnect: () => {}, + }; + + initialize = async (): Promise => { + // In Node.js environment, we can't create real AudioContext + // This service provides a compatible interface for server-side rendering + console.log('NodeAudioService: Audio functionality not available in Node.js environment'); + this.onAudioContextChange.next(this.audioContext); + }; + + createAudioContext = (): AudioContext => { + throw new Error('NodeAudioService: AudioContext not available in Node.js environment'); + }; + + getMasterGainNode = (): GainNode => { + return this.mockGainNode as GainNode; + }; + + setMasterVolume = (volume: number): void => { + this.mockGainNode.gain.value = Math.max(0, Math.min(1, volume)); + console.log(`NodeAudioService: Master volume set to ${volume} (mock)`); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/services/node/node_wam_registry_service.ts b/packages/jamtools/core/services/node/node_wam_registry_service.ts new file mode 100644 index 00000000..97815688 --- /dev/null +++ b/packages/jamtools/core/services/node/node_wam_registry_service.ts @@ -0,0 +1,119 @@ +import {Subject} from 'rxjs'; +import {WAMRegistryService, WAMDescriptor, WebAudioModule, WAMConfig, AudioService} from '@jamtools/core/types/audio_io_types'; + +export class NodeWAMRegistryService implements WAMRegistryService { + onWAMRegistered = new Subject(); + onWAMInstantiated = new Subject<{descriptor: WAMDescriptor; instance: WebAudioModule}>(); + onWAMDestroyed = new Subject<{instanceId: string}>(); + + private registeredWAMs: Map = new Map(); + private wamInstances: Map = new Map(); + + constructor(private audioService: AudioService) { + this.registerMockWAMs(); + } + + registerWAM = (descriptor: WAMDescriptor): void => { + this.registeredWAMs.set(descriptor.moduleId, descriptor); + this.onWAMRegistered.next(descriptor); + }; + + getRegisteredWAMs = (): WAMDescriptor[] => { + return Array.from(this.registeredWAMs.values()); + }; + + instantiateWAM = async (moduleId: string, instanceId: string, config?: Partial): Promise => { + const descriptor = this.registeredWAMs.get(moduleId); + if (!descriptor) { + throw new Error(`WAM ${moduleId} not found`); + } + + if (this.wamInstances.has(instanceId)) { + throw new Error(`WAM instance ${instanceId} already exists`); + } + + console.log(`NodeWAMRegistryService: Creating mock WAM instance ${instanceId} for ${moduleId}`); + + // Create mock WAM instance for Node.js environment + const mockInstance: WebAudioModule = { + moduleId, + instanceId, + name: descriptor.name, + vendor: descriptor.vendor, + audioNode: {} as AudioNode, + audioContext: {} as AudioContext, + + getParameterInfo: async () => [], + getParameterValues: async () => ({}), + setParameterValues: async (values) => { + console.log(`NodeWAMRegistryService: Mock parameter update for ${instanceId}:`, values); + }, + getState: async () => ({}), + setState: async (state) => { + console.log(`NodeWAMRegistryService: Mock state update for ${instanceId}:`, state); + }, + destroy: async () => { + console.log(`NodeWAMRegistryService: Mock destroy for ${instanceId}`); + }, + + onMidi: (midiData) => { + console.log(`NodeWAMRegistryService: Mock MIDI input for ${instanceId}:`, Array.from(midiData)); + }, + }; + + this.wamInstances.set(instanceId, mockInstance); + this.onWAMInstantiated.next({descriptor, instance: mockInstance}); + + return mockInstance; + }; + + destroyWAMInstance = async (instanceId: string): Promise => { + const instance = this.wamInstances.get(instanceId); + if (!instance) { + throw new Error(`WAM instance ${instanceId} not found`); + } + + await instance.destroy(); + this.wamInstances.delete(instanceId); + this.onWAMDestroyed.next({instanceId}); + }; + + getWAMInstance = (instanceId: string): WebAudioModule | null => { + return this.wamInstances.get(instanceId) || null; + }; + + getAllInstances = (): WebAudioModule[] => { + return Array.from(this.wamInstances.values()); + }; + + private registerMockWAMs = (): void => { + this.registerWAM({ + moduleId: 'com.jamtools.oscillator-synth', + name: 'Oscillator Synthesizer (Node Mock)', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Mock synthesizer for Node.js environment', + moduleUrl: '', + isInstrument: true, + }); + + this.registerWAM({ + moduleId: 'com.jamtools.delay', + name: 'Delay Effect (Node Mock)', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Mock delay effect for Node.js environment', + moduleUrl: '', + isEffect: true, + }); + + this.registerWAM({ + moduleId: 'com.jamtools.spectrum-analyzer', + name: 'Spectrum Analyzer (Node Mock)', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Mock spectrum analyzer for Node.js environment', + moduleUrl: '', + }); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/test/services/mock_audio_service.ts b/packages/jamtools/core/test/services/mock_audio_service.ts new file mode 100644 index 00000000..b186e25a --- /dev/null +++ b/packages/jamtools/core/test/services/mock_audio_service.ts @@ -0,0 +1,31 @@ +import {Subject} from 'rxjs'; +import {AudioService} from '@jamtools/core/types/audio_io_types'; + +export class MockAudioService implements AudioService { + audioContext: AudioContext | null = null; + onAudioContextChange = new Subject(); + + private mockGainNode: any = { + gain: {value: 0.8}, + connect: () => {}, + disconnect: () => {}, + }; + + initialize = async (): Promise => { + // Mock initialization - don't create real AudioContext in tests + this.audioContext = null; + this.onAudioContextChange.next(this.audioContext); + }; + + createAudioContext = (): AudioContext => { + throw new Error('MockAudioService: Cannot create real AudioContext in test environment'); + }; + + getMasterGainNode = (): GainNode => { + return this.mockGainNode as GainNode; + }; + + setMasterVolume = (volume: number): void => { + this.mockGainNode.gain.value = Math.max(0, Math.min(1, volume)); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/test/services/mock_wam_registry_service.ts b/packages/jamtools/core/test/services/mock_wam_registry_service.ts new file mode 100644 index 00000000..aef09116 --- /dev/null +++ b/packages/jamtools/core/test/services/mock_wam_registry_service.ts @@ -0,0 +1,98 @@ +import {Subject} from 'rxjs'; +import {WAMRegistryService, WAMDescriptor, WebAudioModule, WAMConfig} from '@jamtools/core/types/audio_io_types'; + +export class MockWAMRegistryService implements WAMRegistryService { + onWAMRegistered = new Subject(); + onWAMInstantiated = new Subject<{descriptor: WAMDescriptor; instance: WebAudioModule}>(); + onWAMDestroyed = new Subject<{instanceId: string}>(); + + private registeredWAMs: Map = new Map(); + private wamInstances: Map = new Map(); + + constructor() { + this.registerMockWAMs(); + } + + registerWAM = (descriptor: WAMDescriptor): void => { + this.registeredWAMs.set(descriptor.moduleId, descriptor); + this.onWAMRegistered.next(descriptor); + }; + + getRegisteredWAMs = (): WAMDescriptor[] => { + return Array.from(this.registeredWAMs.values()); + }; + + instantiateWAM = async (moduleId: string, instanceId: string, config?: Partial): Promise => { + const descriptor = this.registeredWAMs.get(moduleId); + if (!descriptor) { + throw new Error(`WAM ${moduleId} not found`); + } + + if (this.wamInstances.has(instanceId)) { + throw new Error(`WAM instance ${instanceId} already exists`); + } + + const mockInstance: WebAudioModule = { + moduleId, + instanceId, + name: descriptor.name, + vendor: descriptor.vendor, + audioNode: {} as AudioNode, + audioContext: {} as AudioContext, + + getParameterInfo: async () => [], + getParameterValues: async () => ({}), + setParameterValues: async () => {}, + getState: async () => ({}), + setState: async () => {}, + destroy: async () => {}, + + onMidi: () => {}, + }; + + this.wamInstances.set(instanceId, mockInstance); + this.onWAMInstantiated.next({descriptor, instance: mockInstance}); + + return mockInstance; + }; + + destroyWAMInstance = async (instanceId: string): Promise => { + const instance = this.wamInstances.get(instanceId); + if (!instance) { + throw new Error(`WAM instance ${instanceId} not found`); + } + + this.wamInstances.delete(instanceId); + this.onWAMDestroyed.next({instanceId}); + }; + + getWAMInstance = (instanceId: string): WebAudioModule | null => { + return this.wamInstances.get(instanceId) || null; + }; + + getAllInstances = (): WebAudioModule[] => { + return Array.from(this.wamInstances.values()); + }; + + private registerMockWAMs = (): void => { + this.registerWAM({ + moduleId: 'com.jamtools.oscillator-synth', + name: 'Mock Oscillator Synthesizer', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Mock synthesizer for testing', + moduleUrl: '', + isInstrument: true, + }); + + this.registerWAM({ + moduleId: 'com.jamtools.delay', + name: 'Mock Delay Effect', + vendor: 'Jamtools', + version: '1.0.0', + description: 'Mock delay effect for testing', + moduleUrl: '', + isEffect: true, + }); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/types/audio_io_types.ts b/packages/jamtools/core/types/audio_io_types.ts new file mode 100644 index 00000000..11d6d27e --- /dev/null +++ b/packages/jamtools/core/types/audio_io_types.ts @@ -0,0 +1,91 @@ +import {Subject} from 'rxjs'; + +export type ParameterInfo = { + id: string; + label: string; + type: 'float' | 'int' | 'boolean' | 'enum'; + defaultValue: number; + minValue?: number; + maxValue?: number; + discreteStep?: number; + exponent?: number; + units?: string; + enumValues?: string[]; +}; + +export type ParameterValues = { + [parameterId: string]: number; +}; + +export type WAMConfig = { + moduleId: string; + instanceId: string; + initialParameterValues?: ParameterValues; +}; + +export interface WebAudioModule { + readonly moduleId: string; + readonly instanceId: string; + readonly name: string; + readonly vendor: string; + + readonly audioNode: AudioNode; + readonly audioContext: AudioContext; + + getParameterInfo(): Promise; + getParameterValues(): Promise; + setParameterValues(values: ParameterValues): Promise; + + getState(): Promise; + setState(state: any): Promise; + + onMidi?(midiData: Uint8Array): void; + + createGui?(): Promise; + destroyGui?(): void; + + destroy(): Promise; +} + +export type WAMDescriptor = { + moduleId: string; + name: string; + vendor: string; + version: string; + description: string; + moduleUrl: string; + thumbnail?: string; + keywords?: string[]; + isInstrument?: boolean; + isEffect?: boolean; + parameterInfo?: ParameterInfo[]; +}; + +export type AudioIOState = { + audioContext: AudioContext | null; + wamInstances: {id: string; moduleId: string; name: string}[]; + isAudioInitialized: boolean; + masterVolume: number; +}; + +export type AudioService = { + audioContext: AudioContext | null; + onAudioContextChange: Subject; + initialize: () => Promise; + createAudioContext: () => AudioContext; + getMasterGainNode: () => GainNode; + setMasterVolume: (volume: number) => void; +}; + +export type WAMRegistryService = { + onWAMRegistered: Subject; + onWAMInstantiated: Subject<{descriptor: WAMDescriptor; instance: WebAudioModule}>; + onWAMDestroyed: Subject<{instanceId: string}>; + + registerWAM: (descriptor: WAMDescriptor) => void; + getRegisteredWAMs: () => WAMDescriptor[]; + instantiateWAM: (moduleId: string, instanceId: string, config?: Partial) => Promise; + destroyWAMInstance: (instanceId: string) => Promise; + getWAMInstance: (instanceId: string) => WebAudioModule | null; + getAllInstances: () => WebAudioModule[]; +}; \ No newline at end of file diff --git a/packages/jamtools/core/types/react.d.ts b/packages/jamtools/core/types/react.d.ts new file mode 100644 index 00000000..8f46b8ca --- /dev/null +++ b/packages/jamtools/core/types/react.d.ts @@ -0,0 +1,196 @@ +declare module 'react' { + export interface FC

{ + (props: P): JSX.Element | null; + } + + export interface ChangeEvent { + target: T; + } + + interface HTMLInputElement { + value: string; + } + + export function useState(initialState: T): [T, (newState: T) => void]; + export function useEffect(effect: () => void | (() => void), deps?: any[]): void; + + export namespace JSX { + interface IntrinsicElements { + div: any; + h1: any; + h2: any; + h3: any; + h4: any; + p: any; + button: any; + input: any; + select: any; + option: any; + label: any; + br: any; + ul: any; + ol: any; + li: any; + strong: any; + } + interface Element {} + } +} + +declare module 'springboard' { + interface StatesAPI { + createSharedState(name: string, initialValue: T): Promise<{ + useState: () => T; + setState: (value: T) => void; + }>; + } + + interface ModuleAPI { + statesAPI: StatesAPI; + deps: any; + registerRoute: (path: string, config: any, component: any) => void; + } + + const springboard: { + registerModule: (name: string, deps: any, callback: (moduleAPI: ModuleAPI) => Promise) => void; + registerClassModule: (factory: (coreDeps: any, modDependencies: any) => any) => void; + }; + export default springboard; +} + +declare module 'rxjs' { + export class Subject { + subscribe(fn: (value: T) => void): { unsubscribe: () => void }; + next(value: T): void; + } + export class BehaviorSubject extends Subject { + constructor(initialValue: T); + getValue(): T; + } +} + +declare module 'vitest' { + export function describe(name: string, fn: () => void): void; + export function it(name: string, fn: () => void): void; + export function expect(value: any): any; + export function beforeEach(fn: () => void): void; +} + +declare module 'springboard/types/module_types' { + export type ModuleDependencies = any; + export type CoreDependencies = any; +} + +declare module 'springboard/module_registry/module_registry' { + export interface ModuleRegistry { + getModule(name: string): any; + } + export abstract class Module { + constructor(coreDeps: any); + } +} + +declare module 'springboard/services/states/shared_state_service' { + export interface SharedStateService { + createSharedState(name: string, initialValue: T): any; + } + export class StateSupervisor { + constructor(); + getState(): T; + setState(newState: T): void; + } +} + +declare module 'springboard/engine/module_api' { + export interface ModuleAPI { + statesAPI: any; + deps: any; + registerRoute: (path: string, config: any, component: any) => void; + } +} + +declare const process: { + browser: boolean; + env: { + NODE_ENV?: string; + [key: string]: string | undefined; + }; +}; + +declare global { + interface Window { + webkitAudioContext?: typeof AudioContext; + AudioContext: typeof AudioContext; + } + + interface AudioContext { + createOscillator(): OscillatorNode; + createGain(): GainNode; + createBiquadFilter(): BiquadFilterNode; + createAnalyser(): AnalyserNode; + createDelay(): DelayNode; + destination: AudioDestinationNode; + sampleRate: number; + currentTime: number; + state: AudioContextState; + suspend(): Promise; + resume(): Promise; + close(): Promise; + } + + type AudioContextState = 'suspended' | 'running' | 'closed'; + + interface AudioNode { + connect(destination: AudioNode): AudioNode; + connect(destination: AudioParam): void; + disconnect(): void; + disconnect(destination: AudioNode): void; + context: AudioContext; + numberOfInputs: number; + numberOfOutputs: number; + } + + interface GainNode extends AudioNode { + gain: AudioParam; + } + + interface OscillatorNode extends AudioNode { + frequency: AudioParam; + detune: AudioParam; + type: OscillatorType; + start(when?: number): void; + stop(when?: number): void; + } + + type OscillatorType = 'sine' | 'square' | 'sawtooth' | 'triangle'; + + interface BiquadFilterNode extends AudioNode { + frequency: AudioParam; + Q: AudioParam; + type: BiquadFilterType; + } + + type BiquadFilterType = 'lowpass' | 'highpass' | 'bandpass' | 'lowshelf' | 'highshelf' | 'peaking' | 'notch' | 'allpass'; + + interface AudioParam { + value: number; + setValueAtTime(value: number, startTime: number): AudioParam; + linearRampToValueAtTime(value: number, endTime: number): AudioParam; + exponentialRampToValueAtTime(value: number, endTime: number): AudioParam; + } + + interface AnalyserNode extends AudioNode { + fftSize: number; + frequencyBinCount: number; + getFloatFrequencyData(array: Float32Array): void; + getByteFrequencyData(array: Uint8Array): void; + getFloatTimeDomainData(array: Float32Array): void; + getByteTimeDomainData(array: Uint8Array): void; + } + + interface DelayNode extends AudioNode { + delayTime: AudioParam; + } + + interface AudioDestinationNode extends AudioNode {} +} \ No newline at end of file diff --git a/packages/jamtools/core/wams/delay_wam.ts b/packages/jamtools/core/wams/delay_wam.ts new file mode 100644 index 00000000..9346dae1 --- /dev/null +++ b/packages/jamtools/core/wams/delay_wam.ts @@ -0,0 +1,181 @@ +import {WebAudioModule, ParameterInfo, ParameterValues, WAMConfig} from '@jamtools/core/types/audio_io_types'; + +export class DelayWAM implements WebAudioModule { + readonly moduleId: string; + readonly instanceId: string; + readonly name = 'Delay Effect'; + readonly vendor = 'Jamtools'; + + readonly audioNode: GainNode; + readonly audioContext: AudioContext; + + private inputGain: GainNode; + private outputGain: GainNode; + private wetGain: GainNode; + private dryGain: GainNode; + private delayNode: DelayNode; + private feedbackGain: GainNode; + + private parameters = { + 'delay.time': 0.25, // seconds + 'delay.feedback': 0.3, // 0-1 + 'delay.wetLevel': 0.5, // 0-1 + 'delay.dryLevel': 1.0, // 0-1 + }; + + constructor(audioContext: AudioContext, config: WAMConfig) { + this.audioContext = audioContext; + this.moduleId = config.moduleId; + this.instanceId = config.instanceId; + + // Create audio graph + this.inputGain = audioContext.createGain(); + this.outputGain = audioContext.createGain(); + this.wetGain = audioContext.createGain(); + this.dryGain = audioContext.createGain(); + this.delayNode = audioContext.createDelay(2.0); // Max 2 seconds delay + this.feedbackGain = audioContext.createGain(); + + // Main output node + this.audioNode = this.outputGain; + + // Connect the delay network: + // input -> inputGain -> [dry path] -> dryGain -> outputGain + // \-> [wet path] -> delayNode -> wetGain -> outputGain + // ^ + // | + // feedbackGain + // | + // [feedback loop] + + // Dry path + this.inputGain.connect(this.dryGain); + this.dryGain.connect(this.outputGain); + + // Wet path + this.inputGain.connect(this.delayNode); + this.delayNode.connect(this.wetGain); + this.wetGain.connect(this.outputGain); + + // Feedback loop + this.delayNode.connect(this.feedbackGain); + this.feedbackGain.connect(this.delayNode); + + // Set initial parameter values + this.updateAllParameters(); + + // Apply initial parameters from config + if (config.initialParameterValues) { + this.setParameterValues(config.initialParameterValues); + } + } + + getParameterInfo = async (): Promise => { + return [ + { + id: 'delay.time', + label: 'Delay Time', + type: 'float', + defaultValue: 0.25, + minValue: 0.001, + maxValue: 2.0, + units: 's', + }, + { + id: 'delay.feedback', + label: 'Feedback', + type: 'float', + defaultValue: 0.3, + minValue: 0.0, + maxValue: 0.95, // Prevent runaway feedback + }, + { + id: 'delay.wetLevel', + label: 'Wet Level', + type: 'float', + defaultValue: 0.5, + minValue: 0.0, + maxValue: 1.0, + }, + { + id: 'delay.dryLevel', + label: 'Dry Level', + type: 'float', + defaultValue: 1.0, + minValue: 0.0, + maxValue: 1.0, + }, + ]; + }; + + getParameterValues = async (): Promise => { + return {...this.parameters}; + }; + + setParameterValues = async (values: ParameterValues): Promise => { + for (const [id, value] of Object.entries(values)) { + if (id in this.parameters) { + this.parameters[id as keyof typeof this.parameters] = value; + this.updateParameter(id, value); + } + } + }; + + private updateParameter = (id: string, value: number): void => { + const now = this.audioContext.currentTime; + + switch (id) { + case 'delay.time': + // Smooth delay time changes to avoid clicks + this.delayNode.delayTime.setTargetAtTime(value, now, 0.01); + break; + case 'delay.feedback': + this.feedbackGain.gain.setValueAtTime(value, now); + break; + case 'delay.wetLevel': + this.wetGain.gain.setValueAtTime(value, now); + break; + case 'delay.dryLevel': + this.dryGain.gain.setValueAtTime(value, now); + break; + } + }; + + private updateAllParameters = (): void => { + for (const [id, value] of Object.entries(this.parameters)) { + this.updateParameter(id, value); + } + }; + + getState = async (): Promise => { + return { + parameters: this.parameters, + }; + }; + + setState = async (state: any): Promise => { + if (state.parameters) { + await this.setParameterValues(state.parameters); + } + }; + + // Connect input to our input gain node + connect = (destination: AudioNode): void => { + this.inputGain.connect(destination); + }; + + // Get input node for connecting sources + getInputNode = (): AudioNode => { + return this.inputGain; + }; + + destroy = async (): Promise => { + // Disconnect all nodes + this.inputGain.disconnect(); + this.outputGain.disconnect(); + this.wetGain.disconnect(); + this.dryGain.disconnect(); + this.delayNode.disconnect(); + this.feedbackGain.disconnect(); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/wams/oscillator_synth_wam.ts b/packages/jamtools/core/wams/oscillator_synth_wam.ts new file mode 100644 index 00000000..6212efcd --- /dev/null +++ b/packages/jamtools/core/wams/oscillator_synth_wam.ts @@ -0,0 +1,247 @@ +import {WebAudioModule, ParameterInfo, ParameterValues, WAMConfig} from '@jamtools/core/types/audio_io_types'; + +export class OscillatorSynthWAM implements WebAudioModule { + readonly moduleId: string; + readonly instanceId: string; + readonly name = 'Oscillator Synthesizer'; + readonly vendor = 'Jamtools'; + + readonly audioNode: GainNode; + readonly audioContext: AudioContext; + + private oscillators: Map = new Map(); + private parameters = { + 'osc.waveform': 0, // 0=sine, 1=square, 2=sawtooth, 3=triangle + 'env.attack': 0.1, + 'env.decay': 0.1, + 'env.sustain': 0.8, + 'env.release': 0.5, + 'filter.cutoff': 1000, + 'filter.resonance': 1, + 'master.gain': 0.5, + }; + + private filterNode: BiquadFilterNode; + + constructor(audioContext: AudioContext, config: WAMConfig) { + this.audioContext = audioContext; + this.moduleId = config.moduleId; + this.instanceId = config.instanceId; + + // Create audio graph + this.audioNode = audioContext.createGain(); + this.filterNode = audioContext.createBiquadFilter(); + + this.filterNode.connect(this.audioNode); + this.filterNode.type = 'lowpass'; + this.filterNode.frequency.value = this.parameters['filter.cutoff']; + this.filterNode.Q.value = this.parameters['filter.resonance']; + + this.audioNode.gain.value = this.parameters['master.gain']; + + // Apply initial parameters + if (config.initialParameterValues) { + this.setParameterValues(config.initialParameterValues); + } + } + + getParameterInfo = async (): Promise => { + return [ + { + id: 'osc.waveform', + label: 'Waveform', + type: 'enum', + defaultValue: 0, + enumValues: ['Sine', 'Square', 'Sawtooth', 'Triangle'], + }, + { + id: 'env.attack', + label: 'Attack', + type: 'float', + defaultValue: 0.1, + minValue: 0.001, + maxValue: 2.0, + units: 's', + }, + { + id: 'env.decay', + label: 'Decay', + type: 'float', + defaultValue: 0.1, + minValue: 0.001, + maxValue: 2.0, + units: 's', + }, + { + id: 'env.sustain', + label: 'Sustain', + type: 'float', + defaultValue: 0.8, + minValue: 0.0, + maxValue: 1.0, + }, + { + id: 'env.release', + label: 'Release', + type: 'float', + defaultValue: 0.5, + minValue: 0.001, + maxValue: 5.0, + units: 's', + }, + { + id: 'filter.cutoff', + label: 'Filter Cutoff', + type: 'float', + defaultValue: 1000, + minValue: 20, + maxValue: 20000, + units: 'Hz', + }, + { + id: 'filter.resonance', + label: 'Filter Resonance', + type: 'float', + defaultValue: 1, + minValue: 0.1, + maxValue: 30, + }, + { + id: 'master.gain', + label: 'Master Gain', + type: 'float', + defaultValue: 0.5, + minValue: 0.0, + maxValue: 1.0, + }, + ]; + }; + + getParameterValues = async (): Promise => { + return {...this.parameters}; + }; + + setParameterValues = async (values: ParameterValues): Promise => { + for (const [id, value] of Object.entries(values)) { + if (id in this.parameters) { + this.parameters[id as keyof typeof this.parameters] = value; + this.updateParameter(id, value); + } + } + }; + + private updateParameter = (id: string, value: number): void => { + const now = this.audioContext.currentTime; + + switch (id) { + case 'filter.cutoff': + this.filterNode.frequency.setValueAtTime(value, now); + break; + case 'filter.resonance': + this.filterNode.Q.setValueAtTime(value, now); + break; + case 'master.gain': + this.audioNode.gain.setValueAtTime(value, now); + break; + } + }; + + getState = async (): Promise => { + return { + parameters: this.parameters, + activeNotes: Array.from(this.oscillators.keys()), + }; + }; + + setState = async (state: any): Promise => { + if (state.parameters) { + await this.setParameterValues(state.parameters); + } + }; + + onMidi = (midiData: Uint8Array): void => { + const [status, note, velocity] = midiData; + const command = status & 0xF0; + + switch (command) { + case 0x90: // Note on + if (velocity > 0) { + this.noteOn(note, velocity / 127); + } else { + this.noteOff(note); + } + break; + case 0x80: // Note off + this.noteOff(note); + break; + } + }; + + private noteOn = (note: number, velocity: number): void => { + // Stop existing note if playing + this.noteOff(note); + + const frequency = this.midiNoteToFrequency(note); + const waveforms: OscillatorType[] = ['sine', 'square', 'sawtooth', 'triangle']; + const waveform = waveforms[Math.floor(this.parameters['osc.waveform'])] || 'sine'; + + // Create oscillator and gain + const osc = this.audioContext.createOscillator(); + const gain = this.audioContext.createGain(); + + osc.type = waveform; + osc.frequency.value = frequency; + + // Connect: osc -> gain -> filter -> output + osc.connect(gain); + gain.connect(this.filterNode); + + // ADSR envelope + const now = this.audioContext.currentTime; + const attack = this.parameters['env.attack']; + const decay = this.parameters['env.decay']; + const sustain = this.parameters['env.sustain']; + + gain.gain.setValueAtTime(0, now); + gain.gain.linearRampToValueAtTime(velocity, now + attack); + gain.gain.linearRampToValueAtTime(velocity * sustain, now + attack + decay); + + osc.start(now); + + this.oscillators.set(note, {osc, gain}); + }; + + private noteOff = (note: number): void => { + const oscillatorData = this.oscillators.get(note); + if (!oscillatorData) { + return; + } + + const {osc, gain} = oscillatorData; + const now = this.audioContext.currentTime; + const release = this.parameters['env.release']; + + // Release envelope + gain.gain.cancelScheduledValues(now); + gain.gain.setValueAtTime(gain.gain.value, now); + gain.gain.linearRampToValueAtTime(0, now + release); + + osc.stop(now + release); + this.oscillators.delete(note); + }; + + private midiNoteToFrequency = (note: number): number => { + return 440 * Math.pow(2, (note - 69) / 12); + }; + + destroy = async (): Promise => { + // Stop all oscillators + for (const [note] of this.oscillators) { + this.noteOff(note); + } + + // Disconnect audio nodes + this.filterNode.disconnect(); + this.audioNode.disconnect(); + }; +} \ No newline at end of file diff --git a/packages/jamtools/core/wams/spectrum_analyzer_wam.ts b/packages/jamtools/core/wams/spectrum_analyzer_wam.ts new file mode 100644 index 00000000..45770c2e --- /dev/null +++ b/packages/jamtools/core/wams/spectrum_analyzer_wam.ts @@ -0,0 +1,258 @@ +import {WebAudioModule, ParameterInfo, ParameterValues, WAMConfig} from '@jamtools/core/types/audio_io_types'; + +export class SpectrumAnalyzerWAM implements WebAudioModule { + readonly moduleId: string; + readonly instanceId: string; + readonly name = 'Spectrum Analyzer'; + readonly vendor = 'Jamtools'; + + readonly audioNode: GainNode; + readonly audioContext: AudioContext; + + private analyzerNode: AnalyserNode; + private inputGain: GainNode; + private frequencyData: Uint8Array; + private timeData: Uint8Array; + + private parameters = { + 'analyzer.fftSize': 2048, + 'analyzer.smoothing': 0.8, + 'analyzer.minDecibels': -100, + 'analyzer.maxDecibels': -30, + }; + + constructor(audioContext: AudioContext, config: WAMConfig) { + this.audioContext = audioContext; + this.moduleId = config.moduleId; + this.instanceId = config.instanceId; + + // Create audio graph + this.inputGain = audioContext.createGain(); + this.analyzerNode = audioContext.createAnalyser(); + this.audioNode = audioContext.createGain(); // Pass-through output + + // Connect: input -> inputGain -> analyzer -> output + // \-> (analysis data) + this.inputGain.connect(this.analyzerNode); + this.analyzerNode.connect(this.audioNode); + + // Set initial analyzer properties + this.updateAllParameters(); + + // Initialize data arrays + this.frequencyData = new Uint8Array(this.analyzerNode.frequencyBinCount); + this.timeData = new Uint8Array(this.analyzerNode.fftSize); + + // Apply initial parameters from config + if (config.initialParameterValues) { + this.setParameterValues(config.initialParameterValues); + } + } + + getParameterInfo = async (): Promise => { + return [ + { + id: 'analyzer.fftSize', + label: 'FFT Size', + type: 'enum', + defaultValue: 2048, + enumValues: ['32', '64', '128', '256', '512', '1024', '2048', '4096', '8192', '16384', '32768'], + }, + { + id: 'analyzer.smoothing', + label: 'Smoothing', + type: 'float', + defaultValue: 0.8, + minValue: 0.0, + maxValue: 1.0, + }, + { + id: 'analyzer.minDecibels', + label: 'Min Decibels', + type: 'float', + defaultValue: -100, + minValue: -150, + maxValue: 0, + units: 'dB', + }, + { + id: 'analyzer.maxDecibels', + label: 'Max Decibels', + type: 'float', + defaultValue: -30, + minValue: -100, + maxValue: 0, + units: 'dB', + }, + ]; + }; + + getParameterValues = async (): Promise => { + return {...this.parameters}; + }; + + setParameterValues = async (values: ParameterValues): Promise => { + for (const [id, value] of Object.entries(values)) { + if (id in this.parameters) { + this.parameters[id as keyof typeof this.parameters] = value; + this.updateParameter(id, value); + } + } + }; + + private updateParameter = (id: string, value: number): void => { + switch (id) { + case 'analyzer.fftSize': + // FFT size must be power of 2 + const validSizes = [32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768]; + const closestSize = validSizes.find(size => size >= value) || 2048; + this.analyzerNode.fftSize = closestSize; + + // Recreate data arrays with new size + this.frequencyData = new Uint8Array(this.analyzerNode.frequencyBinCount); + this.timeData = new Uint8Array(this.analyzerNode.fftSize); + break; + + case 'analyzer.smoothing': + this.analyzerNode.smoothingTimeConstant = value; + break; + + case 'analyzer.minDecibels': + this.analyzerNode.minDecibels = value; + break; + + case 'analyzer.maxDecibels': + this.analyzerNode.maxDecibels = value; + break; + } + }; + + private updateAllParameters = (): void => { + for (const [id, value] of Object.entries(this.parameters)) { + this.updateParameter(id, value); + } + }; + + getState = async (): Promise => { + return { + parameters: this.parameters, + }; + }; + + setState = async (state: any): Promise => { + if (state.parameters) { + await this.setParameterValues(state.parameters); + } + }; + + // Analysis methods + getFrequencyData = (): Uint8Array => { + this.analyzerNode.getByteFrequencyData(this.frequencyData); + return this.frequencyData; + }; + + getTimeData = (): Uint8Array => { + this.analyzerNode.getByteTimeDomainData(this.timeData); + return this.timeData; + }; + + getFloatFrequencyData = (): Float32Array => { + const floatData = new Float32Array(this.analyzerNode.frequencyBinCount); + this.analyzerNode.getFloatFrequencyData(floatData); + return floatData; + }; + + getFloatTimeData = (): Float32Array => { + const floatData = new Float32Array(this.analyzerNode.fftSize); + this.analyzerNode.getFloatTimeDomainData(floatData); + return floatData; + }; + + // Utility methods for frequency analysis + getFrequencyAtBin = (bin: number): number => { + return (bin * this.audioContext.sampleRate) / (2 * this.analyzerNode.frequencyBinCount); + }; + + getBinAtFrequency = (frequency: number): number => { + return Math.round((frequency * 2 * this.analyzerNode.frequencyBinCount) / this.audioContext.sampleRate); + }; + + // Get peaks in frequency spectrum + getFrequencyPeaks = (threshold: number = 0.5): Array<{frequency: number; magnitude: number; bin: number}> => { + const frequencyData = this.getFrequencyData(); + const peaks: Array<{frequency: number; magnitude: number; bin: number}> = []; + + for (let i = 1; i < frequencyData.length - 1; i++) { + const current = frequencyData[i] / 255; // Normalize to 0-1 + const prev = frequencyData[i - 1] / 255; + const next = frequencyData[i + 1] / 255; + + // Local maximum above threshold + if (current > prev && current > next && current > threshold) { + peaks.push({ + frequency: this.getFrequencyAtBin(i), + magnitude: current, + bin: i, + }); + } + } + + return peaks.sort((a, b) => b.magnitude - a.magnitude); // Sort by magnitude desc + }; + + // Get input node for connecting sources + getInputNode = (): AudioNode => { + return this.inputGain; + }; + + createGui = async (): Promise => { + const container = document.createElement('div'); + container.style.width = '400px'; + container.style.height = '200px'; + container.style.border = '1px solid #ccc'; + container.style.padding = '10px'; + container.style.backgroundColor = '#f0f0f0'; + + const canvas = document.createElement('canvas'); + canvas.width = 380; + canvas.height = 150; + canvas.style.backgroundColor = '#000'; + + const ctx = canvas.getContext('2d')!; + + // Simple spectrum display + const draw = () => { + const frequencyData = this.getFrequencyData(); + const barWidth = canvas.width / frequencyData.length; + + ctx.fillStyle = '#000'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + ctx.fillStyle = '#0f0'; + for (let i = 0; i < frequencyData.length; i++) { + const barHeight = (frequencyData[i] / 255) * canvas.height; + ctx.fillRect(i * barWidth, canvas.height - barHeight, barWidth - 1, barHeight); + } + + requestAnimationFrame(draw); + }; + + draw(); + + const title = document.createElement('h4'); + title.textContent = 'Spectrum Analyzer'; + title.style.margin = '0 0 10px 0'; + + container.appendChild(title); + container.appendChild(canvas); + + return container; + }; + + destroy = async (): Promise => { + // Disconnect all nodes + this.inputGain.disconnect(); + this.analyzerNode.disconnect(); + this.audioNode.disconnect(); + }; +} \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json index 0a40a16a..99a99199 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -12,8 +12,8 @@ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ /* Language and Environment */ - "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + "target": "es2017", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + "lib": ["es2017", "dom"], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ // "jsx": "preserve", /* Specify what JSX code is generated. */ // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */