midi-player.ts
typescript
| 1 | /** |
| 2 | * midi-player.ts — Tone.js powered MIDI playback for MuseHub. |
| 3 | * |
| 4 | * Converts a MidiParseResult (from /objects/{id}/parse-midi) into a scheduled |
| 5 | * Tone.js sequence. Drives the piano roll playhead and transport controls. |
| 6 | * |
| 7 | * Tone.js (v14) is loaded via <script> tag on the host page and exposed as |
| 8 | * the global `Tone`. No ES-module import is needed here; we use `declare`. |
| 9 | * |
| 10 | * Architecture: |
| 11 | * - One PolySynth per logical MIDI track (colour-coded, matches piano roll) |
| 12 | * - Percussion tracks (ch 9) use a MembraneSynth + MetalSynth duo |
| 13 | * - Tone.Transport drives timing; Tone.Draw syncs the playhead to rAF |
| 14 | * - Caller registers an onProgress(beatPosition) callback; the piano roll |
| 15 | * canvas draws a red playhead line at that beat position |
| 16 | * |
| 17 | * Public API: |
| 18 | * const player = new MidiPlayer(midiData, { onProgress, onEnd, bpmOverride }); |
| 19 | * await player.play(); |
| 20 | * player.pause(); |
| 21 | * player.stop(); |
| 22 | * player.seek(beatPosition); |
| 23 | * player.dispose(); |
| 24 | */ |
| 25 | |
| 26 | // --------------------------------------------------------------------------- |
| 27 | // Tone.js global — declared but loaded by the host page via CDN <script> |
| 28 | // --------------------------------------------------------------------------- |
| 29 | declare const Tone: { |
| 30 | start(): Promise<void>; |
| 31 | now(): number; |
| 32 | Transport: { |
| 33 | bpm: { value: number }; |
| 34 | seconds: number; |
| 35 | state: string; |
| 36 | start(time?: number): void; |
| 37 | pause(): void; |
| 38 | stop(): void; |
| 39 | cancel(time?: number): void; |
| 40 | schedule(cb: (time: number) => void, time: number | string): number; |
| 41 | scheduleRepeat(cb: (time: number) => void, interval: string, startTime?: number | string): number; |
| 42 | clear(id: number): void; |
| 43 | position: string; |
| 44 | }; |
| 45 | Draw: { |
| 46 | schedule(cb: () => void, time: number): void; |
| 47 | }; |
| 48 | PolySynth: new (synth: unknown, opts?: unknown) => ToneSynth; |
| 49 | Synth: unknown; |
| 50 | MembraneSynth: new (opts?: unknown) => ToneSynth; |
| 51 | MetalSynth: new (opts?: unknown) => ToneSynth; |
| 52 | Gain: new (vol: number) => ToneNode; |
| 53 | getDestination(): ToneNode; |
| 54 | }; |
| 55 | |
| 56 | interface ToneSynth { |
| 57 | triggerAttackRelease(note: string | number, duration: string | number, time?: number, velocity?: number): void; |
| 58 | triggerAttack(note: string | number, time?: number, velocity?: number): void; |
| 59 | triggerRelease(note: string | number, time?: number): void; |
| 60 | connect(dest: ToneNode): this; |
| 61 | disconnect(): this; |
| 62 | dispose(): void; |
| 63 | volume: { value: number }; |
| 64 | } |
| 65 | |
| 66 | interface ToneNode { |
| 67 | connect(dest: ToneNode): this; |
| 68 | toDestination(): this; |
| 69 | dispose(): void; |
| 70 | } |
| 71 | |
| 72 | // --------------------------------------------------------------------------- |
| 73 | // MIDI data types (mirrors piano-roll.ts) |
| 74 | // --------------------------------------------------------------------------- |
| 75 | |
| 76 | export interface MidiNote { |
| 77 | pitch: number; |
| 78 | start_beat: number; |
| 79 | duration_beats: number; |
| 80 | velocity: number; |
| 81 | track_id: number; |
| 82 | channel: number; |
| 83 | } |
| 84 | |
| 85 | export interface MidiTrack { |
| 86 | track_id: number; |
| 87 | channel: number; |
| 88 | name?: string; |
| 89 | notes?: MidiNote[]; |
| 90 | } |
| 91 | |
| 92 | export interface MidiParseResult { |
| 93 | tracks?: MidiTrack[]; |
| 94 | tempo_bpm?: number; |
| 95 | time_signature?: string; |
| 96 | total_beats?: number; |
| 97 | } |
| 98 | |
| 99 | // --------------------------------------------------------------------------- |
| 100 | // Player options |
| 101 | // --------------------------------------------------------------------------- |
| 102 | |
| 103 | export interface MidiPlayerOptions { |
| 104 | /** Called on each animation frame with the current beat position (0-based). */ |
| 105 | onProgress?: (beat: number) => void; |
| 106 | /** Called when playback reaches the end of the piece. */ |
| 107 | onEnd?: () => void; |
| 108 | /** Override the file tempo. Defaults to midi.tempo_bpm. */ |
| 109 | bpmOverride?: number; |
| 110 | } |
| 111 | |
| 112 | // --------------------------------------------------------------------------- |
| 113 | // Pitch helpers |
| 114 | // --------------------------------------------------------------------------- |
| 115 | |
| 116 | const NOTE_NAMES = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'] as const; |
| 117 | |
| 118 | function midiToTone(pitch: number): string { |
| 119 | // Tone.js uses scientific pitch: C4 = middle C (MIDI 60) |
| 120 | const octave = Math.floor(pitch / 12) - 1; |
| 121 | return NOTE_NAMES[pitch % 12] + octave; |
| 122 | } |
| 123 | |
| 124 | function beatsToSeconds(beats: number, bpm: number): number { |
| 125 | return (beats * 60) / bpm; |
| 126 | } |
| 127 | |
| 128 | // --------------------------------------------------------------------------- |
| 129 | // Percussion mapping (GM channel 10 / index 9) |
| 130 | // --------------------------------------------------------------------------- |
| 131 | |
| 132 | const PERC_NOTES: Record<number, { type: 'kick' | 'snare' | 'hat' | 'clap'; note: string }> = { |
| 133 | 35: { type: 'kick', note: 'C1' }, |
| 134 | 36: { type: 'kick', note: 'C1' }, |
| 135 | 38: { type: 'snare', note: 'E1' }, |
| 136 | 40: { type: 'snare', note: 'E1' }, |
| 137 | 42: { type: 'hat', note: 'F#1' }, |
| 138 | 44: { type: 'hat', note: 'F#1' }, |
| 139 | 46: { type: 'hat', note: 'G#1' }, |
| 140 | 49: { type: 'clap', note: 'A1' }, |
| 141 | 51: { type: 'hat', note: 'A#1' }, |
| 142 | }; |
| 143 | |
| 144 | // --------------------------------------------------------------------------- |
| 145 | // MidiPlayer |
| 146 | // --------------------------------------------------------------------------- |
| 147 | |
| 148 | type PlayerState = 'stopped' | 'playing' | 'paused'; |
| 149 | |
| 150 | export class MidiPlayer { |
| 151 | private readonly midi: MidiParseResult; |
| 152 | private readonly opts: MidiPlayerOptions; |
| 153 | private readonly bpm: number; |
| 154 | private readonly totalBeats: number; |
| 155 | |
| 156 | private state: PlayerState = 'stopped'; |
| 157 | private synths: Map<number, ToneSynth> = new Map(); |
| 158 | private masterGain: ToneNode | null = null; |
| 159 | private scheduledIds: number[] = []; |
| 160 | private tickerId: number | null = null; |
| 161 | private pausedAt = 0; // Transport.seconds when paused |
| 162 | |
| 163 | constructor(midi: MidiParseResult, opts: MidiPlayerOptions = {}) { |
| 164 | this.midi = midi; |
| 165 | this.opts = opts; |
| 166 | this.bpm = opts.bpmOverride ?? midi.tempo_bpm ?? 120; |
| 167 | this.totalBeats = midi.total_beats ?? 0; |
| 168 | } |
| 169 | |
| 170 | // ── Public transport API ───────────────────────────────────────────────── |
| 171 | |
| 172 | async play(): Promise<void> { |
| 173 | if (this.state === 'playing') return; |
| 174 | |
| 175 | // Tone context must be resumed from a user gesture |
| 176 | await Tone.start(); |
| 177 | |
| 178 | if (this.state === 'stopped') { |
| 179 | this._buildSynths(); |
| 180 | this._scheduleNotes(); |
| 181 | this._startTicker(); |
| 182 | Tone.Transport.bpm.value = this.bpm; |
| 183 | Tone.Transport.start(); |
| 184 | } else { |
| 185 | // Resume from pause |
| 186 | Tone.Transport.start(); |
| 187 | } |
| 188 | |
| 189 | this.state = 'playing'; |
| 190 | } |
| 191 | |
| 192 | pause(): void { |
| 193 | if (this.state !== 'playing') return; |
| 194 | this.pausedAt = Tone.Transport.seconds; |
| 195 | Tone.Transport.pause(); |
| 196 | this.state = 'paused'; |
| 197 | } |
| 198 | |
| 199 | stop(): void { |
| 200 | this._cleanup(); |
| 201 | this.state = 'stopped'; |
| 202 | this.pausedAt = 0; |
| 203 | this.opts.onProgress?.(0); |
| 204 | } |
| 205 | |
| 206 | seek(beat: number): void { |
| 207 | const wasPlaying = this.state === 'playing'; |
| 208 | if (wasPlaying) Tone.Transport.pause(); |
| 209 | |
| 210 | // Re-schedule from the new position |
| 211 | this._cancelScheduled(); |
| 212 | Tone.Transport.seconds = beatsToSeconds(beat, this.bpm); |
| 213 | this._scheduleNotes(beat); |
| 214 | |
| 215 | if (wasPlaying) Tone.Transport.start(); |
| 216 | } |
| 217 | |
| 218 | /** Permanently release all Tone.js resources. */ |
| 219 | dispose(): void { |
| 220 | this._cleanup(); |
| 221 | this.synths.forEach(s => s.dispose()); |
| 222 | this.synths.clear(); |
| 223 | this.masterGain?.dispose(); |
| 224 | this.masterGain = null; |
| 225 | } |
| 226 | |
| 227 | get isPlaying(): boolean { return this.state === 'playing'; } |
| 228 | get isPaused(): boolean { return this.state === 'paused'; } |
| 229 | get currentBeat(): number { |
| 230 | return (Tone.Transport.seconds * this.bpm) / 60; |
| 231 | } |
| 232 | |
| 233 | // ── Private helpers ─────────────────────────────────────────────────────── |
| 234 | |
| 235 | private _buildSynths(): void { |
| 236 | if (this.synths.size > 0) return; |
| 237 | |
| 238 | const tracks = this.midi.tracks ?? []; |
| 239 | const dest = Tone.getDestination(); |
| 240 | |
| 241 | tracks.forEach(track => { |
| 242 | const isPerc = track.channel === 9; |
| 243 | let synth: ToneSynth; |
| 244 | |
| 245 | if (isPerc) { |
| 246 | synth = new Tone.MembraneSynth({ |
| 247 | pitchDecay: 0.05, |
| 248 | octaves: 4, |
| 249 | envelope: { attack: 0.001, decay: 0.2, sustain: 0, release: 0.2 }, |
| 250 | }); |
| 251 | } else { |
| 252 | // Piano-style attack with soft release |
| 253 | synth = new Tone.PolySynth(Tone.Synth, { |
| 254 | oscillator: { type: 'triangle8' }, |
| 255 | envelope: { attack: 0.01, decay: 0.3, sustain: 0.45, release: 1.8 }, |
| 256 | }); |
| 257 | } |
| 258 | |
| 259 | synth.volume.value = isPerc ? -14 : -18; |
| 260 | synth.connect(dest as ToneNode); |
| 261 | this.synths.set(track.track_id, synth); |
| 262 | }); |
| 263 | } |
| 264 | |
| 265 | private _scheduleNotes(fromBeat = 0): void { |
| 266 | const tracks = this.midi.tracks ?? []; |
| 267 | const bpm = this.bpm; |
| 268 | const totalBeats = this.totalBeats; |
| 269 | const origin = Tone.now(); |
| 270 | |
| 271 | tracks.forEach(track => { |
| 272 | const synth = this.synths.get(track.track_id); |
| 273 | if (!synth) return; |
| 274 | |
| 275 | const notes = (track.notes ?? []).filter(n => n.start_beat >= fromBeat); |
| 276 | const isPerc = track.channel === 9; |
| 277 | |
| 278 | notes.forEach(n => { |
| 279 | const startSec = beatsToSeconds(n.start_beat - fromBeat, bpm) + origin + 0.05; |
| 280 | const durSec = Math.max(beatsToSeconds(n.duration_beats, bpm), 0.02); |
| 281 | const vel = Math.max(0.01, Math.min(1, n.velocity / 127)); |
| 282 | |
| 283 | const id = Tone.Transport.schedule((time: number) => { |
| 284 | if (isPerc) { |
| 285 | const perc = PERC_NOTES[n.pitch] ?? { note: 'C1' }; |
| 286 | synth.triggerAttackRelease(perc.note, '16n', time, vel); |
| 287 | } else { |
| 288 | const note = midiToTone(n.pitch); |
| 289 | synth.triggerAttackRelease(note, durSec, time, vel); |
| 290 | } |
| 291 | }, startSec); |
| 292 | |
| 293 | this.scheduledIds.push(id); |
| 294 | }); |
| 295 | }); |
| 296 | |
| 297 | // Schedule "end of piece" callback |
| 298 | if (totalBeats > fromBeat) { |
| 299 | const endSec = beatsToSeconds(totalBeats - fromBeat, bpm) + origin + 0.1; |
| 300 | const endId = Tone.Transport.schedule((_time: number) => { |
| 301 | Tone.Draw.schedule(() => { |
| 302 | this.stop(); |
| 303 | this.opts.onEnd?.(); |
| 304 | }, Tone.now()); |
| 305 | }, endSec); |
| 306 | this.scheduledIds.push(endId); |
| 307 | } |
| 308 | } |
| 309 | |
| 310 | private _startTicker(): void { |
| 311 | const bpm = this.bpm; |
| 312 | const onProgress = this.opts.onProgress; |
| 313 | if (!onProgress) return; |
| 314 | |
| 315 | const id = Tone.Transport.scheduleRepeat((time: number) => { |
| 316 | const beat = (Tone.Transport.seconds * bpm) / 60; |
| 317 | Tone.Draw.schedule(() => onProgress(beat), time); |
| 318 | }, '16n'); |
| 319 | |
| 320 | this.tickerId = id; |
| 321 | } |
| 322 | |
| 323 | private _cancelScheduled(): void { |
| 324 | this.scheduledIds.forEach(id => Tone.Transport.clear(id)); |
| 325 | this.scheduledIds = []; |
| 326 | if (this.tickerId !== null) { |
| 327 | Tone.Transport.clear(this.tickerId); |
| 328 | this.tickerId = null; |
| 329 | } |
| 330 | } |
| 331 | |
| 332 | private _cleanup(): void { |
| 333 | this._cancelScheduled(); |
| 334 | Tone.Transport.stop(); |
| 335 | Tone.Transport.cancel(); |
| 336 | } |
| 337 | } |
| 338 | |
| 339 | // --------------------------------------------------------------------------- |
| 340 | // Playhead overlay helper — draws a red line on the piano roll canvas |
| 341 | // --------------------------------------------------------------------------- |
| 342 | |
| 343 | export interface PlayheadOptions { |
| 344 | canvas: HTMLCanvasElement; |
| 345 | outerEl: HTMLElement; |
| 346 | totalBeats: number; |
| 347 | zoomX: number; |
| 348 | panX: number; |
| 349 | keyWidth: number; |
| 350 | } |
| 351 | |
| 352 | /** |
| 353 | * Draw (or erase) a translucent red playhead line at the given beat position. |
| 354 | * Called from the MidiPlayer.onProgress callback on every animation frame. |
| 355 | */ |
| 356 | export function drawPlayhead( |
| 357 | beat: number, |
| 358 | ctx: CanvasRenderingContext2D, |
| 359 | opts: PlayheadOptions, |
| 360 | ): void { |
| 361 | const { canvas, outerEl, panX, zoomX, keyWidth } = opts; |
| 362 | const dpr = window.devicePixelRatio || 1; |
| 363 | const w = outerEl.clientWidth; |
| 364 | const h = canvas.height / dpr; |
| 365 | |
| 366 | const x = keyWidth + (beat - panX) * zoomX; |
| 367 | if (x < keyWidth || x > w) return; |
| 368 | |
| 369 | ctx.save(); |
| 370 | ctx.globalAlpha = 0.85; |
| 371 | ctx.strokeStyle = '#f85149'; // --dim-e red |
| 372 | ctx.lineWidth = 1.5; |
| 373 | ctx.setLineDash([4, 3]); |
| 374 | ctx.beginPath(); |
| 375 | ctx.moveTo(x, 0); |
| 376 | ctx.lineTo(x, h); |
| 377 | ctx.stroke(); |
| 378 | ctx.restore(); |
| 379 | } |
| 380 | |
| 381 | // --------------------------------------------------------------------------- |
| 382 | // Factory — resolves the MidiParseResult from the page canvas attributes |
| 383 | // then constructs and returns a ready MidiPlayer |
| 384 | // --------------------------------------------------------------------------- |
| 385 | |
| 386 | export async function createPlayerFromCanvas( |
| 387 | canvas: HTMLCanvasElement, |
| 388 | opts: Omit<MidiPlayerOptions, 'onProgress'> & { |
| 389 | onProgress?: (beat: number) => void; |
| 390 | }, |
| 391 | ): Promise<MidiPlayer | null> { |
| 392 | const midiUrl = canvas.dataset.midiUrl; |
| 393 | if (!midiUrl) return null; |
| 394 | |
| 395 | try { |
| 396 | const res = await fetch(midiUrl, { credentials: 'include' }); |
| 397 | if (!res.ok) return null; |
| 398 | const midi = (await res.json()) as MidiParseResult; |
| 399 | return new MidiPlayer(midi, opts); |
| 400 | } catch { |
| 401 | return null; |
| 402 | } |
| 403 | } |
| 404 | |
| 405 | // Expose to window for page-level scripts that import via bundled app.js |
| 406 | declare global { |
| 407 | interface Window { |
| 408 | MidiPlayer: typeof MidiPlayer; |
| 409 | createPlayerFromCanvas: typeof createPlayerFromCanvas; |
| 410 | } |
| 411 | } |
| 412 | |
| 413 | window.MidiPlayer = MidiPlayer; |
| 414 | window.createPlayerFromCanvas = createPlayerFromCanvas; |