Skip to content

Commit

Permalink
feat: implement AnalyserNode for OfflineAudioContext
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisguttandin committed Mar 28, 2018
1 parent c8392cb commit 983f4ae
Show file tree
Hide file tree
Showing 10 changed files with 613 additions and 178 deletions.
7 changes: 1 addition & 6 deletions src/audio-contexts/audio-context.ts
@@ -1,12 +1,11 @@
import { Injector } from '@angular/core';
import { AnalyserNode } from '../audio-nodes/analyser-node';
import { ChannelMergerNode } from '../audio-nodes/channel-merger-node';
import { ChannelSplitterNode } from '../audio-nodes/channel-splitter-node';
import { MediaElementAudioSourceNode } from '../audio-nodes/media-element-audio-source-node';
import { MediaStreamAudioSourceNode } from '../audio-nodes/media-stream-audio-source-node';
import { INVALID_STATE_ERROR_FACTORY_PROVIDER, InvalidStateErrorFactory } from '../factories/invalid-state-error';
import { isValidLatencyHint } from '../helpers/is-valid-latency-hint';
import { IAnalyserNode, IAudioContext, IAudioContextOptions } from '../interfaces';
import { IAudioContext, IAudioContextOptions } from '../interfaces';
import {
UNPATCHED_AUDIO_CONTEXT_CONSTRUCTOR_PROVIDER,
unpatchedAudioContextConstructor as nptchdDCntxtCnstrctr
Expand Down Expand Up @@ -75,10 +74,6 @@ export class AudioContext extends BaseAudioContext implements IAudioContext {
return (this._state !== null) ? this._state : this._unpatchedAudioContext.state;
}

public createAnalyser (): IAnalyserNode {
return new AnalyserNode(this);
}

public createChannelMerger (numberOfInputs = 6) {
return new ChannelMergerNode(this, { numberOfInputs });
}
Expand Down
6 changes: 6 additions & 0 deletions src/audio-contexts/base-audio-context.ts
@@ -1,5 +1,6 @@
import { addAudioWorkletModule } from '../add-audio-worklet-module';
import { AudioBuffer } from '../audio-buffer';
import { AnalyserNode } from '../audio-nodes/analyser-node';
import { AudioBufferSourceNode } from '../audio-nodes/audio-buffer-source-node';
import { BiquadFilterNode } from '../audio-nodes/biquad-filter-node';
import { ConstantSourceNode } from '../audio-nodes/constant-source-node';
Expand All @@ -8,6 +9,7 @@ import { IIRFilterNode } from '../audio-nodes/iir-filter-node';
import { OscillatorNode } from '../audio-nodes/oscillator-node';
import { decodeAudioData } from '../decode-audio-data';
import {
IAnalyserNode,
IAudioBuffer,
IAudioBufferSourceNode,
IAudioWorklet,
Expand Down Expand Up @@ -43,6 +45,10 @@ export class BaseAudioContext extends MinimalBaseAudioContext implements IBaseAu
return this._audioWorklet;
}

public createAnalyser (): IAnalyserNode {
return new AnalyserNode(this);
}

public createBiquadFilter (): IBiquadFilterNode {
return new BiquadFilterNode(this);
}
Expand Down
67 changes: 13 additions & 54 deletions src/audio-nodes/analyser-node.ts
@@ -1,22 +1,10 @@
import { Injector } from '@angular/core';
import { cacheTestResult } from '../helpers/cache-test-result';
import { AUDIO_NODE_RENDERER_STORE } from '../globals';
import { createNativeAnalyserNode } from '../helpers/create-native-analyser-node';
import { getNativeContext } from '../helpers/get-native-context';
import { isOfflineAudioContext } from '../helpers/is-offline-audio-context';
import { IAnalyserNode, IAnalyserOptions, IMinimalBaseAudioContext } from '../interfaces';
import {
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_SUPPORT_TESTER_PROVIDER,
AnalyserNodeGetFloatTimeDomainDataSupportTester } from '../support-testers/analyser-node-get-float-time-domain-data';
import {
TChannelCountMode,
TChannelInterpretation,
TNativeAnalyserNode,
TUnpatchedAudioContext,
TUnpatchedOfflineAudioContext
} from '../types';
import {
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_METHOD_WRAPPER_PROVIDER,
AnalyserNodeGetFloatTimeDomainDataMethodWrapper
} from '../wrappers/analyser-node-get-float-time-domain-data-method';
import { AnalyserNodeRenderer } from '../renderers/analyser-node';
import { TChannelCountMode, TChannelInterpretation, TNativeAnalyserNode } from '../types';
import { NoneAudioDestinationNode } from './none-audio-destination-node';

const DEFAULT_OPTIONS: IAnalyserOptions = {
Expand All @@ -29,49 +17,20 @@ const DEFAULT_OPTIONS: IAnalyserOptions = {
smoothingTimeConstant: 0.8
};

const injector = Injector.create({
providers: [
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_METHOD_WRAPPER_PROVIDER,
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_SUPPORT_TESTER_PROVIDER
]
});

const analyserNodeGetFloatTimeDomainDataMethodWrapper = injector.get(AnalyserNodeGetFloatTimeDomainDataMethodWrapper);
const analyserNodeGetFloatTimeDomainDataSupportTester = injector.get(AnalyserNodeGetFloatTimeDomainDataSupportTester);

const isSupportingAnalyserNodeGetFloatTimeDomainData = (context: TUnpatchedAudioContext | TUnpatchedOfflineAudioContext) => cacheTestResult(
AnalyserNodeGetFloatTimeDomainDataSupportTester,
() => analyserNodeGetFloatTimeDomainDataSupportTester.test(context)
);

const createNativeNode = (nativeContext: TUnpatchedAudioContext | TUnpatchedOfflineAudioContext) => {
if (isOfflineAudioContext(nativeContext)) {
throw new Error('This is not yet supported.');
}

const nativeNode = nativeContext.createAnalyser();

// Bug #37: Only Edge and Safari create an AnalyserNode with the default properties.
if (nativeNode.channelCount === 1) {
nativeNode.channelCount = 2;
}

// Bug #36: Safari does not support getFloatTimeDomainData() yet.
if (!isSupportingAnalyserNodeGetFloatTimeDomainData(nativeContext)) {
analyserNodeGetFloatTimeDomainDataMethodWrapper.wrap(nativeNode);
}

return nativeNode;
};

export class AnalyserNode extends NoneAudioDestinationNode<TNativeAnalyserNode> implements IAnalyserNode {

constructor (context: IMinimalBaseAudioContext, options: Partial<IAnalyserOptions> = DEFAULT_OPTIONS) {
const nativeContext = getNativeContext(context);
const { channelCount } = <IAnalyserOptions> { ...DEFAULT_OPTIONS, ...options };
const nativeNode = createNativeNode(nativeContext);
const mergedOptions = <IAnalyserOptions> { ...DEFAULT_OPTIONS, ...options };
const nativeNode = createNativeAnalyserNode(nativeContext, mergedOptions);

super(context, nativeNode, mergedOptions.channelCount);

if (isOfflineAudioContext(nativeContext)) {
const analyserNodeRenderer = new AnalyserNodeRenderer(this);

super(context, nativeNode, channelCount);
AUDIO_NODE_RENDERER_STORE.set(this, analyserNodeRenderer);
}
}

public get fftSize () {
Expand Down
65 changes: 65 additions & 0 deletions src/helpers/create-native-analyser-node.ts
@@ -0,0 +1,65 @@
import { Injector } from '@angular/core';
import { assignNativeAudioNodeOptions } from '../helpers/assign-native-audio-node-options';
import { cacheTestResult } from '../helpers/cache-test-result';
import { IAnalyserOptions } from '../interfaces';
import {
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_SUPPORT_TESTER_PROVIDER,
AnalyserNodeGetFloatTimeDomainDataSupportTester
} from '../support-testers/analyser-node-get-float-time-domain-data';
import { TNativeAnalyserNode, TUnpatchedAudioContext, TUnpatchedOfflineAudioContext } from '../types';
import {
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_METHOD_WRAPPER_PROVIDER,
AnalyserNodeGetFloatTimeDomainDataMethodWrapper
} from '../wrappers/analyser-node-get-float-time-domain-data-method';

const injector = Injector.create({
providers: [
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_METHOD_WRAPPER_PROVIDER,
ANALYSER_NODE_GET_FLOAT_TIME_DOMAIN_DATA_SUPPORT_TESTER_PROVIDER
]
});

const analyserNodeGetFloatTimeDomainDataMethodWrapper = injector.get(AnalyserNodeGetFloatTimeDomainDataMethodWrapper);
const analyserNodeGetFloatTimeDomainDataSupportTester = injector.get(AnalyserNodeGetFloatTimeDomainDataSupportTester);

const isSupportingAnalyserNodeGetFloatTimeDomainData = (context: TUnpatchedAudioContext | TUnpatchedOfflineAudioContext) => cacheTestResult(
AnalyserNodeGetFloatTimeDomainDataSupportTester,
() => analyserNodeGetFloatTimeDomainDataSupportTester.test(context)
);

export const createNativeAnalyserNode = (
nativeContext: TUnpatchedAudioContext | TUnpatchedOfflineAudioContext,
options: Partial<IAnalyserOptions> = { }
): TNativeAnalyserNode => {
const nativeNode = nativeContext.createAnalyser();

assignNativeAudioNodeOptions(nativeNode, options);

if (options.fftSize !== undefined) {
nativeNode.fftSize = options.fftSize;
}

if (options.maxDecibels !== undefined) {
nativeNode.maxDecibels = options.maxDecibels;
}

if (options.minDecibels !== undefined) {
nativeNode.minDecibels = options.minDecibels;
}

if (options.smoothingTimeConstant !== undefined) {
nativeNode.smoothingTimeConstant = options.smoothingTimeConstant;
}

// Bug #37: Only Edge and Safari create an AnalyserNode with the default properties.
if (nativeNode.channelCount === 1) {
nativeNode.channelCount = 2;
}

// Bug #36: Safari does not support getFloatTimeDomainData() yet.
if (!isSupportingAnalyserNodeGetFloatTimeDomainData(nativeContext)) {
analyserNodeGetFloatTimeDomainDataMethodWrapper.wrap(nativeNode);
}

return nativeNode;
};
4 changes: 0 additions & 4 deletions src/interfaces/audio-context.ts
@@ -1,4 +1,3 @@
import { IAnalyserNode } from './analyser-node';
import { IAudioNode } from './audio-node';
import { IBaseAudioContext } from './base-audio-context';
import { IMediaElementAudioSourceNode } from './media-element-audio-source-node';
Expand All @@ -7,9 +6,6 @@ import { IMinimalAudioContext } from './minimal-audio-context';

export interface IAudioContext extends IBaseAudioContext, IMinimalAudioContext {

// @todo This should move into the IBaseAudioContext interface.
createAnalyser (): IAnalyserNode;

// @todo This should move into the IBaseAudioContext interface.
createChannelMerger (numberOfInputs?: number): IAudioNode;

Expand Down
3 changes: 3 additions & 0 deletions src/interfaces/base-audio-context.ts
@@ -1,4 +1,5 @@
import { TDecodeErrorCallback, TDecodeSuccessCallback } from '../types';
import { IAnalyserNode } from './analyser-node';
import { IAudioBufferSourceNode } from './audio-buffer-source-node';
import { IAudioWorklet } from './audio-worklet';
import { IBiquadFilterNode } from './biquad-filter-node';
Expand All @@ -14,6 +15,8 @@ export interface IBaseAudioContext extends IMinimalBaseAudioContext {

audioWorklet: IAudioWorklet;

createAnalyser (): IAnalyserNode;

createBiquadFilter (): IBiquadFilterNode;

createBuffer (numberOfChannels: number, length: number, sampleRate: number): AudioBuffer;
Expand Down
38 changes: 38 additions & 0 deletions src/renderers/analyser-node.ts
@@ -0,0 +1,38 @@
import { createNativeAnalyserNode } from '../helpers/create-native-analyser-node';
import { getNativeNode } from '../helpers/get-native-node';
import { isOwnedByContext } from '../helpers/is-owned-by-context';
import { IAnalyserNode } from '../interfaces';
import { TNativeAnalyserNode, TNativeAudioNode, TUnpatchedOfflineAudioContext } from '../types';
import { AudioNodeRenderer } from './audio-node';

export class AnalyserNodeRenderer extends AudioNodeRenderer {

private _nativeNode: null | TNativeAnalyserNode;

private _proxy: IAnalyserNode;

constructor (proxy: IAnalyserNode) {
super();

this._nativeNode = null;
this._proxy = proxy;
}

public async render (offlineAudioContext: TUnpatchedOfflineAudioContext): Promise<TNativeAudioNode> {
if (this._nativeNode !== null) {
return this._nativeNode;
}

this._nativeNode = <TNativeAnalyserNode> getNativeNode(this._proxy);

// If the initially used nativeNode was not constructed on the same OfflineAudioContext it needs to be created again.
if (!isOwnedByContext(this._nativeNode, offlineAudioContext)) {
this._nativeNode = createNativeAnalyserNode(offlineAudioContext);
}

await this._connectSources(offlineAudioContext, <TNativeAudioNode> this._nativeNode);

return <TNativeAudioNode> this._nativeNode;
}

}
116 changes: 2 additions & 114 deletions test/unit/audio-contexts/audio-context.js
Expand Up @@ -259,120 +259,8 @@ describe('AudioContext', () => {

describe('createAnalyser()', () => {

it('should return an instance of the AnalyserNode interface', () => {
const analyserNode = audioContext.createAnalyser();

expect(analyserNode.channelCount).to.equal(2);
expect(analyserNode.channelCountMode).to.equal('max');
expect(analyserNode.channelInterpretation).to.equal('speakers');

expect(analyserNode.fftSize).to.equal(2048);
expect(analyserNode.frequencyBinCount).to.equal(1024);

expect(analyserNode.getByteFrequencyData).to.be.a('function');
expect(analyserNode.getByteTimeDomainData).to.be.a('function');

expect(analyserNode.getFloatFrequencyData).to.be.a('function');
expect(analyserNode.getFloatTimeDomainData).to.be.a('function');

expect(analyserNode.maxDecibels).to.equal(-30);
expect(analyserNode.minDecibels).to.equal(-100);

expect(analyserNode.numberOfInputs).to.equal(1);
expect(analyserNode.numberOfOutputs).to.equal(1);

expect(analyserNode.smoothingTimeConstant).to.closeTo(0.8, 0.0000001);
});

it('should throw an error if the AudioContext is closed', (done) => {
audioContext
.close()
.then(() => {
audioContext.createAnalyser();
})
.catch((err) => {
expect(err.code).to.equal(11);
expect(err.name).to.equal('InvalidStateError');

audioContext = new AudioContext();

done();
});
});

it('should be chainable', () => {
const analyserNode = audioContext.createAnalyser();
const gainNode = audioContext.createGain();

expect(analyserNode.connect(gainNode)).to.equal(gainNode);
});

it('should be disconnectable', (done) => {
const candidate = audioContext.createAnalyser();
const dummy = audioContext.createGain();
const analyzer = createScriptProcessor(audioContext, 256, 1, 1);
// Safari does not play buffers which contain just one frame.
const ones = audioContext.createBuffer(1, 2, 44100);

ones.copyToChannel(new Float32Array([ 1, 1 ]), 0);

const source = audioContext.createBufferSource();

source.buffer = ones;
source.loop = true;

source.connect(candidate);
candidate.connect(analyzer);
analyzer.connect(audioContext.destination);
candidate.connect(dummy);
candidate.disconnect(dummy);

analyzer.onaudioprocess = (event) => {
const channelData = event.inputBuffer.getChannelData(0);

if (Array.from(channelData).indexOf(1) > -1) {
source.stop();

analyzer.onaudioprocess = null;

source.disconnect(candidate);
candidate.disconnect(analyzer);
analyzer.disconnect(audioContext.destination);

done();
}
};

source.start();
});

it('should not be connectable to a node of another AudioContext', (done) => {
const analyserNode = audioContext.createAnalyser();
const anotherAudioContext = new AudioContext();

try {
analyserNode.connect(anotherAudioContext.destination);
} catch (err) {
expect(err.code).to.equal(15);
expect(err.name).to.equal('InvalidAccessError');

done();
} finally {
anotherAudioContext.close();
}
});

describe('getFloatTimeDomainData()', () => {

it('should return time-domain data', () => {
const analyserNode = audioContext.createAnalyser();
const data = new Float32Array(analyserNode.fftSize);

analyserNode.getFloatTimeDomainData(data);

expect(data[0]).to.equal(0);
});

it('should be a function', () => {
expect(audioContext.createAnalyser).to.be.a('function');
});

});
Expand Down

0 comments on commit 983f4ae

Please sign in to comment.