Bug 1446218 [wpt PR 10062] - Web Platform Tests: add /interfaces/webaudio.idl and corresponding test, a=testonly

Automatic update from web-platform-testsWeb Platform Tests: add /interfaces/webaudio.idl and corresponding test

The Web Audio tests (in /webaudio) have idlharness test on an
interface-by-interface basis, but the new hotness is to have the
IDL fragments consolidated. This also allows dependent specs to
include the master interface definitions.

Bug: 679813,697123,785409,823431

Change-Id: I60bc8415627b8b3f43c13060e4bf65f47bc17c2b
Reviewed-on: https://chromium-review.googlesource.com/965401
Commit-Queue: Joshua Bell <jsbell@chromium.org>
Reviewed-by: Raymond Toy <rtoy@chromium.org>
Reviewed-by: Hongchan Choi <hongchan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#544173}

wpt-commits: 3ec34e5a2c8cbeeb7fad521cce0daf923b272a92
wpt-pr: 10062
wpt-commits: 3ec34e5a2c8cbeeb7fad521cce0daf923b272a92
wpt-pr: 10062
This commit is contained in:
Joshua Bell 2018-04-09 17:22:40 +00:00 коммит произвёл James Graham
Родитель 6b4d680a3d
Коммит b049a9907b
3 изменённых файлов: 706 добавлений и 0 удалений

Просмотреть файл

@ -285587,6 +285587,11 @@
{}
]
],
"interfaces/webaudio.idl": [
[
{}
]
],
"interfaces/webauthn.idl": [
[
{}
@ -364851,6 +364856,12 @@
{}
]
],
"webaudio/idlharness.https.html": [
[
"/webaudio/idlharness.https.html",
{}
]
],
"webaudio/the-audio-api/the-analysernode-interface/test-analyser-gain.html": [
[
"/webaudio/the-audio-api/the-analysernode-interface/test-analyser-gain.html",
@ -572532,6 +572543,10 @@
"d3ab33fa078f1b3bd4b29e174369073aab3963d5",
"support"
],
"interfaces/webaudio.idl": [
"43a244276b9795b137c2e46c194d56dbb957635d",
"support"
],
"interfaces/webauthn.idl": [
"77076f0828383c0f48f36131a81b25186622b3a3",
"support"
@ -601624,6 +601639,10 @@
"93068df297042344669093ce899f0230c87ebf54",
"testharness"
],
"webaudio/idlharness.https.html": [
"7876e16ea643f69315e18c3e17af0d7f95769420",
"testharness"
],
"webaudio/js/buffer-loader.js": [
"4d564eae0b3d7d1045626d1f144cd2638dba64e5",
"support"

Просмотреть файл

@ -0,0 +1,596 @@
enum AudioContextState {
"suspended",
"running",
"closed"
};
callback DecodeErrorCallback = void (DOMException error);
callback DecodeSuccessCallback = void (AudioBuffer decodedData);
[Exposed=Window]
interface BaseAudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
readonly attribute AudioListener listener;
readonly attribute AudioContextState state;
[SameObject, SecureContext]
readonly attribute AudioWorklet audioWorklet;
Promise<void> resume ();
attribute EventHandler onstatechange;
AudioBuffer createBuffer (unsigned long numberOfChannels, unsigned long length, float sampleRate);
Promise<AudioBuffer> decodeAudioData (ArrayBuffer audioData,
optional DecodeSuccessCallback successCallback,
optional DecodeErrorCallback errorCallback);
AudioBufferSourceNode createBufferSource ();
ConstantSourceNode createConstantSource ();
ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
optional unsigned long numberOfInputChannels = 2,
optional unsigned long numberOfOutputChannels = 2);
AnalyserNode createAnalyser ();
GainNode createGain ();
DelayNode createDelay (optional double maxDelayTime = 1.0);
BiquadFilterNode createBiquadFilter ();
IIRFilterNode createIIRFilter (sequence<double> feedforward, sequence<double> feedback);
WaveShaperNode createWaveShaper ();
PannerNode createPanner ();
StereoPannerNode createStereoPanner ();
ConvolverNode createConvolver ();
ChannelSplitterNode createChannelSplitter (optional unsigned long numberOfOutputs = 6);
ChannelMergerNode createChannelMerger (optional unsigned long numberOfInputs = 6);
DynamicsCompressorNode createDynamicsCompressor ();
OscillatorNode createOscillator ();
PeriodicWave createPeriodicWave (sequence<float> real, sequence<float> imag, optional PeriodicWaveConstraints constraints);
};
[Exposed=Window]
enum AudioContextLatencyCategory {
"balanced",
"interactive",
"playback"
};
[Exposed=Window,
Constructor (optional AudioContextOptions contextOptions)]
interface AudioContext : BaseAudioContext {
readonly attribute double baseLatency;
readonly attribute double outputLatency;
AudioTimestamp getOutputTimestamp ();
Promise<void> suspend ();
Promise<void> close ();
MediaElementAudioSourceNode createMediaElementSource (HTMLMediaElement mediaElement);
MediaStreamAudioSourceNode createMediaStreamSource (MediaStream mediaStream);
MediaStreamTrackAudioSourceNode createMediaStreamTrackSource (MediaStreamTrack mediaStreamTrack);
MediaStreamAudioDestinationNode createMediaStreamDestination ();
};
[Exposed=Window]
dictionary AudioContextOptions {
(AudioContextLatencyCategory or double) latencyHint = "interactive";
float sampleRate;
};
[Exposed=Window]
dictionary AudioTimestamp {
double contextTime;
DOMHighResTimeStamp performanceTime;
};
[Exposed=Window,
Constructor (OfflineAudioContextOptions contextOptions),
Constructor (unsigned long numberOfChannels, unsigned long length, float sampleRate)]
interface OfflineAudioContext : BaseAudioContext {
Promise<AudioBuffer> startRendering();
Promise<void> suspend(double suspendTime);
readonly attribute unsigned long length;
attribute EventHandler oncomplete;
};
[Exposed=Window]
dictionary OfflineAudioContextOptions {
unsigned long numberOfChannels = 1;
required unsigned long length;
required float sampleRate;
};
[Exposed=Window,
Constructor (DOMString type, OfflineAudioCompletionEventInit eventInitDict)]
interface OfflineAudioCompletionEvent : Event {
readonly attribute AudioBuffer renderedBuffer;
};
[Exposed=Window]
dictionary OfflineAudioCompletionEventInit : EventInit {
required AudioBuffer renderedBuffer;
};
[Exposed=Window,
Constructor (AudioBufferOptions options)]
interface AudioBuffer {
readonly attribute float sampleRate;
readonly attribute unsigned long length;
readonly attribute double duration;
readonly attribute unsigned long numberOfChannels;
Float32Array getChannelData (unsigned long channel);
void copyFromChannel (Float32Array destination, unsigned long channelNumber, optional unsigned long startInChannel = 0);
void copyToChannel (Float32Array source, unsigned long channelNumber, optional unsigned long startInChannel = 0);
};
dictionary AudioBufferOptions {
unsigned long numberOfChannels = 1;
required unsigned long length;
required float sampleRate;
};
[Exposed=Window]
interface AudioNode : EventTarget {
AudioNode connect (AudioNode destinationNode,
optional unsigned long output = 0,
optional unsigned long input = 0);
void connect (AudioParam destinationParam, optional unsigned long output = 0);
void disconnect ();
void disconnect (unsigned long output);
void disconnect (AudioNode destinationNode);
void disconnect (AudioNode destinationNode, unsigned long output);
void disconnect (AudioNode destinationNode, unsigned long output, unsigned long input);
void disconnect (AudioParam destinationParam);
void disconnect (AudioParam destinationParam, unsigned long output);
readonly attribute BaseAudioContext context;
readonly attribute unsigned long numberOfInputs;
readonly attribute unsigned long numberOfOutputs;
attribute unsigned long channelCount;
attribute ChannelCountMode channelCountMode;
attribute ChannelInterpretation channelInterpretation;
};
[Exposed=Window]
enum ChannelCountMode {
"max",
"clamped-max",
"explicit"
};
[Exposed=Window]
enum ChannelInterpretation {
"speakers",
"discrete"
};
dictionary AudioNodeOptions {
unsigned long channelCount;
ChannelCountMode channelCountMode;
ChannelInterpretation channelInterpretation;
};
[Exposed=Window]
interface AudioParam {
attribute float value;
readonly attribute float defaultValue;
readonly attribute float minValue;
readonly attribute float maxValue;
AudioParam setValueAtTime (float value, double startTime);
AudioParam linearRampToValueAtTime (float value, double endTime);
AudioParam exponentialRampToValueAtTime (float value, double endTime);
AudioParam setTargetAtTime (float target, double startTime, float timeConstant);
AudioParam setValueCurveAtTime (sequence<float> values, double startTime, double duration);
AudioParam cancelScheduledValues (double cancelTime);
AudioParam cancelAndHoldAtTime (double cancelTime);
};
[Exposed=Window]
interface AudioScheduledSourceNode : AudioNode {
attribute EventHandler onended;
void start(optional double when = 0);
void stop(optional double when = 0);
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional AnalyserOptions options)]
interface AnalyserNode : AudioNode {
void getFloatFrequencyData (Float32Array array);
void getByteFrequencyData (Uint8Array array);
void getFloatTimeDomainData (Float32Array array);
void getByteTimeDomainData (Uint8Array array);
attribute unsigned long fftSize;
readonly attribute unsigned long frequencyBinCount;
attribute double minDecibels;
attribute double maxDecibels;
attribute double smoothingTimeConstant;
};
dictionary AnalyserOptions : AudioNodeOptions {
unsigned long fftSize = 2048;
double maxDecibels = -30;
double minDecibels = -100;
double smoothingTimeConstant = 0.8;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional AudioBufferSourceOptions options)]
interface AudioBufferSourceNode : AudioScheduledSourceNode {
attribute AudioBuffer? buffer;
readonly attribute AudioParam playbackRate;
readonly attribute AudioParam detune;
attribute boolean loop;
attribute double loopStart;
attribute double loopEnd;
void start (optional double when = 0,
optional double offset,
optional double duration);
void stop (optional double when = 0);
};
dictionary AudioBufferSourceOptions {
AudioBuffer? buffer;
float detune = 0;
boolean loop = false;
double loopEnd = 0;
double loopStart = 0;
float playbackRate = 1;
};
[Exposed=Window]
interface AudioDestinationNode : AudioNode {
readonly attribute unsigned long maxChannelCount;
};
[Exposed=Window]
interface AudioListener {
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam forwardX;
readonly attribute AudioParam forwardY;
readonly attribute AudioParam forwardZ;
readonly attribute AudioParam upX;
readonly attribute AudioParam upY;
readonly attribute AudioParam upZ;
void setPosition (float x, float y, float z);
void setOrientation (float x, float y, float z, float xUp, float yUp, float zUp);
};
[Exposed=Window,
Constructor (DOMString type, AudioProcessingEventInit eventInitDict)]
interface AudioProcessingEvent : Event {
readonly attribute double playbackTime;
readonly attribute AudioBuffer inputBuffer;
readonly attribute AudioBuffer outputBuffer;
};
dictionary AudioProcessingEventInit : EventInit {
required double playbackTime;
required AudioBuffer inputBuffer;
required AudioBuffer outputBuffer;
};
enum BiquadFilterType {
"lowpass",
"highpass",
"bandpass",
"lowshelf",
"highshelf",
"peaking",
"notch",
"allpass"
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional BiquadFilterOptions options)]
interface BiquadFilterNode : AudioNode {
attribute BiquadFilterType type;
readonly attribute AudioParam frequency;
readonly attribute AudioParam detune;
readonly attribute AudioParam Q;
readonly attribute AudioParam gain;
void getFrequencyResponse (Float32Array frequencyHz, Float32Array magResponse, Float32Array phaseResponse);
};
dictionary BiquadFilterOptions : AudioNodeOptions {
BiquadFilterType type = "lowpass";
float Q = 1;
float detune = 0;
float frequency = 350;
float gain = 0;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional ChannelMergerOptions options)]
interface ChannelMergerNode : AudioNode {
};
dictionary ChannelMergerOptions : AudioNodeOptions {
unsigned long numberOfInputs = 6;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional ChannelSplitterNode options)]
interface ChannelSplitterNode : AudioNode {
};
dictionary ChannelSplitterOptions : AudioNodeOptions {
unsigned long numberOfOutputs = 6;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional ConstantSourceOptions options)]
interface ConstantSourceNode : AudioScheduledSourceNode {
readonly attribute AudioParam offset;
};
dictionary ConstantSourceOptions {
float offset = 1;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional ConvolverOptions options)]
interface ConvolverNode : AudioNode {
attribute AudioBuffer? buffer;
attribute boolean normalize;
};
dictionary ConvolverOptions : AudioNodeOptions {
AudioBuffer? buffer;
boolean disableNormalization = false;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional DelayOptions options)]
interface DelayNode : AudioNode {
readonly attribute AudioParam delayTime;
};
dictionary DelayOptions : AudioNodeOptions {
double maxDelayTime = 1;
double delayTime = 0;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional DynamicsCompressorOptions options)]
interface DynamicsCompressorNode : AudioNode {
readonly attribute AudioParam threshold;
readonly attribute AudioParam knee;
readonly attribute AudioParam ratio;
readonly attribute float reduction;
readonly attribute AudioParam attack;
readonly attribute AudioParam release;
};
dictionary DynamicsCompressorOptions : AudioNodeOptions {
float attack = 0.003;
float knee = 30;
float ratio = 12;
float release = 0.25;
float threshold = -24;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional GainOptions options)]
interface GainNode : AudioNode {
readonly attribute AudioParam gain;
};
dictionary GainOptions : AudioNodeOptions {
float gain = 1.0;
};
[Exposed=Window,
Constructor (BaseAudioContext context, IIRFilterOptions options)]
interface IIRFilterNode : AudioNode {
void getFrequencyResponse (Float32Array frequencyHz, Float32Array magResponse, Float32Array phaseResponse);
};
dictionary IIRFilterOptions : AudioNodeOptions {
required sequence<double> feedforward;
required sequence<double> feedback;
};
[Exposed=Window,
Constructor (BaseAudioContext context, MediaElementAudioSourceOptions options)]
interface MediaElementAudioSourceNode : AudioNode {
[SameObject] readonly attribute HTMLMediaElement mediaElement;
};
dictionary MediaElementAudioSourceOptions {
required HTMLMediaElement mediaElement;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional AudioNodeOptions options)]
interface MediaStreamAudioDestinationNode : AudioNode {
readonly attribute MediaStream stream;
};
[Exposed=Window,
Constructor (BaseAudioContext context, MediaStreamAudioSourceOptions options)]
interface MediaStreamAudioSourceNode : AudioNode {
[SameObject] readonly attribute MediaStream mediaStream;
};
dictionary MediaStreamAudioSourceOptions {
required MediaStream mediaStream;
};
[Exposed=Window,
Constructor (AudioContext context, MediaStreamTrackAudioSourceOptions options)]
interface MediaStreamTrackAudioSourceNode : AudioNode {
};
dictionary MediaStreamTrackAudioSourceOptions {
required MediaStreamTrack mediaStreamTrack;
};
enum OscillatorType {
"sine",
"square",
"sawtooth",
"triangle",
"custom"
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional OscillatorOptions options)]
interface OscillatorNode : AudioScheduledSourceNode {
attribute OscillatorType type;
readonly attribute AudioParam frequency;
readonly attribute AudioParam detune;
void setPeriodicWave (PeriodicWave periodicWave);
};
dictionary OscillatorOptions : AudioNodeOptions {
OscillatorType type = "sine";
float frequency = 440;
float detune = 0;
PeriodicWave periodicWave;
};
enum PanningModelType {
"equalpower",
"HRTF"
};
enum DistanceModelType {
"linear",
"inverse",
"exponential"
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional PannerOptions options)]
interface PannerNode : AudioNode {
attribute PanningModelType panningModel;
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam orientationX;
readonly attribute AudioParam orientationY;
readonly attribute AudioParam orientationZ;
attribute DistanceModelType distanceModel;
attribute double refDistance;
attribute double maxDistance;
attribute double rolloffFactor;
attribute double coneInnerAngle;
attribute double coneOuterAngle;
attribute double coneOuterGain;
void setPosition (float x, float y, float z);
void setOrientation (float x, float y, float z);
};
dictionary PannerOptions : AudioNodeOptions {
PanningModelType panningModel = "equalpower";
DistanceModelType distanceModel = "inverse";
float positionX = 0;
float positionY = 0;
float positionZ = 0;
float orientationX = 1;
float orientationY = 0;
float orientationZ = 0;
double refDistance = 1;
double maxDistance = 10000;
double rolloffFactor = 1;
double coneInnerAngle = 360;
double coneOuterAngle = 360;
double coneOuterGain = 0;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional PeriodicWaveOptions options)]
interface PeriodicWave {
};
dictionary PeriodicWaveConstraints {
boolean disableNormalization = false;
};
dictionary PeriodicWaveOptions : PeriodicWaveConstraints {
sequence<float> real;
sequence<float> imag;
};
[Exposed=Window]
interface ScriptProcessorNode : AudioNode {
attribute EventHandler onaudioprocess;
readonly attribute long bufferSize;
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional StereoPannerOptions options)]
interface StereoPannerNode : AudioNode {
readonly attribute AudioParam pan;
};
dictionary StereoPannerOptions : AudioNodeOptions {
float pan = 0;
};
enum OverSampleType {
"none",
"2x",
"4x"
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional WaveShaperOptions options)]
interface WaveShaperNode : AudioNode {
attribute Float32Array? curve;
attribute OverSampleType oversample;
};
dictionary WaveShaperOptions : AudioNodeOptions {
sequence<float> curve;
OverSampleType oversample = "none";
};
[Exposed=Window, SecureContext]
interface AudioWorklet : Worklet {
};
[Global=(Worklet, AudioWorklet), Exposed=AudioWorklet]
interface AudioWorkletGlobalScope : WorkletGlobalScope {
void registerProcessor (DOMString name, VoidFunction processorCtor);
readonly attribute double currentTime;
readonly attribute float sampleRate;
};
[Exposed=Window]
interface AudioParamMap {
readonly maplike<DOMString, AudioParam>;
};
enum AudioWorkletProcessorState {
"pending",
"running",
"stopped",
"error"
};
[Exposed=Window,
SecureContext,
Constructor (BaseAudioContext context, DOMString name, optional AudioWorkletNodeOptions options)]
interface AudioWorkletNode : AudioNode {
readonly attribute AudioParamMap parameters;
readonly attribute MessagePort port;
readonly attribute AudioWorkletProcessorState processorState;
attribute EventHandler onprocessorstatechange;
};
dictionary AudioWorkletNodeOptions : AudioNodeOptions {
unsigned long numberOfInputs = 1;
unsigned long numberOfOutputs = 1;
sequence<unsigned long> outputChannelCount;
record<DOMString, double> parameterData;
object processorOptions = null;
};
[Exposed=AudioWorklet,
Constructor (optional AudioWorkletNodeOptions options)]
interface AudioWorkletProcessor {
readonly attribute MessagePort port;
};
dictionary AudioParamDescriptor {
required DOMString name;
float defaultValue = 0;
float minValue = -3.4028235e38;
float maxValue = 3.4028235e38;
};

Просмотреть файл

@ -0,0 +1,91 @@
<!DOCTYPE html>
<meta charset="utf-8">
<title>WebAudio IDL tests</title>
<link rel="help" href="https://webaudio.github.io/web-audio-api/"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/WebIDLParser.js"></script>
<script src="/resources/idlharness.js"></script>
<script>
'use strict';
promise_test(async t => {
const [html, dom, mediacapture, webaudio] = await Promise.all([
// Needed for EventTarget, HTMLMediaElement
'/interfaces/html.idl',
// Needed for Event, EventListener
'/interfaces/dom.idl',
// Needed for MediaStream, MediaStreamTrack
'/interfaces/mediacapture-main.idl',
'/interfaces/webaudio.idl'
].map(url => fetch(url).then(response => response.text())));
const idl_array = new IdlArray();
idl_array.add_untested_idls('interface LinkStyle {};'); // Dependency of HTML
idl_array.add_untested_idls(html);
idl_array.add_untested_idls(dom);
idl_array.add_untested_idls(mediacapture);
idl_array.add_untested_idls('interface Worklet {};');
idl_array.add_idls(webaudio);
const sample_rate = 44100;
const context = new AudioContext;
const buffer = new AudioBuffer({length: 1, sampleRate: sample_rate});
await context.audioWorklet.addModule(
'the-audio-api/the-audioworklet-interface/processors/dummy-processor.js');
const worklet_node = new AudioWorkletNode(context, 'dummy');
idl_array.add_objects({
BaseAudioContext: [],
AudioContext: [context],
OfflineAudioContext: [new OfflineAudioContext(1, 1, sample_rate)],
OfflineAudioCompletionEvent: [
new OfflineAudioCompletionEvent('', {renderedBuffer: buffer})],
AudioBuffer: [buffer],
AudioNode: [],
AudioParam: [new AudioBufferSourceNode(context).playbackRate],
AudioScheduledSourceNode: [],
AnalyserNode: [new AnalyserNode(context)],
AudioBufferSourceNode: [new AudioBufferSourceNode(context)],
AudioDestinationNode: [context.destination],
AudioListener: [context.listener],
AudioProcessingEvent: [new AudioProcessingEvent('', {
playbackTime: 0, inputBuffer: buffer, outputBuffer: buffer
})],
BiquadFilterNode: [new BiquadFilterNode(context)],
ChannelMergerNode: [new ChannelMergerNode(context)],
ChannelSplitterNode: [new ChannelSplitterNode(context)],
ConstantSourceNode: [new ConstantSourceNode(context)],
ConvolverNode: [new ConvolverNode(context)],
DelayNode: [new DelayNode(context)],
DynamicsCompressorNode: [new DynamicsCompressorNode(context)],
GainNode: [new GainNode(context)],
IIRFilterNode: [
new IIRFilterNode(context, {feedforward: [1], feedback: [1]})],
MediaElementAudioSourceNode: [
new MediaElementAudioSourceNode(context, {mediaElement: new Audio})],
MediaStreamAudioDestinationNode: [
new MediaStreamAudioDestinationNode(context)],
MediaStreamAudioSourceNode: [],
MediaStreamTrackAudioSourceNode: [],
OscillatorNode: [new OscillatorNode(context)],
PannerNode: [new PannerNode(context)],
PeriodicWave: [new PeriodicWave(context)],
ScriptProcessorNode: [context.createScriptProcessor()],
StereoPannerNode: [new StereoPannerNode(context)],
WaveShaperNode: [new WaveShaperNode(context)],
AudioWorklet: [context.audioWorklet],
AudioWorkletGlobalScope: [],
AudioParamMap: [worklet_node.parameters],
AudioWorkletNode: [worklet_node],
AudioWorkletProcessor: [],
});
idl_array.test();
}, 'Test driver');
</script>