+2011-09-28 Chris Rogers <crogers@google.com>
+
+ DelayNode must set the context on delayTime AudioParam to support automation
+ https://bugs.webkit.org/show_bug.cgi?id=68828
+
+ Reviewed by Kenneth Russell.
+
+ * webaudio/delaynode-scheduling-expected.txt: Added.
+ * webaudio/delaynode-scheduling.html: Added.
+ * webaudio/delaynode.html:
+ * webaudio/resources/delay-testing.js: Added.
+ (createToneBuffer):
+ (checkDelayedResult.return.var):
+ (checkDelayedResult):
+
2011-09-28 Ryosuke Niwa <rniwa@webkit.org>
More Mac rebaselines for r96257.
--- /dev/null
+Tests that DelayNode delayTime parameter can be scheduled at a given time.
+
+On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE".
+
+PASS Test signal was correctly delayed.
+PASS successfullyParsed is true
+
+TEST COMPLETE
+
--- /dev/null
+<!DOCTYPE html>
+
+<html>
+<head>
+<link rel="stylesheet" href="../fast/js/resources/js-test-style.css"/>
+<script src="../fast/js/resources/js-test-pre.js"></script>
+<script src="resources/delay-testing.js"></script>
+</head>
+
+<body>
+
+<div id="description"></div>
+<div id="console"></div>
+
+<script>
+description("Tests that DelayNode delayTime parameter can be scheduled at a given time.");
+
+function runTest() {
+ if (window.layoutTestController) {
+ layoutTestController.dumpAsText();
+ layoutTestController.waitUntilDone();
+ }
+
+ window.jsTestIsAsync = true;
+
+ // Create offline audio context.
+ var context = new webkitAudioContext(1, sampleRate * renderLengthSeconds, sampleRate);
+ var toneBuffer = createToneBuffer(context, 20, 20 * toneLengthSeconds, sampleRate); // 20Hz tone
+
+ var bufferSource = context.createBufferSource();
+ bufferSource.buffer = toneBuffer;
+
+ var delay = context.createDelayNode();
+
+ // Schedule delay time at time zero.
+ delay.delayTime.setValueAtTime(delayTimeSeconds, 0);
+
+ bufferSource.connect(delay);
+ delay.connect(context.destination);
+ bufferSource.noteOn(0);
+
+ context.oncomplete = checkDelayedResult(toneBuffer);
+ context.startRendering();
+}
+
+runTest();
+successfullyParsed = true;
+
+</script>
+
+<script src="../fast/js/resources/js-test-post.js"></script>
+</body>
+</html>
<head>
<link rel="stylesheet" href="../fast/js/resources/js-test-style.css"/>
<script src="../fast/js/resources/js-test-pre.js"></script>
+<script src="resources/delay-testing.js"></script>
</head>
<body>
<script>
description("Tests basic functionality of DelayNode.");
-var sampleRate = 44100.0;
-
-var context;
-var toneBuffer;
-
-var renderLengthSeconds = 4;
-var delayTimeSeconds = 0.5;
-var toneLengthSeconds = 2;
-
-function createToneBuffer(frequency, numberOfCycles, sampleRate) {
- var duration = numberOfCycles / frequency;
- var sampleFrameLength = duration * sampleRate;
-
- var audioBuffer = context.createBuffer(1, sampleFrameLength, sampleRate);
-
- var n = audioBuffer.length;
- var data = audioBuffer.getChannelData(0);
-
- for (var i = 0; i < n; ++i)
- data[i] = Math.sin(frequency * 2.0*Math.PI * i / sampleRate);
-
- return audioBuffer;
-}
-
-function checkDelayedResult(event) {
- var renderedBuffer = event.renderedBuffer;
-
- var sourceData = toneBuffer.getChannelData(0);
- var renderedData = renderedBuffer.getChannelData(0);
-
- var delayTimeFrames = delayTimeSeconds * sampleRate;
- var toneLengthFrames = toneLengthSeconds * sampleRate;
-
- var success = true;
-
- var n = renderedBuffer.length;
-
- for (var i = 0; i < n; ++i) {
- if (i < delayTimeFrames) {
- // Check that initial portion is 0 (since signal is delayed).
- if (renderedData[i] != 0) {
- success = false;
- break;
- }
- } else if (i >= delayTimeFrames && i < delayTimeFrames + toneLengthFrames) {
- // Make sure that the tone data is delayed by exactly the expected number of frames.
- var j = i - delayTimeFrames;
- if (renderedData[i] != sourceData[j]) {
- success = false;
- break;
- }
- } else {
- // Make sure we have silence after the delayed tone.
- if (renderedData[i] != 0) {
- success = false;
- break;
- }
- }
- }
-
- if (success) {
- testPassed("Test signal was correctly delayed.");
- } else {
- testFailed("Test signal was not correctly delayed.");
- }
-
- finishJSTest();
-}
-
function runTest() {
if (window.layoutTestController) {
layoutTestController.dumpAsText();
window.jsTestIsAsync = true;
// Create offline audio context.
- context = new webkitAudioContext(1, sampleRate * renderLengthSeconds, sampleRate);
- toneBuffer = createToneBuffer(20, 20 * toneLengthSeconds, sampleRate); // 20Hz tone
+ var context = new webkitAudioContext(1, sampleRate * renderLengthSeconds, sampleRate);
+ var toneBuffer = createToneBuffer(context, 20, 20 * toneLengthSeconds, sampleRate); // 20Hz tone
var bufferSource = context.createBufferSource();
bufferSource.buffer = toneBuffer;
delay.connect(context.destination);
bufferSource.noteOn(0);
- context.oncomplete = checkDelayedResult;
+ context.oncomplete = checkDelayedResult(toneBuffer);
context.startRendering();
}
--- /dev/null
+var sampleRate = 44100.0;
+
+var renderLengthSeconds = 4;
+var delayTimeSeconds = 0.5;
+var toneLengthSeconds = 2;
+
+function createToneBuffer(context, frequency, numberOfCycles, sampleRate) {
+ var duration = numberOfCycles / frequency;
+ var sampleFrameLength = duration * sampleRate;
+
+ var audioBuffer = context.createBuffer(1, sampleFrameLength, sampleRate);
+
+ var n = audioBuffer.length;
+ var data = audioBuffer.getChannelData(0);
+
+ for (var i = 0; i < n; ++i)
+ data[i] = Math.sin(frequency * 2.0*Math.PI * i / sampleRate);
+
+ return audioBuffer;
+}
+
+function checkDelayedResult(toneBuffer) {
+ return function(event) {
+ var renderedBuffer = event.renderedBuffer;
+
+ var sourceData = toneBuffer.getChannelData(0);
+ var renderedData = renderedBuffer.getChannelData(0);
+
+ var delayTimeFrames = delayTimeSeconds * sampleRate;
+ var toneLengthFrames = toneLengthSeconds * sampleRate;
+
+ var success = true;
+
+ var n = renderedBuffer.length;
+
+ for (var i = 0; i < n; ++i) {
+ if (i < delayTimeFrames) {
+ // Check that initial portion is 0 (since signal is delayed).
+ if (renderedData[i] != 0) {
+ success = false;
+ break;
+ }
+ } else if (i >= delayTimeFrames && i < delayTimeFrames + toneLengthFrames) {
+ // Make sure that the tone data is delayed by exactly the expected number of frames.
+ var j = i - delayTimeFrames;
+ if (renderedData[i] != sourceData[j]) {
+ success = false;
+ break;
+ }
+ } else {
+ // Make sure we have silence after the delayed tone.
+ if (renderedData[i] != 0) {
+ success = false;
+ break;
+ }
+ }
+ }
+
+ if (success) {
+ testPassed("Test signal was correctly delayed.");
+ } else {
+ testFailed("Test signal was not correctly delayed.");
+ }
+
+ finishJSTest();
+ }
+}
+2011-09-28 Chris Rogers <crogers@google.com>
+
+ DelayNode must set the context on delayTime AudioParam to support automation
+ https://bugs.webkit.org/show_bug.cgi?id=68828
+
+ Reviewed by Kenneth Russell.
+
+ Test: webaudio/delaynode-scheduling.html
+
+ * webaudio/DelayNode.cpp:
+ (WebCore::DelayNode::DelayNode):
+
2011-09-28 Sergey Glazunov <serg.glazunov@gmail.com>
JavaScript generated documents don't inherit the cookie URL
: AudioBasicProcessorNode(context, sampleRate)
{
m_processor = adoptPtr(new DelayProcessor(sampleRate, 1));
+ delayTime()->setContext(context);
setType(NodeTypeDelay);
}