chromium/third_party/blink/web_tests/webaudio/ChannelMerger/audiochannelmerger-cycle.html

<!DOCTYPE html>
<html>
  <head>
    <title>
      audiochannelmerger-cycle.html
    </title>
    <script src="../../resources/testharness.js"></script>
    <script src="../../resources/testharnessreport.js"></script>
    <script src="../resources/audit-util.js"></script>
    <script src="../resources/audit.js"></script>
  </head>
  <body>
    <script id="layout-test-code">
      // This specific sample rate is chosen to avoid the round/truncation error
      // in delay time. See: crbug.com/448801
      let sampleRate = 32768;

      // Web Audio API's rendering quantum.
      let renderingQuantum = 128;

      // 4x of rendering quantum. This is to make the rendered result long
      // enough so that we can observe the delayed output.
      let renderLength = renderingQuantum * 4;

      // 1x rendering quantum of delay.
      let delayTime = renderingQuantum / sampleRate;

      // Use 2 channels as a test case.
      let numberOfChannels = 2;

      let audit = Audit.createTaskRunner();

      audit.define('merger-cyclic-graph', (task, should) => {

        let context =
            new OfflineAudioContext(numberOfChannels, renderLength, sampleRate);
        let merger = context.createChannelMerger(2);
        let delay = context.createDelay();
        let source = context.createBufferSource();

        // Create a mono source buffer filled with '1'.
        source.buffer = createConstantBuffer(context, renderLength, [1]);

        delay.delayTime.value = delayTime;

        // Connect the source to input 0 of the merger. Connect the output of
        // the merger to a delay node whose output is then connected to input 1
        // of the merger. See: crbug.com/442925
        source.connect(merger, 0, 0);
        delay.connect(merger, 0, 1);
        merger.connect(delay);
        merger.connect(context.destination);
        source.start();

        context.startRendering().then(function(buffer) {
          // Expected output values: the output of delay node will be a stereo
          // signal of [1, 0]. When it feeds back to the 2nd input of merger
          // node, the stereo channel will be summed to mono resulting in 0.5.
          let expected_left = [];
          let expected_right = [];

          for (let i = 0; i < renderLength; i++) {
            // Note that the delayed channel will be zero for the first 128
            // samples due to the cyclic audio graph, the second 128 sample will
            // be also zero because of 128 samples delay.
            expected_left[i] = 1.0;
            expected_right[i] = (i < renderingQuantum * 2) ? 0.0 : 0.5;
          }

          let actual_left = buffer.getChannelData(0);
          let actual_right = buffer.getChannelData(1);
          should(actual_left, 'Left channel').beEqualToArray(expected_left);
          should(actual_right, 'Right channel').beEqualToArray(expected_right);

          task.done();
        });

      });

      audit.run();
    </script>
  </body>
</html>