chromium/third_party/blink/web_tests/webaudio/AudioNode/tail-processing.html

<!DOCTYPE html>
<html>
  <head>
    <title>
      Test Handling of Tail Processing
    </title>
    <script src="../../resources/testharness.js"></script>
    <script src="../../resources/testharnessreport.js"></script>
    <script src="../resources/audit.js"></script>
    <script src="../resources/audit-util.js"></script>
  </head>
  <body>
    <script id="layout-test-code">
      let audit = Audit.createTaskRunner();

      // Fairly arbitrary but must be a power of two to eliminate roundoff when
      // we compute times from sample frames
      const sampleRate = 32768;

      // Fairly arbitrary duration
      const renderDuration = 0.25;
      const renderFrames = renderDuration * sampleRate;

      audit.define(
          {label: 'grain', description: 'ABSN grain has the correct duration'},
          (task, should) => {
            // Test that the grain duration is the requested duration.
            let context = new OfflineAudioContext(2, sampleRate, sampleRate);
            let merger = new ChannelMergerNode(
                context, {numberOfInputs: context.destination.channelCount});
            merger.connect(context.destination);

            // Number of frames for the grain we want to play. (Pretty much
            // arbitrary.)
            let grainFrames = 128;

            // Test signal.  Just fill the buffer with all ones.
            let bTest = new AudioBuffer(
                {length: context.length, sampleRate: context.sampleRate});
            bTest.getChannelData(0).fill(1);
            let sTest = new AudioBufferSourceNode(context, {buffer: bTest});

            // Reference signal. 
            let bRef = new AudioBuffer(
                {length: context.length, sampleRate: context.sampleRate});
            bRef.getChannelData(0).fill(1, 0, grainFrames);
            let sRef = new AudioBufferSourceNode(context, {buffer: bRef});

            sTest.connect(merger, 0, 0);
            sRef.connect(merger, 0, 1);

            sTest.start(0, 0, grainFrames / context.sampleRate);
            sRef.start();

            context.startRendering()
                .then(renderedBuffer => {
                  let actual = renderedBuffer.getChannelData(0);
                  let expected = renderedBuffer.getChannelData(1);

                  // The actual and expected should be the same because we got
                  // rid of the grain hack that caused a grain to play for an
                  // extra 512 frames.
                  should(actual, 'Rendered ABSN grain')
                      .beEqualToArray(expected);
                })
                .then(() => task.done());
          });

      audit.define('hrtf-panner-tail', (task, should) => {
        runTest('PannerNode', {panningModel: 'HRTF', distanceMode: 'linear'})
            .then(renderedBuffer => {
              let prefix = 'HRTF PannerNode';
              let output = renderedBuffer.getChannelData(0);
              let response = renderedBuffer.getChannelData(1);
              let latencyFrame = findLatencyFrame(response);
              let tailFrame = findTailFrame(response);

              // The HRTF panner has both a latency component and a tail
              // component.  Make sure both are non-zero.
              should(latencyFrame, `${prefix} latency frame (${latencyFrame})`)
                  .beGreaterThan(0);

              should(tailFrame, `${prefix} tail frame (${tailFrame})`)
                  .beGreaterThan(0);

              // Because of the latency, the output is zero at the beginning.
              // Make sure this is true.
              should(
                  output.slice(0, latencyFrame),
                  `${prefix} Latency output[0:` + (latencyFrame - 1) + ']')
                  .beConstantValueOf(0);

              // Verify the rest of the output matches the expected values.  The
              // output should be non-zero from latencyFrame to tailFrame and
              // zero after tailFrame.
              verifyOutput(should, output, {
                prefix: prefix,
                startFrame: latencyFrame,
                nonZeroEndFrame: Math.min(tailFrame, output.length),
                zeroStartFrame: roundUp(tailFrame),
                tailFrame: tailFrame,
                reference: response
              });
            })
            .then(() => task.done());
      });

      audit.define('biquad-tail', (task, should) => {
        runTest('BiquadFilterNode', {Q: 20, frequency: 100})
            .then(renderedBuffer => {
              let prefix = 'BiquadFilter'
              let output = renderedBuffer.getChannelData(0);
              let response = renderedBuffer.getChannelData(1);
              let tailFrame = findTailFrame(response);

              should(tailFrame, `${prefix} tail frame (${tailFrame})`)
                  .beGreaterThan(0);

              // Verify biquad output which should be non-zero up to tailFrame
              // and zero afterwards.  However, the actual output isn't after
              // tailFrame because the internal biquad tail time uses an
              // approximation.  That's why zeroStartFrame is one render quantum
              // after tailFrame.
              verifyOutput(should, output, {
                prefix: prefix,
                startFrame: 0,
                nonZeroEndFrame:
                    Math.min(tailFrame + RENDER_QUANTUM_FRAMES, output.length),
                zeroStartFrame: RENDER_QUANTUM_FRAMES + roundUp(tailFrame),
                tailFrame: tailFrame,
                reference: response
              });
            })
            .then(() => task.done());
      });

      audit.define('iir-tail', (task, should) => {
        runTest('IIRFilterNode', {feedforward: [1], feedback: [1, -.99]})
            .then(renderedBuffer => {
              let prefix = 'IIRFilter';
              let output = renderedBuffer.getChannelData(0);
              let response = renderedBuffer.getChannelData(1);
              let tailFrame = findTailFrame(response);

              should(tailFrame, `${prefix} tail frame (${tailFrame})`)
                  .beGreaterThan(0);

              verifyOutput(should, output, {
                prefix: prefix,
                startFrame: 0,
                nonZeroEndFrame:
                    Math.min(tailFrame + RENDER_QUANTUM_FRAMES, output.length),
                zeroStartFrame: 2 * RENDER_QUANTUM_FRAMES + roundUp(tailFrame),
                tailFrame: tailFrame,
                reference: response
              });
            })
            .then(() => task.done());
      });

      audit.define('delay-tail', (task, should) => {
        // For the test, make sure the delay is greater than one render
        // quantum.  If it's less we won't be able to tell if tail processing
        // worked because the input signal is an impulse.
        let delayFrames = RENDER_QUANTUM_FRAMES + 64;
        runTest('DelayNode', {delayTime: delayFrames / sampleRate})
            .then(renderedBuffer => {
              let prefix = 'Delay';
              let output = renderedBuffer.getChannelData(0);
              let response = renderedBuffer.getChannelData(1);
              let tailFrame = findTailFrame(response);

              should(tailFrame, `${prefix} tail frame (${tailFrame})`)
                  .beGreaterThan(0);

              // As a delay node with delay time greater than one render
              // quantum, the first render quantum must be 0.
              should(
                  output.slice(0, RENDER_QUANTUM_FRAMES),
                  `${prefix} output[0:` + (RENDER_QUANTUM_FRAMES - 1) + ']')
                  .beConstantValueOf(0);

              // The output of the delay node should be nonzero in the second
              // render quantum and zero forever after.
              verifyOutput(should, output, {
                prefix: prefix,
                startFrame: RENDER_QUANTUM_FRAMES,
                nonZeroEndFrame: Math.min(tailFrame, output.length),
                zeroStartFrame: roundUp(tailFrame),
                tailFrame: tailFrame,
                reference: response
              });

            })
            .then(() => task.done());
      });

      audit.define('convolver-tail', (task, should) => {
        // The convolver response.  It needs to be longer than one render
        // quantum to show the tail processing.
        let response = new AudioBuffer(
            {length: RENDER_QUANTUM_FRAMES + 64, sampleRate: sampleRate});
        // For simplicity, just make the response all ones.
        response.getChannelData(0).fill(1);

        runTest('ConvolverNode', {disableNormalization: true, buffer: response})
            .then(renderedBuffer => {
              let prefix = 'Convolver';
              let output = renderedBuffer.getChannelData(0);
              let response = renderedBuffer.getChannelData(1);
              let tailFrame = findTailFrame(response);

              should(tailFrame, `${prefix} tail frame (${tailFrame})`)
                  .beGreaterThan(0);

              verifyOutput(should, output, {
                prefix: prefix,
                startFrame: 0,
                nonZeroEndFrame:
                    Math.min(tailFrame + RENDER_QUANTUM_FRAMES, output.length),
                zeroStartFrame: RENDER_QUANTUM_FRAMES + roundUp(tailFrame),
                tailFrame: tailFrame,
                reference: response
              });
            })
            .then(() => task.done());
      });

      audit.define('dynamics-compressor-tail', (task, should) => {
        runTest('DynamicsCompressorNode', {})
            .then(renderedBuffer => {
              let prefix = 'DyamicsCompressor';
              let output = renderedBuffer.getChannelData(0);
              let response = renderedBuffer.getChannelData(1);
              let tailFrame = findTailFrame(response);

              should(tailFrame, `${prefix} tail frame (${tailFrame})`)
                  .beGreaterThan(0);

              let latencyFrame = roundDown(tailFrame - 1);
              should(
                  output.slice(0, latencyFrame),
                  `${prefix} output[0:` + (latencyFrame - 1) + ']')
                  .beConstantValueOf(0);

              verifyOutput(should, output, {
                prefix: prefix,
                startFrame: latencyFrame,
                nonZeroEndFrame: Math.min(tailFrame, output.length),
                zeroStartFrame: roundUp(tailFrame),
                tailFrame: tailFrame,
                reference: response
              });

            })
            .then(() => task.done());
      });

      audit.define('waveshaper-tail', (task, should) => {
        // Fairly arbitrary curve for the WaveShaper
        let curve = Float32Array.from([-1, -.5, 0, 0.5, 1]);

        runTest('WaveShaperNode', {curve: curve, oversample: '2x'})
            .then(renderedBuffer => {
              let prefix = 'WaveShaper';
              let output = renderedBuffer.getChannelData(0);
              let response = renderedBuffer.getChannelData(1);
              // FFT resampler introduces some very small round-off.  Use a
              // threshold of zero to find the tail frame.
              let tailFrame = findTailFrame(response, 0);

              should(tailFrame, `${prefix} tail frame (${tailFrame})`)
                  .beGreaterThan(0);

              verifyOutput(should, output, {
                prefix: prefix,
                startFrame: 0,
                nonZeroEndFrame: Math.min(tailFrame, output.length),
                zeroStartFrame: roundUp(tailFrame),
                tailFrame: tailFrame,
                reference: response
              });
            })
            .then(() => task.done());
      });

      audit.run('waveshaper-tail');

      function runTest(nodeName, nodeOptions) {
        // Two-channel output.  Channel 0 is the test result; channel 1 is the
        // impulse response that is used to figure out when the tail should
        // start.
        let context = new OfflineAudioContext(2, sampleRate, sampleRate);

        // Merge channels for the destination.
        let merger = new ChannelMergerNode(context, {numberOfInputs: 2});
        merger.connect(context.destination);

        let src = new ConstantSourceNode(context, {offset: 1});

        // Impulse for testing.  We want a full buffer so as not to worry about
        // the source disconnecting prematurely from the filter.
        let b = new AudioBuffer(
            {length: context.length, sampleRate: context.sampleRate});
        b.getChannelData(0)[0] = 1;
        let impulse = new AudioBufferSourceNode(context, {buffer: b});

        let testNode = new window[nodeName](context, nodeOptions);
        let refNode = new window[nodeName](context, nodeOptions);

        src.connect(testNode).connect(merger, 0, 0);
        impulse.connect(refNode).connect(merger, 0, 1);

        src.start();
        src.stop(1 / context.sampleRate);
        impulse.start();

        return context.startRendering();
      }

      // Starting from the end find the first frame that exceeds our threshold.
      // This assumes that everything less than the threshold is equivalent to 0
      // for our purposes.
      function findTailFrame(response, zeroThreshold) {
        // Any value below this is considered to be zero for our purpose of
        // finding the first non-zero value.  Somewhat arbitrary, but the value
        // here is the size of the LSB of a 16-bit PCM sample.
        let threshold = zeroThreshold === undefined ? 1 / 32768 : zeroThreshold;
        let tailFrame = response.length;

        for (let k = response.length - 1; k >= 0; --k) {
          if (Math.abs(response[k]) > threshold) {
            tailFrame = k + 1;
            break;
          }
        }

        return tailFrame;
      }

      function findLatencyFrame(response) {
        for (let k = 0; k < response.length; ++k) {
          if (response[k] != 0)
            return k;
        }

        return response.length;
      }

      function verifyOutput(should, output, options) {
        let prefix = options.prefix || '';
        if (options.tailFrame && options.reference) {
          should(
              output.slice(0, options.tailFrame),
              `${prefix} Tail output[0:` + (options.tailFrame - 1) + ']')
              .beEqualToArray(options.reference.slice(0, options.tailFrame));
        }

        // Verify that |output| is non-zero between |startFrame| and
        // |nonZeroEndFrame|.
        for (let k = options.startFrame; k < options.nonZeroEndFrame;
             k += RENDER_QUANTUM_FRAMES) {
          should(
              output.slice(k, k + RENDER_QUANTUM_FRAMES),
              `${prefix} output[` + k + ':' + (k + 127) + ']')
              .notBeConstantValueOf(0);
        }

        // Verify |output| is zero starting at frame |zeroStartFrame|, inclusive
        if (options.zeroStartFrame < output.length) {
          should(
              output.slice(options.zeroStartFrame),
              `${prefix} output[` + options.zeroStartFrame + ':]')
              .beConstantValueOf(0);
        }
      }

      function roundDown(frame) {
        return RENDER_QUANTUM_FRAMES *
            Math.floor(frame / RENDER_QUANTUM_FRAMES);
      }

      function roundUp(frame) {
        return RENDER_QUANTUM_FRAMES * Math.ceil(frame / RENDER_QUANTUM_FRAMES);
      }
    </script>
  </body>
</html>