chromium/third_party/blink/web_tests/http/tests/webaudio/resources/autoplay-crossorigin-iframe.html

<!DOCTYPE html>
<body><span>click me</span></body>
<script>
// Setup the flags before the test is run.
internals.settings.setAutoplayPolicy('document-user-activation-required');
internals.runtimeFlags.autoplayIgnoresWebAudioEnabled = false;

function synthesizeClick(callback) {
  const elementBoundingRect = document.body.getBoundingClientRect();
  const xPos = elementBoundingRect.left + elementBoundingRect.width / 2;
  const yPos = elementBoundingRect.top + elementBoundingRect.height / 2;
  const leftButton = 0;

  chrome.gpuBenchmarking.pointerActionSequence([
    {
      source: 'mouse',
      actions: [
        { name: 'pointerMove', x: xPos, y: yPos },
        { name: 'pointerDown', x: xPos, y: yPos, button: leftButton },
        { name: 'pointerUp', button: leftButton }
      ]
    }
  ], callback);
}

// AudioContext will start as suspended when lacking a user gesture.
var audioContext = new AudioContext();
window.parent.postMessage({ msg: 'initialState', value: audioContext.state },
                          '*');

// OfflineAudioContext has no user gesture requirement but is always suspended.
var offlineAudioContext = new OfflineAudioContext(1, 512, 3000);
window.parent.postMessage({ msg: 'initialOfflineState',
                            value: offlineAudioContext.state }, '*');

// Calling 'resume()' will fail to start the AudioContext.
audioContext.resume();
window.parent.postMessage({ msg: 'afterResume', value: audioContext.state },
                          '*');

// Calling 'start()' will fail to start the AudioContext associated.
var oscillator = audioContext.createOscillator();
oscillator.type = 'square';
oscillator.frequency.value = 2000;
oscillator.connect(audioContext.destination);
oscillator.start();
window.parent.postMessage({ msg: 'afterOscillator', value: audioContext.state },
                          '*');

var otherAudioContext = new AudioContext();

synthesizeClick(async _ => {
  // Calling 'resume()' from a click event will start the audio context.
  await audioContext.resume();
  window.parent.postMessage({ msg: 'stateAfterClick',
                              value: audioContext.state }, '*');

  window.parent.postMessage({ msg: 'stateOtherContextAfterClick',
                              value: otherAudioContext.state }, '*');

  synthesizeClick(_ => {
    var oscillator = otherAudioContext.createOscillator();
    oscillator.type = 'square';
    oscillator.frequency.value = 2000;
    oscillator.connect(otherAudioContext.destination);

    // Creating an AudioContext from a click event will start it.
    var lastAudioContext = new AudioContext();
    window.parent.postMessage({ msg: 'stateCreatedAfterClick',
                                value: lastAudioContext.state }, '*');

    window.parent.postMessage({ msg: 'done' }, '*');
  });
});
</script>