chromium/third_party/blink/web_tests/external/wpt/webrtc-encoded-transform/RTCRtpScriptTransform-sender-worker-single-frame.https.html

<!DOCTYPE html>
<html>
<head>
<title>RTCRtpScriptTransform Insertable Streams - Worker</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src=/resources/testdriver.js></script>
<script src=/resources/testdriver-vendor.js></script>
<script src='../../mediacapture-streams/permission-helper.js'></script>
<script src="../../webrtc/RTCPeerConnection-helper.js"></script>
</head>
<body>
<script>

function areArrayBuffersEqual(buffer1, buffer2)
{
  if (buffer1.byteLength !== buffer2.byteLength) {
    return false;
  }
  let array1 = new Int8Array(buffer1);
  var array2 = new Int8Array(buffer2);
  for (let i = 0 ; i < buffer1.byteLength ; ++i) {
    if (array1[i] !== array2[i]) {
      return false;
    }
  }
  return true;
}


promise_test(async t => {
  const caller = new RTCPeerConnection();
  t.add_cleanup(() => caller.close());
  const callee = new RTCPeerConnection();
  t.add_cleanup(() => callee.close());

  // Video is used in a later test, so we ask for both permissions
  await setMediaPermission();
  const stream = await navigator.mediaDevices.getUserMedia({audio:true});
  const track = stream.getTracks()[0];
  t.add_cleanup(() => track.stop());

  const audioSender = caller.addTrack(track);

  const senderWorker = new Worker('RTCRtpScriptTransform-sender-worker-single-frame.js');
  audioSender.transform = new RTCRtpScriptTransform(senderWorker);
  t.add_cleanup(() => senderWorker.terminate());
  let expectedFrameInfo = null;
  let numVerifiedFrames = 0;
  const onmessagePromise = new Promise(resolve => {
    senderWorker.onmessage = t.step_func(message => {
      if (!(message.data instanceof RTCEncodedAudioFrame)) {
        // This is the first message sent from the Worker to the test.
        // It contains an object (not an RTCEncodedAudioFrame) with the same
        // fields as the RTCEncodedAudioFrame to be sent in follow-up messages.
        // These serve as expected values to validate that the
        // RTCEncodedAudioFrame is sent correctly back to the test in the next
        // message.
        expectedFrameInfo = message.data;
      } else {
        // This is the frame sent by the Worker after reading it from the
        // readable stream. The Worker sends it twice after sending the
        // verification message.
        const frame = message.data;
        assert_equals(frame.type, expectedFrameInfo.type);
        assert_true(areArrayBuffersEqual(frame.data, expectedFrameInfo.data));
        assert_equals(frame.getMetadata().synchronizationSource, expectedFrameInfo.metadata.synchronizationSource);
        assert_equals(frame.getMetadata().rtpTimestamp, expectedFrameInfo.metadata.rtpTimestamp);

        if (++numVerifiedFrames == 2)
          resolve();
      }
    });
  });

  exchangeIceCandidates(caller, callee);
  await exchangeOfferAnswer(caller, callee);

  return onmessagePromise;
}, 'RTCRtpSender initializes its transform attribute and the Worker sends an RTCEncodedAudioFrame back');

promise_test(async t => {
  const caller = new RTCPeerConnection();
  t.add_cleanup(() => caller.close());
  const callee = new RTCPeerConnection();
  t.add_cleanup(() => callee.close());

  const stream = await navigator.mediaDevices.getUserMedia({video:true});
  const videoTrack = stream.getVideoTracks()[0];
  t.add_cleanup(() => videoTrack.stop());

  const videoSender = caller.addTrack(videoTrack)

  const senderWorker = new Worker('RTCRtpScriptTransform-sender-worker-single-frame.js');
  videoSender.transform = new RTCRtpScriptTransform(senderWorker);
  t.add_cleanup(() => senderWorker.terminate());

  let expectedFrameInfo = null;
  let numVerifiedFrames = 0;
  const onmessagePromise = new Promise(resolve => {
    senderWorker.onmessage = t.step_func(message => {
      if (!(message.data instanceof RTCEncodedVideoFrame)) {
        // This is the first message sent from the Worker to the test.
        // It contains an object (not an RTCEncodedVideoFrame) with the same
        // fields as the RTCEncodedVideoFrame to be sent in follow-up messages.
        // These serve as expected values to validate that the
        // RTCEncodedVideoFrame is sent correctly back to the test in the next
        // message.
        expectedFrameInfo = message.data;
      } else {
        // This is the frame sent by the Worker after reading it from the
        // readable stream. The Worker sends it twice after sending the
        // verification message.
        const frame = message.data;
        assert_equals(frame.type, expectedFrameInfo.type);
        assert_true(areArrayBuffersEqual(frame.data, expectedFrameInfo.data));
        assert_equals(frame.getMetadata().synchronizationSource, expectedFrameInfo.metadata.synchronizationSource);
        assert_equals(frame.getMetadata().rtpTimestamp, expectedFrameInfo.metadata.rtpTimestamp);
        if (++numVerifiedFrames == 2)
          resolve();
      }
    });
  });

  exchangeIceCandidates(caller, callee);
  await exchangeOfferAnswer(caller, callee);

  return onmessagePromise;
}, 'RTCRtpSender initializes its transform attribute and the Worker sends an RTCEncodedVideoFrame back');

</script>
</body>
</html>