chromium/content/test/data/media/getusermedia.html

<html>
<head>
  <script type="text/javascript" src="webrtc_test_utilities.js"></script>
  <script type="text/javascript">
  $ = function(id) {
    return document.getElementById(id);
  };

  // The modal permission dialog requires a user gesture to trigger.
  // Hook up a click event listener to run a specified method (which
  // may be changed by the test).
  var functionToRun;
  function runFunctionOnClick() {
      eval(functionToRun);
  }

  window.addEventListener('load', () => {
      window.addEventListener('click', runFunctionOnClick);
  });

  function getSources() {
    return navigator.mediaDevices.enumerateDevices().then(function(devices) {
      document.title = 'Media devices available';
      var results = [];
      for (var device, i = 0; device = devices[i]; ++i) {
        if (device.kind != "audioinput" && device.kind != "videoinput")
          continue;
        results.push({
          'id': device.deviceId,
          'kind': device.kind == "audioinput" ? "audio" : "video",
          'label': device.label,
          'facing': ""
        });
      }
      return JSON.stringify(results);
    });
  }

  // Creates a MediaStream and renders it locally. When the video is detected to
  // be rolling, the stream should be stopped.
  function getUserMediaAndStop(constraints) {
    console.log('Calling getUserMediaAndStop. constraints : ' +
        JSON.stringify(constraints));
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(stream => {
          window.document.title = 'Granted';
          return detectVideoInLocalView1(stream).then(() => {
            stream.getVideoTracks()[0].stop();
          });
        })
        .then(() => {
          return detectVideoStopped('local-view-1');
        })
        .then(logSuccess)
        .catch(() => {
          window.document.title = 'Denied';
          throw new Error('failed');
        });
  }

  // Legacy version of the above, exposed for Java tests since they rely on
  // domAutomationController.send.
  function getUserMediaAndStopLegacy(constraints) {
    getUserMediaAndStop(constraints)
      .then((result) => window.domAutomationController.send(result));
  }

  // Requests getusermedia and expects it to succeed.
  function getUserMediaAndExpectSuccess(constraints) {
    console.log('Calling getUserMediaAndExpectSuccess.');
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(logSuccess);
  }

  // Requests getusermedia and expects it to fail. The error name is returned
  // to the test.
  function getUserMediaAndExpectFailure(constraints) {
    console.log('Calling getUserMediaAndExpectFailure.');
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(stream => {
          throw new Error('Unexpectedly succeeded getUserMedia.');
        }, (err) => err.name)
  }

  function renderClonedMediastreamAndStop(constraints, waitTimeInSeconds) {
    console.log('Calling renderClonedMediastreamAndStop.');
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(stream => {
          var duplicate = stream.clone();
          assertEquals(stream.getVideoTracks().length, 1);
          assertEquals(duplicate.getVideoTracks().length, 1);
          assertNotEquals(stream.getVideoTracks()[0].id,
                          duplicate.getVideoTracks()[0].id);
          return detectVideoInLocalView1(stream)
              .then(logSuccess);
        });
  }

  function renderDuplicatedMediastreamAndStop(constraints, waitTimeInSeconds) {
    console.log('Calling renderDuplicateMediastreamAndStop.');
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(stream => {
          var duplicate = new MediaStream(stream);
          assertEquals(stream.getVideoTracks().length, 1);
          assertEquals(duplicate.getVideoTracks().length, 1);
          assertEquals(stream.getVideoTracks()[0].id,
                       duplicate.getVideoTracks()[0].id);
          return detectVideoInLocalView1(duplicate)
              .then(logSuccess);
        });
  }

  function renderSameTrackMediastreamAndStop(constraints, waitTimeInSeconds) {
    console.log('Calling renderSameTrackMediastreamAndStop.');
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(stream => {
          var duplicate = new MediaStream();
          duplicate.addTrack(stream.getVideoTracks()[0]);
          assertEquals(duplicate.getVideoTracks().length, 1);
          assertEquals(duplicate.getVideoTracks().length, 1);
          assertEquals(stream.getVideoTracks()[0].id,
                       duplicate.getVideoTracks()[0].id);
          return detectVideoInLocalView1(duplicate)
              .then(logSuccess);
        });
  }

  function renderClonedTrackMediastreamAndStop(constraints, waitTimeInSeconds) {
    console.log('Calling renderClonedTrackMediastreamAndStop.');
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(stream => {
          var duplicate = new MediaStream();
          duplicate.addTrack(stream.getVideoTracks()[0].clone());
          assertEquals(duplicate.getVideoTracks().length, 1);
          assertEquals(duplicate.getVideoTracks().length, 1);
          assertNotEquals(stream.getVideoTracks()[0].id,
                          duplicate.getVideoTracks()[0].id)
          return detectVideoInLocalView1(duplicate)
              .then(logSuccess);
        });
  }

  function getUserMediaAndRenderInSeveralVideoTags() {
    // This test makes sure multiple video renderers can be created for the same
    // local video track, and makes sure a renderer can still render if other
    // renderers are paused. See http://crbug/352619.
    function createDetectableRenderer(stream, id) {
      var video = document.createElement('video');
      document.body.appendChild(video);
      video.id = id;
      video.style.display = 'none'
      video.srcObject = stream;
      video.autoplay = true;
      video.play();

      // The detector needs a canvas.
      var canvas = document.createElement('canvas');
      canvas.id = video.id + "-canvas";
      canvas.style.display = 'none'
      document.body.appendChild(canvas);
    };

    return navigator.mediaDevices.getUserMedia({video: true}).then(stream => {
      // Create 3 video renderers and pause them once video is playing.
      var createdAndPaused = [];
      for (var i = 0; i < 3; ++i) {
        var id = "video" + i;
        createDetectableRenderer(stream, id);

        // Detect video id |id| and trigger the function returned by
        // |video_detected_function| when video is playing.
        var promise = detectVideoPlaying(id)
            .then(() => {
              console.log("pause " + id);
              $(id).pause();
            });
        createdAndPaused.push(promise);
      }

      return Promise.all(createdAndPaused)
        .then(() => {
          // Create one last renderer and make sure it can play video.
          var id = "lastVideoTag";
          createDetectableRenderer(stream, id);
          return detectVideoPlaying(id);
        })
        .then(logSuccess);
    });
  }

  // Gets a video stream up, analyses it and returns the aspect ratio to the
  // test through the automation controller.
  function getUserMediaAndAnalyseAndStop(constraints) {
    console.log('Calling getUserMediaAndAnalyseAndStop.');
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(displayDetectAndAnalyzeVideo);
  }

  // This test that a MediaStream can be cloned and that the clone can
  // be rendered.
  function getUserMediaAndClone() {
    console.log('Calling getUserMediaAndClone.');
    return navigator.mediaDevices.getUserMedia({video: true, audio: true})
        .then(createAndRenderClone);
  }

  // Creates two MediaStream and renders them locally. When the video of both
  // streams are detected to be rolling, we stop the local video tracks one at
  // the time. In particular, we verify that stopping one track does not stop
  // the other.
  function twoGetUserMediaAndStop(constraints) {
    var stream1 = null;
    var stream2 = null;
    return navigator.mediaDevices.getUserMedia(constraints)
        .then(stream => {
          stream1 = stream;
          $('local-view-1').srcObject = stream;
          return navigator.mediaDevices.getUserMedia(constraints);
        })
        .then(stream => {
          stream2 = stream;
          $('local-view-2').srcObject = stream;

          // Stop track 2, ensure track 2 stops but not track 1, then stop track 1.
          stream2.getVideoTracks()[0].stop();
          return detectVideoStopped('local-view-2');
        })
        .then(() => detectVideoInLocalView1(stream1))
        .then(() => {
          stream1.getVideoTracks()[0].stop();
          return detectVideoStopped('local-view-1');
        })
        .then(logSuccess);
  }

  function twoGetUserMedia(constraints1, constraints2) {
    var result="";
    return navigator.mediaDevices.getUserMedia(constraints1)
        .then(stream => {
          return displayDetectAndAnalyzeVideoInElement(
              stream, 'local-view-1');
        })
        .then((aspectRatio) => {
          result = aspectRatio;
          return navigator.mediaDevices.getUserMedia(constraints2);
        })
        .then(stream => {
          return displayDetectAndAnalyzeVideoInElement(
              stream, 'local-view-2');
        })
        .then((aspectRatio) => {
          result = result + '-' + aspectRatio;
          return result;
        });
  }

  function getUserMediaInIframeAndCloseInSuccessCb(constraints) {
    var iframe = document.createElement('iframe');
    return new Promise((resolve, reject) => {
      iframe.onload = onIframeLoaded;
      document.body.appendChild(iframe);
      iframe.src = window.location;

      function onIframeLoaded() {
        var iframe  = window.document.querySelector('iframe');
        iframe.contentWindow.navigator.mediaDevices.getUserMedia(constraints)
            .then(stream => {
              // Remove the iframe from the parent within the callback scope.
              window.parent.document.querySelector('iframe').remove();
              // This function enqueues reporting test success, rather than doing
              // it directly. We do this so we catch crashes that occur in the
              // current execution context, but after logSuccess is
              // invoked.
              setTimeout(function () {
                resolve(logSuccess());
              }, 0);
            })
            .catch(reject);
      }
    });
  }

  function getUserMediaInIframeAndCloseInFailureCb(constraints) {
    var iframe = document.createElement('iframe');
    return new Promise((resolve, reject) => {
      iframe.onload = onIframeLoaded;
      document.body.appendChild(iframe);
      iframe.src = window.location;

      function onIframeLoaded() {
        var iframe  = window.document.querySelector('iframe');
        iframe.contentWindow.navigator.mediaDevices.getUserMedia(constraints)
            .then(stream => {
              reject(new Error('GetUserMedia call succeeded unexpectedly.'));
            })
            .catch(error => {
              // Remove the iframe from the parent within the callback scope.
              window.parent.document.querySelector('iframe').remove();
              // This function enqueues reporting test success, rather than doing
              // it directly. We do this so we catch crashes that occur in the
              // current execution context, but after logSuccess is
              // invoked.
              setTimeout(function () {
                resolve(logSuccess());
              }, 0);
            });
      }
    });
  }

  function detectVideoInLocalView1(stream) {
    $('local-view-1').srcObject = stream;
    return detectVideoPlaying('local-view-1');
  }

  function displayDetectAndAnalyzeVideo(stream) {
    return displayDetectAndAnalyzeVideoInElement(stream, 'local-view-1');
  }

  function displayDetectAndAnalyzeVideoInElement(
      stream, videoElementName) {
    $(videoElementName).srcObject = stream;
    var videoElement = $(videoElementName);
    return new Promise(resolve => {
      videoElement.onloadedmetadata = resolve;
    }).then(() => detectAspectRatio(videoElement));
  }

  function createAndRenderClone(stream) {
    // TODO(perkj):  --use-fake-device-for-media-stream do not currently
    // work with audio devices and not all bots has a microphone.
    newStream = new MediaStream();
    newStream.addTrack(stream.getVideoTracks()[0]);
    assertEquals(newStream.getVideoTracks().length, 1);
    if (stream.getAudioTracks().length > 0) {
      newStream.addTrack(stream.getAudioTracks()[0]);
      assertEquals(newStream.getAudioTracks().length, 1);
      newStream.removeTrack(newStream.getAudioTracks()[0]);
      assertEquals(newStream.getAudioTracks().length, 0);
    }

    return detectVideoInLocalView1(newStream)
        .then(logSuccess);
  }

  // This function tries to calculate the aspect ratio shown by the fake capture
  // device in the video tag. For this, we count the amount of light green
  // pixels along |aperture| pixels on the positive X and Y axis starting from
  // the center of the image. In this very center there should be a time-varying
  // pacman; the algorithm counts for a couple of iterations and keeps the
  // maximum amount of light green pixels on both directions. From this data
  // the aspect ratio is calculated and the test fails if the number of green
  // pixels are not the same along the X and Y axis.
  // The result of the analysis is returned as a string in the
  // format "w=xxx:h=yyy".
  function detectAspectRatio(videoElement) {
    var canvas = $(videoElement.id + '-canvas');

    var maxLightGreenPixelsX = 0;
    var maxLightGreenPixelsY = 0;

    var attempt = 0;
    var maxAttempts = 10;

    return new Promise((resolve, reject) => {
      var detectorFunction = function() {
        var width = videoElement.videoWidth;
        var height = videoElement.videoHeight;
        if (width == 0 || height == 0)
          return reject(new Error(("VideoElement width and height set to 0.")));

        canvas.width = width;
        canvas.height = height;
        var aperture = Math.min(width, height) / 2;
        var context = canvas.getContext('2d');
        context.drawImage(videoElement, 0, 0, width, height);

        // We are interested in a window starting from the center of the image
        // where we expect the circle from the fake video capture to be rolling.
        var pixels = context.getImageData(width / 2, height / 2,
                                          aperture, aperture);

        var lightGreenPixelsX = 0;
        var lightGreenPixelsY = 0;

        // Walk horizontally counting light green pixels.
        for (var x = 0; x < aperture; ++x) {
          if (!isAlmostBackgroundGreen(pixels.data[4 * x + 1]))
            lightGreenPixelsX++;
        }
        // Walk vertically counting light green pixels.
        for (var y = 0; y < aperture; ++y) {
          if (!isAlmostBackgroundGreen(pixels.data[4 * y * aperture + 1]))
            lightGreenPixelsY++;
        }
        if (lightGreenPixelsX > maxLightGreenPixelsX)
          maxLightGreenPixelsX = lightGreenPixelsX;
        if (lightGreenPixelsY > maxLightGreenPixelsY)
          maxLightGreenPixelsY = lightGreenPixelsY;

        // Allow maxLightGreenPixelsY = maxLightGreenPixelsX +-1 due to
        // possible subpixel rendering on Mac and Android.
        if (maxLightGreenPixelsY > maxLightGreenPixelsX + 1 ||
            maxLightGreenPixelsY < maxLightGreenPixelsX -1 ||
            maxLightGreenPixelsY == 0 ||
            maxLightGreenPixelsX == width / 2 ||
            maxLightGreenPixelsY == height / 2) {
          if (++attempt > maxAttempts) {
            clearInterval(detectorInterval);
            return reject(new Error("Aspect ratio corrupted. X " + maxLightGreenPixelsX  +
                    " Y " + maxLightGreenPixelsY + " width " + width +
                    " height " + height));
          }
          else {
            // We have a bad aspect ratio now; give a chance to shape up.
            return;
          }
        }

        clearInterval(detectorInterval);
        var result = "w=" + width + ":h=" + height;
        resolve(result);
      }

      var detectorInterval = setInterval(detectorFunction, 50);
    });
  }

  function getAudioSettingsDefault() {
    return navigator.mediaDevices.getUserMedia({audio:true})
      .then(stream => {
        assertEquals(stream.getAudioTracks().length, 1);
        var settings = stream.getAudioTracks()[0].getSettings();
        assertEquals(settings.deviceId, 'default');
        assertTrue(settings.echoCancellation);
        stream.getAudioTracks()[0].stop();
        return logSuccess();
      })
      .catch(_ => {
        throw new Error("getUserMedia failed")
      });
  }

  function getAudioSettingsNoEchoCancellation() {
    return navigator.mediaDevices.getUserMedia({audio:{echoCancellation: false}})
      .then(stream => {
        assertEquals(stream.getAudioTracks().length, 1);
        var settings = stream.getAudioTracks()[0].getSettings();
        assertEquals(settings.deviceId, 'default');
        assertEquals(settings.echoCancellation, false);
        stream.getAudioTracks()[0].stop();
        return logSuccess();
      })
      .catch(_ => {
        throw new Error("getUserMedia failed")
      });
  }

  function getAudioSettingsDeviceId() {
    return navigator.mediaDevices.enumerateDevices()
      .then(devices => {
        var last_device_id;
        for (var device, i = 0; device = devices[i]; ++i) {
          if (device.kind != "audioinput")
            continue;
          last_device_id = device.deviceId;
        }
        return navigator.mediaDevices.getUserMedia(
          {audio:{deviceId: {exact: last_device_id}}})
          .then(stream => {
            assertEquals(stream.getAudioTracks().length, 1);
            var settings = stream.getAudioTracks()[0].getSettings();
            assertEquals(settings.deviceId, last_device_id);
            assertNotEquals(settings.deviceId, 'default');
            assertTrue(settings.echoCancellation);
            stream.getAudioTracks()[0].stop();
            return logSuccess();
          })
      });
  }

  function srcObjectAddVideoTrack() {
    var video = document.createElement('video');
    video.autoplay = true;
    assertEquals(video.srcObject, null);
    return navigator.mediaDevices.getUserMedia({audio: true, video: true})
      .then(stream => {
        return new Promise((resolve, reject) => {
          video.onplaying = function () {
            video.onplaying = null;
            resolve();
          };
          video.srcObject = new MediaStream(stream.getAudioTracks());
        })
        .then(() => {
          console.log("playing");
          return new Promise(resolve => {
            video.onloadstart = function() {
              reject(new Error("loadstart should not be called"));
            }

            video.onresize = function() {
              assertNotEquals(video.srcObject, null);
              assertTrue(video.videoHeight > 0);
              assertTrue(video.videoWidth > 0);
              resolve(logSuccess());
            }

            assertNotEquals(video.srcObject, null);
            assertEquals(video.videoWidth, 0);
            assertEquals(video.videoHeight, 0);
            video.srcObject.addTrack(stream.getVideoTracks()[0]);
          });
        })
      });
  }

  async function srcObjectReplaceInactiveTracks() {
    let video = document.createElement('video');
    video.autoplay = true;
    assertEquals(video.srcObject, null);
    try {
      let stream = await navigator.mediaDevices.getUserMedia(
          {audio: true, video: true});
      await new Promise(resolve => {
        video.onplaying = () => {
          video.onplaying = null;
          resolve();
        };
        video.srcObject = stream;
      });

      return new Promise((resolve, reject) => {
        video.onloadstart = () => {
          reject(new Error("loadstart should not be fired"));
        }

        assertNotEquals(video.srcObject, null);
        assertTrue(video.videoWidth > 0);
        assertTrue(video.videoHeight > 0);

        // Stop tracks.
        stream.getTracks().forEach(track => track.stop());
        let onprogress_calls = 0;
        video.onprogress = () => {
          assertEquals(video.networkState, HTMLMediaElement.NETWORK_IDLE);
          setTimeout(() => resolve(logSuccess()), 100);
          if (++onprogress_calls > 1)
            reject(new Error('progress should not be fired more than once'));
        }
      });
    } catch (err) {
      throw new Error('getUserMedia() failed');
    }
  }

  function srcObjectRemoveVideoTrack() {
    var video = document.createElement('video');
    video.autoplay = true;
    assertEquals(video.srcObject, null);
    return navigator.mediaDevices.getUserMedia({audio: true, video: true})
      .then(stream => {
        return new Promise(resolve => {
          video.onplaying = function() {
            video.onplaying = null;
            resolve();
          };
          video.srcObject = stream;
        })
        .then(() => {
          return new Promise((resolve, reject) => {
            video.onloadstart = function() {
              reject(new Error("loadstart should not be called"));
            }

            video.onresize = function() {
              assertNotEquals(video.srcObject, null);
              assertEquals(0, video.videoHeight);
              assertEquals(0, video.videoWidth);
              resolve(logSuccess());
            }

            assertNotEquals(video.srcObject, null);
            assertTrue(video.videoWidth > 0);
            assertTrue(video.videoHeight > 0);
            stream.removeTrack(stream.getVideoTracks()[0]);
          });
        });
      });
  }

  function srcObjectRemoveFirstOfTwoVideoTracks() {
    var canvas = document.createElement('canvas');
    var canvas_stream = canvas.captureStream();
    var canvas_width = canvas_stream.getVideoTracks()[0].getSettings().width;
    var canvas_height = canvas_stream.getVideoTracks()[0].getSettings().height;
    assertTrue(canvas_width > 1);
    assertTrue(canvas_height > 1);

    // Paint something on the canvas, so that it produces frames.
    var ctx = canvas.getContext("2d");
    ctx.moveTo(0,0);
    ctx.lineTo(200,100);
    ctx.stroke();

    var video = document.createElement('video');
    video.autoplay = true;
    assertEquals(video.srcObject, null);
    var gum_width = canvas_width + 1;
    var gum_height = canvas_height + 1;
    return navigator.mediaDevices.getUserMedia({
        video: {
          width: {exact: gum_width},
          height: {exact: gum_height}
        }
      }).then(gum_stream => {
        var gum_settings = gum_stream.getVideoTracks()[0].getSettings();
        assertEquals(gum_width, gum_settings.width)
        assertEquals(gum_height, gum_settings.height)
        var big_stream = new MediaStream();
        big_stream.addTrack(canvas_stream.getVideoTracks()[0]);
        big_stream.addTrack(gum_stream.getVideoTracks()[0]);
        return new Promise(resolve => {
          video.onloadedmetadata = resolve;
          video.srcObject = big_stream;
        })
      })
      .then(() => {
        return new Promise(resolve => {
          video.onloadedmetadata = resolve;
          big_stream.removeTrack(big_stream.getVideoTracks()[0]);
        });
      })
      .then(() => {
        assertEquals(canvas_width, video.videoWidth);
        assertEquals(canvas_height, video.videoHeight);
        assertNotEquals(video.videoWidth, gum_width)
        assertNotEquals(video.videoHeight, gum_height)
        return new Promise(resolve => {
          video.onprogress = resolve;
        });
      })
      .then(() => {
        assertEquals(gum_width, video.videoWidth);
        assertEquals(gum_height, video.videoHeight);
        assertNotEquals(video.videoWidth, canvas_width)
        assertNotEquals(video.videoHeight, canvas_height)
        return logSuccess()
      });
  }

  function srcObjectReassignSameObject() {
    var video = document.createElement('video');
    video.autoplay = true;
    assertEquals(video.srcObject, null);
    return navigator.mediaDevices.getUserMedia({audio: true, video: true})
      .then(stream => {
        return new Promise(resolve => {
          video.onplaying = resolve;
          video.srcObject = stream;
        })
      })
      .then(() => {
        console.log("playing");
        return new Promise(resolve => {
          video.onloadstart = function() {
            resolve(logSuccess());
          }
          assertNotEquals(video.srcObject, null);
          assertTrue(video.videoWidth > 0);
          assertTrue(video.videoHeight > 0);
          // Reassigning the same object should trigger the load algorithm.
          video.srcObject = video.srcObject;
        });
      });
  }

  function applyConstraintsVideo() {
    return navigator.mediaDevices.getUserMedia({video:true})
      .then(stream => {
        assertEquals(stream.getVideoTracks().length, 1);
        var track = stream.getVideoTracks()[0];
        var settings = track.getSettings();
        var default_device_id = settings.deviceId;
        var default_width = settings.width;
        var default_height = settings.height;
        return track.applyConstraints({
          width: default_width-1,
          height: default_height-1
        }).then(()=> {
          settings = track.getSettings();
          assertEquals(settings.deviceId, default_device_id);
          assertEquals(settings.width, default_width - 1);
          assertEquals(settings.height, default_height - 1);
          track.stop();
          return logSuccess();
        });
      });
  }

  function applyConstraintsVideoTwoStreams() {
    return new Promise((resolve, reject) => {
      navigator.mediaDevices.getUserMedia({
          video:{
              width:{exact:640},
              height: {exact:480}
          }
      }).then(stream => {
          assertEquals(stream.getVideoTracks().length, 1);
          let track1 = stream.getVideoTracks()[0];
          let deviceId1 = track1.getSettings().deviceId;
          track1.onmute = () => reject(new Error("Unexpected mute of track1"));
          track1.onunmute = () => reject(new Error("Unexpected unmute of track1"));
          return navigator.mediaDevices.getUserMedia({
              video:{
                  deviceId: {exact: deviceId1},
                  width:{exact:640},
                  height: {exact:480}
              }
          }).then(stream2 => {
              let track2 = stream2.getVideoTracks()[0];
              track2.onmute = () => reject(new Error("Unexpected mute of track2"));
              track2.onunmute = () => reject(new Error("Unexpected unmute of track2"));
              return track1.applyConstraints({
                width: {exact: 639},
                height: {exact: 479}
              }).then(()=> {
                assertEquals(track1.readyState, "live");
                let settings1 = track1.getSettings();
                assertEquals(639, settings1.width);
                assertEquals(479, settings1.height);
                assertEquals(track2.readyState, "live");
                let settings2 = track2.getSettings();
                assertEquals(640, settings2.width);
                assertEquals(480, settings2.height);
                assertEquals(deviceId1, settings1.deviceId);
                assertEquals(deviceId1, settings2.deviceId);
                track1.stop();
                track2.stop();
                resolve(logSuccess());
              });
            })
        })
        .catch(reject);
    });
  }

  function applyConstraintsVideoOverconstrained() {
    return navigator.mediaDevices.getUserMedia({video:true})
      .then(stream => {
        assertEquals(stream.getVideoTracks().length, 1);
        var track = stream.getVideoTracks()[0];
        var settings = track.getSettings();
        var default_device_id = settings.deviceId;
        var default_width = settings.width;
        var default_height = settings.height;
        return track.applyConstraints({
          width: {exact: 1000000},
        }).then(()=> {
          throw new Error("applyConstraints succeeded with impossible constraints");
        }, e => {
          assertEquals(e.name, "OverconstrainedError");
          assertEquals(e.constraint, "width");
          assertTrue(!!e.message);
          settings = track.getSettings();
          assertEquals(settings.deviceId, default_device_id);
          assertEquals(settings.width, default_width);
          assertEquals(settings.height, default_height);
          track.stop();
          return logSuccess();
        });
      });
  }

  function applyConstraintsNonDevice() {
    var canvas = document.createElement('canvas');
    var canvas_stream = canvas.captureStream();
    var canvas_track = canvas_stream.getVideoTracks()[0];
    var canvas_width = canvas_track.getSettings().width;
    var canvas_height = canvas_track.getSettings().height;
    assertTrue(canvas_width > 1);
    assertTrue(canvas_height > 1);

    // Paint something on the canvas, so that it produces frames.
    var ctx = canvas.getContext("2d");
    ctx.moveTo(0,0);
    ctx.lineTo(200,100);
    ctx.stroke();

    var video = document.createElement('video');
    video.autoplay = true;
    assertEquals(video.srcObject, null);
    return new Promise((resolve, reject) => {
      video.onloadedmetadata = () => {
        assertEquals(video.videoWidth, canvas_width);
        assertEquals(video.videoHeight, canvas_height);
        var new_width = canvas_width - 1;
        var new_height = canvas_height - 1;
        video.ontimeupdate = () => {
          assertEquals(video.videoWidth, new_width);
          assertEquals(video.videoHeight, new_height);
          canvas_track.applyConstraints({width: {min: canvas_width + 1}})
            .then(() => {
              reject(new Error("applyConstraints() should not succeed at setting the " +
                      "track size to a value greater than the source size."));
            })
            .catch(e => {
              assertEquals(e.name, 'OverconstrainedError');
              assertEquals(e.constraint, 'width');
              resolve(logSuccess());
            });
        }
        canvas_track.applyConstraints({width: new_width, height: new_height})
          .then(() => {
            ctx.lineTo(100,100);
            ctx.stroke();
          })
          .catch(e => reject(new Error("Unexpected error: " + e)));
      }
      video.srcObject = canvas_stream;
    });
  }

  function concurrentGetUserMediaStop() {
    var num_stopped = 0;
    const N = 250;
    return new Promise((resolve, reject) => {
      for (var i = 0; i < N; i++) {
        navigator.mediaDevices.getUserMedia({video: true}).then(s => {
          setTimeout(() => {
            s.getVideoTracks()[0].stop();
            if (++num_stopped == N)
              resolve(logSuccess());
          }, 1);
        }).catch(reject);
      }
    });
  }

  function getUserMediaAfterStopCanvasCapture() {
    var canvas = document.createElement('canvas');
    canvas.width = canvas.height = 64;

    var stream = canvas.captureStream();
    assertTrue(stream, 'Error creating MediaStream');
    assertEquals(1, stream.getVideoTracks().length);
    assertEquals(0, stream.getAudioTracks().length);

    stream.getVideoTracks()[0].stop();
    return navigator.mediaDevices.getUserMedia({video: true}).then(s => {
      s.getVideoTracks()[0].stop();
      return logSuccess();
    });
  }

  async function getUserMediaEchoCancellationOnAndOff() {
    try {
      var streams;

      streams = await navigator.mediaDevices.getUserMedia({
          audio: {
              deviceId: {exact: "default"},
              echoCancellation: {exact: true}
          }
      });
      assertTrue(streams.getAudioTracks()[0].getSettings().echoCancellation);

      streams = await navigator.mediaDevices.getUserMedia({
          audio: {
              deviceId: {exact: "default"},
              echoCancellation: {exact: false}
          }
      });
      assertFalse(streams.getAudioTracks()[0].getSettings().echoCancellation);
      return logSuccess();
    } catch(err) {
      throw new Error('getUserMedia() failed, error: ' + err.message);
    }
  }

  async function getUserMediaEchoCancellationOnAndOffAndVideo() {
    try {
      var streams;

      streams = await navigator.mediaDevices.getUserMedia({
          video: true,
          audio: {
              deviceId: {exact: "default"},
              echoCancellation: {exact: true}
          }
      });
      assertTrue(streams.getAudioTracks()[0].getSettings().echoCancellation);

      streams = await navigator.mediaDevices.getUserMedia({
          video: true,
          audio: {
              deviceId: {exact: "default"},
              echoCancellation: {exact: false}
          }
      });
      assertFalse(streams.getAudioTracks()[0].getSettings().echoCancellation);
      return logSuccess();
    } catch(err) {
      throw new Error('getUserMedia() failed, error: ' + err.message);
    }
  }

  var resolveTrackEndedAfterServiceCrash;
  var trackEndedAfterServiceCrash = new Promise(function(resolve, reject) {
    resolveTrackEndedAfterServiceCrash = resolve;
  });

  async function setUpForAudioServiceCrash() {
    try {
      // Subscribe to device notifications to force immediate reconnection
      // to the audio service and verify that the browser does not crash.
      navigator.mediaDevices.ondevicechange = () => {};
      let stream = await navigator.mediaDevices.getUserMedia({audio: true});
      let track = stream.getAudioTracks()[0];
      assertTrue(stream.active);
      assertEquals(track.readyState, "live");

      // This expects the audio track to be ended due to a crash in the audio
      // service.
      track.onended = () => {
        resolveTrackEndedAfterServiceCrash(track);
      }

      return logSuccess();
    } catch (err) {
      throw new Error('getUserMedia() failed');
    }
  }

  async function verifyAfterAudioServiceCrash() {
    let track = await trackEndedAfterServiceCrash;
    assertEquals(track.readyState, "ended");

    // getUserMedia() should work after the service restarts.
    try {
      let stream = await navigator.mediaDevices.getUserMedia({audio: true});
      assertTrue(stream.active);
      assertEquals(stream.getAudioTracks()[0].readyState, "live");
      return logSuccess();
    } catch(err) {
      throw new Error('getUserMedia() failed');
    }
  }

  function playStreamAndGetElement(stream) {
    v = document.createElement("video");
    document.body.append(v);
    v.srcObject = stream;
    v.play();
    return v;
  }

  async function enumerationAfterSameDocumentNaviagtion() {
    let stream = await navigator.mediaDevices.getUserMedia({ audio:true, video:true });
    stream.getTracks().forEach(t => t.stop());
    let devices1 = await navigator.mediaDevices.enumerateDevices();

    // Navigate within the same document and enumerate again.
    window.location = "#navigate";
    let devices2 = await navigator.mediaDevices.enumerateDevices();
    assertEquals(devices1.length, devices2.length);
    for (let i=0; i<devices1.length; i++) {
      assertEquals(devices1[i].deviceId, devices2[i].deviceId);
    }
    return logSuccess();
  }

  </script>
</head>
<body>
  <table border="0">
    <!-- Canvases are named after their corresponding video elements. -->
    <tr>
      <td><video id="local-view-1" width="320" height="240" autoplay
          style="display:none"></video></td>
      <td><canvas id="local-view-1-canvas" width="320" height="240"
          style="display:none"></canvas></td>
    </tr>
    <tr>
      <td><video id="local-view-2" width="320" height="240" autoplay
          style="display:none"></video></td>
      <td><canvas id="local-view-2-canvas" width="320" height="240"
          style="display:none"></canvas></td>
    </tr>
  </table>
  <p>Some text so the document is ready for input.</p>
</body>
</html>