<!DOCTYPE html>
<html>
<head>
<!-- Image Capture Browser Test -->
</head>
<body>
<script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script>
const WIDTH = 640;
/** @const */ var CONSTRAINTS = { width: { max : WIDTH } };
// Returns a Promise resolved with |object| after a delay of |delayInMs|.
function delayedResolver(object, delayInMs) {
return new Promise((resolve, reject) => {
setTimeout(() => { resolve(object); }, delayInMs);
});
}
// Runs an ImageCapture.getPhotoCapabilities() and expects it to succeed.
function testCreateAndGetPhotoCapabilitiesSucceeds() {
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.getPhotoCapabilities();
})
.then(capabilities => {
// There's nothing to check here since |capabilities| vary per device.
return logSuccess();
});
}
// Runs an ImageCapture.getPhotoSettings() and expects it to succeed.
function testCreateAndGetPhotoSettingsSucceeds() {
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.getPhotoSettings();
})
.then(settings => {
// There's nothing to check here since |settings| vary per device.
return logSuccess();
});
}
// Runs an ImageCapture.takePhoto() and expects it to succeed.
function testCreateAndTakePhotoSucceeds() {
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.takePhoto();
})
.then(blob => {
assertTrue(blob.type != undefined);
assertNotEquals(0, blob.size);
return logSuccess();
});
}
// Runs an ImageCapture.takePhoto() and expects it to get rejected.
function testCreateAndTakePhotoIsRejected() {
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.takePhoto();
})
.then(blob => {
throw new Error('Expected promise to get rejected but it was fulfilled');
}, err => {
return logSuccess();
});
}
// Runs an ImageCapture.grabFrame() and expects it to succeed.
function testCreateAndGrabFrameSucceeds() {
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.grabFrame();
})
.then(imageBitmap => {
// On Android, depending on the device orientation, |imageBitmap| might
// appear rotated.
assertTrue(WIDTH == imageBitmap.width || WIDTH == imageBitmap.height);
return logSuccess();
});
}
// Runs a MediaStreamTrack.getCapabilities().
function testCreateAndGetTrackCapabilities() {
var imageCapturer;
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
imageCapturer.track.getCapabilities();
// There's nothing to check here since |capabilities| vary per device.
return logSuccess();
});
}
// Runs an MediaStreamTrack.getSettings().
function testCreateAndGetTrackSettings() {
var imageCapturer;
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
imageCapturer.track.getSettings();
// There's nothing to check here since |settings| vary per device.
return logSuccess();
});
}
// Tries to read, set and read back the pan capability if available.
function testManipulatePan() {
var newPan = -1;
var imageCapturer;
var panTolerance;
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.pan === undefined) {
console.log('pan not supported, skipping test');
return logSuccess();
}
const currentPan = imageCapturer.track.getSettings().pan;
newPan = currentPan + trackCapabilities.pan.step;
newPan = Math.min(newPan, trackCapabilities.pan.max);
console.log("Setting pan from " + currentPan + " to " + newPan);
panTolerance = trackCapabilities.pan.step / 10;
return imageCapturer.track.applyConstraints({advanced : [{pan : newPan}]})
.then(() => {
assertEquals(newPan,
imageCapturer.track.getConstraints().advanced[0].pan);
assertTrue(Math.abs(newPan - imageCapturer.track.getSettings().pan)
< panTolerance);
return logSuccess();
});
});
}
// Tries to read, set and read back the tilt capability if available.
function testManipulateTilt() {
var newTilt = -1;
var imageCapturer;
var tiltTolerance;
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.tilt === undefined) {
console.log('tilt not supported, skipping test');
return logSuccess();
}
const currentTilt = imageCapturer.track.getSettings().tilt;
newTilt = currentTilt + trackCapabilities.tilt.step;
newTilt = Math.min(newTilt, trackCapabilities.tilt.max);
console.log("Setting tilt from " + currentTilt + " to " + newTilt);
tiltTolerance = trackCapabilities.tilt.step / 10;
return imageCapturer.track.applyConstraints({advanced : [{tilt : newTilt}]})
.then(() => {
assertEquals(newTilt,
imageCapturer.track.getConstraints().advanced[0].tilt);
assertTrue(Math.abs(newTilt - imageCapturer.track.getSettings().tilt)
< tiltTolerance);
return logSuccess();
});
});
}
// Tries to read, set and read back the zoom capability if available.
function testManipulateZoom() {
var newZoom = -1;
var imageCapturer;
var zoomTolerance;
return navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.zoom === undefined) {
console.log('zoom not supported, skipping test');
return logSuccess();
}
const currentZoom = imageCapturer.track.getSettings().zoom;
newZoom = currentZoom + trackCapabilities.zoom.step;
newZoom = Math.min(newZoom, trackCapabilities.zoom.max);
console.log("Setting zoom from " + currentZoom + " to " + newZoom);
zoomTolerance = trackCapabilities.zoom.step / 10;
return imageCapturer.track.applyConstraints({advanced : [{zoom : newZoom}]})
.then(() => {
assertEquals(newZoom,
imageCapturer.track.getConstraints().advanced[0].zoom);
assertTrue(Math.abs(newZoom - imageCapturer.track.getSettings().zoom)
< zoomTolerance);
return logSuccess();
});
});
}
// Tries to read, set and read back the exposureTime capability if available.
function testManipulateExposureTime() {
var newExposureTime = -1;
var imageCapturer;
var exposureTimeTolerance;
return navigator.mediaDevices.getUserMedia({"video": CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.exposureTime === undefined) {
console.log('exposureTime not supported, skipping test');
return logSuccess();
}
const currentExposureTime = imageCapturer.track.getSettings().exposureTime;
newExposureTime = currentExposureTime + trackCapabilities.exposureTime
.step;
newExposureTime = Math.min(newExposureTime, trackCapabilities.exposureTime
.max);
console.log("Setting exposureTime from " + currentExposureTime +
" to " +
newExposureTime);
exposureTimeTolerance = trackCapabilities.exposureTime.step /
10;
currentExposureMode = imageCapturer.track.getSettings().exposureMode;
console.log(" exposureMode == " + currentExposureMode);
exposureModeCapabilities = trackCapabilities.exposureMode;
console.log(" exposureModeCapabilities == " +
exposureModeCapabilities);
return imageCapturer.track.applyConstraints({
advanced: [{
exposureMode: "manual",
exposureTime: newExposureTime
}]
})
.then(() => {
assertEquals(newExposureTime,
imageCapturer.track.getConstraints().advanced[0].exposureTime
);
assertTrue(Math.abs(newExposureTime - imageCapturer.track.getSettings()
.exposureTime) <
exposureTimeTolerance);
return logSuccess();
});
});
}
// Tries to read, set and read back the focusDistance capability if available.
function testManipulateFocusDistance() {
var newFocusDistance = -1;
var imageCapturer;
var focusDistanceTolerance;
return navigator.mediaDevices.getUserMedia({"video": CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.focusDistance === undefined) {
console.log('focusDistance not supported, skipping test');
return logSuccess();
}
const currentFocusDistance = imageCapturer.track.getSettings().focusDistance;
newFocusDistance = currentFocusDistance + trackCapabilities.focusDistance
.step;
newFocusDistance = Math.min(newFocusDistance, trackCapabilities.focusDistance
.max);
console.log("Setting focusDistance from " + currentFocusDistance +
" to " +
newFocusDistance);
focusDistanceTolerance = trackCapabilities.focusDistance.step /
10;
currentFocusMode = imageCapturer.track.getSettings().focusMode;
console.log(" focusMode == " + currentFocusMode);
focusModeCapabilities = trackCapabilities.focusMode;
console.log(" focusModeCapabilities == " +
focusModeCapabilities);
return imageCapturer.track.applyConstraints({
advanced: [{
focusMode: "manual",
focusDistance: newFocusDistance
}]
})
.then(() => {
assertEquals(newFocusDistance,
imageCapturer.track.getConstraints().advanced[0].focusDistance
);
assertTrue(Math.abs(newFocusDistance - imageCapturer.track.getSettings()
.focusDistance) <
focusDistanceTolerance);
return logSuccess();
});
});
}
</script>
</body>
</html>