chromium/chrome/test/data/xr/e2e_test_files/html/webxr_test_camera_access.html

<!--
Tests that AR camera access returns non-empty GLTexture.
-->
<html>
  <head>
    <link rel="stylesheet" type="text/css" href="../resources/webxr_e2e.css">
  </head>
  <body>
    <canvas id="webgl-canvas"></canvas>
    <script src="../../../../../../third_party/blink/web_tests/resources/testharness.js"></script>
    <script src="../resources/webxr_e2e.js"></script>
    <script>var shouldAutoCreateNonImmersiveSession = false;</script>
    <script src="../resources/webxr_boilerplate.js"></script>
    <!-- Required features must be set after importing webxr_boilerplate.js to avoid overwrite -->
    <script>var immersiveArSessionInit = { requiredFeatures: ['camera-access'] };</script>
    <script>
      // How many frames with viewer poses some tests will wait for until trying to
      // exercise the API and run the test case:
      const MIN_NUM_FRAMES_WITH_POSES = 10;
      // How many frames with viewer poses some tests will run the test case for:
      const MAX_NUM_FRAMES_WITH_POSES = 20;

      let numPosesFound = 0;
      let cameraImageTexture = null;
      let ranAtLeastOnce = false;
      let pixels = null;

      // Override canvas and context from webxr_boilerplate.js, we want to play
      // with WebGL2 here (at least one test case requires it).
      webglCanvas = document.getElementById('webgl-canvas');
      webglCanvas.addEventListener('webglcontextcreationerror', function(e) {
        console.log(e.statusMessage || 'Unknown error');
      }, false);
      gl = webglCanvas.getContext('webgl2', {xrCompatible: true});
      assert_not_equals(gl, null, "We need a WebGL2 context!")

      function stepStartStoringCameraTexture(numCalls) {
        const sessionInfo = sessionInfos[sessionTypes.AR];
        const referenceSpace = sessionInfo.currentRefSpace;
        const glBinding = new XRWebGLBinding(sessionInfo.currentSession, gl);

        onARFrameCallback = (session, frame) => {
          console.log('Got AR frame for storing texture callback');
          const pose = frame.getViewerPose(referenceSpace);
          if (pose) {
            numPosesFound++;

            // Wait several frames to allow the image buffer to be populated by a camera
            // texture before attempting to get the camera image texture.
            if (numPosesFound > MIN_NUM_FRAMES_WITH_POSES) {
              for (let view of pose.views) {
                if (view.camera) {
                  ranAtLeastOnce = true;
                  // Used to test that multiple consecutive calls will all return non-null textures.
                  for (let remainingCalls = numCalls; remainingCalls > 0; remainingCalls--) {
                    cameraImageTexture = glBinding.getCameraImage(view.camera);
                    assert_not_equals(cameraImageTexture, null,
                                      "XRWebGLBinding.getCameraImage(...) returned null texture.");
                  }
                }
              }
            }
          }

          if (numPosesFound > MIN_NUM_FRAMES_WITH_POSES) {
            onARFrameCallback = null;
            assert_true(ranAtLeastOnce);
            done();
          }
        };
      }

      function stepStartStoreAndDeleteCameraTexture() {
        const sessionInfo = sessionInfos[sessionTypes.AR];
        const referenceSpace = sessionInfo.currentRefSpace;
        const glBinding = new XRWebGLBinding(sessionInfo.currentSession, gl);

        onARFrameCallback = (session, frame) => {
          console.log('Got AR frame for store/delete texture callback');
          const pose = frame.getViewerPose(referenceSpace);
          if (pose) {
            numPosesFound++;

            for (let view of pose.views) {
              if (view.camera) {
                ranAtLeastOnce = true;
                cameraImageTexture = glBinding.getCameraImage(view.camera);
                gl.deleteTexture(cameraImageTexture);
                const error = gl.getError();
                assert_equals(error, gl.INVALID_OPERATION);
              }
            }
          }

          if (numPosesFound > MIN_NUM_FRAMES_WITH_POSES) {
            onARFrameCallback = null;
            assert_true(ranAtLeastOnce);
            done();
          }
        };
      }

      function stepConfirmCameraTextureIsNotNull() {
        const sessionInfo = sessionInfos[sessionTypes.AR];
        const referenceSpace = sessionInfo.currentRefSpace;
        const glBinding = new XRWebGLBinding(sessionInfo.currentSession, gl);

        onARFrameCallback = (session, frame) => {
          console.log('Got AR frame for non-null texture');
          const pose = frame.getViewerPose(referenceSpace);
          if (pose) {
            numPosesFound++;

            for (let view of pose.views) {
              if (view.camera) {
                ranAtLeastOnce = true;
                cameraImageTexture = glBinding.getCameraImage(view.camera);
                assert_not_equals(cameraImageTexture, null, "XRWebGLBinding.getCameraImage(...) returned null texture.");
              }
            }
          }

          if (numPosesFound > MAX_NUM_FRAMES_WITH_POSES) {
            onARFrameCallback = null;
            assert_true(ranAtLeastOnce);
            done();
          }
        };
      }

      const countZeros = (array) => {
        let count = 0;
        array.forEach(v => {
          if(v == 0) {
            count++;
          }
        });
        return count;
      }

      function stepCheckCameraTextureLifetimeLimitedToOneFrame() {
        const sessionInfo = sessionInfos[sessionTypes.AR];
        const referenceSpace = sessionInfo.currentRefSpace;
        const glBinding = new XRWebGLBinding(sessionInfo.currentSession, gl);

        const fb = gl.createFramebuffer();
        const attachmentPoint = gl.COLOR_ATTACHMENT0;
        const level = 0;

        // Assign pixels array non-zero values.
        pixels = new Uint8Array(gl.drawingBufferWidth * gl.drawingBufferHeight * 4);
        pixels.fill(0);

        // Counter of rAFcb calls in which readPixels() call was performed:
        let numIterations = 0;

        onARFrameCallback = (session, frame) => {
          console.log('Got AR frame for lifetime limit callback');
          const pose = frame.getViewerPose(referenceSpace);
          if (pose) {
            numPosesFound++;

            // Wait several frames to allow the image buffer to be populated by a camera
            // texture before attempting to get the camera image texture.
            if (numPosesFound >= MIN_NUM_FRAMES_WITH_POSES
             && numPosesFound < MAX_NUM_FRAMES_WITH_POSES) {

              const view = pose.views.find(v => v.camera != null);
              if (!view) return;

              cameraImageTexture = glBinding.getCameraImage(view.camera);

              gl.bindTexture(gl.TEXTURE_2D, cameraImageTexture);
              gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
              gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D,
                                      cameraImageTexture, level);

              const fbComplete = readCameraImageTexturePixels(pixels);

              console.log("Framebuffer complete? " + fbComplete);
              if (!fbComplete) return;

              numIterations++;

              const numZeroedRGBAValues = countZeros(pixels);

              // We should get at least some non-zero pixels.
              assert_not_equals(numZeroedRGBAValues, pixels.length,
                                "Camera image texture was empty.");

              // Clean up after ourselves:
              pixels.fill(0);
            }

            if (numPosesFound >= MAX_NUM_FRAMES_WITH_POSES) {
              onARFrameCallback = null;

              assert_not_equals(numIterations, 0, "We should have read the pixels at least once!");

              // Let's try to use stale texture (cameraImageTexture is still referring
              // to a texture obtained in a previous loop):
              gl.bindTexture(gl.TEXTURE_2D, cameraImageTexture);
              gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
              gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D,
                                      cameraImageTexture, level);

              const fbComplete = readCameraImageTexturePixels(pixels);
              if (fbComplete) {
                const numZeroedRGBAValues = countZeros(pixels);
                assert_equals(numZeroedRGBAValues, pixels.length,
                              "Camera image should be empty.");
              }

              done();
            }
          }
        };
      }

      // Reads pixels from the framebuffer into the |pixels| parameter.
      // Returns true if framebuffer status was FRAMEBUFFER_COMPLETE, false
      // otherwise. If the framebuffer was incomplete, readPixels() call was not
      // performed.
      function readCameraImageTexturePixels (pixels) {
        if(gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE){
          gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
          assert_equals(gl.getError(), gl.NO_ERROR);

          return true;
        }

        return false;
      }

      function stepCheckOpaqueTextures() {
        const ext = gl.getExtension('WEBGL_compressed_texture_etc');

        const sessionInfo = sessionInfos[sessionTypes.AR];
        const referenceSpace = sessionInfo.currentRefSpace;
        const glBinding = new XRWebGLBinding(sessionInfo.currentSession, gl);

        const assert_invalid_operation = (cb, message) => {
          cb();
          const error = gl.getError();
          assert_equals(error, gl.INVALID_OPERATION, message);
        }

        onARFrameCallback = (session, frame) => {
          const pose = frame.getViewerPose(referenceSpace);
          if (pose) {
            numPosesFound++;

            gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer);
            gl.clearColor(0, 0, 0, 0);
            gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);

            for (let view of pose.views) {
              if (view.camera) {
                ranAtLeastOnce = true;
                cameraImageTexture = glBinding.getCameraImage(view.camera);
                assert_not_equals(cameraImageTexture, null, "Camera texture must be present");

                // gl.deleteTexture():
                assert_invalid_operation(
                  () => gl.deleteTexture(cameraImageTexture),
                  "deleteTexture()");

                // gl.texImage2D():
                assert_invalid_operation(
                  () => {
                    gl.bindTexture(gl.TEXTURE_2D, cameraImageTexture);
                    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, view.camera.width,
                                  view.camera.height, 0, gl.RGBA, gl.UNSIGNED_BYTE,
                                  (new Uint32Array(view.camera.width * view.camera.height)).fill(0));
                  }, "texImage2D()");

                // gl.compressedTexImage2D():
                assert_invalid_operation(
                  () => {
                    gl.bindTexture(gl.TEXTURE_2D, cameraImageTexture);
                    gl.compressedTexImage2D(gl.TEXTURE_2D, 0, ext.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC,
                                            view.camera.width, view.camera.height, 0,
                                            (new Uint32Array(view.camera.width * view.camera.height)).fill(0));
                  }, "compressedTexImage2D()");

                // gl.copyTexImage2D():
                assert_invalid_operation(
                  () => {
                    gl.bindTexture(gl.TEXTURE_2D, cameraImageTexture);
                    gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0,
                                      view.camera.width, view.camera.height, 0);
                  }, "copyTexImage2D()");

                // gl.texStorage2D():
                assert_invalid_operation(
                  () => {
                    gl.bindTexture(gl.TEXTURE_2D, cameraImageTexture);
                    gl.texStorage2D(gl.TEXTURE_2D, 0, gl.RGBA, view.camera.width, view.camera.height);
                  }, "texStorage2D()");
              }
            }
          }

          if (numPosesFound > MIN_NUM_FRAMES_WITH_POSES) {
            onARFrameCallback = null;
            assert_true(ranAtLeastOnce);
            done();
          }
        };
      }
    </script>
  </body>
</html>