com.google.ar.core.Camera Java Examples

The following examples show how to use com.google.ar.core.Camera. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HelloArActivity.java    From poly-sample-android with Apache License 2.0 6 votes vote down vote up
private void handleTap(Frame frame, Camera camera) {
  MotionEvent tap = tapHelper.poll();
  if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
    for (HitResult hit : frame.hitTest(tap)) {
      // Check if any plane was hit, and if it was hit inside the plane polygon
      Trackable trackable = hit.getTrackable();
      // Creates an anchor if a plane or an oriented point was hit.
      if ((trackable instanceof Plane
              && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())
              && (PlaneRenderer.calculateDistanceToPlane(hit.getHitPose(), camera.getPose()) > 0))
          || (trackable instanceof Point
              && ((Point) trackable).getOrientationMode()
                  == OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
        // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
        // Cap the number of objects created. This avoids overloading both the
        // rendering system and ARCore.
        if (anchors.size() >= 20) {
          anchors.get(0).detach();
          anchors.remove(0);
        }

        // Adding an Anchor tells ARCore that it should track this position in
        // space. This anchor is created on the Plane to place the 3D model
        // in the correct position relative both to the world and to the plane.
        anchors.add(hit.createAnchor());
        break;
      }
    }
  }
}
 
Example #2
Source File: SumerianConnector.java    From amazon-sumerian-arcore-starter-app with Apache License 2.0 4 votes vote down vote up
void update() {
    final Frame frame;
    try {
        frame = mSession.update();
    } catch (CameraNotAvailableException e) {
        e.printStackTrace();
        return;
    }
    final Camera camera = frame.getCamera();

    if (camera.getTrackingState() == TrackingState.PAUSED) {
        return;
    }

    camera.getViewMatrix(mViewMatrix, 0);
    camera.getProjectionMatrix(mProjectionMatrix, 0, 0.02f, 20.0f);

    final String cameraUpdateString = "ARCoreBridge.viewProjectionMatrixUpdate('" + serializeArray(mViewMatrix) +"', '"+ serializeArray(mProjectionMatrix) + "');";
    evaluateWebViewJavascript(cameraUpdateString);

    HashMap<String, float[]> anchorMap = new HashMap<>();

    for (Anchor anchor : mSession.getAllAnchors()) {
        if (anchor.getTrackingState() != TrackingState.TRACKING) {
            continue;
        }

        final float[] anchorPoseMatrix = new float[16];
        anchor.getPose().toMatrix(anchorPoseMatrix, 0);
        anchorMap.put(String.valueOf(anchor.hashCode()), anchorPoseMatrix);
    }

    if (anchorMap.size() > 0) {
        JSONObject jsonAnchors = new JSONObject(anchorMap);
        final String anchorUpdateScript = "ARCoreBridge.anchorTransformUpdate('" + jsonAnchors.toString() + "');";
        evaluateWebViewJavascript(anchorUpdateScript);
    }

    if (frame.getLightEstimate().getState() != LightEstimate.State.NOT_VALID) {
        final float[] colorCorrectionRgba = new float[4];
        frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);
        
        final String lightEstimateUpdateScript = "ARCoreBridge.lightingEstimateUpdate(" +
                String.valueOf(frame.getLightEstimate().getPixelIntensity()) + ", " +
                convertRgbaToTemperature(colorCorrectionRgba) + ");";
        evaluateWebViewJavascript(lightEstimateUpdateScript);
    }

    // Image Recognition
    Collection<AugmentedImage> updatedAugmentedImages = frame.getUpdatedTrackables(AugmentedImage.class);
    for (AugmentedImage img : updatedAugmentedImages) {
        if (img.getTrackingState() == TrackingState.TRACKING) {
            if (img.getName().equals("SumerianAnchorImage")) {
                imageAnchorCreated(img);
            }
        }
    }
}
 
Example #3
Source File: HelloArActivity.java    From poly-sample-android with Apache License 2.0 4 votes vote down vote up
@Override
public void onDrawFrame(GL10 gl) {
  // Clear screen to notify driver it should not load any pixels from previous frame.
  GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

  // If we are ready to import the object and haven't done so yet, do it now.
  if (mReadyToImport && virtualObject == null) {
    importDownloadedObject();
  }

  if (session == null) {
    return;
  }
  // Notify ARCore session that the view size changed so that the perspective matrix and
  // the video background can be properly adjusted.
  displayRotationHelper.updateSessionIfNeeded(session);

  try {
    session.setCameraTextureName(backgroundRenderer.getTextureId());

    // Obtain the current frame from ARSession. When the configuration is set to
    // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
    // camera framerate.
    Frame frame = session.update();
    Camera camera = frame.getCamera();

    // Handle one tap per frame.
    handleTap(frame, camera);

    // Draw background.
    backgroundRenderer.draw(frame);

    // If not tracking, don't draw 3d objects.
    if (camera.getTrackingState() == TrackingState.PAUSED) {
      return;
    }

    // Get projection matrix.
    float[] projmtx = new float[16];
    camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

    // Get camera matrix and draw.
    float[] viewmtx = new float[16];
    camera.getViewMatrix(viewmtx, 0);

    // Compute lighting from average intensity of the image.
    // The first three components are color scaling factors.
    // The last one is the average pixel intensity in gamma space.
    final float[] colorCorrectionRgba = new float[4];
    frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);

    // Visualize tracked points.
    PointCloud pointCloud = frame.acquirePointCloud();
    pointCloudRenderer.update(pointCloud);
    pointCloudRenderer.draw(viewmtx, projmtx);

    // Application is responsible for releasing the point cloud resources after
    // using it.
    pointCloud.release();

    // Check if we detected at least one plane. If so, hide the loading message.
    if (messageSnackbarHelper.isShowing()) {
      for (Plane plane : session.getAllTrackables(Plane.class)) {
        if (plane.getTrackingState() == TrackingState.TRACKING) {
          messageSnackbarHelper.hide(this);
          break;
        }
      }
    }

    // Visualize planes.
    planeRenderer.drawPlanes(
        session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);

    // Visualize anchors created by touch.
    float scaleFactor = 1.0f;
    for (Anchor anchor : anchors) {
      if (anchor.getTrackingState() != TrackingState.TRACKING) {
        continue;
      }
      // Get the current pose of an Anchor in world space. The Anchor pose is updated
      // during calls to session.update() as ARCore refines its estimate of the world.
      anchor.getPose().toMatrix(anchorMatrix, 0);

      // Update and draw the model.
      if (virtualObject != null) {
        virtualObject.updateModelMatrix(anchorMatrix, ASSET_SCALE * scaleFactor);
        virtualObject.draw(viewmtx, projmtx, colorCorrectionRgba);

        // If we haven't yet showing the attribution toast, do it now.
        if (!mShowedAttributionToast) {
          showAttributionToast();
        }
      }
    }

  } catch (Throwable t) {
    // Avoid crashing the application due to unhandled exceptions.
    Log.e(TAG, "Exception on the OpenGL thread", t);
  }
}
 
Example #4
Source File: DrawAR.java    From ar-drawing-java with Apache License 2.0 4 votes vote down vote up
/**
 * update() is executed on the GL Thread.
 * The method handles all operations that need to take place before drawing to the screen.
 * The method :
 * extracts the current projection matrix and view matrix from the AR Pose
 * handles adding stroke and points to the data collections
 * updates the ZeroMatrix and performs the matrix multiplication needed to re-center the drawing
 * updates the Line Renderer with the current strokes, color, distance scale, line width etc
 */
private void update() {

    if (mSession == null) {
        return;
    }

    mDisplayRotationHelper.updateSessionIfNeeded(mSession);

    try {

        mSession.setCameraTextureName(mBackgroundRenderer.getTextureId());

        mFrame = mSession.update();
        Camera camera = mFrame.getCamera();

        mState = camera.getTrackingState();

        // Update tracking states
        if (mState == TrackingState.TRACKING && !bIsTracking.get()) {
            bIsTracking.set(true);
        } else if (mState== TrackingState.STOPPED && bIsTracking.get()) {
            bIsTracking.set(false);
            bTouchDown.set(false);
        }

        // Get projection matrix.
        camera.getProjectionMatrix(projmtx, 0, AppSettings.getNearClip(), AppSettings.getFarClip());
        camera.getViewMatrix(viewmtx, 0);

        float[] position = new float[3];
        camera.getPose().getTranslation(position, 0);

        // Check if camera has moved much, if thats the case, stop touchDown events
        // (stop drawing lines abruptly through the air)
        if (mLastFramePosition != null) {
            Vector3f distance = new Vector3f(position[0], position[1], position[2]);
            distance.sub(new Vector3f(mLastFramePosition[0], mLastFramePosition[1], mLastFramePosition[2]));

            if (distance.length() > 0.15) {
                bTouchDown.set(false);
            }
        }
        mLastFramePosition = position;

        // Multiply the zero matrix
        Matrix.multiplyMM(viewmtx, 0, viewmtx, 0, mZeroMatrix, 0);


        if (bNewStroke.get()) {
            bNewStroke.set(false);
            addStroke(lastTouch.get());
            mLineShaderRenderer.bNeedsUpdate.set(true);
        } else if (bTouchDown.get()) {
            addPoint(lastTouch.get());
            mLineShaderRenderer.bNeedsUpdate.set(true);
        }

        if (bReCenterView.get()) {
            bReCenterView.set(false);
            mZeroMatrix = getCalibrationMatrix();
        }

        if (bClearDrawing.get()) {
            bClearDrawing.set(false);
            clearDrawing();
            mLineShaderRenderer.bNeedsUpdate.set(true);
        }

        if (bUndo.get()) {
            bUndo.set(false);
            if (mStrokes.size() > 0) {
                mStrokes.remove(mStrokes.size() - 1);
                mLineShaderRenderer.bNeedsUpdate.set(true);
            }
        }
        mLineShaderRenderer.setDrawDebug(bLineParameters.get());
        if (mLineShaderRenderer.bNeedsUpdate.get()) {
            mLineShaderRenderer.setColor(AppSettings.getColor());
            mLineShaderRenderer.mDrawDistance = AppSettings.getStrokeDrawDistance();
            mLineShaderRenderer.setDistanceScale(mDistanceScale);
            mLineShaderRenderer.setLineWidth(mLineWidthMax);
            mLineShaderRenderer.clear();
            mLineShaderRenderer.updateStrokes(mStrokes);
            mLineShaderRenderer.upload();
        }

    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example #5
Source File: MainActivity.java    From augmentedreality with Apache License 2.0 4 votes vote down vote up
@Override
public void onDrawFrame(GL10 gl) {
    // Clear screen to notify driver it should not load any pixels from previous frame.
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    if (session == null) {
        return;
    }
    // Notify ARCore session that the view size changed so that the perspective matrix and
    // the video background can be properly adjusted.
    displayRotationHelper.updateSessionIfNeeded(session);

    try {
        session.setCameraTextureName(backgroundRenderer.getTextureId());

        // Obtain the current frame from ARSession. When the configuration is set to
        // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
        // camera framerate.
        Frame frame = session.update();
        Camera camera = frame.getCamera();

        // Handle taps. Handling only one tap per frame, as taps are usually low frequency
        // compared to frame rate.

        MotionEvent tap = queuedSingleTaps.poll();
        if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
            for (HitResult hit : frame.hitTest(tap)) {
                // Check if any plane was hit, and if it was hit inside the plane polygon
                Trackable trackable = hit.getTrackable();
                // Creates an anchor if a plane or an oriented point was hit.
                if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
                    || (trackable instanceof Point
                    && ((Point) trackable).getOrientationMode()
                    == Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
                    // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
                    // Cap the number of objects created. This avoids overloading both the
                    // rendering system and ARCore.
                    if (anchors.size() >= 20) {
                        anchors.get(0).detach();
                        anchors.remove(0);
                    }
                    // Adding an Anchor tells ARCore that it should track this position in
                    // space. This anchor is created on the Plane to place the 3D model
                    // in the correct position relative both to the world and to the plane.
                    anchors.add(hit.createAnchor());
                    break;
                }
            }
        }


        // Draw background, which is the what the camera is actually capturing.
        backgroundRenderer.draw(frame);

        // Check if we detected at least one plane. If so, hide the loading message.
        if (messageSnackbar != null) {
            for (Plane plane : session.getAllTrackables(Plane.class)) {
                if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
                    && plane.getTrackingState() == TrackingState.TRACKING) {
                    hideLoadingMessage();
                    break;
                }
            }
        }


        // If not tracking, don't draw 3d objects.
        if (camera.getTrackingState() == TrackingState.PAUSED) {
            return;
        }
        // Get projection matrix.
        float[] projmtx = new float[16];
        camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

        // Get camera matrix and draw.
        float[] viewmtx = new float[16];
        camera.getViewMatrix(viewmtx, 0);

        // Visualize anchors created by touch.
        // ie get the anchors from taps and now draw a cube for each place.
        float scaleFactor = 1.0f;
        for (Anchor anchor : anchors) {
            if (anchor.getTrackingState() != TrackingState.TRACKING) {
                continue;
            }
            // Get the current pose of an Anchor in world space. The Anchor pose is updated
            // during calls to session.update() as ARCore refines its estimate of the world.
            anchor.getPose().toMatrix(anchorMatrix, 0);

            // Update and draw the model and its shadow.
            mCube.updateModelMatrix(anchorMatrix, scaleFactor);
            mCube.draw(viewmtx, projmtx);
        }

    } catch (Throwable t) {
        // Avoid crashing the application due to unhandled exceptions.
        Log.e(TAG, "Exception on the OpenGL thread", t);
    }
}