com.google.ar.core.HitResult Java Examples

The following examples show how to use com.google.ar.core.HitResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HelloArActivity.java    From poly-sample-android with Apache License 2.0 6 votes vote down vote up
private void handleTap(Frame frame, Camera camera) {
  MotionEvent tap = tapHelper.poll();
  if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
    for (HitResult hit : frame.hitTest(tap)) {
      // Check if any plane was hit, and if it was hit inside the plane polygon
      Trackable trackable = hit.getTrackable();
      // Creates an anchor if a plane or an oriented point was hit.
      if ((trackable instanceof Plane
              && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())
              && (PlaneRenderer.calculateDistanceToPlane(hit.getHitPose(), camera.getPose()) > 0))
          || (trackable instanceof Point
              && ((Point) trackable).getOrientationMode()
                  == OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
        // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
        // Cap the number of objects created. This avoids overloading both the
        // rendering system and ARCore.
        if (anchors.size() >= 20) {
          anchors.get(0).detach();
          anchors.remove(0);
        }

        // Adding an Anchor tells ARCore that it should track this position in
        // space. This anchor is created on the Plane to place the 3D model
        // in the correct position relative both to the world and to the plane.
        anchors.add(hit.createAnchor());
        break;
      }
    }
  }
}
 
Example #2
Source File: SumerianConnector.java    From amazon-sumerian-arcore-starter-app with Apache License 2.0 5 votes vote down vote up
@JavascriptInterface
public void requestHitTest(final String requestId, final float screenX, final float screenY) {
    if (requestId == null) {
        return;
    }

    mSurfaceView.queueEvent(new Runnable() {
        @Override
        public void run() {
            final float hitTestX = screenX * mWebView.getWidth();
            final float hitTestY = screenY * mWebView.getHeight();

            List<HitResult> hitTestResults = null;
            try {
                hitTestResults = mSession.update().hitTest(hitTestX, hitTestY);
            } catch (CameraNotAvailableException e) {
                e.printStackTrace();
            }

            final String scriptString;

            if (hitTestResults.size() > 0) {
                hitTestResults.get(0).getHitPose().toMatrix(mHitTestResultPose, 0);
                scriptString = "ARCoreBridge.hitTestResponse('" + requestId + "', '" + serializeArray(mHitTestResultPose) + "');";

            } else {
                scriptString = "ARCoreBridge.hitTestResponse('" + requestId + "', null);";
            }

            evaluateWebViewJavascript(scriptString);
        }
    });
}
 
Example #3
Source File: MainActivity.java    From journaldev with MIT License 5 votes vote down vote up
private void addObject(Uri parse) {
    Frame frame = arFragment.getArSceneView().getArFrame();
    Point point = getScreenCenter();
    if (frame != null) {
        List<HitResult> hits = frame.hitTest((float) point.x, (float) point.y);

        for (int i = 0; i < hits.size(); i++) {
            Trackable trackable = hits.get(i).getTrackable();
            if (trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hits.get(i).getHitPose())) {
                placeObject(arFragment, hits.get(i).createAnchor(), parse);
            }
        }
    }
}
 
Example #4
Source File: MainActivity.java    From augmentedreality with Apache License 2.0 4 votes vote down vote up
@Override
public void onDrawFrame(GL10 gl) {
    // Clear screen to notify driver it should not load any pixels from previous frame.
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    if (session == null) {
        return;
    }
    // Notify ARCore session that the view size changed so that the perspective matrix and
    // the video background can be properly adjusted.
    displayRotationHelper.updateSessionIfNeeded(session);

    try {
        session.setCameraTextureName(backgroundRenderer.getTextureId());

        // Obtain the current frame from ARSession. When the configuration is set to
        // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
        // camera framerate.
        Frame frame = session.update();
        Camera camera = frame.getCamera();

        // Handle taps. Handling only one tap per frame, as taps are usually low frequency
        // compared to frame rate.

        MotionEvent tap = queuedSingleTaps.poll();
        if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
            for (HitResult hit : frame.hitTest(tap)) {
                // Check if any plane was hit, and if it was hit inside the plane polygon
                Trackable trackable = hit.getTrackable();
                // Creates an anchor if a plane or an oriented point was hit.
                if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
                    || (trackable instanceof Point
                    && ((Point) trackable).getOrientationMode()
                    == Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
                    // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
                    // Cap the number of objects created. This avoids overloading both the
                    // rendering system and ARCore.
                    if (anchors.size() >= 20) {
                        anchors.get(0).detach();
                        anchors.remove(0);
                    }
                    // Adding an Anchor tells ARCore that it should track this position in
                    // space. This anchor is created on the Plane to place the 3D model
                    // in the correct position relative both to the world and to the plane.
                    anchors.add(hit.createAnchor());
                    break;
                }
            }
        }


        // Draw background, which is the what the camera is actually capturing.
        backgroundRenderer.draw(frame);

        // Check if we detected at least one plane. If so, hide the loading message.
        if (messageSnackbar != null) {
            for (Plane plane : session.getAllTrackables(Plane.class)) {
                if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
                    && plane.getTrackingState() == TrackingState.TRACKING) {
                    hideLoadingMessage();
                    break;
                }
            }
        }


        // If not tracking, don't draw 3d objects.
        if (camera.getTrackingState() == TrackingState.PAUSED) {
            return;
        }
        // Get projection matrix.
        float[] projmtx = new float[16];
        camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

        // Get camera matrix and draw.
        float[] viewmtx = new float[16];
        camera.getViewMatrix(viewmtx, 0);

        // Visualize anchors created by touch.
        // ie get the anchors from taps and now draw a cube for each place.
        float scaleFactor = 1.0f;
        for (Anchor anchor : anchors) {
            if (anchor.getTrackingState() != TrackingState.TRACKING) {
                continue;
            }
            // Get the current pose of an Anchor in world space. The Anchor pose is updated
            // during calls to session.update() as ARCore refines its estimate of the world.
            anchor.getPose().toMatrix(anchorMatrix, 0);

            // Update and draw the model and its shadow.
            mCube.updateModelMatrix(anchorMatrix, scaleFactor);
            mCube.draw(viewmtx, projmtx);
        }

    } catch (Throwable t) {
        // Avoid crashing the application due to unhandled exceptions.
        Log.e(TAG, "Exception on the OpenGL thread", t);
    }
}
 
Example #5
Source File: MainActivity.java    From arcgis-runtime-samples-android with Apache License 2.0 4 votes vote down vote up
/**
 * Setup the Ar View to use ArCore and tracking. Also add a touch listener to the scene view which checks for single
 * taps on a plane, as identified by ArCore. On tap, set the initial transformation matrix and load the scene.
 */
private void setupArView() {

  mArView = findViewById(R.id.arView);
  mArView.registerLifecycle(getLifecycle());

  // show simple instructions to the user. Refer to the README for more details
  Toast.makeText(this, R.string.camera_instruction_message, Toast.LENGTH_LONG).show();

  mArView.getSceneView().setOnTouchListener(new DefaultSceneViewOnTouchListener(mArView.getSceneView()) {
    @Override
    public boolean onSingleTapConfirmed(MotionEvent motionEvent) {
      // get the hit results for the tap
      List<HitResult> hitResults = mArView.getArSceneView().getArFrame().hitTest(motionEvent);
      // check if the tapped point is recognized as a plane by ArCore
      if (!hitResults.isEmpty() && hitResults.get(0).getTrackable() instanceof Plane) {
        // get a reference to the tapped plane
        Plane plane = (Plane) hitResults.get(0).getTrackable();
        Toast.makeText(MainActivity.this, "Plane detected with a width of: " + plane.getExtentX(), Toast.LENGTH_SHORT)
            .show();
        // get the tapped point as a graphics point
        android.graphics.Point screenPoint = new android.graphics.Point(Math.round(motionEvent.getX()),
            Math.round(motionEvent.getY()));
        // if initial transformation set correctly
        if (mArView.setInitialTransformationMatrix(screenPoint)) {
          // the scene hasn't been configured
          if (!mHasConfiguredScene) {
            loadSceneFromPackage(plane);
          } else if (mArView.getSceneView().getScene() != null) {
            // use information from the scene to determine the origin camera and translation factor
            updateTranslationFactorAndOriginCamera(mArView.getSceneView().getScene(), plane);
          }
        }
      } else {
        Toast.makeText(MainActivity.this, getString(R.string.not_plane_error), Toast.LENGTH_SHORT).show();
        Log.e(TAG, getString(R.string.not_plane_error));
      }
      return super.onSingleTapConfirmed(motionEvent);
    }

    // disable pinch zooming
    @Override public boolean onScale(ScaleGestureDetector scaleGestureDetector) {
      return true;
    }
  });
}