com.google.ar.core.Frame Java Examples

The following examples show how to use com.google.ar.core.Frame. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TrackingIndicator.java    From justaline-android with Apache License 2.0 6 votes vote down vote up
public void setTrackingStates(Frame frame, Anchor anchor) {
    trackingState = frame.getCamera().getTrackingState();
    anchorTrackingState = anchor == null ? null : anchor.getTrackingState();

    if (trackingState != TrackingState.TRACKING && !mNotTrackingEscalating) {
        mNotTrackingEscalating = true;
        mHandler.postDelayed(mTrackingIndicatorTimeoutRunnable,
                SURFACE_RENDER_TIMEOUT_INTERVAL);
    }

    if (trackingState == TrackingState.TRACKING) {
        resetTrackingTimeout();
    }

    updateUI();
}
 
Example #2
Source File: MainActivity.java    From amazon-sumerian-arcore-starter-app with Apache License 2.0 6 votes vote down vote up
@Override
public void onDrawFrame(GL10 gl) {
    // Clear screen to notify driver it should not load any pixels from previous frame.
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    if (mSession == null) {
        return;
    }

    try {
        // Obtain the current frame from ARSession. When the configuration is set to
        // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
        // camera framerate.
        final Frame frame = mSession.update();

        // Draw background.
        mBackgroundRenderer.draw(frame);
        mSumerianConnector.update();
    } catch (Throwable t) {
        // Avoid crashing the application due to unhandled exceptions.
        Log.e(TAG, "Exception on the OpenGL thread", t);
    }
}
 
Example #3
Source File: HelloArActivity.java    From poly-sample-android with Apache License 2.0 6 votes vote down vote up
private void handleTap(Frame frame, Camera camera) {
  MotionEvent tap = tapHelper.poll();
  if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
    for (HitResult hit : frame.hitTest(tap)) {
      // Check if any plane was hit, and if it was hit inside the plane polygon
      Trackable trackable = hit.getTrackable();
      // Creates an anchor if a plane or an oriented point was hit.
      if ((trackable instanceof Plane
              && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())
              && (PlaneRenderer.calculateDistanceToPlane(hit.getHitPose(), camera.getPose()) > 0))
          || (trackable instanceof Point
              && ((Point) trackable).getOrientationMode()
                  == OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
        // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
        // Cap the number of objects created. This avoids overloading both the
        // rendering system and ARCore.
        if (anchors.size() >= 20) {
          anchors.get(0).detach();
          anchors.remove(0);
        }

        // Adding an Anchor tells ARCore that it should track this position in
        // space. This anchor is created on the Plane to place the 3D model
        // in the correct position relative both to the world and to the plane.
        anchors.add(hit.createAnchor());
        break;
      }
    }
  }
}
 
Example #4
Source File: BackgroundRenderer.java    From justaline-android with Apache License 2.0 5 votes vote down vote up
/**
 * Draws the AR background image.  The image will be drawn such that virtual content rendered
 * with the matrices provided by {@link Camera#getViewMatrix(float[], int)} and
 * {@link Camera#getProjectionMatrix(float[], int, float, float)} will accurately follow
 * static physical objects.  This must be called <b>before</b> drawing virtual content.
 *
 * @param frame The last {@code Frame} returned by {@link Session#update()}.
 */
public void draw(Frame frame) {
    // We need to re-query the uv coordinates for the screen rect, as they may have
    // changed as well.
    frame.transformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed);

    // No need to test or write depth, the screen quad has arbitrary depth, and is expected
    // to be drawn first.
    GLES20.glDisable(GLES20.GL_DEPTH_TEST);
    GLES20.glDepthMask(false);

    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);

    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
            mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices);

    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoordTransformed);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    // Restore the depth state for further drawing.
    GLES20.glDepthMask(true);
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    ShaderUtil.checkGLError(TAG, "Draw");
}
 
Example #5
Source File: MainActivity.java    From journaldev with MIT License 5 votes vote down vote up
private void addObject(Uri parse) {
    Frame frame = arFragment.getArSceneView().getArFrame();
    Point point = getScreenCenter();
    if (frame != null) {
        List<HitResult> hits = frame.hitTest((float) point.x, (float) point.y);

        for (int i = 0; i < hits.size(); i++) {
            Trackable trackable = hits.get(i).getTrackable();
            if (trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hits.get(i).getHitPose())) {
                placeObject(arFragment, hits.get(i).createAnchor(), parse);
            }
        }
    }
}
 
Example #6
Source File: MainActivity.java    From journaldev with MIT License 5 votes vote down vote up
@Override
public void onUpdate(FrameTime frameTime) {
    Frame frame = arFragment.getArSceneView().getArFrame();

    Log.d("API123", "onUpdateframe... current anchor node " + (currentAnchorNode == null));


    if (currentAnchorNode != null) {
        Pose objectPose = currentAnchor.getPose();
        Pose cameraPose = frame.getCamera().getPose();

        float dx = objectPose.tx() - cameraPose.tx();
        float dy = objectPose.ty() - cameraPose.ty();
        float dz = objectPose.tz() - cameraPose.tz();

        ///Compute the straight-line distance.
        float distanceMeters = (float) Math.sqrt(dx * dx + dy * dy + dz * dz);
        tvDistance.setText("Distance from camera: " + distanceMeters + " metres");


        /*float[] distance_vector = currentAnchor.getPose().inverse()
                .compose(cameraPose).getTranslation();
        float totalDistanceSquared = 0;
        for (int i = 0; i < 3; ++i)
            totalDistanceSquared += distance_vector[i] * distance_vector[i];*/
    }
}
 
Example #7
Source File: BackgroundRenderer.java    From unity-ads-android with Apache License 2.0 5 votes vote down vote up
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
void draw(Frame frame) {
	// If display rotation changed (also includes view size change), we need to re-query the uv
	// coordinates for the screen rect, as they may have changed as well.
	if (frame.hasDisplayGeometryChanged()) {
		frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
	}

	GLES20.glDisable(GLES20.GL_DEPTH_TEST);
	GLES20.glDepthMask(false);

	GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);

	GLES20.glUseProgram(quadProgram);

	GLES20.glVertexAttribPointer(
			quadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices);

	GLES20.glVertexAttribPointer(
			quadTexCoordParam,
			TEXCOORDS_PER_VERTEX,
			GLES20.GL_FLOAT,
			false,
			0,
			quadTexCoordTransformed);

	GLES20.glEnableVertexAttribArray(quadPositionParam);
	GLES20.glEnableVertexAttribArray(quadTexCoordParam);

	GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

	GLES20.glDisableVertexAttribArray(quadPositionParam);
	GLES20.glDisableVertexAttribArray(quadTexCoordParam);

	GLES20.glDepthMask(true);
	GLES20.glEnable(GLES20.GL_DEPTH_TEST);
}
 
Example #8
Source File: MainActivity.java    From augmentedreality with Apache License 2.0 4 votes vote down vote up
@Override
public void onDrawFrame(GL10 gl) {
    // Clear screen to notify driver it should not load any pixels from previous frame.
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    if (session == null) {
        return;
    }
    // Notify ARCore session that the view size changed so that the perspective matrix and
    // the video background can be properly adjusted.
    displayRotationHelper.updateSessionIfNeeded(session);

    try {
        session.setCameraTextureName(backgroundRenderer.getTextureId());

        // Obtain the current frame from ARSession. When the configuration is set to
        // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
        // camera framerate.
        Frame frame = session.update();
        Camera camera = frame.getCamera();

        // Handle taps. Handling only one tap per frame, as taps are usually low frequency
        // compared to frame rate.

        MotionEvent tap = queuedSingleTaps.poll();
        if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
            for (HitResult hit : frame.hitTest(tap)) {
                // Check if any plane was hit, and if it was hit inside the plane polygon
                Trackable trackable = hit.getTrackable();
                // Creates an anchor if a plane or an oriented point was hit.
                if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
                    || (trackable instanceof Point
                    && ((Point) trackable).getOrientationMode()
                    == Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
                    // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
                    // Cap the number of objects created. This avoids overloading both the
                    // rendering system and ARCore.
                    if (anchors.size() >= 20) {
                        anchors.get(0).detach();
                        anchors.remove(0);
                    }
                    // Adding an Anchor tells ARCore that it should track this position in
                    // space. This anchor is created on the Plane to place the 3D model
                    // in the correct position relative both to the world and to the plane.
                    anchors.add(hit.createAnchor());
                    break;
                }
            }
        }


        // Draw background, which is the what the camera is actually capturing.
        backgroundRenderer.draw(frame);

        // Check if we detected at least one plane. If so, hide the loading message.
        if (messageSnackbar != null) {
            for (Plane plane : session.getAllTrackables(Plane.class)) {
                if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
                    && plane.getTrackingState() == TrackingState.TRACKING) {
                    hideLoadingMessage();
                    break;
                }
            }
        }


        // If not tracking, don't draw 3d objects.
        if (camera.getTrackingState() == TrackingState.PAUSED) {
            return;
        }
        // Get projection matrix.
        float[] projmtx = new float[16];
        camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

        // Get camera matrix and draw.
        float[] viewmtx = new float[16];
        camera.getViewMatrix(viewmtx, 0);

        // Visualize anchors created by touch.
        // ie get the anchors from taps and now draw a cube for each place.
        float scaleFactor = 1.0f;
        for (Anchor anchor : anchors) {
            if (anchor.getTrackingState() != TrackingState.TRACKING) {
                continue;
            }
            // Get the current pose of an Anchor in world space. The Anchor pose is updated
            // during calls to session.update() as ARCore refines its estimate of the world.
            anchor.getPose().toMatrix(anchorMatrix, 0);

            // Update and draw the model and its shadow.
            mCube.updateModelMatrix(anchorMatrix, scaleFactor);
            mCube.draw(viewmtx, projmtx);
        }

    } catch (Throwable t) {
        // Avoid crashing the application due to unhandled exceptions.
        Log.e(TAG, "Exception on the OpenGL thread", t);
    }
}
 
Example #9
Source File: BackgroundRenderer.java    From augmentedreality with Apache License 2.0 4 votes vote down vote up
/**
 * Draws the AR background image. The image will be drawn such that virtual content rendered with
 * the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)} and
 * {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will
 * accurately follow static physical objects. This must be called <b>before</b> drawing virtual
 * content.
 *
 * @param frame The last {@code Frame} returned by {@link Session#update()}.
 */
public void draw(Frame frame) {
  // If display rotation changed (also includes view size change), we need to re-query the uv
  // coordinates for the screen rect, as they may have changed as well.
  if (frame.hasDisplayGeometryChanged()) {
    frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
  }

  // No need to test or write depth, the screen quad has arbitrary depth, and is expected
  // to be drawn first.
  GLES20.glDisable(GLES20.GL_DEPTH_TEST);
  GLES20.glDepthMask(false);

  GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);

  GLES20.glUseProgram(quadProgram);

  // Set the vertex positions.
  GLES20.glVertexAttribPointer(
      quadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices);

  // Set the texture coordinates.
  GLES20.glVertexAttribPointer(
      quadTexCoordParam,
      TEXCOORDS_PER_VERTEX,
      GLES20.GL_FLOAT,
      false,
      0,
      quadTexCoordTransformed);

  // Enable vertex arrays
  GLES20.glEnableVertexAttribArray(quadPositionParam);
  GLES20.glEnableVertexAttribArray(quadTexCoordParam);

  GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

  // Disable vertex arrays
  GLES20.glDisableVertexAttribArray(quadPositionParam);
  GLES20.glDisableVertexAttribArray(quadTexCoordParam);

  // Restore the depth state for further drawing.
  GLES20.glDepthMask(true);
  GLES20.glEnable(GLES20.GL_DEPTH_TEST);

  ShaderUtil.checkGLError(TAG, "Draw");
}
 
Example #10
Source File: ARVelocityActivity.java    From science-journal with Apache License 2.0 4 votes vote down vote up
private void onUpdateFrame(FrameTime frameTime) {
  Frame frame = arFragment.getArSceneView().getArFrame();

  // If there is no frame or ARCore is not tracking yet, just return.
  if (frame == null || frame.getCamera().getTrackingState() != TrackingState.TRACKING) {
    return;
  }

  Collection<AugmentedImage> updatedAugmentedImages =
      frame.getUpdatedTrackables(AugmentedImage.class);
  for (AugmentedImage augmentedImage : updatedAugmentedImages) {
    switch (augmentedImage.getTrackingState()) {
      case PAUSED:
        // When an image is in the PAUSED state, it has been detected, but not yet tracked.
        velocityText.setVisibility(View.VISIBLE);
        velocityText.setText(getResources().getString(R.string.ar_detecting_image));
        break;

      case TRACKING:
        if (augmentedImage.getTrackingMethod() == TrackingMethod.FULL_TRACKING) {
          fitToScanView.setVisibility(View.GONE);

          // Create a new anchor for newly found images.
          if (!augmentedImageSet.contains(augmentedImage)) {
            arFragment.getArSceneView().getSession().createAnchor(augmentedImage.getCenterPose());
            augmentedImageSet.add(augmentedImage);
          }
          averageVelocityEveryFrame(augmentedImage.getCenterPose(), frameTime.getDeltaSeconds());
          snapshotButton.setVisibility(View.VISIBLE);
          recordButton.setVisibility(View.VISIBLE);
        } else {
          lastPos = null;
          positions = new ArrayList<>();
          currIndex = 0;
          velocityText.setText(getResources().getString(R.string.ar_not_tracking));
          snapshotButton.setVisibility(View.GONE);
          recordButton.setVisibility(View.GONE);
        }
        break;

      case STOPPED:
        velocityText.setVisibility(View.INVISIBLE);
        augmentedImageSet.remove(augmentedImage);
        break;
    }
  }
}
 
Example #11
Source File: BackgroundRenderer.java    From ar-drawing-java with Apache License 2.0 4 votes vote down vote up
/**
 * Draws the AR background image.  The image will be drawn such that virtual content rendered
 * with the matrices provided by {@link Frame#getViewMatrix(float[], int)} and
 * {@link Session#getProjectionMatrix(float[], int, float, float)} will accurately follow
 * static physical objects.  This must be called <b>before</b> drawing virtual content.
 *
 * @param frame The last {@code Frame} returned by {@link Session#update()}.
 */
public void draw(Frame frame) {

    if (frame == null) {
        return;
    }

    // If display rotation changed (also includes view size change), we need to re-query the uv
    // coordinates for the screen rect, as they may have changed as well.
    if (frame.hasDisplayGeometryChanged()) {
        frame.transformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed);
    }

    // No need to test or write depth, the screen quad has arbitrary depth, and is expected
    // to be drawn first.
    GLES20.glDisable(GLES20.GL_DEPTH_TEST);
    GLES20.glDepthMask(false);

    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);

    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
        mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices);

    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoordTransformed);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    // Restore the depth state for further drawing.
    GLES20.glDepthMask(true);
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    ShaderUtil.checkGLError(TAG, "Draw");
}
 
Example #12
Source File: BackgroundRenderer.java    From poly-sample-android with Apache License 2.0 4 votes vote down vote up
/**
 * Draws the AR background image. The image will be drawn such that virtual content rendered with
 * the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)} and
 * {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will
 * accurately follow static physical objects. This must be called <b>before</b> drawing virtual
 * content.
 *
 * @param frame The last {@code Frame} returned by {@link Session#update()}.
 */
public void draw(Frame frame) {
  // If display rotation changed (also includes view size change), we need to re-query the uv
  // coordinates for the screen rect, as they may have changed as well.
  if (frame.hasDisplayGeometryChanged()) {
    frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed);
  }

  // No need to test or write depth, the screen quad has arbitrary depth, and is expected
  // to be drawn first.
  GLES20.glDisable(GLES20.GL_DEPTH_TEST);
  GLES20.glDepthMask(false);

  GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);

  GLES20.glUseProgram(quadProgram);

  // Set the vertex positions.
  GLES20.glVertexAttribPointer(
      quadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices);

  // Set the texture coordinates.
  GLES20.glVertexAttribPointer(
      quadTexCoordParam,
      TEXCOORDS_PER_VERTEX,
      GLES20.GL_FLOAT,
      false,
      0,
      quadTexCoordTransformed);

  // Enable vertex arrays
  GLES20.glEnableVertexAttribArray(quadPositionParam);
  GLES20.glEnableVertexAttribArray(quadTexCoordParam);

  GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

  // Disable vertex arrays
  GLES20.glDisableVertexAttribArray(quadPositionParam);
  GLES20.glDisableVertexAttribArray(quadTexCoordParam);

  // Restore the depth state for further drawing.
  GLES20.glDepthMask(true);
  GLES20.glEnable(GLES20.GL_DEPTH_TEST);

  ShaderUtil.checkGLError(TAG, "Draw");
}
 
Example #13
Source File: HelloArActivity.java    From poly-sample-android with Apache License 2.0 4 votes vote down vote up
@Override
public void onDrawFrame(GL10 gl) {
  // Clear screen to notify driver it should not load any pixels from previous frame.
  GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

  // If we are ready to import the object and haven't done so yet, do it now.
  if (mReadyToImport && virtualObject == null) {
    importDownloadedObject();
  }

  if (session == null) {
    return;
  }
  // Notify ARCore session that the view size changed so that the perspective matrix and
  // the video background can be properly adjusted.
  displayRotationHelper.updateSessionIfNeeded(session);

  try {
    session.setCameraTextureName(backgroundRenderer.getTextureId());

    // Obtain the current frame from ARSession. When the configuration is set to
    // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
    // camera framerate.
    Frame frame = session.update();
    Camera camera = frame.getCamera();

    // Handle one tap per frame.
    handleTap(frame, camera);

    // Draw background.
    backgroundRenderer.draw(frame);

    // If not tracking, don't draw 3d objects.
    if (camera.getTrackingState() == TrackingState.PAUSED) {
      return;
    }

    // Get projection matrix.
    float[] projmtx = new float[16];
    camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

    // Get camera matrix and draw.
    float[] viewmtx = new float[16];
    camera.getViewMatrix(viewmtx, 0);

    // Compute lighting from average intensity of the image.
    // The first three components are color scaling factors.
    // The last one is the average pixel intensity in gamma space.
    final float[] colorCorrectionRgba = new float[4];
    frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);

    // Visualize tracked points.
    PointCloud pointCloud = frame.acquirePointCloud();
    pointCloudRenderer.update(pointCloud);
    pointCloudRenderer.draw(viewmtx, projmtx);

    // Application is responsible for releasing the point cloud resources after
    // using it.
    pointCloud.release();

    // Check if we detected at least one plane. If so, hide the loading message.
    if (messageSnackbarHelper.isShowing()) {
      for (Plane plane : session.getAllTrackables(Plane.class)) {
        if (plane.getTrackingState() == TrackingState.TRACKING) {
          messageSnackbarHelper.hide(this);
          break;
        }
      }
    }

    // Visualize planes.
    planeRenderer.drawPlanes(
        session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);

    // Visualize anchors created by touch.
    float scaleFactor = 1.0f;
    for (Anchor anchor : anchors) {
      if (anchor.getTrackingState() != TrackingState.TRACKING) {
        continue;
      }
      // Get the current pose of an Anchor in world space. The Anchor pose is updated
      // during calls to session.update() as ARCore refines its estimate of the world.
      anchor.getPose().toMatrix(anchorMatrix, 0);

      // Update and draw the model.
      if (virtualObject != null) {
        virtualObject.updateModelMatrix(anchorMatrix, ASSET_SCALE * scaleFactor);
        virtualObject.draw(viewmtx, projmtx, colorCorrectionRgba);

        // If we haven't yet showing the attribution toast, do it now.
        if (!mShowedAttributionToast) {
          showAttributionToast();
        }
      }
    }

  } catch (Throwable t) {
    // Avoid crashing the application due to unhandled exceptions.
    Log.e(TAG, "Exception on the OpenGL thread", t);
  }
}
 
Example #14
Source File: LocationScene.java    From ARCore-Location with MIT License 4 votes vote down vote up
public void refreshAnchorsIfRequired(Frame frame) {
    if (anchorsNeedRefresh) {
        anchorsNeedRefresh = false;

        for (int i = 0; i < mLocationMarkers.size(); i++) {
            try {

                int markerDistance = (int) Math.round(
                        LocationUtils.distance(
                                mLocationMarkers.get(i).latitude,
                                deviceLocation.currentBestLocation.getLatitude(),
                                mLocationMarkers.get(i).longitude,
                                deviceLocation.currentBestLocation.getLongitude(),
                                0,
                                0)
                );

                float markerBearing = deviceOrientation.currentDegree + (float) LocationUtils.bearing(
                        deviceLocation.currentBestLocation.getLatitude(),
                        deviceLocation.currentBestLocation.getLongitude(),
                        mLocationMarkers.get(i).latitude,
                        mLocationMarkers.get(i).longitude);

                markerBearing = markerBearing + bearingAdjustment;
                markerBearing = markerBearing % 360;

                double rotation = Math.floor(markerBearing);
                rotation = rotation * Math.PI / 180;

                int renderDistance = markerDistance;

                // Limit the distance of the Anchor within the scene.
                // Prevents uk.co.appoly.arcorelocation.rendering issues.
                if (renderDistance > distanceLimit)
                    renderDistance = distanceLimit;

                // Adjustment to add markers on horizon, instead of just directly in front of camera
                double heightAdjustment = Math.round(renderDistance * (Math.tan(Math.toRadians(deviceOrientation.pitch))));

                // Raise distant markers for better illusion of distance
                // Hacky - but it works as a temporary measure
                int cappedRealDistance = markerDistance > 500 ?  500 : markerDistance;
                if (renderDistance != markerDistance)
                    heightAdjustment += 0.01F * (cappedRealDistance - renderDistance);

                float x = 0;
                float z = -renderDistance;

                float zRotated = (float) (z * Math.cos(rotation) - x * Math.sin(rotation));
                float xRotated = (float) -(z * Math.sin(rotation) + x * Math.cos(rotation));

                // Current camera height
                float y = frame.getCamera().getDisplayOrientedPose().ty();

                // Don't immediately assign newly created anchor in-case of exceptions
                Anchor newAnchor = mSession.createAnchor(
                        frame.getCamera().getPose()
                                .compose(Pose.makeTranslation(xRotated, y + (float) heightAdjustment, zRotated)));

                mLocationMarkers.get(i).anchor = newAnchor;

                mLocationMarkers.get(i).renderer.createOnGlThread(mContext, markerDistance);

            } catch (Exception e) {
                e.printStackTrace();
            }

        }
    }
}
 
Example #15
Source File: LocationScene.java    From ARCore-Location with MIT License 4 votes vote down vote up
public void drawMarkers(Frame frame) {
    for (LocationMarker locationMarker : mLocationMarkers) {

        try {
            // Get the current pose of an Anchor in world space. The Anchor pose is updated
            // during calls to session.update() as ARCore refines its estimate of the world.

            float translation[] = new float[3];
            float rotation[] = new float[4];
            locationMarker.anchor.getPose().getTranslation(translation, 0);
            frame.getCamera().getPose().getRotationQuaternion(rotation, 0);

            Pose rotatedPose = new Pose(translation, rotation);
            rotatedPose.toMatrix(mAnchorMatrix, 0);

            int markerDistance = (int) Math.ceil(
                    uk.co.appoly.arcorelocation.utils.LocationUtils.distance(
                            locationMarker.latitude,
                            deviceLocation.currentBestLocation.getLatitude(),
                            locationMarker.longitude,
                            deviceLocation.currentBestLocation.getLongitude(),
                            0,
                            0)
            );

            // Limit the distance of the Anchor within the scene.
            // Prevents uk.co.appoly.arcorelocation.rendering issues.
            int renderDistance = markerDistance;
            if (renderDistance > distanceLimit)
                renderDistance = distanceLimit;


            float[] projectionMatrix = new float[16];
            frame.getCamera().getProjectionMatrix(projectionMatrix, 0, 0.1f, 100.0f);

            // Get camera matrix and draw.
            float[] viewMatrix = new float[16];
            frame.getCamera().getViewMatrix(viewMatrix, 0);

            // Make sure marker stays the same size on screen, no matter the distance
            float scale = 3.0F / 10.0F * (float) renderDistance;

            // Distant markers a little smaller
            if(markerDistance > 3000)
                scale *= 0.75F;

            // Compute lighting from average intensity of the image.
            final float lightIntensity = frame.getLightEstimate().getPixelIntensity();

            locationMarker.renderer.updateModelMatrix(mAnchorMatrix, scale);
            locationMarker.renderer.draw(viewMatrix, projectionMatrix, lightIntensity);

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
 
Example #16
Source File: LocationScene.java    From ARCore-Location with MIT License 4 votes vote down vote up
public void processFrame(Frame frame) {
    refreshAnchorsIfRequired(frame);
}
 
Example #17
Source File: BackgroundRenderer.java    From react-native-arcore with MIT License 4 votes vote down vote up
/**
 * Draws the AR background image.  The image will be drawn such that virtual content rendered
 * with the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)}
 * and {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will
 * accurately follow static physical objects.
 * This must be called <b>before</b> drawing virtual content.
 *
 * @param frame The last {@code Frame} returned by {@link Session#update()}.
 */
public void draw(Frame frame) {
    // If display rotation changed (also includes view size change), we need to re-query the uv
    // coordinates for the screen rect, as they may have changed as well.
    if (frame.hasDisplayGeometryChanged()) {
        frame.transformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed);
    }

    // No need to test or write depth, the screen quad has arbitrary depth, and is expected
    // to be drawn first.
    GLES20.glDisable(GLES20.GL_DEPTH_TEST);
    GLES20.glDepthMask(false);

    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);

    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
        mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices);

    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoordTransformed);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    // Restore the depth state for further drawing.
    GLES20.glDepthMask(true);
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    ShaderUtil.checkGLError(TAG, "Draw");
}
 
Example #18
Source File: SumerianConnector.java    From amazon-sumerian-arcore-starter-app with Apache License 2.0 4 votes vote down vote up
void update() {
    final Frame frame;
    try {
        frame = mSession.update();
    } catch (CameraNotAvailableException e) {
        e.printStackTrace();
        return;
    }
    final Camera camera = frame.getCamera();

    if (camera.getTrackingState() == TrackingState.PAUSED) {
        return;
    }

    camera.getViewMatrix(mViewMatrix, 0);
    camera.getProjectionMatrix(mProjectionMatrix, 0, 0.02f, 20.0f);

    final String cameraUpdateString = "ARCoreBridge.viewProjectionMatrixUpdate('" + serializeArray(mViewMatrix) +"', '"+ serializeArray(mProjectionMatrix) + "');";
    evaluateWebViewJavascript(cameraUpdateString);

    HashMap<String, float[]> anchorMap = new HashMap<>();

    for (Anchor anchor : mSession.getAllAnchors()) {
        if (anchor.getTrackingState() != TrackingState.TRACKING) {
            continue;
        }

        final float[] anchorPoseMatrix = new float[16];
        anchor.getPose().toMatrix(anchorPoseMatrix, 0);
        anchorMap.put(String.valueOf(anchor.hashCode()), anchorPoseMatrix);
    }

    if (anchorMap.size() > 0) {
        JSONObject jsonAnchors = new JSONObject(anchorMap);
        final String anchorUpdateScript = "ARCoreBridge.anchorTransformUpdate('" + jsonAnchors.toString() + "');";
        evaluateWebViewJavascript(anchorUpdateScript);
    }

    if (frame.getLightEstimate().getState() != LightEstimate.State.NOT_VALID) {
        final float[] colorCorrectionRgba = new float[4];
        frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);
        
        final String lightEstimateUpdateScript = "ARCoreBridge.lightingEstimateUpdate(" +
                String.valueOf(frame.getLightEstimate().getPixelIntensity()) + ", " +
                convertRgbaToTemperature(colorCorrectionRgba) + ");";
        evaluateWebViewJavascript(lightEstimateUpdateScript);
    }

    // Image Recognition
    Collection<AugmentedImage> updatedAugmentedImages = frame.getUpdatedTrackables(AugmentedImage.class);
    for (AugmentedImage img : updatedAugmentedImages) {
        if (img.getTrackingState() == TrackingState.TRACKING) {
            if (img.getName().equals("SumerianAnchorImage")) {
                imageAnchorCreated(img);
            }
        }
    }
}
 
Example #19
Source File: BackgroundRenderer.java    From amazon-sumerian-arcore-starter-app with Apache License 2.0 4 votes vote down vote up
/**
 * Draws the AR background image.  The image will be drawn such that virtual content rendered
 * with the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)}
 * and {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will
 * accurately follow static physical objects.
 * This must be called <b>before</b> drawing virtual content.
 *
 * @param frame The last {@code Frame} returned by {@link Session#update()}.
 */
public void draw(Frame frame) {
    // If display rotation changed (also includes view size change), we need to re-query the uv
    // coordinates for the screen rect, as they may have changed as well.
    if (frame.hasDisplayGeometryChanged()) {
        frame.transformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed);
    }

    // No need to test or write depth, the screen quad has arbitrary depth, and is expected
    // to be drawn first.
    GLES20.glDisable(GLES20.GL_DEPTH_TEST);
    GLES20.glDepthMask(false);

    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);

    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
            mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices);

    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoordTransformed);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    // Restore the depth state for further drawing.
    GLES20.glDepthMask(true);
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    ShaderUtil.checkGLError(TAG, "Draw");
}
 
Example #20
Source File: LocationScene.java    From ARCore-Location with MIT License 3 votes vote down vote up
public void draw(Frame frame) {

        // Refresh the anchors in the scene.
        // Needs to occur in the draw method, as we need details about the camera
        refreshAnchorsIfRequired(frame);

        // Draw each anchor with it's individual renderer.
        drawMarkers(frame);

    }