Java Code Examples for com.google.api.gax.longrunning.OperationFuture#get()

The following examples show how to use com.google.api.gax.longrunning.OperationFuture#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VisionClassificationDeployModelNodeCount.java    From java-docs-samples with Apache License 2.0 6 votes vote down vote up
static void visionClassificationDeployModelNodeCount(String projectId, String modelId)
    throws IOException, ExecutionException, InterruptedException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (AutoMlClient client = AutoMlClient.create()) {
    // Get the full path of the model.
    ModelName modelFullId = ModelName.of(projectId, "us-central1", modelId);
    ImageClassificationModelDeploymentMetadata metadata =
        ImageClassificationModelDeploymentMetadata.newBuilder().setNodeCount(2).build();
    DeployModelRequest request =
        DeployModelRequest.newBuilder()
            .setName(modelFullId.toString())
            .setImageClassificationModelDeploymentMetadata(metadata)
            .build();
    OperationFuture<Empty, OperationMetadata> future = client.deployModelAsync(request);

    future.get();
    System.out.println("Model deployment finished");
  }
}
 
Example 2
Source File: DeleteGlossary.java    From java-docs-samples with Apache License 2.0 6 votes vote down vote up
public static void deleteGlossary(String projectId, String glossaryId)
    throws InterruptedException, ExecutionException, IOException {

  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (TranslationServiceClient client = TranslationServiceClient.create()) {
    // Supported Locations: `global`, [glossary location], or [model location]
    // Glossaries must be hosted in `us-central1`
    // Custom Models must use the same location as your model. (us-central1)
    GlossaryName glossaryName = GlossaryName.of(projectId, "us-central1", glossaryId);
    DeleteGlossaryRequest request =
        DeleteGlossaryRequest.newBuilder().setName(glossaryName.toString()).build();

    // Start an asynchronous request
    OperationFuture<DeleteGlossaryResponse, DeleteGlossaryMetadata> future =
        client.deleteGlossaryAsync(request);

    System.out.println("Waiting for operation to complete...");
    DeleteGlossaryResponse response = future.get();
    System.out.format("Deleted Glossary: %s\n", response.getName());
  }
}
 
Example 3
Source File: CreateClusterWithAutoscalingTest.java    From java-docs-samples with Apache License 2.0 6 votes vote down vote up
@After
public void tearDown() throws IOException, InterruptedException, ExecutionException {
  String myEndpoint = String.format("%s-dataproc.googleapis.com:443", REGION);

  ClusterControllerSettings clusterControllerSettings =
      ClusterControllerSettings.newBuilder().setEndpoint(myEndpoint).build();

  AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings =
      AutoscalingPolicyServiceSettings.newBuilder().setEndpoint(myEndpoint).build();

  try (ClusterControllerClient clusterControllerClient =
      ClusterControllerClient.create(clusterControllerSettings);
      AutoscalingPolicyServiceClient autoscalingPolicyServiceClient =
          AutoscalingPolicyServiceClient.create(autoscalingPolicyServiceSettings)) {

    OperationFuture<Empty, ClusterOperationMetadata> deleteClusterAsyncRequest =
        clusterControllerClient.deleteClusterAsync(PROJECT_ID, REGION, CLUSTER_NAME);
    deleteClusterAsyncRequest.get();

    AutoscalingPolicyName name =
        AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName(
            PROJECT_ID, REGION, AUTOSCALING_POLICY_NAME);
    autoscalingPolicyServiceClient.deleteAutoscalingPolicy(name);
  }
}
 
Example 4
Source File: QuickstartTest.java    From java-docs-samples with Apache License 2.0 6 votes vote down vote up
@After
public void teardown() throws IOException, InterruptedException, ExecutionException {
  blob.delete();
  bucket.delete();

  ClusterControllerSettings clusterControllerSettings =
      ClusterControllerSettings.newBuilder().setEndpoint(ENDPOINT).build();

  try (ClusterControllerClient clusterControllerClient =
      ClusterControllerClient.create(clusterControllerSettings)) {
    for (Cluster element :
        clusterControllerClient.listClusters(PROJECT_ID, REGION).iterateAll()) {
      if (element.getClusterName() == CLUSTER_NAME) {
        OperationFuture<Empty, ClusterOperationMetadata> deleteClusterAsyncRequest =
            clusterControllerClient.deleteClusterAsync(PROJECT_ID, REGION, CLUSTER_NAME);
        deleteClusterAsyncRequest.get();
        break;
      }
    }
  }
}
 
Example 5
Source File: UndeployModel.java    From java-docs-samples with Apache License 2.0 6 votes vote down vote up
static void undeployModel(String projectId, String modelId)
    throws IOException, ExecutionException, InterruptedException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (AutoMlClient client = AutoMlClient.create()) {
    // Get the full path of the model.
    ModelName modelFullId = ModelName.of(projectId, "us-central1", modelId);
    UndeployModelRequest request =
        UndeployModelRequest.newBuilder().setName(modelFullId.toString()).build();
    OperationFuture<Empty, OperationMetadata> future = client.undeployModelAsync(request);

    future.get();
    System.out.println("Model undeployment finished");
  }
}
 
Example 6
Source File: ClassificationDeployModelNodeCount.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
static void classificationDeployModelNodeCount(String projectId, String modelId)
    throws IOException, ExecutionException, InterruptedException {
  // String projectId = "YOUR_PROJECT_ID";
  // String modelId = "YOUR_MODEL_ID";

  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (AutoMlClient client = AutoMlClient.create()) {
    // Get the full path of the model.
    ModelName modelFullId = ModelName.of(projectId, "us-central1", modelId);

    // Set how many nodes the model is deployed on
    ImageClassificationModelDeploymentMetadata deploymentMetadata =
        ImageClassificationModelDeploymentMetadata.newBuilder().setNodeCount(2).build();

    DeployModelRequest request =
        DeployModelRequest.newBuilder()
            .setName(modelFullId.toString())
            .setImageClassificationModelDeploymentMetadata(deploymentMetadata)
            .build();
    // Deploy the model
    OperationFuture<Empty, OperationMetadata> future = client.deployModelAsync(request);
    future.get();
    System.out.println("Model deployment on 2 nodes finished");
  }
}
 
Example 7
Source File: ImportDataset.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
static void importDataset(String projectId, String datasetId, String path)
    throws IOException, ExecutionException, InterruptedException, TimeoutException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (AutoMlClient client = AutoMlClient.create()) {
    // Get the complete path of the dataset.
    DatasetName datasetFullId = DatasetName.of(projectId, "us-central1", datasetId);

    // Get multiple Google Cloud Storage URIs to import data from
    GcsSource gcsSource =
        GcsSource.newBuilder().addAllInputUris(Arrays.asList(path.split(","))).build();

    // Import data from the input URI
    InputConfig inputConfig = InputConfig.newBuilder().setGcsSource(gcsSource).build();
    System.out.println("Processing import...");

    // Start the import job
    OperationFuture<Empty, OperationMetadata> operation =
        client.importDataAsync(datasetFullId, inputConfig);

    System.out.format("Operation name: %s%n", operation.getName());

    // If you want to wait for the operation to finish, adjust the timeout appropriately. The
    // operation will still run if you choose not to wait for it to complete. You can check the
    // status of your operation using the operation's name.
    Empty response = operation.get(45, TimeUnit.MINUTES);
    System.out.format("Dataset imported. %s%n", response);
  } catch (TimeoutException e) {
    System.out.println("The operation's polling period was not long enough.");
    System.out.println("You can use the Operation's name to get the current status.");
    System.out.println("The import job is still running and will complete as expected.");
    throw e;
  }
}
 
Example 8
Source File: ImportProductSets.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
/**
 * Import images of different products in the product set.
 *
 * @param projectId - Id of the project.
 * @param computeRegion - Region name.
 * @param gcsUri - Google Cloud Storage URI.Target files must be in Product Search CSV format.
 * @throws Exception - on client errors.
 */
public static void importProductSets(String projectId, String computeRegion, String gcsUri)
    throws Exception {
  try (ProductSearchClient client = ProductSearchClient.create()) {

    // A resource that represents Google Cloud Platform location.
    String formattedParent = ProductSearchClient.formatLocationName(projectId, computeRegion);
    Builder gcsSource = ImportProductSetsGcsSource.newBuilder().setCsvFileUri(gcsUri);

    // Set the input configuration along with Google Cloud Storage URI
    ImportProductSetsInputConfig inputConfig =
        ImportProductSetsInputConfig.newBuilder().setGcsSource(gcsSource).build();

    // Import the product sets from the input URI.
    OperationFuture<ImportProductSetsResponse, BatchOperationMetadata> response =
        client.importProductSetsAsync(formattedParent, inputConfig);

    System.out.println(String.format("Processing operation name: %s", response.getName()));
    ImportProductSetsResponse results = response.get();
    System.out.println("Processing done.");
    System.out.println("Results of the processing:");

    for (int i = 0; i < results.getStatusesCount(); i++) {
      System.out.println(
          String.format(
              "Status of processing line %s of the csv: %s", i, results.getStatuses(i)));
      // Check the status of reference image.
      if (results.getStatuses(i).getCode() == 0) {
        ReferenceImage referenceImage = results.getReferenceImages(i);
        System.out.println(referenceImage);
      } else {
        System.out.println("No reference image.");
      }
    }
  }
}
 
Example 9
Source File: LanguageSentimentAnalysisCreateDataset.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
static void createDataset(String projectId, String displayName)
    throws IOException, ExecutionException, InterruptedException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (AutoMlClient client = AutoMlClient.create()) {
    // A resource that represents Google Cloud Platform location.
    LocationName projectLocation = LocationName.of(projectId, "us-central1");
    // Specify the text classification type for the dataset.
    TextSentimentDatasetMetadata metadata =
        TextSentimentDatasetMetadata.newBuilder()
            .setSentimentMax(4) // Possible max sentiment score: 1-10
            .build();
    Dataset dataset =
        Dataset.newBuilder()
            .setDisplayName(displayName)
            .setTextSentimentDatasetMetadata(metadata)
            .build();
    OperationFuture<Dataset, OperationMetadata> future =
        client.createDatasetAsync(projectLocation, dataset);

    Dataset createdDataset = future.get();

    // Display the dataset information.
    System.out.format("Dataset name: %s\n", createdDataset.getName());
    // To get the dataset id, you have to parse it out of the `name` field. As dataset Ids are
    // required for other methods.
    // Name Form: `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`
    String[] names = createdDataset.getName().split("/");
    String datasetId = names[names.length - 1];
    System.out.format("Dataset id: %s\n", datasetId);
  }
}
 
Example 10
Source File: BatchPredict.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
static void batchPredict(String projectId, String modelId, String inputUri, String outputUri)
    throws IOException, ExecutionException, InterruptedException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (PredictionServiceClient client = PredictionServiceClient.create()) {
    // Get the full path of the model.
    ModelName name = ModelName.of(projectId, "us-central1", modelId);

    // Configure the source of the file from a GCS bucket
    GcsSource gcsSource = GcsSource.newBuilder().addInputUris(inputUri).build();
    BatchPredictInputConfig inputConfig =
        BatchPredictInputConfig.newBuilder().setGcsSource(gcsSource).build();

    // Configure where to store the output in a GCS bucket
    GcsDestination gcsDestination =
        GcsDestination.newBuilder().setOutputUriPrefix(outputUri).build();
    BatchPredictOutputConfig outputConfig =
        BatchPredictOutputConfig.newBuilder().setGcsDestination(gcsDestination).build();

    // Build the request that will be sent to the API
    BatchPredictRequest request =
        BatchPredictRequest.newBuilder()
            .setName(name.toString())
            .setInputConfig(inputConfig)
            .setOutputConfig(outputConfig)
            .build();

    // Start an asynchronous request
    OperationFuture<BatchPredictResult, OperationMetadata> future =
        client.batchPredictAsync(request);

    System.out.println("Waiting for operation to complete...");
    BatchPredictResult response = future.get();
    System.out.println("Batch Prediction results saved to specified Cloud Storage bucket.");
  }
}
 
Example 11
Source File: ObjectDetectionDeployModelNodeCount.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
static void objectDetectionDeployModelNodeCount(String projectId, String modelId)
    throws IOException, ExecutionException, InterruptedException {
  // String projectId = "YOUR_PROJECT_ID";
  // String modelId = "YOUR_MODEL_ID";

  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (AutoMlClient client = AutoMlClient.create()) {
    // Get the full path of the model.
    ModelName modelFullId = ModelName.of(projectId, "us-central1", modelId);

    // Set how many nodes the model is deployed on
    ImageObjectDetectionModelDeploymentMetadata deploymentMetadata =
        ImageObjectDetectionModelDeploymentMetadata.newBuilder().setNodeCount(2).build();

    DeployModelRequest request =
        DeployModelRequest.newBuilder()
            .setName(modelFullId.toString())
            .setImageObjectDetectionModelDeploymentMetadata(deploymentMetadata)
            .build();
    // Deploy the model
    OperationFuture<Empty, OperationMetadata> future = client.deployModelAsync(request);
    future.get();
    System.out.println("Model deployment on 2 nodes finished");
  }
}
 
Example 12
Source File: SpannerReadIT.java    From beam with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() throws Exception {
  PipelineOptionsFactory.register(SpannerTestPipelineOptions.class);
  options = TestPipeline.testingPipelineOptions().as(SpannerTestPipelineOptions.class);

  project = options.getInstanceProjectId();
  if (project == null) {
    project = options.as(GcpOptions.class).getProject();
  }

  spanner = SpannerOptions.newBuilder().setProjectId(project).build().getService();

  databaseName = generateDatabaseName();

  databaseAdminClient = spanner.getDatabaseAdminClient();

  // Delete database if exists.
  databaseAdminClient.dropDatabase(options.getInstanceId(), databaseName);

  OperationFuture<Database, CreateDatabaseMetadata> op =
      databaseAdminClient.createDatabase(
          options.getInstanceId(),
          databaseName,
          Collections.singleton(
              "CREATE TABLE "
                  + options.getTable()
                  + " ("
                  + "  Key           INT64,"
                  + "  Value         STRING(MAX),"
                  + ") PRIMARY KEY (Key)"));
  op.get();
  makeTestData();
}
 
Example 13
Source File: ClientLibraryOperations.java    From google-cloud-spanner-hibernate with GNU Lesser General Public License v2.1 5 votes vote down vote up
/**
 * Runs the DDL statement to create a single table.
 */
public void createSingleTable() {
  List<String> airportDdl = loadDdlStrings(DDL_SMALL);
  OperationFuture<Void, UpdateDatabaseDdlMetadata> ddlFuture =
      this.databaseAdminClient.updateDatabaseDdl(
          INSTANCE_NAME, DATABASE_NAME, airportDdl, null);

  try {
    ddlFuture.get();
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
Example 14
Source File: DetectFacesGcs.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void detectFacesGcs(String gcsUri) throws Exception {
  try (VideoIntelligenceServiceClient videoIntelligenceServiceClient =
      VideoIntelligenceServiceClient.create()) {

    FaceDetectionConfig faceDetectionConfig =
        FaceDetectionConfig.newBuilder()
            // Must set includeBoundingBoxes to true to get facial attributes.
            .setIncludeBoundingBoxes(true)
            .setIncludeAttributes(true)
            .build();
    VideoContext videoContext =
        VideoContext.newBuilder().setFaceDetectionConfig(faceDetectionConfig).build();

    AnnotateVideoRequest request =
        AnnotateVideoRequest.newBuilder()
            .setInputUri(gcsUri)
            .addFeatures(Feature.FACE_DETECTION)
            .setVideoContext(videoContext)
            .build();

    // Detects faces in a video
    OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> future =
        videoIntelligenceServiceClient.annotateVideoAsync(request);

    System.out.println("Waiting for operation to complete...");
    AnnotateVideoResponse response = future.get();

    // Gets annotations for video
    VideoAnnotationResults annotationResult = response.getAnnotationResultsList().get(0);

    // Annotations for list of people detected, tracked and recognized in video.
    for (FaceDetectionAnnotation faceDetectionAnnotation :
        annotationResult.getFaceDetectionAnnotationsList()) {
      System.out.print("Face detected:\n");
      for (Track track : faceDetectionAnnotation.getTracksList()) {
        VideoSegment segment = track.getSegment();
        System.out.printf(
            "\tStart: %d.%.0fs\n",
            segment.getStartTimeOffset().getSeconds(),
            segment.getStartTimeOffset().getNanos() / 1e6);
        System.out.printf(
            "\tEnd: %d.%.0fs\n",
            segment.getEndTimeOffset().getSeconds(), segment.getEndTimeOffset().getNanos() / 1e6);

        // Each segment includes timestamped objects that
        // include characteristics of the face detected.
        TimestampedObject firstTimestampedObject = track.getTimestampedObjects(0);

        for (DetectedAttribute attribute : firstTimestampedObject.getAttributesList()) {
          // Attributes include unique pieces of clothing, like glasses,
          // poses, or hair color.
          System.out.printf("\tAttribute: %s;\n", attribute.getName());
        }
      }
    }
  }
}
 
Example 15
Source File: TranscribeDiarizationGcs.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void transcribeDiarizationGcs(String gcsUri)
    throws IOException, ExecutionException, InterruptedException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (SpeechClient speechClient = SpeechClient.create()) {
    SpeakerDiarizationConfig speakerDiarizationConfig =
        SpeakerDiarizationConfig.newBuilder()
            .setEnableSpeakerDiarization(true)
            .setMinSpeakerCount(2)
            .setMaxSpeakerCount(2)
            .build();
    // Configure request to enable Speaker diarization
    RecognitionConfig config =
        RecognitionConfig.newBuilder()
            .setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
            .setLanguageCode("en-US")
            .setSampleRateHertz(8000)
            .setDiarizationConfig(speakerDiarizationConfig)
            .build();
    // Set the remote path for the audio file
    RecognitionAudio audio = RecognitionAudio.newBuilder().setUri(gcsUri).build();

    // Use non-blocking call for getting file transcription
    OperationFuture<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> future =
        speechClient.longRunningRecognizeAsync(config, audio);
    System.out.println("Waiting for response...");

    // Speaker Tags are only included in the last result object, which has only one alternative.
    LongRunningRecognizeResponse response = future.get();
    SpeechRecognitionAlternative alternative =
        response.getResults(response.getResultsCount() - 1).getAlternatives(0);
    // The alternative is made up of WordInfo objects that contain the speaker_tag.
    WordInfo wordInfo = alternative.getWords(0);
    int currentSpeakerTag = wordInfo.getSpeakerTag();
    // For each word, get all the words associated with one speaker, once the speaker changes,
    // add a new line with the new speaker and their spoken words.
    StringBuilder speakerWords =
        new StringBuilder(
            String.format("Speaker %d: %s", wordInfo.getSpeakerTag(), wordInfo.getWord()));
    for (int i = 1; i < alternative.getWordsCount(); i++) {
      wordInfo = alternative.getWords(i);
      if (currentSpeakerTag == wordInfo.getSpeakerTag()) {
        speakerWords.append(" ");
        speakerWords.append(wordInfo.getWord());
      } else {
        speakerWords.append(
            String.format("\nSpeaker %d: %s", wordInfo.getSpeakerTag(), wordInfo.getWord()));
        currentSpeakerTag = wordInfo.getSpeakerTag();
      }
    }
    System.out.println(speakerWords.toString());
  }
}
 
Example 16
Source File: ImportData.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
static void importData(String datasetName, String gcsSourceUri) throws IOException {
  // String datasetName = DataLabelingServiceClient.formatDatasetName(
  //     "YOUR_PROJECT_ID", "YOUR_DATASETS_UUID");
  // String gcsSourceUri = "gs://YOUR_BUCKET_ID/path_to_data";

  // [END datalabeling_import_data_beta]
  String endpoint = System.getenv("DATALABELING_ENDPOINT");
  if (endpoint == null) {
    endpoint = DataLabelingServiceSettings.getDefaultEndpoint();
  }
  // [START datalabeling_import_data_beta]

  DataLabelingServiceSettings settings = DataLabelingServiceSettings
      .newBuilder()
      // [END datalabeling_import_data_beta]
      .setEndpoint(endpoint)
      // [START datalabeling_import_data_beta]
      .build();
  try (DataLabelingServiceClient dataLabelingServiceClient =
           DataLabelingServiceClient.create(settings)) {
    GcsSource gcsSource = GcsSource.newBuilder()
        .setInputUri(gcsSourceUri)
        .setMimeType("text/csv")
        .build();

    InputConfig inputConfig = InputConfig.newBuilder()
        .setDataType(DataType.IMAGE) // DataTypes: AUDIO, IMAGE, VIDEO, TEXT
        .setGcsSource(gcsSource)
        .build();

    ImportDataRequest importDataRequest = ImportDataRequest.newBuilder()
        .setName(datasetName)
        .setInputConfig(inputConfig)
        .build();

    OperationFuture<ImportDataOperationResponse, ImportDataOperationMetadata> operation =
        dataLabelingServiceClient.importDataAsync(importDataRequest);

    ImportDataOperationResponse response = operation.get();

    System.out.format("Imported items: %d\n", response.getImportCount());
  } catch (IOException | InterruptedException | ExecutionException e) {
    e.printStackTrace();
  }
}
 
Example 17
Source File: LogoDetection.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
public static void detectLogo(String filePath)
    throws IOException, ExecutionException, InterruptedException, TimeoutException {
  // Initialize client that will be used to send requests. This client only needs to be created
  // once, and can be reused for multiple requests. After completing all of your requests, call
  // the "close" method on the client to safely clean up any remaining background resources.
  try (VideoIntelligenceServiceClient client = VideoIntelligenceServiceClient.create()) {
    // Read file
    Path path = Paths.get(filePath);
    byte[] data = Files.readAllBytes(path);
    // Create the request
    AnnotateVideoRequest request =
        AnnotateVideoRequest.newBuilder()
            .setInputContent(ByteString.copyFrom(data))
            .addFeatures(Feature.LOGO_RECOGNITION)
            .build();

    // asynchronously perform object tracking on videos
    OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> future =
        client.annotateVideoAsync(request);

    System.out.println("Waiting for operation to complete...");
    // The first result is retrieved because a single video was processed.
    AnnotateVideoResponse response = future.get(300, TimeUnit.SECONDS);
    VideoAnnotationResults annotationResult = response.getAnnotationResults(0);

    // Annotations for list of logos detected, tracked and recognized in video.
    for (LogoRecognitionAnnotation logoRecognitionAnnotation :
        annotationResult.getLogoRecognitionAnnotationsList()) {
      Entity entity = logoRecognitionAnnotation.getEntity();
      // Opaque entity ID. Some IDs may be available in
      // [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/).
      System.out.printf("Entity Id : %s\n", entity.getEntityId());
      System.out.printf("Description : %s\n", entity.getDescription());
      // All logo tracks where the recognized logo appears. Each track corresponds to one logo
      // instance appearing in consecutive frames.
      for (Track track : logoRecognitionAnnotation.getTracksList()) {

        // Video segment of a track.
        Duration startTimeOffset = track.getSegment().getStartTimeOffset();
        System.out.printf(
            "\n\tStart Time Offset: %s.%s\n",
            startTimeOffset.getSeconds(), startTimeOffset.getNanos());
        Duration endTimeOffset = track.getSegment().getEndTimeOffset();
        System.out.printf(
            "\tEnd Time Offset: %s.%s\n", endTimeOffset.getSeconds(), endTimeOffset.getNanos());
        System.out.printf("\tConfidence: %s\n", track.getConfidence());

        // The object with timestamp and attributes per frame in the track.
        for (TimestampedObject timestampedObject : track.getTimestampedObjectsList()) {

          // Normalized Bounding box in a frame, where the object is located.
          NormalizedBoundingBox normalizedBoundingBox =
              timestampedObject.getNormalizedBoundingBox();
          System.out.printf("\n\t\tLeft: %s\n", normalizedBoundingBox.getLeft());
          System.out.printf("\t\tTop: %s\n", normalizedBoundingBox.getTop());
          System.out.printf("\t\tRight: %s\n", normalizedBoundingBox.getRight());
          System.out.printf("\t\tBottom: %s\n", normalizedBoundingBox.getBottom());

          // Optional. The attributes of the object in the bounding box.
          for (DetectedAttribute attribute : timestampedObject.getAttributesList()) {
            System.out.printf("\n\t\t\tName: %s\n", attribute.getName());
            System.out.printf("\t\t\tConfidence: %s\n", attribute.getConfidence());
            System.out.printf("\t\t\tValue: %s\n", attribute.getValue());
          }
        }

        // Optional. Attributes in the track level.
        for (DetectedAttribute trackAttribute : track.getAttributesList()) {
          System.out.printf("\n\t\tName : %s\n", trackAttribute.getName());
          System.out.printf("\t\tConfidence : %s\n", trackAttribute.getConfidence());
          System.out.printf("\t\tValue : %s\n", trackAttribute.getValue());
        }
      }

      // All video segments where the recognized logo appears. There might be multiple instances
      // of the same logo class appearing in one VideoSegment.
      for (VideoSegment segment : logoRecognitionAnnotation.getSegmentsList()) {
        System.out.printf(
            "\n\tStart Time Offset : %s.%s\n",
            segment.getStartTimeOffset().getSeconds(), segment.getStartTimeOffset().getNanos());
        System.out.printf(
            "\tEnd Time Offset : %s.%s\n",
            segment.getEndTimeOffset().getSeconds(), segment.getEndTimeOffset().getNanos());
      }
    }
  }
}
 
Example 18
Source File: TextDetection.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
/**
 * Detect text in a video.
 *
 * @param filePath the path to the video file to analyze.
 */
public static VideoAnnotationResults detectText(String filePath) throws Exception {
  try (VideoIntelligenceServiceClient client = VideoIntelligenceServiceClient.create()) {
    // Read file
    Path path = Paths.get(filePath);
    byte[] data = Files.readAllBytes(path);

    // Create the request
    AnnotateVideoRequest request =
        AnnotateVideoRequest.newBuilder()
            .setInputContent(ByteString.copyFrom(data))
            .addFeatures(Feature.TEXT_DETECTION)
            .build();

    // asynchronously perform object tracking on videos
    OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> future =
        client.annotateVideoAsync(request);

    System.out.println("Waiting for operation to complete...");
    // The first result is retrieved because a single video was processed.
    AnnotateVideoResponse response = future.get(600, TimeUnit.SECONDS);
    VideoAnnotationResults results = response.getAnnotationResults(0);

    // Get only the first annotation for demo purposes.
    TextAnnotation annotation = results.getTextAnnotations(0);
    System.out.println("Text: " + annotation.getText());

    // Get the first text segment.
    TextSegment textSegment = annotation.getSegments(0);
    System.out.println("Confidence: " + textSegment.getConfidence());
    // For the text segment display it's time offset
    VideoSegment videoSegment = textSegment.getSegment();
    Duration startTimeOffset = videoSegment.getStartTimeOffset();
    Duration endTimeOffset = videoSegment.getEndTimeOffset();
    // Display the offset times in seconds, 1e9 is part of the formula to convert nanos to seconds
    System.out.println(
        String.format(
            "Start time: %.2f", startTimeOffset.getSeconds() + startTimeOffset.getNanos() / 1e9));
    System.out.println(
        String.format(
            "End time: %.2f", endTimeOffset.getSeconds() + endTimeOffset.getNanos() / 1e9));

    // Show the first result for the first frame in the segment.
    TextFrame textFrame = textSegment.getFrames(0);
    Duration timeOffset = textFrame.getTimeOffset();
    System.out.println(
        String.format(
            "Time offset for the first frame: %.2f",
            timeOffset.getSeconds() + timeOffset.getNanos() / 1e9));

    // Display the rotated bounding box for where the text is on the frame.
    System.out.println("Rotated Bounding Box Vertices:");
    List<NormalizedVertex> vertices = textFrame.getRotatedBoundingBox().getVerticesList();
    for (NormalizedVertex normalizedVertex : vertices) {
      System.out.println(
          String.format(
              "\tVertex.x: %.2f, Vertex.y: %.2f",
              normalizedVertex.getX(), normalizedVertex.getY()));
    }
    return results;
  }
}
 
Example 19
Source File: TrackObjects.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
/**
 * Track objects in a video.
 *
 * @param gcsUri the path to the video file to analyze.
 */
public static VideoAnnotationResults trackObjectsGcs(String gcsUri) throws Exception {
  try (VideoIntelligenceServiceClient client = VideoIntelligenceServiceClient.create()) {
    // Create the request
    AnnotateVideoRequest request =
        AnnotateVideoRequest.newBuilder()
            .setInputUri(gcsUri)
            .addFeatures(Feature.OBJECT_TRACKING)
            .setLocationId("us-east1")
            .build();

    // asynchronously perform object tracking on videos
    OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> future =
        client.annotateVideoAsync(request);

    System.out.println("Waiting for operation to complete...");
    // The first result is retrieved because a single video was processed.
    AnnotateVideoResponse response = future.get(300, TimeUnit.SECONDS);
    VideoAnnotationResults results = response.getAnnotationResults(0);

    // Get only the first annotation for demo purposes.
    ObjectTrackingAnnotation annotation = results.getObjectAnnotations(0);
    System.out.println("Confidence: " + annotation.getConfidence());

    if (annotation.hasEntity()) {
      Entity entity = annotation.getEntity();
      System.out.println("Entity description: " + entity.getDescription());
      System.out.println("Entity id:: " + entity.getEntityId());
    }

    if (annotation.hasSegment()) {
      VideoSegment videoSegment = annotation.getSegment();
      Duration startTimeOffset = videoSegment.getStartTimeOffset();
      Duration endTimeOffset = videoSegment.getEndTimeOffset();
      // Display the segment time in seconds, 1e9 converts nanos to seconds
      System.out.println(
          String.format(
              "Segment: %.2fs to %.2fs",
              startTimeOffset.getSeconds() + startTimeOffset.getNanos() / 1e9,
              endTimeOffset.getSeconds() + endTimeOffset.getNanos() / 1e9));
    }

    // Here we print only the bounding box of the first frame in this segment.
    ObjectTrackingFrame frame = annotation.getFrames(0);
    // Display the offset time in seconds, 1e9 converts nanos to seconds
    Duration timeOffset = frame.getTimeOffset();
    System.out.println(
        String.format(
            "Time offset of the first frame: %.2fs",
            timeOffset.getSeconds() + timeOffset.getNanos() / 1e9));

    // Display the bounding box of the detected object
    NormalizedBoundingBox normalizedBoundingBox = frame.getNormalizedBoundingBox();
    System.out.println("Bounding box position:");
    System.out.println("\tleft: " + normalizedBoundingBox.getLeft());
    System.out.println("\ttop: " + normalizedBoundingBox.getTop());
    System.out.println("\tright: " + normalizedBoundingBox.getRight());
    System.out.println("\tbottom: " + normalizedBoundingBox.getBottom());
    return results;
  }
}
 
Example 20
Source File: TextDetection.java    From java-docs-samples with Apache License 2.0 4 votes vote down vote up
/**
 * Detect Text in a video.
 *
 * @param gcsUri the path to the video file to analyze.
 */
public static VideoAnnotationResults detectTextGcs(String gcsUri) throws Exception {
  try (VideoIntelligenceServiceClient client = VideoIntelligenceServiceClient.create()) {
    // Create the request
    AnnotateVideoRequest request = AnnotateVideoRequest.newBuilder()
        .setInputUri(gcsUri)
        .addFeatures(Feature.TEXT_DETECTION)
        .build();

    // asynchronously perform object tracking on videos
    OperationFuture<AnnotateVideoResponse, AnnotateVideoProgress> future =
        client.annotateVideoAsync(request);

    System.out.println("Waiting for operation to complete...");
    // The first result is retrieved because a single video was processed.
    AnnotateVideoResponse response = future.get(300, TimeUnit.SECONDS);
    VideoAnnotationResults results = response.getAnnotationResults(0);

    // Get only the first annotation for demo purposes.
    TextAnnotation annotation = results.getTextAnnotations(0);
    System.out.println("Text: " + annotation.getText());

    // Get the first text segment.
    TextSegment textSegment = annotation.getSegments(0);
    System.out.println("Confidence: " + textSegment.getConfidence());
    // For the text segment display it's time offset
    VideoSegment videoSegment = textSegment.getSegment();
    Duration startTimeOffset = videoSegment.getStartTimeOffset();
    Duration endTimeOffset = videoSegment.getEndTimeOffset();
    // Display the offset times in seconds, 1e9 is part of the formula to convert nanos to seconds
    System.out.println(String.format("Start time: %.2f",
        startTimeOffset.getSeconds() + startTimeOffset.getNanos() / 1e9));
    System.out.println(String.format("End time: %.2f",
        endTimeOffset.getSeconds() + endTimeOffset.getNanos() / 1e9));

    // Show the first result for the first frame in the segment.
    TextFrame textFrame = textSegment.getFrames(0);
    Duration timeOffset = textFrame.getTimeOffset();
    System.out.println(String.format("Time offset for the first frame: %.2f",
        timeOffset.getSeconds() + timeOffset.getNanos() / 1e9));

    // Display the rotated bounding box for where the text is on the frame.
    System.out.println("Rotated Bounding Box Vertices:");
    List<NormalizedVertex> vertices = textFrame.getRotatedBoundingBox().getVerticesList();
    for (NormalizedVertex normalizedVertex : vertices) {
      System.out.println(String.format(
          "\tVertex.x: %.2f, Vertex.y: %.2f",
          normalizedVertex.getX(),
          normalizedVertex.getY()));
    }
    return results;
  }
}