com.google.datastore.v1.Entity Java Examples

The following examples show how to use com.google.datastore.v1.Entity. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Guestbook.java    From google-cloud-datastore with Apache License 2.0 7 votes vote down vote up
/**
 * Add a greeting to the specified guestbook.
 */
private void addGreeting(String guestbookName, String user, String message)
    throws DatastoreException {
  Entity.Builder greeting = Entity.newBuilder();
  greeting.setKey(makeKey(GUESTBOOK_KIND, guestbookName, GREETING_KIND));
  greeting.putProperties(USER_PROPERTY, makeValue(user).build());
  greeting.putProperties(MESSAGE_PROPERTY, makeValue(message).build());
  greeting.putProperties(DATE_PROPERTY, makeValue(new Date()).build());
  Key greetingKey = insert(greeting.build());
  System.out.println("greeting key is: " + greetingKey);
}
 
Example #2
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Tests {@link DatastoreWriterFn} with a failed request which is retried. */
@Test
public void testDatatoreWriterFnRetriesErrors() throws Exception {
  List<Mutation> mutations = new ArrayList<>();
  int numRpcs = 2;
  for (int i = 0; i < DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START * numRpcs; ++i) {
    mutations.add(
        makeUpsert(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)).build()).build());
  }

  CommitResponse successfulCommit = CommitResponse.getDefaultInstance();
  when(mockDatastore.commit(any(CommitRequest.class)))
      .thenReturn(successfulCommit)
      .thenThrow(new DatastoreException("commit", Code.DEADLINE_EXCEEDED, "", null))
      .thenReturn(successfulCommit);

  DatastoreWriterFn datastoreWriter =
      new DatastoreWriterFn(
          StaticValueProvider.of(PROJECT_ID), null, mockDatastoreFactory, new FakeWriteBatcher());
  DoFnTester<Mutation, Void> doFnTester = DoFnTester.of(datastoreWriter);
  doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
  doFnTester.processBundle(mutations);
}
 
Example #3
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSourcePrimitiveDisplayData() {
  DisplayDataEvaluator evaluator = DisplayDataEvaluator.create();
  int numSplits = 98;
  PTransform<PBegin, PCollection<Entity>> read =
      DatastoreIO.v1()
          .read()
          .withProjectId(PROJECT_ID)
          .withQuery(Query.newBuilder().build())
          .withNumQuerySplits(numSplits);

  String assertMessage = "DatastoreIO read should include the '%s' in its primitive display data";
  Set<DisplayData> displayData = evaluator.displayDataForPrimitiveSourceTransforms(read);
  assertThat(
      String.format(assertMessage, "project id"),
      displayData,
      hasItem(hasDisplayItem("projectId", PROJECT_ID)));
  assertThat(
      String.format(assertMessage, "number of query splits"),
      displayData,
      hasItem(hasDisplayItem("numQuerySplits", numSplits)));
}
 
Example #4
Source File: DatastoreConvertersTest.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
/** Test {@link DatastoreConverters.CheckNoKey} with only correct entities. */
@Test
@Category(NeedsRunner.class)
public void testCheckNoKeyAllCorrect() throws Exception {

  // Create test data
  List<Entity> testEntitiesWithKey = new ArrayList<>(entities);

  // Run the test
  TupleTag<Entity> successTag = new TupleTag<Entity>("entities") {};
  TupleTag<String> failureTag = new TupleTag<String>("failures") {};
  PCollectionTuple results =
      pipeline
          .apply("Create", Create.of(testEntitiesWithKey))
          .apply(
              "RemoveNoKeys",
              CheckNoKey.newBuilder()
                  .setSuccessTag(successTag)
                  .setFailureTag(failureTag)
                  .build());

  // Check the results
  PAssert.that(results.get(successTag)).containsInAnyOrder(testEntitiesWithKey);
  PAssert.that(results.get(failureTag)).empty();
  pipeline.run();
}
 
Example #5
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testDeleteEntityPrimitiveDisplayData() {
  DisplayDataEvaluator evaluator = DisplayDataEvaluator.create();
  PTransform<PCollection<Entity>, ?> write =
      DatastoreIO.v1().deleteEntity().withProjectId("myProject");

  Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(write);
  assertThat(
      "DatastoreIO write should include the project in its primitive display data",
      displayData,
      hasItem(hasDisplayItem("projectId")));
  assertThat(
      "DatastoreIO write should include the deleteEntityFn in its primitive display data",
      displayData,
      hasItem(hasDisplayItem("deleteEntityFn")));
}
 
Example #6
Source File: DatastoreV1.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Returns Number of entities available for reading. */
public long getNumEntities(
    PipelineOptions options, String ourKind, @Nullable String namespace) {
  try {
    V1Options v1Options = V1Options.from(getProjectId(), getNamespace(), getLocalhost());
    V1DatastoreFactory datastoreFactory = new V1DatastoreFactory();
    Datastore datastore =
        datastoreFactory.getDatastore(
            options, v1Options.getProjectId(), v1Options.getLocalhost());

    Entity entity = getLatestTableStats(ourKind, namespace, datastore);
    return entity.getProperties().get("count").getIntegerValue();
  } catch (Exception e) {
    return -1;
  }
}
 
Example #7
Source File: Guestbook.java    From google-cloud-datastore with Apache License 2.0 6 votes vote down vote up
/**
 * List the greetings in the specified guestbook.
 */
private void listGreetings(String guestbookName) throws DatastoreException {
  Query.Builder query = Query.newBuilder();
  query.addKindBuilder().setName(GREETING_KIND);
  query.setFilter(makeFilter(KEY_PROPERTY, PropertyFilter.Operator.HAS_ANCESTOR,
      makeValue(makeKey(GUESTBOOK_KIND, guestbookName))));
  query.addOrder(makeOrder(DATE_PROPERTY, PropertyOrder.Direction.DESCENDING));

  List<Entity> greetings = runQuery(query.build());
  if (greetings.isEmpty()) {
    System.out.println("no greetings in " + guestbookName);
  }
  for (Entity greeting : greetings) {
    Map<String, Value> propertyMap = greeting.getPropertiesMap();
    System.out.println(
        DatastoreHelper.toDate(propertyMap.get(DATE_PROPERTY)) + ": " +
        DatastoreHelper.getString(propertyMap.get(USER_PROPERTY)) + " says " +
        DatastoreHelper.getString(propertyMap.get(MESSAGE_PROPERTY)));
  }
}
 
Example #8
Source File: DatastoreConverters.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
@Override
public PCollectionTuple expand(PCollection<Entity> entity) {
  TupleTag<Entity> goodTag = new TupleTag<>();

  // Due to the fact that DatastoreIO does non-transactional writing to Datastore, writing the
  // same entity more than once in the same commit is not supported (error "A non-transactional
  // commit may not contain multiple mutations affecting the same entity). Messages with the
  // same key are thus not written to Datastore and instead routed to an error PCollection for
  // further handlig downstream.
  PCollectionTuple entities =
      entity.apply(
          "CheckSameKey",
          CheckSameKey.newBuilder().setErrorTag(errorTag()).setGoodTag(goodTag).build());
  entities
      .get(goodTag)
      .apply("WriteToDatastore", DatastoreIO.v1().write().withProjectId(projectId()));
  return entities;
}
 
Example #9
Source File: DatastoreV1.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Cloud Datastore system tables with statistics are periodically updated. This method fetches
 * the latest timestamp (in microseconds) of statistics update using the {@code __Stat_Total__}
 * table.
 */
private static long queryLatestStatisticsTimestamp(
    Datastore datastore, @Nullable String namespace) throws DatastoreException {
  Query.Builder query = Query.newBuilder();
  // Note: namespace either being null or empty represents the default namespace, in which
  // case we treat it as not provided by the user.
  if (Strings.isNullOrEmpty(namespace)) {
    query.addKindBuilder().setName("__Stat_Total__");
  } else {
    query.addKindBuilder().setName("__Stat_Ns_Total__");
  }
  query.addOrder(makeOrder("timestamp", DESCENDING));
  query.setLimit(Int32Value.newBuilder().setValue(1));
  RunQueryRequest request = makeRequest(query.build(), namespace);

  RunQueryResponse response = datastore.runQuery(request);
  QueryResultBatch batch = response.getBatch();
  if (batch.getEntityResultsCount() == 0) {
    throw new NoSuchElementException("Datastore total statistics unavailable");
  }
  Entity entity = batch.getEntityResults(0).getEntity();
  return entity.getProperties().get("timestamp").getTimestampValue().getSeconds() * 1000000;
}
 
Example #10
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 6 votes vote down vote up
private void datastoreWriterFnTest(int numMutations) throws Exception {
  // Create the requested number of mutations.
  List<Mutation> mutations = new ArrayList<>(numMutations);
  for (int i = 0; i < numMutations; ++i) {
    mutations.add(
        makeUpsert(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)).build()).build());
  }

  DatastoreWriterFn datastoreWriter =
      new DatastoreWriterFn(
          StaticValueProvider.of(PROJECT_ID), null, mockDatastoreFactory, new FakeWriteBatcher());
  DoFnTester<Mutation, Void> doFnTester = DoFnTester.of(datastoreWriter);
  doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
  doFnTester.processBundle(mutations);

  int start = 0;
  while (start < numMutations) {
    int end = Math.min(numMutations, start + DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START);
    CommitRequest.Builder commitRequest = CommitRequest.newBuilder();
    commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL);
    commitRequest.addAllMutations(mutations.subList(start, end));
    // Verify all the batch requests were made with the expected mutations.
    verify(mockDatastore, times(1)).commit(commitRequest.build());
    start = end;
  }
}
 
Example #11
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Tests that {@link ReadFn} retries after an error. */
@Test
public void testReadFnRetriesErrors() throws Exception {
  // An empty query to read entities.
  Query query = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(1)).build();

  // Use mockResponseForQuery to generate results.
  when(mockDatastore.runQuery(any(RunQueryRequest.class)))
      .thenThrow(new DatastoreException("RunQuery", Code.DEADLINE_EXCEEDED, "", null))
      .thenAnswer(
          invocationOnMock -> {
            Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery();
            return mockResponseForQuery(q);
          });

  ReadFn readFn = new ReadFn(V_1_OPTIONS, mockDatastoreFactory);
  DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn);
  doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
  doFnTester.processBundle(query);
}
 
Example #12
Source File: DataStoreTableTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testRowToEntityConverterWithoutKey() {
  Schema schemaWithoutKey =
      Schema.builder()
          .addFields(
              SCHEMA.getFields().stream()
                  .filter(f -> !f.getName().equals("__key__"))
                  .collect(Collectors.toList()))
          .build();
  Row rowWithoutKey =
      Row.withSchema(schemaWithoutKey)
          .addValues(
              schemaWithoutKey.getFieldNames().stream()
                  .map(ROW::getValue)
                  .collect(Collectors.toList()))
          .build();
  PCollection<Entity> result =
      pipeline
          .apply(Create.of(rowWithoutKey))
          .setRowSchema(schemaWithoutKey)
          .apply(RowToEntity.createTest(UUID_VALUE, "__key__", KIND));

  PAssert.that(result).containsInAnyOrder(ENTITY);

  pipeline.run().waitUntilFinish();
}
 
Example #13
Source File: BigQueryConvertersTest.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that {@link BigQueryConverters.AvroToEntity} creates an Entity without a valid key when a
 * field is of type Record.
 */
@Test
public void testAvroToEntityRecordField() throws Exception {
  // Create test data
  TableFieldSchema column = generateNestedTableFieldSchema();
  List<TableFieldSchema> fields = new ArrayList<>();
  fields.add(column);
  TableSchema bqSchema = new TableSchema().setFields(fields);
  Record record = generateNestedAvroRecord();
  SchemaAndRecord inputBqData = new SchemaAndRecord(record, bqSchema);
  // Run the test
  Entity outputEntity = converter.apply(inputBqData);
  // Assess results
  String expectedCauseMessage = String.format("Column [address] of type [RECORD] not supported.");
  assertTrue(!outputEntity.hasKey());
  assertEquals(
      expectedCauseMessage, outputEntity.getPropertiesMap().get("cause").getStringValue());
  assertEquals(record.toString(), outputEntity.getPropertiesMap().get("row").getStringValue());
}
 
Example #14
Source File: DatastoreConverters.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
/**
 * Grabs the schema for what data is in the Entity.
 * @param entity a populated entity
 * @return a schema of what kind of data is in the entity
 */
private JsonObject entitySchema(Entity entity) {
  JsonObject jsonObject = new JsonObject();
  entity.getPropertiesMap().entrySet().stream().forEach(entrySet -> {
    String key = entrySet.getKey();
    Value value = entrySet.getValue();
    switch (value.getValueTypeCase()) {
      case ENTITY_VALUE:
        jsonObject.add(key, entitySchema(value.getEntityValue()));
        break;
      case ARRAY_VALUE:
        jsonObject.add(key, arraySchema(value.getArrayValue()));
        break;
      default:
        jsonObject.addProperty(key, value.getValueTypeCase().toString());
    }
  });
  return jsonObject;
}
 
Example #15
Source File: Guestbook.java    From google-cloud-datastore with Apache License 2.0 6 votes vote down vote up
/**
 * Run a query on the datastore.
 *
 * @return The entities returned by the query.
 * @throws DatastoreException on error
 */
private List<Entity> runQuery(Query query) throws DatastoreException {
  RunQueryRequest.Builder request = RunQueryRequest.newBuilder();
  request.setQuery(query);
  RunQueryResponse response = datastore.runQuery(request.build());

  if (response.getBatch().getMoreResults() == QueryResultBatch.MoreResultsType.NOT_FINISHED) {
    System.err.println("WARNING: partial results\n");
  }
  List<EntityResult> results = response.getBatch().getEntityResultsList();
  List<Entity> entities = new ArrayList<Entity>(results.size());
  for (EntityResult result : results) {
    entities.add(result.getEntity());
  }
  return entities;
}
 
Example #16
Source File: AutoComplete.java    From beam with Apache License 2.0 6 votes vote down vote up
@ProcessElement
public void processElement(ProcessContext c) {
  Entity.Builder entityBuilder = Entity.newBuilder();
  com.google.datastore.v1.Key key =
      makeKey(makeKey(kind, ancestorKey).build(), kind, c.element().getKey()).build();

  entityBuilder.setKey(key);
  List<Value> candidates = new ArrayList<>();
  Map<String, Value> properties = new HashMap<>();
  for (CompletionCandidate tag : c.element().getValue()) {
    Entity.Builder tagEntity = Entity.newBuilder();
    properties.put("tag", makeValue(tag.value).build());
    properties.put("count", makeValue(tag.count).build());
    candidates.add(makeValue(tagEntity).build());
  }
  properties.put("candidates", makeValue(candidates).build());
  entityBuilder.putAllProperties(properties);
  c.output(entityBuilder.build());
}
 
Example #17
Source File: V1TestUtil.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Build an entity for the given ancestorKey, kind, namespace and value.
 *
 * @param largePropertySize if greater than 0, add an unindexed property of the given size.
 */
static Entity makeEntity(
    Long value, Key ancestorKey, String kind, @Nullable String namespace, int largePropertySize) {
  Entity.Builder entityBuilder = Entity.newBuilder();
  Key.Builder keyBuilder = makeKey(ancestorKey, kind, UUID.randomUUID().toString());
  // NOTE: Namespace is not inherited between keys created with DatastoreHelper.makeKey, so
  // we must set the namespace on keyBuilder. TODO: Once partitionId inheritance is added,
  // we can simplify this code.
  if (namespace != null) {
    keyBuilder.getPartitionIdBuilder().setNamespaceId(namespace);
  }

  entityBuilder.setKey(keyBuilder.build());
  entityBuilder.putProperties("value", makeValue(value).build());
  if (largePropertySize > 0) {
    entityBuilder.putProperties(
        "unindexed_value",
        makeValue(new String(new char[largePropertySize]).replace("\0", "A"))
            .setExcludeFromIndexes(true)
            .build());
  }
  return entityBuilder.build();
}
 
Example #18
Source File: V1TestUtil.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Delete all entities with the given ancestor. */
static void deleteAllEntities(V1TestOptions options, String project, String ancestor)
    throws Exception {
  Datastore datastore = getDatastore(options, project);
  Query query =
      V1TestUtil.makeAncestorKindQuery(options.getKind(), options.getNamespace(), ancestor);

  V1TestReader reader = new V1TestReader(datastore, query, options.getNamespace());
  V1TestWriter writer = new V1TestWriter(datastore, new DeleteMutationBuilder());

  long numEntities = 0;
  while (reader.advance()) {
    Entity entity = reader.getCurrent();
    numEntities++;
    writer.write(entity);
  }

  writer.close();
  LOG.info("Successfully deleted {} entities", numEntities);
}
 
Example #19
Source File: V1TestUtil.java    From beam with Apache License 2.0 5 votes vote down vote up
private void flushBatch() throws DatastoreException, IOException, InterruptedException {
  LOG.info("Writing batch of {} entities", entities.size());
  Sleeper sleeper = Sleeper.DEFAULT;
  BackOff backoff =
      FluentBackoff.DEFAULT
          .withMaxRetries(MAX_RETRIES)
          .withInitialBackoff(INITIAL_BACKOFF)
          .backoff();

  while (true) {
    // Batch mutate entities.
    try {
      CommitRequest.Builder commitRequest = CommitRequest.newBuilder();
      for (Entity entity : entities) {
        commitRequest.addMutations(mutationBuilder.apply(entity));
      }
      commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL);
      datastore.commit(commitRequest.build());
      // Break if the commit threw no exception.
      break;
    } catch (DatastoreException exception) {
      LOG.error(
          "Error writing to the Datastore ({}): {}",
          exception.getCode(),
          exception.getMessage());
      if (!BackOffUtils.next(sleeper, backoff)) {
        LOG.error("Aborting after {} retries.", MAX_RETRIES);
        throw exception;
      }
    }
  }
  LOG.info("Successfully wrote {} entities", entities.size());
  entities.clear();
}
 
Example #20
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Builds a per-kind statistics response with the given entity size. */
private static RunQueryResponse makeStatKindResponse(long entitySizeInBytes) {
  RunQueryResponse.Builder statKindResponse = RunQueryResponse.newBuilder();
  Entity.Builder entity = Entity.newBuilder();
  entity.setKey(makeKey("dummyKind", "dummyId"));
  entity.putProperties("entity_bytes", makeValue(entitySizeInBytes).build());
  EntityResult.Builder entityResult = EntityResult.newBuilder();
  entityResult.setEntity(entity);
  QueryResultBatch.Builder batch = QueryResultBatch.newBuilder();
  batch.addEntityResults(entityResult);
  statKindResponse.setBatch(batch);
  return statKindResponse.build();
}
 
Example #21
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Helper function to run a test reading from a {@link ReadFn}. */
private void readFnTest(int numEntities) throws Exception {
  // An empty query to read entities.
  Query query =
      Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(numEntities)).build();

  // Use mockResponseForQuery to generate results.
  when(mockDatastore.runQuery(any(RunQueryRequest.class)))
      .thenAnswer(
          invocationOnMock -> {
            Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery();
            return mockResponseForQuery(q);
          });

  ReadFn readFn = new ReadFn(V_1_OPTIONS, mockDatastoreFactory);
  DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn);
  /**
   * Although Datastore client is marked transient in {@link ReadFn}, when injected through mock
   * factory using a when clause for unit testing purposes, it is not serializable because it
   * doesn't have a no-arg constructor. Thus disabling the cloning to prevent the test object from
   * being serialized.
   */
  doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
  List<Entity> entities = doFnTester.processBundle(query);

  int expectedNumCallsToRunQuery = (int) Math.ceil((double) numEntities / QUERY_BATCH_LIMIT);
  verify(mockDatastore, times(expectedNumCallsToRunQuery)).runQuery(any(RunQueryRequest.class));
  // Validate the number of results.
  assertEquals(numEntities, entities.size());
}
 
Example #22
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Builds a response of the given timestamp. */
private static RunQueryResponse makeLatestTimestampResponse(long timestamp) {
  RunQueryResponse.Builder timestampResponse = RunQueryResponse.newBuilder();
  Entity.Builder entity = Entity.newBuilder();
  entity.setKey(makeKey("dummyKind", "dummyId"));
  entity.putProperties("timestamp", makeValue(new Date(timestamp * 1000)).build());
  EntityResult.Builder entityResult = EntityResult.newBuilder();
  entityResult.setEntity(entity);
  QueryResultBatch.Builder batch = QueryResultBatch.newBuilder();
  batch.addEntityResults(entityResult);
  timestampResponse.setBatch(batch);
  return timestampResponse.build();
}
 
Example #23
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 5 votes vote down vote up
/**
 * A helper function that creates mock {@link Entity} results in response to a query. Always
 * indicates that more results are available, unless the batch is limited to fewer than {@link
 * DatastoreV1.Read#QUERY_BATCH_LIMIT} results.
 */
private static RunQueryResponse mockResponseForQuery(Query q) {
  // Every query DatastoreV1 sends should have a limit.
  assertTrue(q.hasLimit());

  // The limit should be in the range [1, QUERY_BATCH_LIMIT]
  int limit = q.getLimit().getValue();
  assertThat(limit, greaterThanOrEqualTo(1));
  assertThat(limit, lessThanOrEqualTo(QUERY_BATCH_LIMIT));

  // Create the requested number of entities.
  List<EntityResult> entities = new ArrayList<>(limit);
  for (int i = 0; i < limit; ++i) {
    entities.add(
        EntityResult.newBuilder()
            .setEntity(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)))
            .build());
  }

  // Fill out the other parameters on the returned result batch.
  RunQueryResponse.Builder ret = RunQueryResponse.newBuilder();
  ret.getBatchBuilder()
      .addAllEntityResults(entities)
      .setEntityResultType(EntityResult.ResultType.FULL)
      .setMoreResults(
          limit == QUERY_BATCH_LIMIT
              ? QueryResultBatch.MoreResultsType.NOT_FINISHED
              : QueryResultBatch.MoreResultsType.NO_MORE_RESULTS);

  return ret.build();
}
 
Example #24
Source File: DatastoreV1Test.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Test that entities with incomplete keys cannot be deleted. */
@Test
public void testDeleteEntitiesWithIncompleteKeys() throws Exception {
  Key key = makeKey("bird").build();
  Entity entity = Entity.newBuilder().setKey(key).build();
  DeleteEntityFn deleteEntityFn = new DeleteEntityFn();

  thrown.expect(IllegalArgumentException.class);
  thrown.expectMessage("Entities to be deleted from the Cloud Datastore must have complete keys");

  deleteEntityFn.apply(entity);
}
 
Example #25
Source File: DatastoreV1.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public Mutation apply(Entity entity) {
  // Verify that the entity to delete has a complete key.
  checkArgument(
      isValidKey(entity.getKey()),
      "Entities to be deleted from the Cloud Datastore must have complete keys:\n%s",
      entity);

  return makeDelete(entity.getKey()).build();
}
 
Example #26
Source File: DatastoreV1.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public Mutation apply(Entity entity) {
  // Verify that the entity to write has a complete key.
  checkArgument(
      isValidKey(entity.getKey()),
      "Entities to be written to the Cloud Datastore must have complete keys:\n%s",
      entity);

  return makeUpsert(entity).build();
}
 
Example #27
Source File: DatastoreV1.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Retrieve latest table statistics for a given kind, namespace, and datastore. */
private static Entity getLatestTableStats(
    String ourKind, @Nullable String namespace, Datastore datastore) throws DatastoreException {
  long latestTimestamp = queryLatestStatisticsTimestamp(datastore, namespace);
  LOG.info("Latest stats timestamp for kind {} is {}", ourKind, latestTimestamp);

  Query.Builder queryBuilder = Query.newBuilder();
  if (Strings.isNullOrEmpty(namespace)) {
    queryBuilder.addKindBuilder().setName("__Stat_Kind__");
  } else {
    queryBuilder.addKindBuilder().setName("__Stat_Ns_Kind__");
  }

  queryBuilder.setFilter(
      makeAndFilter(
          makeFilter("kind_name", EQUAL, makeValue(ourKind).build()).build(),
          makeFilter("timestamp", EQUAL, makeValue(latestTimestamp).build()).build()));

  RunQueryRequest request = makeRequest(queryBuilder.build(), namespace);

  long now = System.currentTimeMillis();
  RunQueryResponse response = datastore.runQuery(request);
  LOG.debug("Query for per-kind statistics took {}ms", System.currentTimeMillis() - now);

  QueryResultBatch batch = response.getBatch();
  if (batch.getEntityResultsCount() == 0) {
    throw new NoSuchElementException(
        "Datastore statistics for kind " + ourKind + " unavailable");
  }
  return batch.getEntityResults(0).getEntity();
}
 
Example #28
Source File: DataStoreTableTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testRowToEntityConverter() {
  PCollection<Entity> result =
      pipeline
          .apply(Create.of(ROW))
          .setRowSchema(SCHEMA)
          .apply(RowToEntity.createTest(UUID_VALUE, "__key__", KIND));
  PAssert.that(result).containsInAnyOrder(ENTITY);

  pipeline.run().waitUntilFinish();
}
 
Example #29
Source File: DataStoreV1Table.java    From beam with Apache License 2.0 5 votes vote down vote up
/**
 * Converts an entire {@code Row} to an appropriate DataStore {@code Entity.Builder}.
 *
 * @param row {@code Row} to convert.
 * @return resulting {@code Entity.Builder}.
 */
private Entity.Builder constructEntityFromRow(Schema schema, Row row) {
  Entity.Builder entityBuilder = Entity.newBuilder();
  for (Schema.Field field : schema.getFields()) {
    Value val = mapObjectToValue(row.getValue(field.getName()));
    entityBuilder.putProperties(field.getName(), val);
  }
  return entityBuilder;
}
 
Example #30
Source File: DataStoreV1Table.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<Entity> expand(PCollection<Row> input) {
  boolean isFieldPresent = input.getSchema().getFieldNames().contains(keyField);
  if (isFieldPresent) {
    if (!input.getSchema().getField(keyField).getType().getTypeName().equals(TypeName.BYTES)) {
      throw new IllegalStateException(
          "Field `"
              + keyField
              + "` should of type `VARBINARY`. Please change the type or specify a field to"
              + " write the KEY value from via TableProperties.");
    }
    LOG.info("Field to use as Entity KEY is set to: `" + keyField + "`.");
  }
  return input.apply(ParDo.of(new RowToEntityConverter(isFieldPresent)));
}