com.google.cloud.bigquery.FieldValueList Java Examples

The following examples show how to use com.google.cloud.bigquery.FieldValueList. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BigQuerySampleApplicationTests.java    From spring-cloud-gcp with Apache License 2.0 6 votes vote down vote up
@Test
public void testCsvDataUpload() throws InterruptedException {
	LinkedMultiValueMap<String, Object> map = new LinkedMultiValueMap<>();
	map.add("csvText", "name,age,location\nBob,24,Wyoming");
	map.add("tableName", TABLE_NAME);

	HttpHeaders headers = new HttpHeaders();
	HttpEntity<LinkedMultiValueMap<String, Object>> request = new HttpEntity<>(map, headers);
	ResponseEntity<String> response =
			this.restTemplate.postForEntity("/uploadCsvText", request, String.class);
	assertThat(response.getStatusCode().is2xxSuccessful()).isTrue();

	QueryJobConfiguration queryJobConfiguration = QueryJobConfiguration
			.newBuilder("SELECT * FROM " + DATASET_NAME + "." + TABLE_NAME)
			.build();

	TableResult queryResult = this.bigQuery.query(queryJobConfiguration);
	assertThat(queryResult.getTotalRows()).isEqualTo(1);

	FieldValueList row = queryResult.getValues().iterator().next();
	assertThat(row.get(0).getStringValue()).isEqualTo("Bob");
	assertThat(row.get(1).getLongValue()).isEqualTo(24);
	assertThat(row.get(2).getStringValue()).isEqualTo("Wyoming");
}
 
Example #2
Source File: BigQuerySnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of listing table rows, specifying the page size. */
// [TARGET listTableData(String, String, TableDataListOption...)]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
public TableResult listTableData(String datasetName, String tableName) {
  // [START ]
  // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
  // simply omit the option.
  TableResult tableData =
      bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
  for (FieldValueList row : tableData.iterateAll()) {
    // do something with the row
  }
  // [END ]
  return tableData;
}
 
Example #3
Source File: BigQuerySnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of listing table rows, specifying the page size. */
// [TARGET listTableData(TableId, TableDataListOption...)]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
public TableResult listTableDataFromId(String datasetName, String tableName) {
  // [START bigquery_browse_table]
  TableId tableIdObject = TableId.of(datasetName, tableName);
  // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
  // simply omit the option.
  TableResult tableData =
      bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
  for (FieldValueList row : tableData.iterateAll()) {
    // do something with the row
  }
  // [END bigquery_browse_table]
  return tableData;
}
 
Example #4
Source File: BigQuerySnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of listing table rows with schema. */
// [TARGET listTableData(String, String, Schema, TableDataListOption...)]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
// [VARIABLE ...]
// [VARIABLE "field"]
public TableResult listTableDataSchema(
    String datasetName, String tableName, Schema schema, String field) {
  // [START ]
  TableResult tableData = bigquery.listTableData(datasetName, tableName, schema);
  for (FieldValueList row : tableData.iterateAll()) {
    row.get(field);
  }
  // [END ]
  return tableData;
}
 
Example #5
Source File: BigQuerySnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of listing table rows with schema. */
// [TARGET listTableData(TableId, Schema, TableDataListOption...)]
public FieldValueList listTableDataSchemaId() {
  // [START ]
  Schema schema =
      Schema.of(
          Field.of("word", LegacySQLTypeName.STRING),
          Field.of("word_count", LegacySQLTypeName.STRING),
          Field.of("corpus", LegacySQLTypeName.STRING),
          Field.of("corpus_date", LegacySQLTypeName.STRING));
  TableResult tableData =
      bigquery.listTableData(
          TableId.of("bigquery-public-data", "samples", "shakespeare"), schema);
  FieldValueList row = tableData.getValues().iterator().next();
  System.out.println(row.get("word").getStringValue());
  // [END ]
  return row;
}
 
Example #6
Source File: CloudSnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of running a query with the cache disabled. */
public void runUncachedQuery() throws TimeoutException, InterruptedException {
  // [START bigquery_query_no_cache]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
  QueryJobConfiguration queryConfig =
      QueryJobConfiguration.newBuilder(query)
          // Disable the query cache to force live query evaluation.
          .setUseQueryCache(false)
          .build();

  // Print the results.
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    for (FieldValue val : row) {
      System.out.printf("%s,", val.toString());
    }
    System.out.printf("\n");
  }
  // [END bigquery_query_no_cache]
}
 
Example #7
Source File: BigQuerySnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of running a query. */
// [TARGET query(QueryJobConfiguration, JobOption...)]
public void runQuery() throws InterruptedException {
  // [START bigquery_query]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
  QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();

  // Print the results.
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    for (FieldValue val : row) {
      System.out.printf("%s,", val.toString());
    }
    System.out.printf("\n");
  }
  // [END bigquery_query]
}
 
Example #8
Source File: CloudSnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of running a query and saving the results to a table. */
public void runQueryPermanentTable(String destinationDataset, String destinationTable)
    throws InterruptedException {
  // [START bigquery_query_destination_table]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  // String destinationDataset = 'my_destination_dataset';
  // String destinationTable = 'my_destination_table';
  String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
  QueryJobConfiguration queryConfig =
      // Note that setUseLegacySql is set to false by default
      QueryJobConfiguration.newBuilder(query)
          // Save the results of the query to a permanent table.
          .setDestinationTable(TableId.of(destinationDataset, destinationTable))
          .build();

  // Print the results.
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    for (FieldValue val : row) {
      System.out.printf("%s,", val.toString());
    }
    System.out.printf("\n");
  }
  // [END bigquery_query_destination_table]
}
 
Example #9
Source File: CloudSnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/** Example of running a Legacy SQL query. */
public void runLegacySqlQuery() throws InterruptedException {
  // [START bigquery_query_legacy]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  String query = "SELECT corpus FROM [bigquery-public-data:samples.shakespeare] GROUP BY corpus;";
  QueryJobConfiguration queryConfig =
      // To use legacy SQL syntax, set useLegacySql to true.
      QueryJobConfiguration.newBuilder(query).setUseLegacySql(true).build();

  // Print the results.
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    for (FieldValue val : row) {
      System.out.printf("%s,", val.toString());
    }
    System.out.printf("\n");
  }
  // [END bigquery_query_legacy]
}
 
Example #10
Source File: ITTableSnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
@Test
public void testInsertParams() throws InterruptedException {
  InsertAllResponse response = tableSnippets.insertWithParams("row1", "row2");
  assertFalse(response.hasErrors());
  List<FieldValueList> rows = ImmutableList.copyOf(tableSnippets.list().getValues());
  while (rows.isEmpty()) {
    Thread.sleep(500);
    rows = ImmutableList.copyOf(tableSnippets.list().getValues());
  }
  Set<List<?>> values =
      FluentIterable.from(rows)
          .transform(
              new Function<FieldValueList, List<?>>() {
                @Override
                public List<?> apply(FieldValueList row) {
                  return ImmutableList.of(
                      row.get(0).getStringValue(), row.get(1).getBooleanValue());
                }
              })
          .toSet();
  assertEquals(ImmutableSet.of(ROW2), values);
}
 
Example #11
Source File: ITTableSnippets.java    From google-cloud-java with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that the given table has the rows inserted by InsertTestRows().
 *
 * @param checkTable the table to query
 */
private void verifyTestRows(Table checkTable) throws InterruptedException {
  List<FieldValueList> rows = waitForTableRows(checkTable, 2);
  // Verify that the table data matches what it's supposed to.
  Set<List<?>> values =
      FluentIterable.from(rows)
          .transform(
              new Function<FieldValueList, List<?>>() {
                @Override
                public List<?> apply(FieldValueList row) {
                  return ImmutableList.of(
                      row.get(0).getStringValue(), row.get(1).getBooleanValue());
                }
              })
          .toSet();
  assertEquals(ImmutableSet.of(ROW2, ROW1), values);
}
 
Example #12
Source File: BigQueryExample.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
@Override
void run(BigQuery bigquery, QueryJobConfiguration queryConfig) throws Exception {
  System.out.println("Running query");
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    System.out.println(row);
  }
}
 
Example #13
Source File: ITBigQuerySnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
@Test
public void testInsertAllAndListTableData() throws IOException, InterruptedException {
  String tableName = "test_insert_all_and_list_table_data";
  String fieldName1 = "booleanField";
  String fieldName2 = "bytesField";
  String fieldName3 = "recordField";
  String fieldName4 = "stringField";
  TableId tableId = TableId.of(DATASET, tableName);
  Schema schema =
      Schema.of(
          Field.of(fieldName1, LegacySQLTypeName.BOOLEAN),
          Field.of(fieldName2, LegacySQLTypeName.BYTES),
          Field.of(
              fieldName3,
              LegacySQLTypeName.RECORD,
              Field.of(fieldName4, LegacySQLTypeName.STRING)));
  TableInfo table = TableInfo.of(tableId, StandardTableDefinition.of(schema));
  assertNotNull(bigquery.create(table));
  InsertAllResponse response = bigquerySnippets.insertAll(DATASET, tableName);
  assertFalse(response.hasErrors());
  assertTrue(response.getInsertErrors().isEmpty());
  Page<FieldValueList> listPage = bigquerySnippets.listTableDataFromId(DATASET, tableName);
  while (Iterators.size(listPage.iterateAll().iterator()) < 1) {
    Thread.sleep(500);
    listPage = bigquerySnippets.listTableDataFromId(DATASET, tableName);
  }
  FieldValueList row = listPage.getValues().iterator().next();
  assertEquals(true, row.get(0).getBooleanValue());
  assertArrayEquals(new byte[] {0xA, 0xD, 0xD, 0xE, 0xD}, row.get(1).getBytesValue());
  assertEquals("Hello, World!", row.get(2).getRecordValue().get(0).getStringValue());

  listPage = bigquerySnippets.listTableDataSchema(DATASET, tableName, schema, fieldName1);
  row = listPage.getValues().iterator().next();
  assertNotNull(row.get(fieldName1));
  assertArrayEquals(new byte[] {0xA, 0xD, 0xD, 0xE, 0xD}, row.get(fieldName2).getBytesValue());

  bigquerySnippets.listTableDataSchemaId();

  assertTrue(bigquerySnippets.deleteTable(DATASET, tableName));
}
 
Example #14
Source File: ITTableSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
@Test
public void testList() throws InterruptedException {
  List<FieldValueList> rows = ImmutableList.copyOf(tableSnippets.list().getValues());
  assertEquals(0, rows.size());

  InsertAllResponse response = tableSnippets.insert("row1", "row2");
  assertFalse(response.hasErrors());
  rows = ImmutableList.copyOf(tableSnippets.list().getValues());
  while (rows.isEmpty()) {
    Thread.sleep(500);
    rows = ImmutableList.copyOf(tableSnippets.list().getValues());
  }
  assertEquals(2, rows.size());
}
 
Example #15
Source File: ITTableSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
/**
 * Waits for a specified number of rows to appear in the given table. This is used by
 * verifyTestRows to wait for data to appear before verifying.
 *
 * @param checkTable the table to query
 * @param numRows the expected number of rows
 * @return the rows from the table
 */
private List<FieldValueList> waitForTableRows(Table checkTable, int numRows)
    throws InterruptedException {
  // Wait for the data to appear.
  Page<FieldValueList> page = checkTable.list(TableDataListOption.pageSize(100));
  List<FieldValueList> rows = ImmutableList.copyOf(page.getValues());
  while (rows.size() != numRows) {
    Thread.sleep(1000);
    page = checkTable.list(TableDataListOption.pageSize(100));
    rows = ImmutableList.copyOf(page.getValues());
  }
  return rows;
}
 
Example #16
Source File: PutBigQueryStreamingIT.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void PutBigQueryStreamingNoError() throws Exception {
    String tableName = Thread.currentThread().getStackTrace()[1].getMethodName();
    createTable(tableName);

    runner.setProperty(BigQueryAttributes.DATASET_ATTR, dataset.getDatasetId().getDataset());
    runner.setProperty(BigQueryAttributes.TABLE_NAME_ATTR, tableName);

    final JsonTreeReader jsonReader = new JsonTreeReader();
    runner.addControllerService("reader", jsonReader);
    runner.enableControllerService(jsonReader);

    runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");

    runner.enqueue(Paths.get("src/test/resources/bigquery/streaming-correct-data.json"));

    runner.run();
    runner.assertAllFlowFilesTransferred(PutBigQueryStreaming.REL_SUCCESS, 1);
    runner.getFlowFilesForRelationship(PutBigQueryStreaming.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR, "2");

    TableResult result = bigquery.listTableData(dataset.getDatasetId().getDataset(), tableName, schema);
    Iterator<FieldValueList> iterator = result.getValues().iterator();

    FieldValueList firstElt = iterator.next();
    FieldValueList sndElt = iterator.next();
    assertTrue(firstElt.get("name").getStringValue().endsWith("Doe"));
    assertTrue(sndElt.get("name").getStringValue().endsWith("Doe"));

    FieldValueList john;
    FieldValueList jane;
    john = firstElt.get("name").getStringValue().equals("John Doe") ? firstElt : sndElt;
    jane = firstElt.get("name").getStringValue().equals("Jane Doe") ? firstElt : sndElt;

    assertEquals(jane.get("job").getRecordValue().get(0).getStringValue(), "Director");
    assertTrue(john.get("alias").getRepeatedValue().size() == 2);
    assertTrue(john.get("addresses").getRepeatedValue().get(0).getRecordValue().get(0).getStringValue().endsWith("000"));

    deleteTable(tableName);
}
 
Example #17
Source File: PutBigQueryStreamingIT.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void PutBigQueryStreamingPartialError() throws Exception {
    String tableName = Thread.currentThread().getStackTrace()[1].getMethodName();
    createTable(tableName);

    runner.setProperty(BigQueryAttributes.DATASET_ATTR, dataset.getDatasetId().getDataset());
    runner.setProperty(BigQueryAttributes.TABLE_NAME_ATTR, tableName);

    final JsonTreeReader jsonReader = new JsonTreeReader();
    runner.addControllerService("reader", jsonReader);
    runner.enableControllerService(jsonReader);

    runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");
    runner.setProperty(BigQueryAttributes.SKIP_INVALID_ROWS_ATTR, "true");

    runner.enqueue(Paths.get("src/test/resources/bigquery/streaming-bad-data.json"));

    runner.run();
    runner.assertAllFlowFilesTransferred(PutBigQueryStreaming.REL_FAILURE, 1);
    runner.getFlowFilesForRelationship(PutBigQueryStreaming.REL_FAILURE).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR, "1");

    TableResult result = bigquery.listTableData(dataset.getDatasetId().getDataset(), tableName, schema);
    Iterator<FieldValueList> iterator = result.getValues().iterator();

    FieldValueList firstElt = iterator.next();
    assertFalse(iterator.hasNext());
    assertEquals(firstElt.get("name").getStringValue(), "Jane Doe");

    deleteTable(tableName);
}
 
Example #18
Source File: CloudSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
/** Example of running a query with timestamp query parameters. */
public void runQueryWithTimestampParameters() throws InterruptedException {
  // [START bigquery_query_params_timestamps]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  ZonedDateTime timestamp = LocalDateTime.of(2016, 12, 7, 8, 0, 0).atZone(ZoneOffset.UTC);
  String query = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);";
  // Note: Standard SQL is required to use query parameters.
  QueryJobConfiguration queryConfig =
      QueryJobConfiguration.newBuilder(query)
          .addNamedParameter(
              "ts_value",
              QueryParameterValue.timestamp(
                  // Timestamp takes microseconds since 1970-01-01T00:00:00 UTC
                  timestamp.toInstant().toEpochMilli() * 1000))
          .build();

  // Print the results.
  DateTimeFormatter formatter = DateTimeFormatter.ISO_INSTANT.withZone(ZoneOffset.UTC);
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    System.out.printf(
        "%s\n",
        formatter.format(
            Instant.ofEpochMilli(
                    // Timestamp values are returned in microseconds since 1970-01-01T00:00:00
                    // UTC,
                    // but org.joda.time.DateTime constructor accepts times in milliseconds.
                    row.get(0).getTimestampValue() / 1000)
                .atOffset(ZoneOffset.UTC)));
    System.out.printf("\n");
  }
  // [END bigquery_query_params_timestamps]
}
 
Example #19
Source File: CloudSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
/** Example of running a query with array query parameters. */
public void runQueryWithArrayParameters() throws InterruptedException {
  // [START bigquery_query_params_arrays]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  String gender = "M";
  String[] states = {"WA", "WI", "WV", "WY"};
  String query =
      "SELECT name, sum(number) as count\n"
          + "FROM `bigquery-public-data.usa_names.usa_1910_2013`\n"
          + "WHERE gender = @gender\n"
          + "AND state IN UNNEST(@states)\n"
          + "GROUP BY name\n"
          + "ORDER BY count DESC\n"
          + "LIMIT 10;";
  // Note: Standard SQL is required to use query parameters.
  QueryJobConfiguration queryConfig =
      QueryJobConfiguration.newBuilder(query)
          .addNamedParameter("gender", QueryParameterValue.string(gender))
          .addNamedParameter("states", QueryParameterValue.array(states, String.class))
          .build();

  // Print the results.
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    for (FieldValue val : row) {
      System.out.printf("%s,", val.toString());
    }
    System.out.printf("\n");
  }
  // [END bigquery_query_params_arrays]
}
 
Example #20
Source File: CloudSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
/** Example of running a query with named query parameters. */
public void runQueryWithNamedParameters() throws InterruptedException {
  // [START bigquery_query_params_named]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  String corpus = "romeoandjuliet";
  long minWordCount = 250;
  String query =
      "SELECT word, word_count\n"
          + "FROM `bigquery-public-data.samples.shakespeare`\n"
          + "WHERE corpus = @corpus\n"
          + "AND word_count >= @min_word_count\n"
          + "ORDER BY word_count DESC";
  // Note: Standard SQL is required to use query parameters.
  QueryJobConfiguration queryConfig =
      QueryJobConfiguration.newBuilder(query)
          .addNamedParameter("corpus", QueryParameterValue.string(corpus))
          .addNamedParameter("min_word_count", QueryParameterValue.int64(minWordCount))
          .build();

  // Print the results.
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    for (FieldValue val : row) {
      System.out.printf("%s,", val.toString());
    }
    System.out.printf("\n");
  }
  // [END bigquery_query_params_named]
}
 
Example #21
Source File: CloudSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
/** Example of running a query and saving the results to a table. */
public void runQueryLargeResults(String destinationDataset, String destinationTable)
    throws InterruptedException {
  // [START bigquery_query_legacy_large_results]
  // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
  // String destinationDataset = 'my_destination_dataset';
  // String destinationTable = 'my_destination_table';
  String query = "SELECT corpus FROM [bigquery-public-data:samples.shakespeare] GROUP BY corpus;";
  QueryJobConfiguration queryConfig =
      // To use legacy SQL syntax, set useLegacySql to true.
      QueryJobConfiguration.newBuilder(query)
          .setUseLegacySql(true)
          // Save the results of the query to a permanent table.
          .setDestinationTable(TableId.of(destinationDataset, destinationTable))
          // Allow results larger than the maximum response size.
          // If true, a destination table must be set.
          .setAllowLargeResults(true)
          .build();

  // Print the results.
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    for (FieldValue val : row) {
      System.out.printf("%s,", val.toString());
    }
    System.out.printf("\n");
  }
  // [END bigquery_query_legacy_large_results]
}
 
Example #22
Source File: TableSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
/** Example of listing rows in the table given a schema. */
// [TARGET list(Schema, TableDataListOption...)]
// [VARIABLE ...]
// [VARIABLE "my_field"]
public Page<FieldValueList> list(Schema schema, String field) {
  // [START ]
  Page<FieldValueList> page = table.list(schema);
  for (FieldValueList row : page.iterateAll()) {
    row.get(field);
  }
  // [END ]
  return page;
}
 
Example #23
Source File: TableSnippets.java    From google-cloud-java with Apache License 2.0 5 votes vote down vote up
/** Example of listing rows in the table. */
// [TARGET list(TableDataListOption...)]
public Page<FieldValueList> list() {
  // [START ]
  // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
  // simply omit the option.
  Page<FieldValueList> page = table.list(TableDataListOption.pageSize(100));
  for (FieldValueList row : page.iterateAll()) {
    // do something with the row
  }
  // [END ]
  return page;
}
 
Example #24
Source File: TestBigQueryDelegate.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private static FieldValue createFieldValue(Object value, FieldValue.Attribute attribute) {
  FieldValue fieldValue = mock(FieldValue.class);
  when(fieldValue.getAttribute()).thenReturn(attribute);
  when(fieldValue.getValue()).thenReturn(value);

  if (value instanceof Long) {
    when(fieldValue.getTimestampValue()).thenReturn((long) value);
  }
  if (value instanceof byte[]) {
    when(fieldValue.getBytesValue()).thenReturn((byte[]) value);
  }


  if (! (attribute.equals(FieldValue.Attribute.RECORD) || attribute.equals(FieldValue.Attribute.REPEATED))) {
    when(fieldValue.getStringValue()).thenReturn(value.toString());
  }

  if (attribute.equals(FieldValue.Attribute.RECORD)) {
    when(fieldValue.getRecordValue()).thenReturn(FieldValueList.of((List<FieldValue>) value));
  }
  if (attribute.equals(FieldValue.Attribute.REPEATED)) {
    when(fieldValue.getRepeatedValue()).thenReturn((List<FieldValue>) value);
  }

  return fieldValue;
}
 
Example #25
Source File: TestBigQueryDelegate.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public static FieldValueList createTestValues() {
  return FieldValueList.of(ImmutableList.<FieldValue>builder()
      .add(createFieldValue("a string"))
      .add(createFieldValue("bytes".getBytes()))
      .add(createFieldValue(
          ImmutableList.of(
              createFieldValue(1L),
              createFieldValue(2L),
              createFieldValue(3L)
          ),
          FieldValue.Attribute.REPEATED)
      )
      .add(createFieldValue(2.0d))
      .add(createFieldValue(true))
      .add(createFieldValue(1351700038292387L))
      .add(createFieldValue("08:39:01.123"))
      .add(createFieldValue("2019-02-05T23:59:59.123"))
      .add(createFieldValue("2019-02-05"))
      .add(createFieldValue(
          ImmutableList.of(
              createFieldValue("nested string"),
              createFieldValue(ImmutableList.of(createFieldValue("z")), FieldValue.Attribute.RECORD)
          ),
          FieldValue.Attribute.RECORD
      ))
      .build());
}
 
Example #26
Source File: BigQueryHome.java    From java-docs-samples with Apache License 2.0 5 votes vote down vote up
private static String convertRunToHtmlTable(TableResult result) {
  if (result == null) {
    return "";
  }

  StringBuilder sb = new StringBuilder();
  for (FieldValueList row : result.iterateAll()) {
    sb.append("<tr>");
    String url = row.get("url").getStringValue();
    addColumn(sb, String.format("<a href=\"%s\">%s</a>", url, url));
    addColumn(sb, row.get("view_count").getLongValue());
    sb.append("</tr>");
  }
  return sb.toString();
}
 
Example #27
Source File: InsertDataAndQueryTable.java    From google-cloud-java with Apache License 2.0 4 votes vote down vote up
public static void main(String... args) throws InterruptedException {
  // Create a service instance
  BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();

  // Create a dataset
  String datasetId = "my_dataset_id";
  bigquery.create(DatasetInfo.newBuilder(datasetId).build());

  TableId tableId = TableId.of(datasetId, "my_table_id");
  // Table field definition
  Field stringField = Field.of("StringField", LegacySQLTypeName.STRING);
  // Table schema definition
  Schema schema = Schema.of(stringField);
  // Create a table
  StandardTableDefinition tableDefinition = StandardTableDefinition.of(schema);
  bigquery.create(TableInfo.of(tableId, tableDefinition));

  // Define rows to insert
  Map<String, Object> firstRow = new HashMap<>();
  Map<String, Object> secondRow = new HashMap<>();
  firstRow.put("StringField", "value1");
  secondRow.put("StringField", "value2");
  // Create an insert request
  InsertAllRequest insertRequest =
      InsertAllRequest.newBuilder(tableId).addRow(firstRow).addRow(secondRow).build();
  // Insert rows
  InsertAllResponse insertResponse = bigquery.insertAll(insertRequest);
  // Check if errors occurred
  if (insertResponse.hasErrors()) {
    System.out.println("Errors occurred while inserting rows");
  }

  // Create a query request
  QueryJobConfiguration queryConfig =
      QueryJobConfiguration.newBuilder("SELECT * FROM my_dataset_id.my_table_id").build();
  // Read rows
  System.out.println("Table rows:");
  for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    System.out.println(row);
  }
}
 
Example #28
Source File: BigQueryExample.java    From google-cloud-java with Apache License 2.0 4 votes vote down vote up
@Override
public void run(BigQuery bigquery, TableId tableId) {
  for (FieldValueList row : bigquery.listTableData(tableId).iterateAll()) {
    System.out.println(row);
  }
}
 
Example #29
Source File: BigQueryDatasetRuntime.java    From components with Apache License 2.0 4 votes vote down vote up
@Override
public void getSample(int limit, Consumer<IndexedRecord> consumer) {
    BigQuery bigquery = BigQueryConnection.createClient(properties.getDatastoreProperties());
    com.google.cloud.bigquery.Schema bqRowSchema = null;
    String query = null;
    boolean useLegacySql = true;
    switch (properties.sourceType.getValue()) {
    case TABLE_NAME: {
        query = String.format("select * from `%s.%s.%s` LIMIT %d",
                properties.getDatastoreProperties().projectName.getValue(), properties.bqDataset.getValue(),
                properties.tableName.getValue(), limit);
        useLegacySql = false;
        break;
    }
    case QUERY: {
        query = properties.query.getValue();
        useLegacySql = properties.useLegacySql.getValue();
        break;
    }
    default:
        throw new RuntimeException("To be implemented: " + properties.sourceType.getValue());
    }
    QueryJobConfiguration queryRequest = QueryJobConfiguration
            .newBuilder(query)
            .setUseLegacySql(useLegacySql)
            .build();
    // todo: proper pagination, not critical for getSample yet
    TableResult queryResponse =
            query(bigquery, queryRequest, properties.getDatastoreProperties().projectName.getValue());
    bqRowSchema = queryResponse.getSchema();
    Schema schema = BigQueryAvroRegistry.get().inferSchema(bqRowSchema);
    Iterator<FieldValueList> iterator = queryResponse.getValues().iterator();
    IndexedRecordConverter<Map<String, Object>, IndexedRecord> converter =
            new BigQueryFieldValueListIndexedRecordConverter();
    converter.setSchema(schema);
    int count = 0; // need this only for legacy sql with large result
    while (iterator.hasNext() && count < limit) {
        List<FieldValue> values = iterator.next();
        consumer.accept(converter.convertToAvro(BigQueryAvroRegistry.get().convertFileds(values, schema)));
        count++;
    }
}
 
Example #30
Source File: TestBigQuerySource.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Test
public void testProduce() throws Exception {
  File tempFile = File.createTempFile("gcp", "json");
  tempFile.deleteOnExit();

  BigQuerySourceConfig conf = new BigQuerySourceConfig();
  conf.credentials.projectId = "test";
  conf.credentials.path = tempFile.getAbsolutePath();
  conf.credentials.credentialsProvider = CredentialsProviderType.JSON_PROVIDER;
  conf.query = "SELECT * FROM [test:table]";


  Job mockJob = mock(Job.class);
  JobStatus mockJobStatus = mock(JobStatus.class);

  // First pretend we haven't finished running the query, second time around its completed.
  when(mockJob.isDone()).thenReturn(false).thenReturn(true);
  when(mockJob.getJobId()).thenReturn(jobId);
  when(mockJobStatus.getError()).thenReturn(null);
  when(mockJob.getStatus()).thenReturn(mockJobStatus);

  List<FieldValueList> resultSet = new ArrayList<>(1);
  resultSet.add(TestBigQueryDelegate.createTestValues());

  when(mockResult.getSchema()).thenReturn(TestBigQueryDelegate.createTestSchema());
  when(mockResult.iterateAll()).thenReturn(resultSet);
  when(mockResult.getValues()).thenReturn(resultSet);

  when(mockJob.getQueryResults()).thenReturn(mockResult);

  when(mockBigquery.create((JobInfo)any())).thenReturn(mockJob);
  when(mockBigquery.cancel(jobId)).thenReturn(true);

  BigQuerySource bigquerySource = spy(new BigQuerySource(conf));
  doReturn(mockBigquery).when(bigquerySource).getBigQuery(any());
  doReturn(mockResult).when(bigquerySource).runQuery(any(), anyLong());

  SourceRunner runner = new SourceRunner.Builder(BigQueryDSource.class, bigquerySource)
      .addOutputLane("lane")
      .build();

  try {
    runner.runInit();

    StageRunner.Output output = runner.runProduce(null, 1000);
    List<Record> records = output.getRecords().get("lane");
    assertEquals(1, records.size());
    assertEquals(10, records.get(0).get().getValueAsListMap().size());
    assertEquals("nested string", records.get(0).get("/j/x").getValueAsString());
    assertEquals("z", records.get(0).get("/j/y/z").getValueAsString());
  } finally {
    runner.runDestroy();
  }
}