com.google.api.services.bigquery.model.ErrorProto Java Examples

The following examples show how to use com.google.api.services.bigquery.model.ErrorProto. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TextToBigQueryStreamingTest.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
@Test
public void wrapBigQueryInsertErrorReturnsValidJSON() {
  TableRow testRow =
      new TableRow()
          .set(NAME_KEY, testPerson.name)
          .set(AGE_KEY, testPerson.age);
  InsertErrors insertErrors = new TableDataInsertAllResponse.InsertErrors();
  ErrorProto errorProto = new ErrorProto().setMessage(ERROR_MESSAGE);
  insertErrors.setErrors(ImmutableList.of(errorProto));
  TableReference tableReference = new TableReference();
  BigQueryInsertError bigQueryInsertError =
      new BigQueryInsertError(testRow.clone(), insertErrors, tableReference);
  String expected = GSON.toJson(testPerson);

  FailsafeElement<String, String> wrappedValue =
      TextToBigQueryStreaming.wrapBigQueryInsertError(bigQueryInsertError);
  String actualOriginalPayload = wrappedValue.getOriginalPayload();
  String actualPayload = wrappedValue.getPayload();
  String actualErrorMessage = wrappedValue.getErrorMessage();

  assertThat(actualOriginalPayload).isEqualTo(expected);
  assertThat(actualPayload).isEqualTo(expected);
  assertThat(actualErrorMessage).isEqualTo(GSON.toJson(insertErrors));
}
 
Example #2
Source File: BigQueryUtilsTest.java    From hadoop-connectors with Apache License 2.0 6 votes vote down vote up
/**
 * Tests waitForJobCompletion method of BigQueryUtils when the job returns an error.
 */
@Test
public void testWaitForJobCompletionError()
    throws InterruptedException, IOException {
  // Return completed job.
  when(mockJobsGet.execute()).thenReturn(job);

  // Set error result to not null.
  jobStatus.setErrorResult(new ErrorProto());

  // Run waitForJobCompletion and assert failure.
  IOException e =
      assertThrows(
          IOException.class,
          () ->
              BigQueryUtils.waitForJobCompletion(
                  mockBigQuery, projectId, jobReference, mockProgressable));
  assertThat(e).hasMessageThat().contains(jobReference.getJobId());
}
 
Example #3
Source File: BigqueryConnection.java    From nomulus with Apache License 2.0 6 votes vote down vote up
/**
 * Checks completed job for errors.
 *
 * @throws BigqueryJobFailureException
 */
private static Job checkJob(Job job) {
  verify(job.getStatus() != null);
  JobStatus jobStatus = job.getStatus();
  if (jobStatus.getErrorResult() != null) {
    throw BigqueryJobFailureException.create(jobStatus);
  } else {
    logger.atInfo().log(summarizeCompletedJob(job));
    if (jobStatus.getErrors() != null) {
      for (ErrorProto error : jobStatus.getErrors()) {
        logger.atWarning().log("%s: %s", error.getReason(), error.getMessage());
      }
    }
    return job;
  }
}
 
Example #4
Source File: BigqueryJobFailureException.java    From nomulus with Apache License 2.0 6 votes vote down vote up
@Override
public String getMessage() {
  StringBuilder result = new StringBuilder();
  result.append(String.format("%s: %s", getClass().getSimpleName(), super.getMessage()));
  try {
    if (jobStatus != null) {
      for (ErrorProto error : jobStatus.getErrors()) {
        result.append("\n---------------------------------- BEGIN DEBUG INFO\n");
        result.append(describeError(error));
        result.append('\n');
        result.append(error.getDebugInfo());
        result.append("\n---------------------------------- END DEBUG INFO");
      }
    }
    if (jsonError != null) {
      String extraInfo = jsonError.toPrettyString();
      result.append('\n');
      result.append(extraInfo);
    }
  } catch (IOException e) {
    result.append(e);
  }
  return result.toString();
}
 
Example #5
Source File: BigQueryInsertErrorCoderTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testDecodeEncodeEqual() throws Exception {
  BigQueryInsertError value =
      new BigQueryInsertError(
          new TableRow().setF(Collections.singletonList(new TableCell().setV("Value"))),
          new TableDataInsertAllResponse.InsertErrors()
              .setIndex(0L)
              .setErrors(
                  Collections.singletonList(
                      new ErrorProto()
                          .setReason("a Reason")
                          .setLocation("A location")
                          .setMessage("A message")
                          .setDebugInfo("The debug info"))),
          new TableReference()
              .setProjectId("dummy-project-id")
              .setDatasetId("dummy-dataset-id")
              .setTableId("dummy-table-id"));

  CoderProperties.coderDecodeEncodeEqual(TEST_CODER, value);
}
 
Example #6
Source File: InsertRetryPolicy.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Retry all failures except for known persistent errors. */
public static InsertRetryPolicy retryTransientErrors() {
  return new InsertRetryPolicy() {
    @Override
    public boolean shouldRetry(Context context) {
      if (context.getInsertErrors().getErrors() != null) {
        for (ErrorProto error : context.getInsertErrors().getErrors()) {
          if (error.getReason() != null && PERSISTENT_ERRORS.contains(error.getReason())) {
            return false;
          }
        }
      }
      return true;
    }
  };
}
 
Example #7
Source File: BigQueryServicesImplTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Tests that {@link BigQueryServicesImpl.JobServiceImpl#pollJob} fails. */
@Test
public void testPollJobFailed() throws IOException, InterruptedException {
  Job testJob = new Job();
  testJob.setStatus(new JobStatus().setState("DONE").setErrorResult(new ErrorProto()));

  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
  when(response.getStatusCode()).thenReturn(200);
  when(response.getContent()).thenReturn(toStream(testJob));

  BigQueryServicesImpl.JobServiceImpl jobService =
      new BigQueryServicesImpl.JobServiceImpl(bigquery);
  JobReference jobRef = new JobReference().setProjectId("projectId").setJobId("jobId");
  Job job = jobService.pollJob(jobRef, Sleeper.DEFAULT, BackOff.ZERO_BACKOFF);

  assertEquals(testJob, job);
  verify(response, times(1)).getStatusCode();
  verify(response, times(1)).getContent();
  verify(response, times(1)).getContentType();
}
 
Example #8
Source File: ErrorConvertersTest.java    From DataflowTemplates with Apache License 2.0 5 votes vote down vote up
/**
 * Generates a {@link InsertErrors} used by {@link BigQueryInsertError}.
 *
 * @param error string to be added to {@link BigQueryInsertError}
 */
private static InsertErrors getInsertErrors(String error) {
  InsertErrors insertErrors = new TableDataInsertAllResponse.InsertErrors();
  ErrorProto errorProto = new ErrorProto().setMessage(error);
  insertErrors.setErrors(Lists.newArrayList(errorProto));
  return insertErrors;
}
 
Example #9
Source File: BqJobRunner.java    From digdag with Apache License 2.0 5 votes vote down vote up
private static String toPrettyString(ErrorProto error)
{
    try {
        return error.toPrettyString();
    }
    catch (IOException e) {
        return "<json error>";
    }
}
 
Example #10
Source File: BqJobRunner.java    From digdag with Apache License 2.0 5 votes vote down vote up
private static Map<String, String> errorProperties(List<ErrorProto> errors)
{
    return ImmutableMap.of(
            "errors", errors.stream()
                    .map(BqJobRunner::toPrettyString)
                    .collect(Collectors.joining(", ")));
}
 
Example #11
Source File: BigqueryPollJobActionTest.java    From nomulus with Apache License 2.0 5 votes vote down vote up
@Test
public void testJobFailed() throws Exception {
  when(bigqueryJobsGet.execute()).thenReturn(new Job().setStatus(
      new JobStatus()
          .setState("DONE")
          .setErrorResult(new ErrorProto().setMessage("Job failed"))));
  action.run();
  assertLogMessage(
      logHandler, SEVERE, String.format("Bigquery job failed - %s:%s", PROJECT_ID, JOB_ID));
  assertNoTasksEnqueued(CHAINED_QUEUE_NAME);
}
 
Example #12
Source File: InsertRetryPolicyTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public boolean shouldRetry(Context context) {
  if (context.getInsertErrors().getErrors() != null) {
    for (ErrorProto error : context.getInsertErrors().getErrors()) {
      if (error.getReason() != null && error.getReason().equals("invalidQuery")) {
        return false;
      }
    }
  }
  return true;
}
 
Example #13
Source File: InsertRetryPolicyTest.java    From beam with Apache License 2.0 5 votes vote down vote up
private TableDataInsertAllResponse.InsertErrors generateErrorAmongMany(
    int numErrors, String baseReason, String exceptionalReason) {
  // The retry policies are expected to search through the entire list of ErrorProtos to determine
  // whether to retry. Stick the exceptionalReason in a random position to exercise this.
  List<ErrorProto> errorProtos = Lists.newArrayListWithExpectedSize(numErrors);
  int exceptionalPosition = ThreadLocalRandom.current().nextInt(numErrors);
  for (int i = 0; i < numErrors; ++i) {
    ErrorProto error = new ErrorProto();
    error.setReason((i == exceptionalPosition) ? exceptionalReason : baseReason);
    errorProtos.add(error);
  }
  TableDataInsertAllResponse.InsertErrors errors = new TableDataInsertAllResponse.InsertErrors();
  errors.setErrors(errorProtos);
  return errors;
}
 
Example #14
Source File: BigQueryIOWriteTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testFailuresNoRetryPolicy() throws Exception {
  TableRow row1 = new TableRow().set("name", "a").set("number", "1");
  TableRow row2 = new TableRow().set("name", "b").set("number", "2");
  TableRow row3 = new TableRow().set("name", "c").set("number", "3");

  TableDataInsertAllResponse.InsertErrors ephemeralError =
      new TableDataInsertAllResponse.InsertErrors()
          .setErrors(ImmutableList.of(new ErrorProto().setReason("timeout")));

  fakeDatasetService.failOnInsert(
      ImmutableMap.of(
          row1, ImmutableList.of(ephemeralError, ephemeralError),
          row2, ImmutableList.of(ephemeralError, ephemeralError)));

  p.apply(Create.of(row1, row2, row3))
      .apply(
          BigQueryIO.writeTableRows()
              .to("project-id:dataset-id.table-id")
              .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)
              .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS)
              .withSchema(
                  new TableSchema()
                      .setFields(
                          ImmutableList.of(
                              new TableFieldSchema().setName("name").setType("STRING"),
                              new TableFieldSchema().setName("number").setType("INTEGER"))))
              .withTestServices(fakeBqServices)
              .withoutValidation());
  p.run();

  assertThat(
      fakeDatasetService.getAllRows("project-id", "dataset-id", "table-id"),
      containsInAnyOrder(row1, row2, row3));
}
 
Example #15
Source File: BigQueryIOWriteTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testRetryPolicy() throws Exception {
  TableRow row1 = new TableRow().set("name", "a").set("number", "1");
  TableRow row2 = new TableRow().set("name", "b").set("number", "2");
  TableRow row3 = new TableRow().set("name", "c").set("number", "3");

  TableDataInsertAllResponse.InsertErrors ephemeralError =
      new TableDataInsertAllResponse.InsertErrors()
          .setErrors(ImmutableList.of(new ErrorProto().setReason("timeout")));
  TableDataInsertAllResponse.InsertErrors persistentError =
      new TableDataInsertAllResponse.InsertErrors()
          .setErrors(ImmutableList.of(new ErrorProto().setReason("invalidQuery")));

  fakeDatasetService.failOnInsert(
      ImmutableMap.of(
          row1, ImmutableList.of(ephemeralError, ephemeralError),
          row2, ImmutableList.of(ephemeralError, ephemeralError, persistentError)));

  PCollection<TableRow> failedRows =
      p.apply(Create.of(row1, row2, row3))
          .apply(
              BigQueryIO.writeTableRows()
                  .to("project-id:dataset-id.table-id")
                  .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)
                  .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS)
                  .withSchema(
                      new TableSchema()
                          .setFields(
                              ImmutableList.of(
                                  new TableFieldSchema().setName("name").setType("STRING"),
                                  new TableFieldSchema().setName("number").setType("INTEGER"))))
                  .withFailedInsertRetryPolicy(InsertRetryPolicy.retryTransientErrors())
                  .withTestServices(fakeBqServices)
                  .withoutValidation())
          .getFailedInserts();
  // row2 finally fails with a non-retryable error, so we expect to see it in the collection of
  // failed rows.
  PAssert.that(failedRows).containsInAnyOrder(row2);
  p.run();

  // Only row1 and row3 were successfully inserted.
  assertThat(
      fakeDatasetService.getAllRows("project-id", "dataset-id", "table-id"),
      containsInAnyOrder(row1, row3));
}
 
Example #16
Source File: BigqueryJobFailureException.java    From nomulus with Apache License 2.0 4 votes vote down vote up
private static String describeError(ErrorProto error) {
  return String.format("%s: %s", error.getReason(), error.getMessage());
}
 
Example #17
Source File: FakeJobService.java    From beam with Apache License 2.0 4 votes vote down vote up
private JobStatus runLoadJob(JobReference jobRef, JobConfigurationLoad load)
    throws InterruptedException, IOException {
  TableReference destination = load.getDestinationTable();
  TableSchema schema = load.getSchema();
  checkArgument(schema != null, "No schema specified");
  List<ResourceId> sourceFiles = filesForLoadJobs.get(jobRef.getProjectId(), jobRef.getJobId());
  WriteDisposition writeDisposition = WriteDisposition.valueOf(load.getWriteDisposition());
  CreateDisposition createDisposition = CreateDisposition.valueOf(load.getCreateDisposition());

  Table existingTable = datasetService.getTable(destination);
  if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
    return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
  }
  if (existingTable == null) {
    TableReference strippedDestination =
        destination
            .clone()
            .setTableId(BigQueryHelpers.stripPartitionDecorator(destination.getTableId()));
    existingTable = new Table().setTableReference(strippedDestination).setSchema(schema);
    if (load.getTimePartitioning() != null) {
      existingTable = existingTable.setTimePartitioning(load.getTimePartitioning());
    }
    if (load.getClustering() != null) {
      existingTable = existingTable.setClustering(load.getClustering());
    }
    datasetService.createTable(existingTable);
  }

  List<TableRow> rows = Lists.newArrayList();
  for (ResourceId filename : sourceFiles) {
    if (load.getSourceFormat().equals("NEWLINE_DELIMITED_JSON")) {
      rows.addAll(readJsonTableRows(filename.toString()));
    } else if (load.getSourceFormat().equals("AVRO")) {
      rows.addAll(readAvroTableRows(filename.toString(), schema));
    }
  }

  datasetService.insertAll(destination, rows, null);
  FileSystems.delete(sourceFiles);
  return new JobStatus().setState("DONE");
}
 
Example #18
Source File: FakeJobService.java    From beam with Apache License 2.0 4 votes vote down vote up
private JobStatus runCopyJob(JobConfigurationTableCopy copy)
    throws InterruptedException, IOException {
  List<TableReference> sources = copy.getSourceTables();
  TableReference destination = copy.getDestinationTable();
  WriteDisposition writeDisposition = WriteDisposition.valueOf(copy.getWriteDisposition());
  CreateDisposition createDisposition = CreateDisposition.valueOf(copy.getCreateDisposition());
  Table existingTable = datasetService.getTable(destination);
  if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
    return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
  }
  TimePartitioning partitioning = null;
  Clustering clustering = null;
  TableSchema schema = null;
  boolean first = true;
  List<TableRow> allRows = Lists.newArrayList();
  for (TableReference source : sources) {
    Table table = checkNotNull(datasetService.getTable(source));
    if (!first) {
      if (!Objects.equals(partitioning, table.getTimePartitioning())) {
        return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
      }
      if (!Objects.equals(clustering, table.getClustering())) {
        return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
      }
      if (!Objects.equals(schema, table.getSchema())) {
        return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
      }
    }
    partitioning = table.getTimePartitioning();
    clustering = table.getClustering();
    schema = table.getSchema();
    first = false;
    allRows.addAll(
        datasetService.getAllRows(
            source.getProjectId(), source.getDatasetId(), source.getTableId()));
  }
  datasetService.createTable(
      new Table()
          .setTableReference(destination)
          .setSchema(schema)
          .setTimePartitioning(partitioning)
          .setClustering(clustering)
          .setEncryptionConfiguration(copy.getDestinationEncryptionConfiguration()));
  datasetService.insertAll(destination, allRows, null);
  return new JobStatus().setState("DONE");
}
 
Example #19
Source File: BigQueryIOWriteTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testExtendedErrorRetrieval() throws Exception {
  TableRow row1 = new TableRow().set("name", "a").set("number", "1");
  TableRow row2 = new TableRow().set("name", "b").set("number", "2");
  TableRow row3 = new TableRow().set("name", "c").set("number", "3");
  String tableSpec = "project-id:dataset-id.table-id";

  TableDataInsertAllResponse.InsertErrors ephemeralError =
      new TableDataInsertAllResponse.InsertErrors()
          .setErrors(ImmutableList.of(new ErrorProto().setReason("timeout")));
  TableDataInsertAllResponse.InsertErrors persistentError =
      new TableDataInsertAllResponse.InsertErrors()
          .setErrors(Lists.newArrayList(new ErrorProto().setReason("invalidQuery")));

  fakeDatasetService.failOnInsert(
      ImmutableMap.of(
          row1, ImmutableList.of(ephemeralError, ephemeralError),
          row2, ImmutableList.of(ephemeralError, ephemeralError, persistentError)));

  PCollection<BigQueryInsertError> failedRows =
      p.apply(Create.of(row1, row2, row3))
          .apply(
              BigQueryIO.writeTableRows()
                  .to(tableSpec)
                  .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)
                  .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS)
                  .withSchema(
                      new TableSchema()
                          .setFields(
                              ImmutableList.of(
                                  new TableFieldSchema().setName("name").setType("STRING"),
                                  new TableFieldSchema().setName("number").setType("INTEGER"))))
                  .withFailedInsertRetryPolicy(InsertRetryPolicy.retryTransientErrors())
                  .withTestServices(fakeBqServices)
                  .withoutValidation()
                  .withExtendedErrorInfo())
          .getFailedInsertsWithErr();

  // row2 finally fails with a non-retryable error, so we expect to see it in the collection of
  // failed rows.
  PAssert.that(failedRows)
      .containsInAnyOrder(
          new BigQueryInsertError(
              row2, persistentError, BigQueryHelpers.parseTableSpec(tableSpec)));
  p.run();

  // Only row1 and row3 were successfully inserted.
  assertThat(
      fakeDatasetService.getAllRows("project-id", "dataset-id", "table-id"),
      containsInAnyOrder(row1, row3));
}
 
Example #20
Source File: BigQueryHelpersTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testPendingJobManager() throws Exception {
  PendingJobManager jobManager =
      new PendingJobManager(
          BackOffAdapter.toGcpBackOff(
              FluentBackoff.DEFAULT
                  .withMaxRetries(Integer.MAX_VALUE)
                  .withInitialBackoff(Duration.millis(10))
                  .withMaxBackoff(Duration.millis(10))
                  .backoff()));

  Set<String> succeeded = Sets.newHashSet();
  for (int i = 0; i < 5; i++) {
    Job currentJob = new Job();
    currentJob.setKind(" bigquery#job");
    PendingJob pendingJob =
        new PendingJob(
            retryId -> {
              if (new Random().nextInt(2) == 0) {
                throw new RuntimeException("Failing to start.");
              }
              currentJob.setJobReference(
                  new JobReference()
                      .setProjectId("")
                      .setLocation("")
                      .setJobId(retryId.getJobId()));
              return null;
            },
            retryId -> {
              if (retryId.getRetryIndex() < 5) {
                currentJob.setStatus(new JobStatus().setErrorResult(new ErrorProto()));
              } else {
                currentJob.setStatus(new JobStatus().setErrorResult(null));
              }
              return currentJob;
            },
            retryId -> {
              if (retryId.getJobId().equals(currentJob.getJobReference().getJobId())) {
                return currentJob;
              } else {
                return null;
              }
            },
            100,
            "JOB_" + i);
    jobManager.addPendingJob(
        pendingJob,
        j -> {
          succeeded.add(j.currentJobId.getJobId());
          return null;
        });
  }

  jobManager.waitForDone();
  Set<String> expectedJobs =
      ImmutableSet.of("JOB_0-5", "JOB_1-5", "JOB_2-5", "JOB_3-5", "JOB_4-5");
  assertEquals(expectedJobs, succeeded);
}
 
Example #21
Source File: BigQueryServicesImplTest.java    From beam with Apache License 2.0 4 votes vote down vote up
/** Tests that {@link DatasetServiceImpl#insertAll} uses the supplied {@link ErrorContainer}. */
@Test
public void testExtendedErrorRetrieval() throws InterruptedException, IOException {
  TableReference ref =
      new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
  List<ValueInSingleWindow<TableRow>> rows =
      ImmutableList.of(
          wrapValue(new TableRow().set("a", 1)), wrapValue(new TableRow().set("b", 2)));

  final TableDataInsertAllResponse failures =
      new TableDataInsertAllResponse()
          .setInsertErrors(
              ImmutableList.of(
                  new InsertErrors()
                      .setIndex(0L)
                      .setErrors(ImmutableList.of(new ErrorProto().setReason("timeout"))),
                  new InsertErrors()
                      .setIndex(1L)
                      .setErrors(ImmutableList.of(new ErrorProto().setReason("invalid")))));

  final List<ValueInSingleWindow<BigQueryInsertError>> expected =
      ImmutableList.of(
          wrapValue(
              new BigQueryInsertError(
                  rows.get(0).getValue(), failures.getInsertErrors().get(0), ref)),
          wrapValue(
              new BigQueryInsertError(
                  rows.get(1).getValue(), failures.getInsertErrors().get(1), ref)));

  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
  when(response.getStatusCode()).thenReturn(200);
  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);

  when(response.getContent()).thenReturn(toStream(failures));

  DatasetServiceImpl dataService =
      new DatasetServiceImpl(bigquery, PipelineOptionsFactory.create());

  List<ValueInSingleWindow<BigQueryInsertError>> failedInserts = Lists.newArrayList();
  dataService.insertAll(
      ref,
      rows,
      null,
      BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()),
      new MockSleeper(),
      InsertRetryPolicy.neverRetry(),
      failedInserts,
      ErrorContainer.BIG_QUERY_INSERT_ERROR_ERROR_CONTAINER,
      false,
      false,
      false);

  assertThat(failedInserts, is(expected));
}
 
Example #22
Source File: BigQueryServicesImplTest.java    From beam with Apache License 2.0 4 votes vote down vote up
/** Tests that {@link DatasetServiceImpl#insertAll} uses the supplied {@link ErrorContainer}. */
@Test
public void testSimpleErrorRetrieval() throws InterruptedException, IOException {
  TableReference ref =
      new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
  List<ValueInSingleWindow<TableRow>> rows =
      ImmutableList.of(
          wrapValue(new TableRow().set("a", 1)), wrapValue(new TableRow().set("b", 2)));

  final TableDataInsertAllResponse failures =
      new TableDataInsertAllResponse()
          .setInsertErrors(
              ImmutableList.of(
                  new InsertErrors()
                      .setIndex(0L)
                      .setErrors(ImmutableList.of(new ErrorProto().setReason("timeout"))),
                  new InsertErrors()
                      .setIndex(1L)
                      .setErrors(ImmutableList.of(new ErrorProto().setReason("invalid")))));

  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
  when(response.getStatusCode()).thenReturn(200);
  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);

  when(response.getContent()).thenReturn(toStream(failures));

  DatasetServiceImpl dataService =
      new DatasetServiceImpl(bigquery, PipelineOptionsFactory.create());

  List<ValueInSingleWindow<TableRow>> failedInserts = Lists.newArrayList();
  dataService.insertAll(
      ref,
      rows,
      null,
      BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()),
      new MockSleeper(),
      InsertRetryPolicy.neverRetry(),
      failedInserts,
      ErrorContainer.TABLE_ROW_ERROR_CONTAINER,
      false,
      false,
      false);

  assertThat(failedInserts, is(rows));
}
 
Example #23
Source File: BigQueryServicesImplTest.java    From beam with Apache License 2.0 4 votes vote down vote up
/**
 * Tests that {@link DatasetServiceImpl#insertAll} uses the supplied {@link InsertRetryPolicy},
 * and returns the list of rows not retried.
 */
@Test
public void testInsertRetryPolicy() throws InterruptedException, IOException {
  TableReference ref =
      new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
  List<ValueInSingleWindow<TableRow>> rows =
      ImmutableList.of(wrapValue(new TableRow()), wrapValue(new TableRow()));

  // First time row0 fails with a retryable error, and row1 fails with a persistent error.
  final TableDataInsertAllResponse firstFailure =
      new TableDataInsertAllResponse()
          .setInsertErrors(
              ImmutableList.of(
                  new InsertErrors()
                      .setIndex(0L)
                      .setErrors(ImmutableList.of(new ErrorProto().setReason("timeout"))),
                  new InsertErrors()
                      .setIndex(1L)
                      .setErrors(ImmutableList.of(new ErrorProto().setReason("invalid")))));

  // Second time there is only one row, which fails with a retryable error.
  final TableDataInsertAllResponse secondFialure =
      new TableDataInsertAllResponse()
          .setInsertErrors(
              ImmutableList.of(
                  new InsertErrors()
                      .setIndex(0L)
                      .setErrors(ImmutableList.of(new ErrorProto().setReason("timeout")))));

  // On the final attempt, no failures are returned.
  final TableDataInsertAllResponse allRowsSucceeded = new TableDataInsertAllResponse();

  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
  // Always return 200.
  when(response.getStatusCode()).thenReturn(200);
  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
  when(response.getStatusCode()).thenReturn(200).thenReturn(200);

  // First fail
  when(response.getContent())
      .thenReturn(toStream(firstFailure))
      .thenReturn(toStream(secondFialure))
      .thenReturn(toStream(allRowsSucceeded));

  DatasetServiceImpl dataService =
      new DatasetServiceImpl(bigquery, PipelineOptionsFactory.create());

  List<ValueInSingleWindow<TableRow>> failedInserts = Lists.newArrayList();
  dataService.insertAll(
      ref,
      rows,
      null,
      BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()),
      new MockSleeper(),
      InsertRetryPolicy.retryTransientErrors(),
      failedInserts,
      ErrorContainer.TABLE_ROW_ERROR_CONTAINER,
      false,
      false,
      false);
  assertEquals(1, failedInserts.size());
  expectedLogs.verifyInfo("Retrying 1 failed inserts to BigQuery");
}
 
Example #24
Source File: BigQueryServicesImplTest.java    From beam with Apache License 2.0 4 votes vote down vote up
/** Tests that {@link DatasetServiceImpl#insertAll} retries selected rows on failure. */
@Test
public void testInsertRetrySelectRows() throws Exception {
  TableReference ref =
      new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
  List<ValueInSingleWindow<TableRow>> rows =
      ImmutableList.of(
          wrapValue(new TableRow().set("row", "a")), wrapValue(new TableRow().set("row", "b")));
  List<String> insertIds = ImmutableList.of("a", "b");

  final TableDataInsertAllResponse bFailed =
      new TableDataInsertAllResponse()
          .setInsertErrors(
              ImmutableList.of(
                  new InsertErrors().setIndex(1L).setErrors(ImmutableList.of(new ErrorProto()))));

  final TableDataInsertAllResponse allRowsSucceeded = new TableDataInsertAllResponse();

  when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
  when(response.getStatusCode()).thenReturn(200).thenReturn(200);
  when(response.getContent())
      .thenReturn(toStream(bFailed))
      .thenReturn(toStream(allRowsSucceeded));

  DatasetServiceImpl dataService =
      new DatasetServiceImpl(bigquery, PipelineOptionsFactory.create());
  dataService.insertAll(
      ref,
      rows,
      insertIds,
      BackOffAdapter.toGcpBackOff(TEST_BACKOFF.backoff()),
      new MockSleeper(),
      InsertRetryPolicy.alwaysRetry(),
      null,
      null,
      false,
      false,
      false);
  verify(response, times(2)).getStatusCode();
  verify(response, times(2)).getContent();
  verify(response, times(2)).getContentType();
}