Java Code Examples for com.streamsets.pipeline.api.StageException#getErrorCode()

The following examples show how to use com.streamsets.pipeline.api.StageException#getErrorCode() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveMetastoreUtil.java    From datacollector with Apache License 2.0 5 votes vote down vote up
/**
 *  Generate avro schema from column name and type information. typeInfo in 1st parameter
 *  needs to contain precision and scale information in the value(HiveTypeInfo).
 *  The 2nd parameter qualifiedName will be the name of Avro Schema.
 * @param typeInfo  Record structure
 * @param qualifiedName qualified name that will be the name of generated avro schema
 * @return String avro schema
 * @throws HiveStageCheckedException
 */
public static String generateAvroSchema(Map<String, HiveTypeInfo> typeInfo, String qualifiedName)
    throws HiveStageCheckedException {
  Utils.checkNotNull(typeInfo, "Error TypeInfo cannot be null");
  // Avro doesn't allow "`" in names, so we're dropping those from qualified name
  AvroHiveSchemaGenerator gen = new AvroHiveSchemaGenerator(qualifiedName.replace("`", ""));
  try {
    return gen.inferSchema(typeInfo);
  } catch (StageException e) {
    //So that any error to generate avro schema will result in onRecordErrorException and routed to error lane.
    throw new HiveStageCheckedException(e.getErrorCode(), e.getParams());
  }
}
 
Example 2
Source File: AvroMessageGenerator.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
public void writeRecord(Record record) throws IOException, DataGeneratorException {
  try {
    datumWriter.write(
        AvroTypeUtil.sdcRecordToAvro(record, schema, defaultValueMap),
        binaryEncoder
    );
  } catch (StageException e) {
    throw new DataGeneratorException(e.getErrorCode(), e.getParams()); // params includes cause
  }
}
 
Example 3
Source File: AvroDataOutputStreamGenerator.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
protected void writeRecord(Record record) throws IOException, DataGeneratorException {
  try {
    dataFileWriter.append((GenericRecord)AvroTypeUtil.sdcRecordToAvro(record, schema, defaultValueMap));
  } catch (StageException e) {
    throw new DataGeneratorException(e.getErrorCode(), e.getParams()); // params includes cause
  }
}
 
Example 4
Source File: WrapperDataParserFactory.java    From datacollector with Apache License 2.0 5 votes vote down vote up
Throwable normalizeException(Throwable ex) {
  if (!(ex instanceof IOException) && !(ex instanceof DataParserException)) {
    if (ex.getCause() != null) {
      ex = ex.getCause();
      if (!(ex instanceof IOException) && !(ex instanceof DataParserException)) {
        if (ex instanceof StageException) {
          StageException seCause = (StageException) ex;
          ex = new DataParserException(seCause.getErrorCode(), seCause.getParams());
        }
      }
    }
    ex = new DataParserException(Errors.DATA_PARSER_02, ex.toString(), ex);
  }
  return ex;
}
 
Example 5
Source File: JdbcUtil.java    From datacollector with Apache License 2.0 5 votes vote down vote up
/**
 * Write records to a JDBC destination using the recordWriter specified by key, and handle errors
 *
 * @param recordIterator iterator of SDC records
 * @param key key to select the recordWriter
 * @param recordWriters JDBC record writer cache
 * @param errorRecordHandler error record handler
 * @param perRecord indicate record or batch update
 * @throws StageException
 */
public <T> void write(
    Iterator<Record> recordIterator,
    T key,
    LoadingCache<T, JdbcRecordWriter> recordWriters,
    ErrorRecordHandler errorRecordHandler,
    boolean perRecord
) throws StageException {
  final JdbcRecordWriter jdbcRecordWriter;
  try {
    jdbcRecordWriter = recordWriters.getUnchecked(key);
  } catch (UncheckedExecutionException ex) {
    final Throwable throwable = ex.getCause();
    final ErrorCode errorCode;
    final Object[] messageParams;
    if (throwable instanceof StageException) {
      StageException stageEx = (StageException) ex.getCause();
      errorCode = stageEx.getErrorCode();
      messageParams = stageEx.getParams();
    } else {
      errorCode = JdbcErrors.JDBC_301;
      messageParams = new Object[] {ex.getMessage(), ex.getCause()};
    }
    // Failed to create RecordWriter, report all as error records.
    while (recordIterator.hasNext()) {
      Record record = recordIterator.next();
      errorRecordHandler.onError(new OnRecordErrorException(record, errorCode, messageParams));
    }
    return;
  }
  List<OnRecordErrorException> errors = perRecord
      ? jdbcRecordWriter.writePerRecord(recordIterator)
      : jdbcRecordWriter.writeBatch(recordIterator);

  for (OnRecordErrorException error : errors) {
    errorRecordHandler.onError(error);
  }
}
 
Example 6
Source File: TestBigQueryDelegate.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test(expected = StageException.class)
public void runQueryTimeout() throws Exception {
  QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder("SELECT * FROM [sample:table] LIMIT 1000")
      .setUseQueryCache(true)
      .setUseLegacySql(useLegacySql)
      .build();

  TableResult mockQueryResponse = mock(TableResult.class);
  Job mockJob = mock(Job.class);
  JobStatus mockJobStatus = mock(JobStatus.class);

  // First pretend we haven't finished running the query, second time around its completed.
  when(mockJob.isDone()).thenReturn(false).thenReturn(true);
  when(mockJob.getJobId()).thenReturn(jobId);
  when(mockJobStatus.getError()).thenReturn(null);
  when(mockJob.getStatus()).thenReturn(mockJobStatus);

  when(mockBigquery.create((JobInfo)any())).thenReturn(mockJob);
  when(mockBigquery.cancel(jobId)).thenReturn(true);
  when(mockJob.getQueryResults()).thenReturn(mockQueryResponse);

  BigQueryDelegate delegate = new BigQueryDelegate(
      mockBigquery,
      useLegacySql,
      Clock.offset(Clock.systemDefaultZone(), Duration.ofSeconds(2))
  );

  ErrorCode code = null;
  try {
    delegate.runQuery(queryConfig, 1000, 1000);
  } catch (StageException e) {
    code = e.getErrorCode();
    throw e;
  } finally {
    assertEquals(Errors.BIGQUERY_00, code);
  }
}
 
Example 7
Source File: TestBigQueryDelegate.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test(expected = StageException.class)
public void runQueryHasErrors() throws Exception {
  QueryJobConfiguration queryRequest = QueryJobConfiguration.newBuilder("SELECT * FROM [sample:table] LIMIT 1000")
      .setUseQueryCache(true)
      .setUseLegacySql(useLegacySql)
      .build();

  TableResult mockQueryResponse = mock(TableResult.class);
  Job mockJob = mock(Job.class);
  JobStatus mockJobStatus = mock(JobStatus.class);

  // First pretend we haven't finished running the query, second time around its completed.
  when(mockJob.isDone()).thenReturn(true);
  when(mockJob.getJobId()).thenReturn(jobId);

  when(mockJob.getQueryResults()).thenReturn(mockQueryResponse);
  when(mockJobStatus.getError()).thenReturn(new BigQueryError(
      "Some Error",
      "Some Location",
      "Some Error Message"
  ));
  when(mockJob.getStatus()).thenReturn(mockJobStatus);

  when(mockBigquery.create((JobInfo)any())).thenReturn(mockJob);
  when(mockBigquery.cancel(jobId)).thenReturn(true);

  BigQueryDelegate delegate = new BigQueryDelegate(mockBigquery, useLegacySql);

  ErrorCode code = null;
  try {
    delegate.runQuery(queryRequest, 1000, 1000);
  } catch (StageException e) {
    code = e.getErrorCode();
    throw e;
  } finally {
    assertEquals(Errors.BIGQUERY_02, code);
  }
}