Java Code Examples for com.streamsets.pipeline.api.StageException

The following examples show how to use com.streamsets.pipeline.api.StageException. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: datacollector   Source File: TestForceSourceUpgrader.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUpgradeV1toV2() throws StageException {
  StageUpgrader.Context context = Mockito.mock(StageUpgrader.Context.class);
  Mockito.doReturn(1).when(context).getFromVersion();
  Mockito.doReturn(2).when(context).getToVersion();

  List<Config> configs = new ArrayList<>();

  ForceSourceUpgrader forceSourceUpgrader = new ForceSourceUpgrader();
  forceSourceUpgrader.upgrade(configs, context);

  Assert.assertEquals(1, configs.size());
  Config config = configs.get(0);
  Assert.assertEquals("forceConfig.subscriptionType", config.getName());
  Assert.assertEquals(SubscriptionType.PUSH_TOPIC, config.getValue());
}
 
Example 2
Source Project: datacollector   Source File: ScriptingProcessorTestUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static <C extends Processor> void verifyErrorRecordStopPipeline(
    Class<C> clazz,
    Processor processor
) throws StageException {
  ProcessorRunner runner = new ProcessorRunner.Builder(clazz, processor)
    .setOnRecordError(OnRecordError.STOP_PIPELINE)
    .addOutputLane("lane")
    .build();

  Record record = RecordCreator.create();
  record.set(Field.create("Not Important"));

  runner.runInit();
  try {
    runner.runProcess(Collections.singletonList(record));
    Assert.fail("Expected exception");
  } catch(Exception e) {
    Assert.assertTrue(e.toString(), e.toString().contains("Script sent record to error"));
  } finally {
    runner.runDestroy();
  }
}
 
Example 3
Source Project: datacollector   Source File: HttpTarget.java    License: Apache License 2.0 6 votes vote down vote up
public List<SDCMetricsJson> getRecordsToWrite() throws StageException {
  List<SDCMetricsJson> sdcMetricsJsonList = new ArrayList<>();
  Record tempRecord = null;
  try {
    for (Record currentRecord : sdcIdToRecordMap.values()) {
      tempRecord = currentRecord;
      SDCMetricsJson sdcMetricsJson = createSdcMetricJson(currentRecord);
      sdcMetricsJsonList.add(sdcMetricsJson);
    }
  } catch (IOException e) {
    errorRecordHandler.onError(
        new OnRecordErrorException(
            tempRecord,
            Errors.HTTP_01,
            tempRecord.getHeader().getSourceId(),
            e.toString(),
            e
        )
    );
  }
  return sdcMetricsJsonList;
}
 
Example 4
Source Project: datacollector   Source File: HttpProcessor.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates the HTTP response headers to the SDC Record at the configured field path.
 *
 * @param record Record to populate with response headers.
 * @param response HTTP response
 * @throws StageException if the field path already exists
 */
private Field createResponseHeaderField(Record record, Response response) throws StageException {
  if (record.has(conf.headerOutputField) || conf.headerOutputLocation.equals(conf.outputField)) {
    throw new StageException(Errors.HTTP_11, getResponseStatus(response), conf.headerOutputField);
  }
  Map<String, Field> headers = new HashMap<>(response.getStringHeaders().size());

  for (Map.Entry<String, List<String>> entry : response.getStringHeaders().entrySet()) {
    if (!entry.getValue().isEmpty()) {
      String firstValue = entry.getValue().get(0);
      headers.put(entry.getKey(), Field.create(firstValue));
    }
  }

  return Field.create(headers);
}
 
Example 5
Source Project: datacollector   Source File: TestWaveAnalyticsUpgrader.java    License: Apache License 2.0 6 votes vote down vote up
@Ignore
public void testUpgradeV1toV2AppendTimestampTrue() throws StageException {
  List<Config> configs = new ArrayList<>();
  StageUpgrader.Context context = Mockito.mock(StageUpgrader.Context.class);
  Mockito.doReturn(1).when(context).getFromVersion();
  Mockito.doReturn(2).when(context).getToVersion();

  configs.add(new Config(WAVE_ANALYTICS_APPEND_TIMESTAMP, true));

  WaveAnalyticsUpgrader waveAnalyticsUpgrader = new WaveAnalyticsUpgrader();
  waveAnalyticsUpgrader.upgrade(configs, context);

  Assert.assertEquals(1, configs.size());
  Config config = configs.get(0);
  Assert.assertEquals(WAVE_ANALYTICS_APPEND_TIMESTAMP, config.getName());
  Assert.assertEquals(true, config.getValue());
}
 
Example 6
Source Project: datacollector   Source File: StartJobSource.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public String produce(String s, int i, BatchMaker batchMaker) throws StageException {
  Executor executor = Executors.newCachedThreadPool();
  List<CompletableFuture<Field>> startJobFutures = startJobCommon.getStartJobFutures(executor, null);
  try {
    LinkedHashMap<String, Field> outputField = startJobCommon.startJobInParallel(startJobFutures);
    Record outputRecord = CommonUtil.createOrchestratorTaskRecord(
        null,
        getContext(),
        conf.taskName,
        outputField
    );
    batchMaker.addRecord(outputRecord);
  } catch (Exception ex) {
    LOG.error(ex.toString(), ex);
    errorRecordHandler.onError(StartJobErrors.START_JOB_08, ex.toString(), ex);
  }
  return null;
}
 
Example 7
Source Project: datacollector   Source File: TestCouchbaseTarget.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void shouldUseDefaultOperationForUnsupportedCdcOperation() throws StageException {
  CouchbaseTargetConfig config = getDefaultConfig();
  config.defaultWriteOperation = WriteOperationType.REPLACE;
  config.unsupportedOperation = UnsupportedOperationType.DEFAULT;

  TargetRunner runner = getMockedTargetRunner(config, ReplaceResponse.class);
  if(runner == null) return;

  runner.runInit();
  runner.runWrite(getTestRecord("999"));
  runner.runDestroy();

  assertTrue(runner.getErrorRecords().isEmpty());
  assertTrue(runner.getErrors().isEmpty());
}
 
Example 8
Source Project: datacollector   Source File: HttpClientSource.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Used only for HEAD requests.  Sets up a record for output based on headers only
 * with an empty body.
 *
 * @param batchMaker batch to add records to.
 * @return the next source offset to commit
 * @throws StageException if an unhandled error is encountered
 */

String parseHeadersOnly(BatchMaker batchMaker) throws StageException {
  HttpSourceOffset sourceOffset = new HttpSourceOffset(
          getResolvedUrl(),
          currentParameterHash,
          System.currentTimeMillis(),
          getCurrentPage()
  );

  Record record = getContext().createRecord(sourceOffset + "::0");
  addResponseHeaders(record.getHeader());
  record.set(Field.create(new HashMap()));

  batchMaker.addRecord(record);
  recordCount++;
  incrementSourceOffset(sourceOffset, 1);
  lastRequestCompletedTime = System.currentTimeMillis();
  return sourceOffset.toString();

}
 
Example 9
Source Project: datacollector   Source File: ProtobufDataParserFactory.java    License: Apache License 2.0 6 votes vote down vote up
public ProtobufDataParserFactory(Settings settings) throws StageException {
  super(settings);
  this.protoDescriptorFile = settings.getConfig(ProtobufConstants.PROTO_DESCRIPTOR_FILE_KEY);
  this.messageType = settings.getConfig(ProtobufConstants.MESSAGE_TYPE_KEY);
  this.isDelimited = settings.getConfig(ProtobufConstants.DELIMITED_KEY);
  messageTypeToExtensionMap = new HashMap<>();
  defaultValueMap = new HashMap<>();
  // Get the descriptor for the expected message type
  descriptor = ProtobufTypeUtil.getDescriptor(
    settings.getContext(),
    protoDescriptorFile,
    messageType,
    messageTypeToExtensionMap,
    defaultValueMap
  );

  // Build the extension registry based on the cached extension map
  extensionRegistry = ExtensionRegistry.newInstance();
  for(Map.Entry<String, Set<Descriptors.FieldDescriptor>> e : messageTypeToExtensionMap.entrySet()) {
    Set<Descriptors.FieldDescriptor> value = e.getValue();
    for (Descriptors.FieldDescriptor f : value) {
      extensionRegistry.add(f);
    }
  }
}
 
Example 10
Source Project: datacollector   Source File: ActiveRecordWriters.java    License: Apache License 2.0 6 votes vote down vote up
public synchronized void closeAll() throws StageException{
  if (IS_TRACE_ENABLED) {
    LOG.trace("Close all '{}'", toString());
  }
  if(writers != null) {
    for (RecordWriter writer : writers.values()) {
      writer.closeLock();
      try {
        if (!writer.isClosed()) {
          manager.commitWriter(writer);
        }
      } catch (IOException ex) {
        String msg = Utils.format("Error closing writer {} : {}", writer, ex);
        LOG.warn(msg, ex);
      } finally {
        writer.closeUnlock();
      }
    }
  }
  writers = null;
  cutOffQueue = null;
}
 
Example 11
Source Project: datacollector   Source File: HdfsTarget.java    License: Apache License 2.0 6 votes vote down vote up
protected void emptyBatch() throws StageException {
  setBatchTime();
  try {
    hdfsTargetConfigBean.getUGI().doAs(new PrivilegedExceptionAction<Void>() {
      @Override
      public Void run() throws Exception {
        hdfsTargetConfigBean.getCurrentWriters().purge();
        if (hdfsTargetConfigBean.getLateWriters() != null) {
          hdfsTargetConfigBean.getLateWriters().purge();
        }
        return null;
      }
    });
  } catch (Exception ex) {
    throw throwStageException(ex);
  }
}
 
Example 12
Source Project: datacollector   Source File: SourcePipe.java    License: Apache License 2.0 6 votes vote down vote up
protected Map<String, Object> finishBatchAndCalculateMetrics(
  long startTimeInStage,
  PipeBatch pipeBatch,
  BatchMakerImpl batchMaker,
  BatchImpl batchImpl,
  ErrorSink errorSink,
  EventSink eventSink,
  String newOffset
) throws StageException {
  statsCollector.incrementRecordCount(batchMaker.getSize());

  return super.finishBatchAndCalculateMetrics(
    startTimeInStage,
    pipeBatch,
    batchMaker,
    batchImpl,
    errorSink,
    eventSink,
    newOffset
  );
}
 
Example 13
Source Project: datacollector   Source File: RedisDTargetUpgrader.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<Config> upgrade (
    String library,
    String stageName,
    String stageInstance,
    int fromVersion,
    int toVersion,
    List<Config> configs
) throws StageException {
  switch(fromVersion) {
    case 1:
      upgradeV1ToV2(configs);
      break;
    default:
      throw new IllegalStateException(Utils.format("Unexpected fromVersion {}", fromVersion));
  }
  return configs;
}
 
Example 14
Source Project: datacollector   Source File: ElasticsearchSource.java    License: Apache License 2.0 6 votes vote down vote up
private JsonObject getResults(String scrollId) throws StageException {
  HttpEntity entity = new StringEntity(
      String.format("{\"scroll\":\"%s\",\"scroll_id\":\"%s\"}", conf.cursorTimeout, scrollId),
      ContentType.APPLICATION_JSON
  );

  try {
    Response response = delegate.performRequest("POST",
        "/_search/scroll",
        conf.params,
        entity,
        delegate.getAuthenticationHeader(conf.securityConfig.securityUser.get())
    );

    return parseEntity(response.getEntity());
  } catch (IOException e) {
    LOG.debug("Expired scroll_id: '{}'", scrollId);
    LOG.error(Errors.ELASTICSEARCH_23.getMessage(), e);
    throw new StageException(Errors.ELASTICSEARCH_23);
  }
}
 
Example 15
Source Project: datacollector   Source File: TestDatabricksJobExecutor.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testRunError() throws Exception {
  Map<Object, Object> mapWithJob =
      ImmutableMap.of("settings", ImmutableMap.of("spark_jar_task", 1000));
  doReturn(mapWithJob).when(listResponse).readEntity(Map.class);
  doReturn(200).when(listResponse).getStatus();

  doReturn(500).when(runResponse).getStatus();
  executor = new FakeDatabricksJobExecutor(configBean);
  ExecutorRunner runner = new ExecutorRunner.Builder(DatabricksJobLauncherDExecutor.class, executor)
      .setOnRecordError(OnRecordError.TO_ERROR)
      .build();
  runner.runInit();
  try {
    runner.runWrite(ImmutableList.of(RecordCreator.create()));
    Assert.fail();
  } catch (StageException ex) {
    Assert.assertEquals(Errors.DATABRICKS_06, ex.getErrorCode());
  }
  runner.runDestroy();
}
 
Example 16
Source Project: datacollector   Source File: HttpProcessor.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Populates HTTP response headers to the configured location
 *
 * @param record current record to populate
 * @param response HTTP response
 * @throws StageException when writing headers to a field path that already exists
 */
private Field createResponseHeaders(Record record, Response response) throws StageException {
  if (conf.headerOutputLocation == HeaderOutputLocation.NONE) {
    return null;
  }

  Record.Header header = record.getHeader();
  header.setAttribute(REQUEST_STATUS_CONFIG_NAME,String.format("%d",response.getStatus()));

  if (conf.headerOutputLocation == HeaderOutputLocation.FIELD) {
    return createResponseHeaderField(record, response);
  } else if (conf.headerOutputLocation == HeaderOutputLocation.HEADER) {
    createResponseHeaderToRecordHeader(response, header);
    return null;
  }
  return null;
}
 
Example 17
Source Project: datacollector   Source File: JmsMessageConsumerImpl.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public int take(BatchMaker batchMaker, Source.Context context, int batchSize, long messageIndex)
throws StageException {
  long start = System.currentTimeMillis();
  int numMessagesConsumed = 0;
  while (System.currentTimeMillis() - start < basicConfig.maxWaitTime && numMessagesConsumed < batchSize) {
    if (IS_TRACE_ENABLED) {
      LOG.trace("Attempting to take up to '{}' messages", (batchSize - numMessagesConsumed));
    }
    try {
      Message message = messageConsumer.receive(POLL_INTERVAL);
      if (message != null) {
        if (IS_TRACE_ENABLED) {
          LOG.trace("Got message: {}", message);
        }
        String messageId = jmsConfig.destinationName + "::" + messageIndex;
        int consumed = jmsMessageConverter.convert(batchMaker, context, messageId, message);
        messageIndex += consumed;
        numMessagesConsumed += consumed;
      }
    } catch (JMSException ex) {
      throw new StageException(JmsErrors.JMS_07, ex.toString(), ex);
    }
  }
  return numMessagesConsumed;
}
 
Example 18
Source Project: datacollector   Source File: HiveMetastoreUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static Connection getHiveConnection(
    final String jdbcUrl,
    final UserGroupInformation loginUgi,
    final List<ConnectionPropertyBean> driverProperties
) throws StageException {

  Properties resolvedDriverProperties = new Properties();
  for(ConnectionPropertyBean bean : driverProperties) {
    resolvedDriverProperties.setProperty(bean.property, bean.value.get());
  }

  try {
    return loginUgi.doAs((PrivilegedExceptionAction<Connection>) () -> DriverManager.getConnection(jdbcUrl, resolvedDriverProperties));
  } catch (Exception e) {
    LOG.error("Failed to connect to Hive with JDBC URL:" + jdbcUrl, e);
    throw new StageException(Errors.HIVE_22, jdbcUrl, e.getMessage());
  }
}
 
Example 19
Source Project: datacollector   Source File: TestSpoolDirSourceUpgrader.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSpoolDirSourceUpgrader() throws StageException {
  SpoolDirSourceUpgrader spoolDirSourceUpgrader = new SpoolDirSourceUpgrader();

  List<Config> upgrade = spoolDirSourceUpgrader.upgrade("x", "y", "z", 1, 7, new ArrayList<Config>());
  assertEquals(9, upgrade.size());
  assertEquals("conf.dataFormatConfig.compression", upgrade.get(0).getName());
  assertEquals("NONE", upgrade.get(0).getValue());
  assertEquals("conf.dataFormatConfig.csvCustomDelimiter", upgrade.get(1).getName());
  assertEquals('|', upgrade.get(1).getValue());
  assertEquals("conf.dataFormatConfig.csvCustomEscape", upgrade.get(2).getName());
  assertEquals('\\', upgrade.get(2).getValue());
  assertEquals("conf.dataFormatConfig.csvCustomQuote", upgrade.get(3).getName());
  assertEquals('\"', upgrade.get(3).getValue());
  assertEquals("conf.dataFormatConfig.csvRecordType", upgrade.get(4).getName());
  assertEquals("LIST", upgrade.get(4).getValue());
  assertEquals("conf.dataFormatConfig.filePatternInArchive", upgrade.get(5).getName());
  assertEquals("*", upgrade.get(5).getValue());
  assertEquals("conf.dataFormatConfig.csvSkipStartLines", upgrade.get(6).getName());
  assertEquals(0, upgrade.get(6).getValue());
  assertEquals("conf.allowLateDirectory", upgrade.get(7).getName());
  assertEquals(false, upgrade.get(7).getValue());
  assertEquals("conf.useLastModified", upgrade.get(8).getName());
  assertEquals(FileOrdering.LEXICOGRAPHICAL.name(), upgrade.get(8).getValue());
}
 
Example 20
Source Project: datacollector   Source File: AvroHiveSchemaGenerator.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * It takes a record structure in <String, HiveTypeInfo> format.
 * Generate a schema and return in String.
 * @param record : record structure
 * @return String representation of Avro schema.
 * @throws StageException: If record contains unsupported type
 */
@Override
public String inferSchema(Map<String, HiveTypeInfo> record)
    throws StageException
{
  Map<String, Schema> fields = new LinkedHashMap<>();
  for(Map.Entry<String, HiveTypeInfo> pair:  record.entrySet()) {
    if(!HiveMetastoreUtil.validateObjectName(pair.getKey())) {
      throw new HiveStageCheckedException(Errors.HIVE_30, pair.getKey());
    }
    Schema columnSchema = Schema.createUnion(ImmutableList.of(Schema.create(Schema.Type.NULL), traverse(pair)));
    // We always set default value to null
    columnSchema.addProp("default", NullNode.getInstance());
    fields.put(pair.getKey(), columnSchema);
  }
  Schema schema =  buildSchema(fields);
  return schema.toString();
}
 
Example 21
protected String persistPem(String pem) {
  validatePem(pem);
  try {
    File pemFile = TempFile.createFile(".pem");
    try (Writer writer = new FileWriter(pemFile)) {
      writer.write(pem);
    }
    return pemFile.getAbsolutePath();
  } catch (IOException ex) {
    throw new StageException(Errors.POSTGRES_03, ex.getMessage());
  }
}
 
Example 22
@Override
public List<Config> upgrade(String library, String stageName, String stageInstance, int fromVersion, int toVersion,
                            List<Config> configs) throws StageException {
  switch(fromVersion) {
    case 1:
      upgradeV1ToV2(configs);
      if (toVersion == 2) {
        break;
      }
      // fall through
    case 2:
      upgradeV2ToV3(configs);
      if (toVersion == 3) {
        break;
      }
      // fall through
    case 3:
      upgradeV3ToV4(configs);
      if (toVersion == 4) {
        break;
      }
      // fall through
    case 4:
      upgradeV4ToV5(configs);
      break;
    default:
      throw new IllegalStateException(Utils.format("Unexpected fromVersion {}", fromVersion));
  }
  return configs;
}
 
Example 23
private void testV11toV12(String expectedCredentialsMode) throws StageException {
  Mockito.doReturn(11).when(context).getFromVersion();
  Mockito.doReturn(12).when(context).getToVersion();

  configs = upgrader.upgrade(configs, context);

  UpgraderTestUtils.assertExists(configs, "s3TargetConfigBean.s3Config.usePathAddressModel", true);
  UpgraderTestUtils.assertExists(
      configs,
      "s3TargetConfigBean.s3Config.awsConfig.credentialMode",
      expectedCredentialsMode
  );
}
 
Example 24
Source Project: datacollector   Source File: TestDateTimeColumnHandler.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTimestampWithFieldAttribute() throws StageException {
  DateTimeColumnHandler handler = new DateTimeColumnHandler(ZoneId.of("GMT"), false);
  Field field = handler.getDateTimeStampField(
      "dt",
      "TO_TIMESTAMP('2018-04-10 02:15:10.654321')",
      Types.TIMESTAMP
  );
  Assert.assertEquals(Field.Type.DATETIME, field.getType());
  Assert.assertEquals("2018-04-10 02:15:10", dateFormat.format(field.getValueAsDatetime()));
  Assert.assertEquals("321000", field.getAttribute("nanoSeconds"));
}
 
Example 25
Source Project: datacollector   Source File: AWSUtil.java    License: Apache License 2.0 5 votes vote down vote up
public static ClientConfiguration getClientConfiguration(ProxyConfig config) throws StageException {
  ClientConfiguration clientConfig = new ClientConfiguration();

  clientConfig.setConnectionTimeout(config.connectionTimeout * MILLIS);
  clientConfig.setSocketTimeout(config.socketTimeout * MILLIS);
  clientConfig.withMaxErrorRetry(config.retryCount);

  // Optional proxy settings
  if (config.useProxy) {
    if (config.proxyHost != null && !config.proxyHost.isEmpty()) {
      clientConfig.setProxyHost(config.proxyHost);
      clientConfig.setProxyPort(config.proxyPort);

      if (config.proxyUser != null && !config.proxyUser.get().isEmpty()) {
        clientConfig.setProxyUsername(config.proxyUser.get());
      }

      if (config.proxyPassword != null && !config.proxyPassword.get().isEmpty()) {
        clientConfig.setProxyPassword(config.proxyPassword.get());
      }

      if (config.proxyDomain != null && !config.proxyDomain.isEmpty()) {
        clientConfig.setProxyDomain(config.proxyDomain);
      }

      if (config.proxyWorkstation != null && !config.proxyWorkstation.isEmpty()) {
        clientConfig.setProxyWorkstation(config.proxyWorkstation);
      }
    }
  }
  return clientConfig;
}
 
Example 26
@Override
public List<Config> upgrade(String library, String stageName, String stageInstance, int fromVersion, int toVersion,
                            List<Config> configs) throws StageException {
  switch(fromVersion) {
    case 1:
      upgradeV1ToV2(configs);
      break;
    default:
      throw new IllegalStateException(Utils.format("Unexpected fromVersion {}", fromVersion));
  }
  return configs;
}
 
Example 27
Source Project: datacollector   Source File: JdbcQueryExecutor.java    License: Apache License 2.0 5 votes vote down vote up
private void processSerially(ErrorRecordHandler errorRecordHandler, Batch batch) {
  ELVars variables = getContext().createELVars();
  ELEval eval = getContext().createELEval(config.getQueriesVariableName());

  Iterator<Record> it = batch.getRecords();
  try (Connection connection = config.getConnection()) {
    while (it.hasNext()) {
      Record record = it.next();
      RecordEL.setRecordInContext(variables, record);

      for (String query : config.getQueries()) {
        if (!query.trim().isEmpty()) {
          processARecord(connection, errorRecordHandler, eval.eval(variables, query, String.class), record);
        }
      }
    }

    if (config.isBatchCommit()) {
      connection.commit();
    }

  } catch (SQLException ex) {
    LOG.error("Can't get connection", ex);
    throw new StageException(QueryExecErrors.QUERY_EXECUTOR_002, ex.getMessage());
  }

}
 
Example 28
Source Project: datacollector   Source File: RecordWriterManager.java    License: Apache License 2.0 5 votes vote down vote up
public RecordWriter getWriter(Date now, Date recordDate, Record record) throws StageException, IOException {
  RecordWriter writer = null;
  long writerTimeToLive = getTimeToLiveMillis(now, recordDate);
  Path tempPath = getPath(recordDate, record);
  if (writerTimeToLive >= 0) {
    if (fs.exists(tempPath)) {
      fsHelper.handleAlreadyExistingFile(fs, tempPath);
    }
    LOG.debug("Path[{}] - Create writer,  time to live '{}ms'", tempPath, writerTimeToLive);
    writer = createWriter(fs, tempPath, writerTimeToLive);
  } else {
    LOG.warn("Path[{}] - Cannot create writer, requested date already cut off", tempPath);
  }
  return writer;
}
 
Example 29
Source Project: datacollector   Source File: TestCouchbaseTarget.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void shouldFailNegativeDisconnectTimeout() throws StageException {
  CouchbaseTargetConfig config = getDefaultConfig();
  config.couchbase.disconnectTimeout = -1;

  TargetRunner runner = getTargetRunner(config);
  List<Target.ConfigIssue> issues = runner.runValidateConfigs();
  runner.runDestroy();

  assertTrue(issues.stream().anyMatch(configIssue -> configIssue.toString().contains("COUCHBASE_32")));
}
 
Example 30
Source Project: datacollector   Source File: TableContextUtil.java    License: Apache License 2.0 5 votes vote down vote up
public Map<String, TableContext> listCTTablesForConfig(
    Connection connection,
    CTTableConfigBean tableConfigBean
) throws SQLException, StageException {
  Map<String, TableContext> tableContextMap = new LinkedHashMap<>();
  Pattern p =
      StringUtils.isEmpty(tableConfigBean.tableExclusionPattern)?
          null : Pattern.compile(tableConfigBean.tableExclusionPattern);

  long currentVersion = getCurrentVersion(connection);

  try (ResultSet rs = jdbcUtil.getTableMetadata(connection, tableConfigBean.schema, tableConfigBean.tablePattern)) {
    while (rs.next()) {
      String schemaName = rs.getString(TABLE_METADATA_TABLE_SCHEMA_CONSTANT);
      String tableName = rs.getString(TABLE_METADATA_TABLE_NAME_CONSTANT);
      if (p == null || !p.matcher(tableName).matches()) {
        // validate table is change tracking enabled
        try {
          long min_valid_version = validateTable(connection, schemaName, tableName);
          if (min_valid_version <= currentVersion) {
            tableContextMap.put(
                getQualifiedTableName(schemaName, tableName),
                createCTTableContext(connection, schemaName, tableName, tableConfigBean, currentVersion)
            );
          } else {
            LOG.debug(JdbcErrors.JDBC_200.getMessage(), schemaName + "." + tableName);
          }
        } catch (SQLException | StageException e) {
          LOG.debug(JdbcErrors.JDBC_200.getMessage(), schemaName + "." + tableName);
        }
      }
    }
  }

  return tableContextMap;
}