Java Code Examples for com.streamsets.pipeline.api.Record#getHeader()

The following examples show how to use com.streamsets.pipeline.api.Record#getHeader() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SqsConsumerWorkerCallable.java    From datacollector with Apache License 2.0 7 votes vote down vote up
private void setSqsAttributesOnRecord(Message message, Record record, String queueUrl, String queueNamePrefix) {
  final Record.Header header = record.getHeader();

  switch (sqsAttributesOption) {
    case ALL:
      header.setAttribute(SQS_QUEUE_URL_ATTRIBUTE, queueUrl);
      Optional.of(message.getMessageAttributes()).ifPresent(attrs -> {
        attrs.forEach((name, val) -> {
          final String stringValue = val.getStringValue();
          if (stringValue != null) {
            header.setAttribute(SQS_MESSAGE_ATTRIBUTE_PREFIX + name, stringValue);
          }
        });
      });
      final String body = message.getBody();
      if (body != null) {
        header.setAttribute(SQS_MESSAGE_BODY_ATTRIBUTE, body);
      }
      final String bodyMd5 = message.getMD5OfBody();
      if (bodyMd5 != null) {
        header.setAttribute(SQS_MESSAGE_BODY_MD5_ATTRIBUTE, bodyMd5);
      }
      final String attrsMd5 = message.getMD5OfMessageAttributes();
      if (attrsMd5 != null) {
        header.setAttribute(SQS_MESSAGE_ATTRIBUTE_MD5_ATTRIBUTE, attrsMd5);
      }
      // fall through
    case BASIC:
      header.setAttribute(SQS_MESSAGE_ID_ATTRIBUTE, message.getMessageId());
      header.setAttribute(SQS_QUEUE_NAME_PREFIX_ATTRIBUTE, queueNamePrefix);
      header.setAttribute(SQS_REGION_ATTRIBUTE, awsRegionLabel);
      break;
    case NONE:
      // empty block
      break;
  }
}
 
Example 2
Source File: TestJdbcMetadata.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@NotNull
private Record makeListRecord(Map<String, Pair<Field.Type, Object>> fieldMap) {
  Record record = RecordCreator.create();
  Record.Header header = record.getHeader();
  ArrayList<Field> fields = new ArrayList<>();
  for (Map.Entry<String, Pair<Field.Type, Object>> entry : fieldMap.entrySet()) {
    String fieldName = entry.getKey();
    Field.Type fieldType = entry.getValue().getLeft();
    Field field = Field.create(fieldType, entry.getValue().getRight());
    if (fieldType == Field.Type.DECIMAL) {
      field.setAttribute(HeaderAttributeConstants.ATTR_SCALE, SCALE);
      field.setAttribute(HeaderAttributeConstants.ATTR_PRECISION, PRECISION);
    }
    fields.add(field);
  }
  record.set(Field.create(fields));
  header.setAttribute("table", tableName);
  return record;
}
 
Example 3
Source File: TestCompressionInputBuilder.java    From datacollector with Apache License 2.0 6 votes vote down vote up
void checkHeader(CompressionDataParser.CompressionInput input, String fileName, String offset) throws Exception {
  Record record =  RecordCreator.create();
  Record.Header header = record.getHeader();
  input.wrapRecordHeaders(header, offset);
  Assert.assertNotNull(
      record.getHeader().getAttribute(CompressionDataParser.CompressionInputBuilder.ArchiveInput.FILE_PATH_INSIDE_ARCHIVE)
  );
  Assert.assertNotNull(
      record.getHeader().getAttribute(CompressionDataParser.CompressionInputBuilder.ArchiveInput.FILE_NAME_INSIDE_ARCHIVE)
  );
  Assert.assertEquals(
      record.getHeader().getAttribute(CompressionDataParser.CompressionInputBuilder.ArchiveInput.FILE_NAME_INSIDE_ARCHIVE),
      fileName
  );
  Assert.assertNotNull(
      record.getHeader().getAttribute(CompressionDataParser.CompressionInputBuilder.ArchiveInput.FILE_OFFSET_INSIDER_ARCHIVE)
  );
  Assert.assertEquals(
      record.getHeader().getAttribute(CompressionDataParser.CompressionInputBuilder.ArchiveInput.FILE_OFFSET_INSIDER_ARCHIVE),
      offset
  );
}
 
Example 4
Source File: TestJdbcMetadata.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@NotNull
private Record makeRecord(Map<String, Pair<Field.Type, Object>> fieldMap) {
  Record record = RecordCreator.create();
  Record.Header header = record.getHeader();
  LinkedHashMap<String, Field> fields = new LinkedHashMap<>();
  for (Map.Entry<String, Pair<Field.Type, Object>> entry : fieldMap.entrySet()) {
    String fieldName = entry.getKey();
    Field.Type fieldType = entry.getValue().getLeft();
    Field field = Field.create(fieldType, entry.getValue().getRight());
    if (fieldType == Field.Type.DECIMAL) {
      field.setAttribute(HeaderAttributeConstants.ATTR_SCALE, SCALE);
      field.setAttribute(HeaderAttributeConstants.ATTR_PRECISION, PRECISION);
    }
    fields.put(fieldName, field);
  }
  record.set(Field.create(fields));
  header.setAttribute("table", tableName);
  return record;
}
 
Example 5
Source File: TestJdbcMetadata.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@NotNull
private Record makeRecord(Map<String, Pair<Field.Type, Object>> fieldMap) {
  Record record = RecordCreator.create();
  Record.Header header = record.getHeader();
  LinkedHashMap<String, Field> fields = new LinkedHashMap<>();
  for (Map.Entry<String, Pair<Field.Type, Object>> entry : fieldMap.entrySet()) {
    String fieldName = entry.getKey();
    Field.Type fieldType = entry.getValue().getLeft();
    Field field = Field.create(fieldType, entry.getValue().getRight());
    if (fieldType == Field.Type.DECIMAL) {
      field.setAttribute(HeaderAttributeConstants.ATTR_SCALE, SCALE);
      field.setAttribute(HeaderAttributeConstants.ATTR_PRECISION, PRECISION);
    }
    fields.put(fieldName, field);
  }
  record.set(Field.create(fields));
  header.setAttribute("table", tableName);
  return record;
}
 
Example 6
Source File: TestJdbcMetadata.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@NotNull
private Record makeListRecord(Map<String, Pair<Field.Type, Object>> fieldMap) {
  Record record = RecordCreator.create();
  Record.Header header = record.getHeader();
  ArrayList<Field> fields = new ArrayList<>();
  for (Map.Entry<String, Pair<Field.Type, Object>> entry : fieldMap.entrySet()) {
    String fieldName = entry.getKey();
    Field.Type fieldType = entry.getValue().getLeft();
    Field field = Field.create(fieldType, entry.getValue().getRight());
    if (fieldType == Field.Type.DECIMAL) {
      field.setAttribute(HeaderAttributeConstants.ATTR_SCALE, SCALE);
      field.setAttribute(HeaderAttributeConstants.ATTR_PRECISION, PRECISION);
    }
    fields.add(field);
  }
  record.set(Field.create(fields));
  header.setAttribute("table", tableName);
  return record;
}
 
Example 7
Source File: PubSubTarget.java    From datacollector with Apache License 2.0 6 votes vote down vote up
private void publish(Record record) throws StageException {
  ByteArrayOutputStream os = new ByteArrayOutputStream();
  try (DataGenerator generator = generatorFactory.getGenerator(os)) {
    generator.write(record);
  } catch (IOException | DataGeneratorException e) {
    errorRecordHandler.onError(new OnRecordErrorException(record, Errors.PUBSUB_06, e.toString(), e));
    return;
  }

  ByteString data = ByteString.copyFrom(os.toByteArray());

  Map<String, String> attributes = new HashMap<>();
  Record.Header header = record.getHeader();
  header.getAttributeNames().forEach(k -> attributes.put(k, header.getAttribute(k)));

  PubsubMessage message = PubsubMessage.newBuilder().setData(data).putAllAttributes(attributes).build();

  ApiFuture<String> messageIdFuture = publisher.publish(message);
  pendingMessages.add(new PendingMessage(record, messageIdFuture));
}
 
Example 8
Source File: TestDataLakeGeneratorManager.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testShouldRollWithRollHeader() throws Exception {
  final String rollHeaderName = "roll";
  final boolean rollIfHeader = true;
  final String dirPath = "";
  Record record = RecordCreator.create();
  Record.Header header = record.getHeader();
  header.setAttribute(rollHeaderName, rollHeaderName);

  DataLakeGeneratorManager dataLakeGeneratorManager = new DataLakeGeneratorManagerTestBuilder()
      .rollHeaderName(rollHeaderName)
      .rollIfHeader(rollIfHeader)
      .build();

  Assert.assertTrue(dataLakeGeneratorManager.shouldRoll(record, dirPath));
}
 
Example 9
Source File: HttpProcessor.java    From datacollector with Apache License 2.0 6 votes vote down vote up
/**
 * Populates HTTP response headers to the configured location
 *
 * @param record current record to populate
 * @param response HTTP response
 * @throws StageException when writing headers to a field path that already exists
 */
private Field createResponseHeaders(Record record, Response response) throws StageException {
  if (conf.headerOutputLocation == HeaderOutputLocation.NONE) {
    return null;
  }

  Record.Header header = record.getHeader();
  header.setAttribute(REQUEST_STATUS_CONFIG_NAME,String.format("%d",response.getStatus()));

  if (conf.headerOutputLocation == HeaderOutputLocation.FIELD) {
    return createResponseHeaderField(record, response);
  } else if (conf.headerOutputLocation == HeaderOutputLocation.HEADER) {
    createResponseHeaderToRecordHeader(response, header);
    return null;
  }
  return null;
}
 
Example 10
Source File: RecordCloner.java    From datacollector with Apache License 2.0 6 votes vote down vote up
/**
 * Kryo loads the RecordImpl in Spark's classloader. So this one clones it to this stage's classloader.
 *
 * @param record Record to be cloned
 * @param context The context of the {@linkplain Processor} to use to clone the record
 * @return Cloned record
 */
@SuppressWarnings("unchecked")
public static Record clone(Object record, Processor.Context context) {
  Record newRecord = context.createRecord("dummyId");
  try {
    Object origHeaders = record.getClass().getMethod("getHeader").invoke(record);
    Map<String, Object> headers =
        (Map<String, Object>) origHeaders.getClass().getMethod("getAllAttributes").invoke(origHeaders);
    Record.Header newHeaders = newRecord.getHeader();
    newHeaders.getClass().getMethod("overrideUserAndSystemAttributes", Map.class).invoke(newHeaders, headers);
    newRecord.set(RecordCloner.cloneField(record.getClass().getMethod("get").invoke(record)));
    return newRecord;
  } catch(Exception ex) {
    throw new RuntimeException(ex);
  }
}
 
Example 11
Source File: ChangeDataCaptureRecordCreator.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
Record createRecord(String sourceId, LinkedHashMap<String, Field> map, Map<String, Object> payload) {
  Record record = context.createRecord(sourceId);

  // Process ChangeEventHeader since we need the object type
  String objectType = null;
  Record.Header recordHeader = record.getHeader();
  Map<String, Object> headers = (Map<String, Object>) payload.get(CHANGE_EVENT_HEADER);
  if (headers == null) {
    throw new StageException(Errors.FORCE_40);
  }

  // event data becomes header attributes
  // of the form salesforce.cdc.createdDate,
  // salesforce.cdc.type
  for (Map.Entry<String, Object> header : headers.entrySet()) {
    if ("recordIds".equals(header.getKey())) {
      // Turn list of record IDs into a comma-separated list
      recordHeader.setAttribute(HEADER_ATTRIBUTE_PREFIX + header.getKey(),
          String.join(",", (List<String>)header.getValue()));
    } else {
      recordHeader.setAttribute(HEADER_ATTRIBUTE_PREFIX + header.getKey(), header.getValue().toString());
      if ("changeType".equals(header.getKey())) {
        int operationCode = SFDC_TO_SDC_OPERATION.get(header.getValue().toString());
        recordHeader.setAttribute(OperationType.SDC_OPERATION_TYPE, String.valueOf(operationCode));
      } else if ("entityName".equals(header.getKey())) {
        objectType = header.getValue().toString();
        recordHeader.setAttribute(SOBJECT_TYPE_ATTRIBUTE, objectType);
      }
    }
  }
  payload.remove(CHANGE_EVENT_HEADER);

  createRecordFields(record, map, payload);

  return record;
}
 
Example 12
Source File: TestExpressionProcessor.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test
public void testHeaderExpression() throws StageException {

  HeaderAttributeConfig headerAttributeConfig1 = new HeaderAttributeConfig();
  headerAttributeConfig1.attributeToSet = "OPERATION";
  headerAttributeConfig1.headerAttributeExpression = "INSERT";

  HeaderAttributeConfig headerAttributeConfig2 = new HeaderAttributeConfig();
  headerAttributeConfig2.attributeToSet = "USER";
  headerAttributeConfig2.headerAttributeExpression = "HK";

  ProcessorRunner runner = new ProcessorRunner.Builder(ExpressionDProcessor.class)
    .addConfiguration("expressionProcessorConfigs", new ArrayList<>())
    .addConfiguration("headerAttributeConfigs", ImmutableList.of(headerAttributeConfig1, headerAttributeConfig2))
    .addOutputLane("a").build();
  runner.runInit();

  try {
    Map<String, Field> map = new LinkedHashMap<>();
    map.put("baseSalary", Field.create(Field.Type.DOUBLE, 100000.25));
    map.put("bonus", Field.create(Field.Type.INTEGER, 2000));
    map.put("perks", Field.create(Field.Type.SHORT, 200));
    Record record = RecordCreator.create("s", "s:1");
    record.getHeader().setAttribute("USER", "SS");
    record.set(Field.create(map));

    StageRunner.Output output = runner.runProcess(ImmutableList.of(record));
    Assert.assertEquals(1, output.getRecords().get("a").size());

    Record record1 = output.getRecords().get("a").get(0);
    Record.Header header = record1.getHeader();

    Assert.assertEquals("HK", header.getAttribute("USER"));
    Assert.assertEquals("INSERT", header.getAttribute("OPERATION"));

  } finally {
    runner.runDestroy();
  }
}
 
Example 13
Source File: HttpClientSourceIT.java    From datacollector with Apache License 2.0 5 votes vote down vote up
/**
 * Grizzly might lower-case the header attribute names on some platforms/versions. That is
 * however correct as RFC 2616 clearly states that header names are case-insensitive.
 *
 * @param record the recored to get the header attributes
 * @return the header attribute map with lower-cased names
 */
private Map<String, String> getLowerCaseHeaders(Record record) {
  Map<String, String> lowerCased = new HashMap<>();
  Record.Header header = record.getHeader();
  header.getAttributeNames().forEach(name -> lowerCased.put(
      name.toLowerCase(), header.getAttribute(name)));
  return lowerCased;
}
 
Example 14
Source File: HeaderImpl.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public void copyErrorFrom(Record record) {
  Record.Header header = record.getHeader();
  setError(
    header.getErrorStage(),
    header.getErrorStageLabel(),
    header.getErrorCode(),
    header.getErrorMessage(),
    header.getErrorTimestamp(),
    header.getErrorStackTrace()
  );
}
 
Example 15
Source File: JdbcUtil.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public void setColumnSpecificHeaders(
    Record record,
    Set<String> knownTableNames,
    ResultSetMetaData metaData,
    String jdbcNameSpacePrefix
) throws SQLException {
  Record.Header header = record.getHeader();
  Set<String> tableNames = new HashSet<>();

  for (int i=1; i<=metaData.getColumnCount(); i++) {
    header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".jdbcType", String.valueOf(metaData.getColumnType(i)));

    // Additional headers per various types
    switch(metaData.getColumnType(i)) {
      case Types.DECIMAL:
      case Types.NUMERIC:
        header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".scale", String.valueOf(metaData.getScale(i)));
        header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".precision", String.valueOf(metaData.getPrecision(i)));
        break;
    }

    String tableName = metaData.getTableName(i);

    // Store the column's table name (if not empty)
    if (StringUtils.isNotEmpty(tableName)) {
      tableNames.add(tableName);
    }
  }

  if (tableNames.isEmpty()) {
    tableNames.addAll(knownTableNames);
  }

  header.setAttribute(jdbcNameSpacePrefix + "tables", Joiner.on(",").join(tableNames));
}
 
Example 16
Source File: TestXmlCharDataParser.java    From datacollector with Apache License 2.0 5 votes vote down vote up
private static void assertNamespacedBookRecordHeaders(Record record) {
  Record.Header header = record.getHeader();
  Assert.assertEquals(
      NAMESPACE1_URI,
      header.getAttribute(XmlCharDataParser.RECORD_ATTRIBUTE_NAMESPACE_PREFIX+NAMESPACE1_OUTPUT_PREFIX)
  );
  Assert.assertEquals(
      NAMESPACE2_URI,
      header.getAttribute(XmlCharDataParser.RECORD_ATTRIBUTE_NAMESPACE_PREFIX+NAMESPACE2_OUTPUT_PREFIX)
  );
  Assert.assertEquals(
      NAMESPACE3_URI,
      header.getAttribute(XmlCharDataParser.RECORD_ATTRIBUTE_NAMESPACE_PREFIX+NAMESPACE3_OUTPUT_PREFIX)
  );
}
 
Example 17
Source File: RabbitSource.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Override
public String produce(String lastSourceOffset, int maxBatchSize, BatchMaker batchMaker) throws StageException {
  if (!isConnected() && !conf.advanced.automaticRecoveryEnabled) {
    // If we don't have automatic recovery enabled and the connection is closed, we should stop the pipeline.
    throw new StageException(Errors.RABBITMQ_05);
  }

  long maxTime = System.currentTimeMillis() + conf.basicConfig.maxWaitTime;
  int maxRecords = Math.min(maxBatchSize, conf.basicConfig.maxBatchSize);
  if (!getContext().isPreview() && checkBatchSize && conf.basicConfig.maxBatchSize > maxBatchSize) {
    getContext().reportError(Errors.RABBITMQ_11, maxBatchSize);
    checkBatchSize = false;
  }

  int numRecords = 0;
  String nextSourceOffset = lastSourceOffset;
  while (System.currentTimeMillis() < maxTime && numRecords < maxRecords) {
    try {
      RabbitMessage message = messages.poll(conf.basicConfig.maxWaitTime, TimeUnit.MILLISECONDS);
      if (message == null) {
        continue;
      }
      String recordId = message.getEnvelope().toString();
      List<Record> records = parseRabbitMessage(recordId, message.getBody());
      Envelope envelope = message.getEnvelope();
      for (Record record : records){
        BasicProperties properties = message.getProperties();
        Record.Header outHeader = record.getHeader();
        if (envelope != null) {
          setHeaderIfNotNull(outHeader, "deliveryTag", envelope.getDeliveryTag());
          setHeaderIfNotNull(outHeader, "exchange", envelope.getExchange());
          setHeaderIfNotNull(outHeader, "routingKey", envelope.getRoutingKey());
          setHeaderIfNotNull(outHeader, "redelivered", envelope.isRedeliver());
        }
        setHeaderIfNotNull(outHeader, "contentType", properties.getContentType());
        setHeaderIfNotNull(outHeader, "contentEncoding", properties.getContentEncoding());
        setHeaderIfNotNull(outHeader, "deliveryMode", properties.getDeliveryMode());
        setHeaderIfNotNull(outHeader, "priority", properties.getPriority());
        setHeaderIfNotNull(outHeader, "correlationId", properties.getCorrelationId());
        setHeaderIfNotNull(outHeader, "replyTo", properties.getReplyTo());
        setHeaderIfNotNull(outHeader, "expiration", properties.getExpiration());
        setHeaderIfNotNull(outHeader, "messageId", properties.getMessageId());
        setHeaderIfNotNull(outHeader, "timestamp", properties.getTimestamp());
        setHeaderIfNotNull(outHeader, "messageType", properties.getType());
        setHeaderIfNotNull(outHeader, "userId", properties.getUserId());
        setHeaderIfNotNull(outHeader, "appId", properties.getAppId());
        Map<String, Object> inHeaders = properties.getHeaders();
        if (inHeaders != null) {
          for (Map.Entry<String, Object> pair : inHeaders.entrySet()) {
            // I am concerned about overlapping with the above headers but it seems somewhat unlikely
            // in addition the behavior of copying these attributes in with no custom prefix is
            // how the jms origin behaves
            setHeaderIfNotNull(outHeader, pair.getKey(), pair.getValue());
          }
        }
        batchMaker.addRecord(record);
        numRecords++;
      }
      if (envelope != null) {
        nextSourceOffset = String.valueOf(envelope.getDeliveryTag());
      } else {
        nextSourceOffset = null;
        LOG.warn("Message received with no envelope" );
      }
    } catch (InterruptedException e) {
      LOG.warn("Pipeline is shutting down.");
    }
  }
  return nextSourceOffset;
}
 
Example 18
Source File: WorkbookParser.java    From datacollector with Apache License 2.0 4 votes vote down vote up
private void updateRecordWithCellValues(Row row, Record record) throws DataParserException {
  LinkedHashMap<String, Field> output = new LinkedHashMap<>();
  String sheetName = row.getSheet().getSheetName();
  String columnHeader;
  Set<String> unsupportedCellTypes = new HashSet<>();
  for (int columnNum = row.getFirstCellNum(); columnNum < row.getLastCellNum(); columnNum++) {
    if (headers.isEmpty()) {
      columnHeader = String.valueOf(columnNum);
    } else {
      if (columnNum >= headers.get(sheetName).size() || headers.get(sheetName).get(columnNum) == null) {
        // The current cell doesn't hae any associated header, which we conditionally skip
        if(settings.shouldSkipCellsWithNoHeader()) {
          continue;
        }

        columnHeader = String.valueOf(columnNum);
      } else {
        columnHeader = headers.get(sheetName).get(columnNum).getValueAsString();
      }
    }

    Cell cell = row.getCell(columnNum, Row.MissingCellPolicy.CREATE_NULL_AS_BLANK);
    try {
      output.put(columnHeader, Cells.parseCell(cell, this.evaluator));
    } catch (ExcelUnsupportedCellTypeException e) {
      output.put(columnHeader, Cells.parseCellAsString(cell));
      unsupportedCellTypes.add(e.getCellType().name());
    }
  }


  // Set interesting metadata about the row
  Record.Header hdr = record.getHeader();
  hdr.setAttribute("worksheet", row.getSheet().getSheetName());
  hdr.setAttribute("row",  Integer.toString(row.getRowNum()));
  hdr.setAttribute("firstCol", Integer.toString(row.getFirstCellNum()));
  hdr.setAttribute("lastCol", Integer.toString(row.getLastCellNum()));
  record.set(Field.createListMap(output));
  if (unsupportedCellTypes.size() > 0) {
    throw new RecoverableDataParserException(record, Errors.EXCEL_PARSER_05, StringUtils.join(unsupportedCellTypes, ", "));
  }
}
 
Example 19
Source File: MessageProcessorImpl.java    From datacollector with Apache License 2.0 4 votes vote down vote up
private static void setHeaders(PubsubMessage message, Record r) {
  // Populate attributes
  Record.Header header = r.getHeader();
  message.getAttributesMap().forEach(header::setAttribute);
}
 
Example 20
Source File: PostgresCDCSource.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Override
public String produce(String lastSourceOffset, int maxBatchSize, final BatchMaker batchMaker) throws StageException {
  // the offset given by data collector is ignored as the external system (postgres) keeps track of it.

  PostgresWalRecord postgresWalRecord = null;
  maxBatchSize = Math.min(configBean.baseConfigBean.maxBatchSize, maxBatchSize);
  int currentBatchSize = 0;

  long startTime = System.currentTimeMillis();

  while (
      !isBatchDone(
          currentBatchSize,
          maxBatchSize,
          startTime,
          postgresWalRecord == null
      )
  ) {

    postgresWalRecord = getWalReceiver().read();

    if (postgresWalRecord == null) {
      LOG.debug("Received null postgresWalRecord");
      ThreadUtil.sleep((long) configBean.pollInterval * 1000);
    }
    else {
      // filter out non data records or old data records
      PostgresWalRecord dataRecord = WalRecordFilteringUtils.filterRecord(postgresWalRecord, this);
      if (dataRecord == null) {
        LOG.debug("Received CDC with LSN {} from stream value filtered out", postgresWalRecord.getLsn().asString());
      } else {
       String recordLsn = dataRecord.getLsn().asString();
        LOG.debug("Received CDC with LSN {} from stream value - {}", recordLsn, dataRecord.getChanges());

        if (LOG.isTraceEnabled()) {
          LOG.trace("Valid CDC: {} ", dataRecord);
        }

        final Record record = processWalRecord(dataRecord);

        Record.Header header = record.getHeader();

        header.setAttribute(LSN, recordLsn);
        header.setAttribute(XID, dataRecord.getXid());
        header.setAttribute(TIMESTAMP_HEADER, dataRecord.getTimestamp());

        batchMaker.addRecord(record);
        currentBatchSize++;
      }
    }
  }
  // we report the current position of the WAL reader.
  return "dummy-not-used";
}