Java Code Examples for org.apache.kafka.connect.data.Struct#get()

The following examples show how to use org.apache.kafka.connect.data.Struct#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StructHelper.java    From connect-utils with Apache License 2.0 6 votes vote down vote up
public static Map<String, Object> asMap(Struct struct) {
  Preconditions.checkNotNull(struct, "struct cannot be null.");
  Map<String, Object> result = new LinkedHashMap<>(struct.schema().fields().size());

  for (Field field : struct.schema().fields()) {
    final Object value;
    if (Schema.Type.STRUCT == field.schema().type()) {
      Struct s = struct.getStruct(field.name());
      value = asMap(s);
    } else {
      value = struct.get(field);
    }
    result.put(field.name(), value);
  }

  return result;
}
 
Example 2
Source File: ObjectMapperFactory.java    From kafka-connect-splunk with Apache License 2.0 6 votes vote down vote up
void handleStruct(Event event) {
  final Struct input = (Struct) event.event;
  List<Field> fields = input.schema().fields();
  final Map result = new LinkedHashMap(fields.size());

  for (Field field : fields) {
    Object key = field.name();
    Object value = input.get(field);

    if (null == value) {
      continue;
    }

    if (!event.setValue(key, value)) {
      result.put(key, value);
    }
  }

  event.event = result.isEmpty() ? null : result;
}
 
Example 3
Source File: EventConverterTest.java    From kafka-connect-splunk with Apache License 2.0 6 votes vote down vote up
void assertSourceRecord(final Map<String, ?> expected, final ConnectRecord record, final String topic) throws JsonProcessingException {
  assertNotNull(record, "record should not be null.");
  assertNotNull(record.value(), "record.value() should not be null.");
  assertEquals(topic, record.topic(), "topic does not match.");
  assertTrue(record.key() instanceof Struct, "record.key() should be a struct");
  assertTrue(record.value() instanceof Struct, "record.value() should be a struct");

  Struct keyStruct = (Struct) record.key();
  keyStruct.validate();

  Struct valueStruct = (Struct) record.value();
  valueStruct.validate();

  for (Map.Entry<String, ?> entry : expected.entrySet()) {
    Object structValue = valueStruct.get(entry.getKey());

    if (entry.getValue() instanceof Map) {
      String text = ObjectMapperFactory.INSTANCE.writeValueAsString(entry.getValue());
      String structText = (String) structValue;
      assertEquals(text, structText, entry.getKey() + " should match.");
    } else {
      assertEquals(entry.getValue(), structValue, entry.getKey() + " should match.");
    }
  }
}
 
Example 4
Source File: AvroJsonSchemafulRecordConverter.java    From mongo-kafka with Apache License 2.0 5 votes vote down vote up
private BsonValue processField(final Struct struct, final Field field) {
  LOGGER.trace("processing field '{}'", field.name());

  if (struct.get(field.name()) == null) {
    LOGGER.trace("no field in struct -> adding null");
    return BsonNull.VALUE;
  }

  if (isSupportedLogicalType(field.schema())) {
    return getConverter(field.schema()).toBson(struct.get(field), field.schema());
  }

  try {
    switch (field.schema().type()) {
      case BOOLEAN:
      case FLOAT32:
      case FLOAT64:
      case INT8:
      case INT16:
      case INT32:
      case INT64:
      case STRING:
      case BYTES:
        return handlePrimitiveField(struct, field);
      case STRUCT:
      case MAP:
        return toBsonDoc(field.schema(), struct.get(field));
      case ARRAY:
        return toBsonArray(field.schema(), struct.get(field));
      default:
        throw new DataException("unexpected / unsupported schema type " + field.schema().type());
    }
  } catch (Exception exc) {
    throw new DataException("error while processing field " + field.name(), exc);
  }
}
 
Example 5
Source File: ExtractTimestamp.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
private long processStruct(SchemaAndValue schemaAndValue) {
  final Struct inputStruct = (Struct) schemaAndValue.value();
  final Field inputField = schemaAndValue.schema().field(this.config.fieldName);

  if (null == inputField) {
    throw new DataException(
        String.format("Schema does not have field '{}'", this.config.fieldName)
    );
  }

  final Schema fieldSchema = inputField.schema();
  final long result;
  if (Schema.Type.INT64 == fieldSchema.type()) {
    final Object fieldValue = inputStruct.get(inputField);

    if (null == fieldValue) {
      throw new DataException(
          String.format("Field '%s' cannot be null.", this.config.fieldName)
      );
    }

    if (Timestamp.LOGICAL_NAME.equals(fieldSchema.name())) {
      final Date date = (Date) fieldValue;
      result = date.getTime();
    } else {
      final long timestamp = (long) fieldValue;
      result = timestamp;
    }
  } else {
    throw new DataException(
        String.format("Schema '{}' is not supported.", inputField.schema())
    );
  }

  return result;
}
 
Example 6
Source File: PatternRename.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
@Override
protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct inputStruct) {
  final SchemaBuilder outputSchemaBuilder = SchemaBuilder.struct();
  outputSchemaBuilder.name(inputSchema.name());
  outputSchemaBuilder.doc(inputSchema.doc());
  if (null != inputSchema.defaultValue()) {
    outputSchemaBuilder.defaultValue(inputSchema.defaultValue());
  }
  if (null != inputSchema.parameters() && !inputSchema.parameters().isEmpty()) {
    outputSchemaBuilder.parameters(inputSchema.parameters());
  }
  if (inputSchema.isOptional()) {
    outputSchemaBuilder.optional();
  }
  Map<String, String> fieldMappings = new HashMap<>(inputSchema.fields().size());
  for (final Field inputField : inputSchema.fields()) {
    log.trace("process() - Processing field '{}'", inputField.name());
    final Matcher fieldMatcher = this.config.pattern.matcher(inputField.name());
    final String outputFieldName;
    if (fieldMatcher.find()) {
      outputFieldName = fieldMatcher.replaceAll(this.config.replacement);
    } else {
      outputFieldName = inputField.name();
    }
    log.trace("process() - Mapping field '{}' to '{}'", inputField.name(), outputFieldName);
    fieldMappings.put(inputField.name(), outputFieldName);
    outputSchemaBuilder.field(outputFieldName, inputField.schema());
  }
  final Schema outputSchema = outputSchemaBuilder.build();
  final Struct outputStruct = new Struct(outputSchema);
  for (Map.Entry<String, String> entry : fieldMappings.entrySet()) {
    final String inputField = entry.getKey(), outputField = entry.getValue();
    log.trace("process() - Copying '{}' to '{}'", inputField, outputField);
    final Object value = inputStruct.get(inputField);
    outputStruct.put(outputField, value);
  }
  return new SchemaAndValue(outputSchema, outputStruct);
}
 
Example 7
Source File: HeaderToField.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
public SchemaAndValue apply(ConnectRecord record, Struct input) {
  Struct result = new Struct(this.newSchema);
  for (Field field : input.schema().fields()) {
    String fieldName = field.name();
    Object fieldValue = input.get(field);
    result.put(fieldName, fieldValue);
  }
  for (ConversionHandler handler : this.conversionHandlers) {
    handler.convert(record, result);
  }
  return new SchemaAndValue(this.newSchema, result);
}
 
Example 8
Source File: ChangeCase.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
@Override
protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct input) {
  final State state = this.schemaState.computeIfAbsent(inputSchema, schema -> {
    final SchemaBuilder builder = SchemaBuilder.struct();
    if (!Strings.isNullOrEmpty(schema.name())) {
      builder.name(schema.name());
    }
    if (schema.isOptional()) {
      builder.optional();
    }

    final Map<String, String> columnMapping = new LinkedHashMap<>();

    for (Field field : schema.fields()) {
      final String newFieldName = this.config.from.to(this.config.to, field.name());
      log.trace("processStruct() - Mapped '{}' to '{}'", field.name(), newFieldName);
      columnMapping.put(field.name(), newFieldName);
      builder.field(newFieldName, field.schema());
    }

    return new State(columnMapping, builder.build());
  });

  final Struct outputStruct = new Struct(state.schema);

  for (Map.Entry<String, String> kvp : state.columnMapping.entrySet()) {
    final Object value = input.get(kvp.getKey());
    outputStruct.put(kvp.getValue(), value);
  }

  return new SchemaAndValue(state.schema, outputStruct);
}
 
Example 9
Source File: AssertStruct.java    From connect-utils with Apache License 2.0 5 votes vote down vote up
static <T> T castAndVerify(Class<T> cls, Struct struct, Field field, boolean expected) {
  final Object value = struct.get(field.name());

  final String prefix = String.format(
      "%s('%s') ",
      expected ? "expected" : "actual",
      field.name()
  );

  if (!field.schema().isOptional()) {
    assertNotNull(
        value,
        prefix + "has a require schema. Should not be null."
    );
  }

  if (null == value) {
    return null;
  }

  assertTrue(
      cls.isInstance(value),
      String.format(
          prefix + "should be a %s",
          cls.getSimpleName()
      )
  );

  return cls.cast(value);
}
 
Example 10
Source File: AbstractSpoolDirSourceTask.java    From kafka-connect-spooldir with Apache License 2.0 5 votes vote down vote up
protected void addRecord(List<SourceRecord> records, SchemaAndValue key, SchemaAndValue value) {
  final Long timestamp;

  switch (this.config.timestampMode) {
    case FIELD:
      Struct valueStruct = (Struct) value.value();
      log.trace("addRecord() - Reading date from timestamp field '{}'", this.config.timestampField);
      final java.util.Date date = (java.util.Date) valueStruct.get(this.config.timestampField);
      timestamp = date.getTime();
      break;
    case FILE_TIME:
      timestamp = this.inputFile.lastModified();
      break;
    case PROCESS_TIME:
      timestamp = null;
      break;
    default:
      throw new UnsupportedOperationException(
          String.format("Unsupported timestamp mode. %s", this.config.timestampMode)
      );
  }

  SourceRecord sourceRecord = record(
      key,
      value,
      timestamp
  );
  recordCount++;
  records.add(sourceRecord);
}
 
Example 11
Source File: FieldPartitioner.java    From streamx with Apache License 2.0 5 votes vote down vote up
@Override
public String encodePartition(SinkRecord sinkRecord) {
  Object value = sinkRecord.value();
  Schema valueSchema = sinkRecord.valueSchema();
  if (value instanceof Struct) {
    Struct struct = (Struct) value;
    Object partitionKey = struct.get(fieldName);
    Type type = valueSchema.field(fieldName).schema().type();
    switch (type) {
      case INT8:
      case INT16:
      case INT32:
      case INT64:
        Number record = (Number) partitionKey;
        return fieldName + "=" + record.toString();
      case STRING:
        return fieldName + "=" + (String) partitionKey;
      case BOOLEAN:
        boolean booleanRecord = (boolean) partitionKey;
        return fieldName + "=" + Boolean.toString(booleanRecord);
      default:
        log.error("Type {} is not supported as a partition key.", type.getName());
        throw new PartitionException("Error encoding partition.");
    }
  } else {
    log.error("Value is not Struct type.");
    throw new PartitionException("Error encoding partition.");
  }
}
 
Example 12
Source File: AssertStruct.java    From connect-utils with Apache License 2.0 4 votes vote down vote up
public static void assertStruct(final Struct expected, final Struct actual, String message) {
  String prefix = Strings.isNullOrEmpty(message) ? "" : message + ": ";

  if (null == expected) {
    assertNull(actual, prefix + "actual should be null.");
    return;
  }

  assertSchema(expected.schema(), actual.schema(), "schema does not match.");
  for (Field expectedField : expected.schema().fields()) {
    log.trace("assertStruct() - testing field '{}'", expectedField.name());
    final Object expectedValue = expected.get(expectedField.name());
    final Object actualValue = actual.get(expectedField.name());

    if (Decimal.LOGICAL_NAME.equals(expectedField.schema().name())) {
      final BigDecimal expectedDecimal = castAndVerify(BigDecimal.class, expected, expectedField, true);
      final BigDecimal actualDecimal = castAndVerify(BigDecimal.class, actual, expectedField, false);
      assertEquals(expectedDecimal, actualDecimal, prefix + expectedField.name() + " does not match.");
    } else if (Timestamp.LOGICAL_NAME.equals(expectedField.schema().name())
        || Date.LOGICAL_NAME.equals(expectedField.schema().name())
        || Time.LOGICAL_NAME.equals(expectedField.schema().name())) {
      final java.util.Date expectedDate = castAndVerify(java.util.Date.class, expected, expectedField, true);
      final java.util.Date actualDate = castAndVerify(java.util.Date.class, actual, expectedField, false);
      assertEquals(expectedDate, actualDate, prefix + expectedField.name() + " does not match.");
    } else {
      switch (expectedField.schema().type()) {
        case ARRAY:
          final List<Object> expectedArray = castAndVerify(List.class, expected, expectedField, true);
          final List<Object> actualArray = castAndVerify(List.class, actual, expectedField, false);
          assertEquals(expectedArray, actualArray, prefix + expectedField.name() + " does not match.");
          break;
        case MAP:
          final Map<Object, Object> expectedMap = castAndVerify(Map.class, expected, expectedField, true);
          final Map<Object, Object> actualMap = castAndVerify(Map.class, actual, expectedField, false);
          assertEquals(expectedMap, actualMap, prefix + expectedField.name() + " does not match.");
          break;
        case STRUCT:
          final Struct expectedStruct = castAndVerify(Struct.class, expected, expectedField, true);
          final Struct actualStruct = castAndVerify(Struct.class, actual, expectedField, false);
          assertStruct(expectedStruct, actualStruct, prefix + expectedField.name() + " does not match.");
          break;
        case BYTES:
          final byte[] expectedByteArray = castAndVerify(byte[].class, expected, expectedField, true);
          final byte[] actualByteArray = castAndVerify(byte[].class, actual, expectedField, false);
          assertEquals(
              null == expectedByteArray ? "" : BaseEncoding.base32Hex().encode(expectedByteArray).toString(),
              null == actualByteArray ? "" : BaseEncoding.base32Hex().encode(actualByteArray).toString(),
              prefix + expectedField.name() + " does not match."
          );
          break;
        default:
          assertEquals(expectedValue, actualValue, prefix + expectedField.name() + " does not match.");
          break;
      }
    }
  }
}
 
Example 13
Source File: AbstractConverter.java    From connect-utils with Apache License 2.0 4 votes vote down vote up
void convertStruct(final T result, Struct struct) {
  final Schema schema = struct.schema();

  for (final Field field : schema.fields()) {
    final String fieldName = field.name();
    log.trace("convertStruct() - Processing '{}'", field.name());
    final Object fieldValue = struct.get(field);

    try {
      if (null == fieldValue) {
        log.trace("convertStruct() - Setting '{}' to null.", fieldName);
        setNullField(result, fieldName);
        continue;
      }

      log.trace("convertStruct() - Field '{}'.field().schema().type() = '{}'", fieldName, field.schema().type());
      switch (field.schema().type()) {
        case STRING:
          log.trace("convertStruct() - Processing '{}' as string.", fieldName);
          setStringField(result, fieldName, (String) fieldValue);
          break;
        case INT8:
          log.trace("convertStruct() - Processing '{}' as int8.", fieldName);
          setInt8Field(result, fieldName, (Byte) fieldValue);
          break;
        case INT16:
          log.trace("convertStruct() - Processing '{}' as int16.", fieldName);
          setInt16Field(result, fieldName, (Short) fieldValue);
          break;
        case INT32:
          if (org.apache.kafka.connect.data.Date.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as date.", fieldName);
            setDateField(result, fieldName, (Date) fieldValue);
          } else if (org.apache.kafka.connect.data.Time.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as time.", fieldName);
            setTimeField(result, fieldName, (Date) fieldValue);
          } else {
            Integer int32Value = (Integer) fieldValue;
            log.trace("convertStruct() - Processing '{}' as int32.", fieldName);
            setInt32Field(result, fieldName, int32Value);
          }
          break;
        case INT64:

          if (Timestamp.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as timestamp.", fieldName);
            setTimestampField(result, fieldName, (Date) fieldValue);
          } else {
            Long int64Value = (Long) fieldValue;
            log.trace("convertStruct() - Processing '{}' as int64.", fieldName);
            setInt64Field(result, fieldName, int64Value);
          }
          break;
        case BYTES:

          if (Decimal.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as decimal.", fieldName);
            setDecimalField(result, fieldName, (BigDecimal) fieldValue);
          } else {
            byte[] bytes = (byte[]) fieldValue;
            log.trace("convertStruct() - Processing '{}' as bytes.", fieldName);
            setBytesField(result, fieldName, bytes);
          }
          break;
        case FLOAT32:
          log.trace("convertStruct() - Processing '{}' as float32.", fieldName);
          setFloat32Field(result, fieldName, (Float) fieldValue);
          break;
        case FLOAT64:
          log.trace("convertStruct() - Processing '{}' as float64.", fieldName);
          setFloat64Field(result, fieldName, (Double) fieldValue);
          break;
        case BOOLEAN:
          log.trace("convertStruct() - Processing '{}' as boolean.", fieldName);
          setBooleanField(result, fieldName, (Boolean) fieldValue);
          break;
        case STRUCT:
          log.trace("convertStruct() - Processing '{}' as struct.", fieldName);
          setStructField(result, fieldName, (Struct) fieldValue);
          break;
        case ARRAY:
          log.trace("convertStruct() - Processing '{}' as array.", fieldName);
          setArray(result, fieldName, schema, (List) fieldValue);
          break;
        case MAP:
          log.trace("convertStruct() - Processing '{}' as map.", fieldName);
          setMap(result, fieldName, schema, (Map) fieldValue);
          break;
        default:
          throw new DataException("Unsupported schema.type(): " + schema.type());
      }
    } catch (Exception ex) {
      throw new DataException(
          String.format("Exception thrown while processing field '%s'", fieldName),
          ex
      );
    }
  }
}
 
Example 14
Source File: AssertStruct.java    From connect-utils with Apache License 2.0 4 votes vote down vote up
public static void assertStruct(final Struct expected, final Struct actual, String message) {
  String prefix = Strings.isNullOrEmpty(message) ? "" : message + ": ";

  if (null == expected) {
    assertNull(actual, prefix + "actual should be null.");
    return;
  }

  assertSchema(expected.schema(), actual.schema(), "schema does not match.");

  final Set<String> logicalTypes = ImmutableSet.of(
      Timestamp.LOGICAL_NAME,
      Date.LOGICAL_NAME,
      Time.LOGICAL_NAME,
      Decimal.LOGICAL_NAME
  );

  for (Field field : expected.schema().fields()) {
    log.trace("assertStruct() - testing field '{}'", field.name());
    final Object expectedValue = expected.get(field.name());
    final Object actualValue = actual.get(field.name());

    if (!Strings.isNullOrEmpty(field.schema().name()) && logicalTypes.contains(field.schema().name())) {
      assertEquals(expectedValue, actualValue, prefix + field.name() + " does not match.");
    } else {
      switch (field.schema().type()) {
        case ARRAY:
          assertTrue(null == expectedValue || expectedValue instanceof List);
          assertTrue(null == actualValue || actualValue instanceof List);
          List<Object> expectedArray = (List<Object>) expectedValue;
          List<Object> actualArray = (List<Object>) actualValue;
          assertEquals(expectedArray, actualArray, prefix + field.name() + " does not match.");
          break;
        case MAP:
          assertTrue(null == expectedValue || expectedValue instanceof Map);
          assertTrue(null == actualValue || actualValue instanceof Map);
          Map<Object, Object> expectedMap = (Map<Object, Object>) expectedValue;
          Map<Object, Object> actualMap = (Map<Object, Object>) actualValue;
          assertEquals(expectedMap, actualMap, prefix + field.name() + " does not match.");
          break;
        case STRUCT:
          assertTrue(null == expectedValue || expectedValue instanceof Struct);
          assertTrue(null == actualValue || actualValue instanceof Struct);
          Struct expectedStruct = (Struct) expectedValue;
          Struct actualStruct = (Struct) actualValue;
          assertStruct(expectedStruct, actualStruct, prefix + field.name() + " does not match.");
          break;
        case BYTES:
          assertTrue(null == expectedValue || expectedValue instanceof byte[]);
          assertTrue(null == actualValue || actualValue instanceof byte[]);
          byte[] expectedByteArray = (byte[]) expectedValue;
          byte[] actualByteArray = (byte[]) actualValue;
          assertEquals(
              null == expectedByteArray ? "" : BaseEncoding.base32Hex().encode(expectedByteArray).toString(),
              null == actualByteArray ? "" : BaseEncoding.base32Hex().encode(actualByteArray).toString(),
              prefix + field.name() + " does not match."
          );
          break;
        default:
          assertEquals(expectedValue, actualValue, prefix + field.name() + " does not match.");
          break;
      }
    }
  }
}
 
Example 15
Source File: EventConverter.java    From kafka-connect-splunk with Apache License 2.0 4 votes vote down vote up
public SourceRecord convert(JsonNode messageNode, String remoteHost) {
  Preconditions.checkNotNull(messageNode, "messageNode cannot be null.");
  Preconditions.checkState(messageNode.isObject(), "messageNode must be an object.");

  Struct keyStruct = new Struct(KEY_SCHEMA);
  Struct valueStruct = new Struct(VALUE_SCHEMA);

  setFieldValue(messageNode, valueStruct, "time", Date.class);
  setFieldValue(messageNode, valueStruct, "host", String.class);
  setFieldValue(messageNode, valueStruct, "source", String.class);
  setFieldValue(messageNode, valueStruct, "sourcetype", String.class);
  setFieldValue(messageNode, valueStruct, "index", String.class);
  setFieldValue(messageNode, valueStruct, "event", String.class);

  if (null == valueStruct.get("time")) {
    valueStruct.put("time", new Date(this.time.milliseconds()));
  }

  String host = valueStruct.getString("host");

  if (null == host) {
    host = remoteHost;
    valueStruct.put("host", host);
  }

  keyStruct.put("host", valueStruct.get("host"));

  String index = valueStruct.getString("index");

  if (null == index) {
    index = this.defaultIndex;
    valueStruct.put("index", index);
  }

  String topic = this.topic;

  if (null == topic) {
    topic = this.indexToTopicLookup.get(index);

    if (null == topic) {
      topic = this.topicPrefix + index.toLowerCase();
      this.indexToTopicLookup.put(index, topic);
    }
  }

  SourceRecord sourceRecord = new SourceRecord(
      EMPTY_MAP,
      EMPTY_MAP,
      topic,
      KEY_SCHEMA,
      keyStruct,
      VALUE_SCHEMA,
      valueStruct
  );

  return sourceRecord;
}