Java Code Examples for com.streamsets.pipeline.api.Field#getValue()

The following examples show how to use com.streamsets.pipeline.api.Field#getValue() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RecordEL.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@ElFunction(
    prefix = RECORD_EL_PREFIX,
    name = "value",
    description = "Returns the value of the field represented by path 'fieldPath' for the record in context")
@SuppressWarnings("unchecked")
public static Object getValue(
    @ElParam("fieldPath") String fieldPath) {
  Object value = null;
  Record record = getRecordInContext();
  if (record != null) {
    Field field = record.get(fieldPath);
    if (field != null) {
      value = field.getValue();
    }
  }
  return value;
}
 
Example 2
Source File: JsonGeneratorProcessor.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Override
protected void process(Record record, SingleLaneBatchMaker batchMaker) throws StageException {
  if (!record.has(fieldPathToSerialize)) {
    throw new OnRecordErrorException(Errors.JSON_00, fieldPathToSerialize, record.getHeader().getSourceId());
  }

  Field field = record.get(fieldPathToSerialize);
  if (field.getValue() == null) {
    throw new OnRecordErrorException(Errors.JSON_01, fieldPathToSerialize);
  }
  if (!supportedFieldTypes.contains(field.getType())) {
    throw new OnRecordErrorException(Errors.JSON_02, fieldPathToSerialize, field.getType());
  }

  Record tempRecord = getContext().createRecord(record.getHeader().getSourceId());
  tempRecord.set(field);
  Writer writer = new StringWriter();
  try (JsonCharDataGenerator generator = new JsonCharDataGenerator(getContext(), writer, Mode.MULTIPLE_OBJECTS)) {
    generator.write(tempRecord);
  } catch (IOException e) {
    throw new OnRecordErrorException(Errors.JSON_03, e.toString(), e);
  }

  record.set(outputFieldPath, Field.create(writer.toString().trim()));
  batchMaker.addRecord(record);
}
 
Example 3
Source File: TestFieldHasherProcessor.java    From datacollector with Apache License 2.0 6 votes vote down vote up
private String hashForRecordsWithFieldsAndHeaderAttr(
    Record record,
    Collection<String> fieldsToHash,
    HashType hashType,
    boolean includeRecordHeaderForHashing,
    boolean useSeparator
) {
  HashFunction hasher = HashingUtil.getHasher(hashType.getHashType());
  Set<String> validFieldsToHash = new HashSet<>();
  for (String fieldPath : fieldsToHash) {
    Field field = record.get(fieldPath);
    Field.Type type = field.getType();
    if (!(FieldHasherProcessor.UNSUPPORTED_FIELD_TYPES.contains(type) || field.getValue() == null)) {
      validFieldsToHash.add(fieldPath);
    }
  }
  HashingUtil.RecordFunnel recordFunnel =
      HashingUtil.getRecordFunnel(
          validFieldsToHash,
          includeRecordHeaderForHashing,
          useSeparator,
              '\u0000'
      );
  return hasher.hashObject(record, recordFunnel).toString();
}
 
Example 4
Source File: EncryptFieldProtector.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Override
public void process(FieldBatch batch) throws StageException {
  Map<String, String> encryptionContext = new HashMap<>(conf.context);

  while(batch.next()) {
    Field field = batch.getField();
    // reviewer requested no use of Java 8 streams
    if (field != null && field.getValue() != null) { // process if field is present and non-null

      Optional<Field> input = encrypter.checkInputEncrypt(field);

      if (input.isPresent()) {
        byte[] bytes = prepare.apply(input.get(), encryptionContext);
        CryptoResult<byte[], ?> result = encrypter.process(bytes, encryptionContext);
        Field encryptedField = createResultField.apply(result);
        batch.replace(encryptedField);
      } else {
        return; // record sent to error, done with this record.
      }
    }
  }
}
 
Example 5
Source File: BigQueryTarget.java    From datacollector with Apache License 2.0 6 votes vote down vote up
/**
 * Convert the root field to a java map object implicitly mapping each field to the column (only non nested objects)
 * @param record record to be converted
 * @return Java row representation for the record
 */
private Map<String, Object> convertToRowObjectFromRecord(Record record) throws OnRecordErrorException {
  Field rootField = record.get();
  Map<String, Object> rowObject = new LinkedHashMap<>();
  if (rootField.getType().isOneOf(Field.Type.MAP, Field.Type.LIST_MAP)) {
    Map<String, Field> fieldMap = rootField.getValueAsMap();
    for (Map.Entry<String, Field> fieldEntry : fieldMap.entrySet()) {
      Field field = fieldEntry.getValue();
      //Skip null value fields
      if (field.getValue() != null){
        try {
          rowObject.put(fieldEntry.getKey(), getValueFromField("/" + fieldEntry.getKey(), field));
        } catch (IllegalArgumentException e) {
          throw new OnRecordErrorException(record, Errors.BIGQUERY_13, e.getMessage());
        }
      }
    }
  } else {
    throw new OnRecordErrorException(record,  Errors.BIGQUERY_16);
  }
  return rowObject;
}
 
Example 6
Source File: FieldHasherProcessor.java    From datacollector with Apache License 2.0 6 votes vote down vote up
private Set<String> validateAndExtractFieldsToHash(
    Record record,
    Set<String> fieldsDontExist,
    Set<String> fieldsWithListOrMapType,
    Set<String> fieldsWithNull,
    Collection<String> matchingFieldsPath
) {
  Set<String> validFieldsToHashForThisConfig = new HashSet<String>();
  for (String matchingFieldPath : matchingFieldsPath) {
    if (record.has(matchingFieldPath)) {
      Field field = record.get(matchingFieldPath);
      if (UNSUPPORTED_FIELD_TYPES.contains(field.getType())) {
        fieldsWithListOrMapType.add(matchingFieldPath);
      } else if (field.getValue() == null) {
        fieldsWithNull.add(matchingFieldPath);
      } else {
        validFieldsToHashForThisConfig.add(matchingFieldPath);
      }
    } else {
      fieldsDontExist.add(matchingFieldPath);
    }
  }
  return validFieldsToHashForThisConfig;
}
 
Example 7
Source File: JdbcTypeSupport.java    From datacollector with Apache License 2.0 5 votes vote down vote up
/**
 * Generate {@link JdbcTypeInfo} from the Metadata Record <br>.
 * (Reverse of {@link #generateJdbcTypeInfoFieldForMetadataRecord(JdbcTypeInfo)})
 *
 * @throws StageException if the metadata field is not valid.
 * @param jdbcTypeInfoField
 * @return {@link JdbcTypeInfo}
 * @throws StageException If the record has invalid
 */
@SuppressWarnings("unchecked")
public JdbcTypeInfo generateJdbcTypeInfoFromMetadataField(Field jdbcTypeInfoField, JdbcSchemaWriter schemaWriter) throws StageException {
  if (jdbcTypeInfoField.getType() == Field.Type.MAP) {
    Map<String, Field> fields = (Map<String, Field>) jdbcTypeInfoField.getValue();
    if (!fields.containsKey(TYPE)
        || !fields.containsKey(EXTRA_INFO)) {
      throw new StageException(JdbcErrors.JDBC_308, TYPE_INFO);
    }
    JdbcType jdbcType = JdbcType.getJdbcTypeFromString(fields.get(TYPE).getValueAsString());
    return generateJdbcTypeInfoFromMetadataField(jdbcType, fields.get(EXTRA_INFO), schemaWriter);
  } else {
    throw new StageException(JdbcErrors.JDBC_308, TYPE_INFO);
  }
}
 
Example 8
Source File: FieldEncryptProcessor.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
protected void process(Record record, SingleLaneBatchMaker singleLaneBatchMaker) throws StageException {
  Map<String, String> encryptionContext = new HashMap<>(conf.context);

  try {
    for (String fieldPath : conf.fieldPaths) {

      Field field = record.get(fieldPath);

      // reviewer requested no use of Java 8 streams
      if (field != null && field.getValue() != null) { // process if field is present and non-null

        Optional<Field> input = checkInput.apply(record, field);

        if (input.isPresent()) {
          byte[] bytes = prepare.apply(input.get(), encryptionContext);
          CryptoResult<byte[], ?> result = encrypter.process(bytes, encryptionContext);
          field = createResultField.apply(result);
          record.set(fieldPath, field);
        } else {
          return; // record sent to error, done with this record.
        }
      }
    }
  // The encryption process can throw a lot of unchecked exceptions that if not caught would terminate the pipeline
  } catch (Exception e) {
    Throwables.propagateIfPossible(e, StageException.class);
    throw new StageException(CryptoErrors.CRYPTO_07, e.toString(), e);
  }

  singleLaneBatchMaker.addRecord(record);
}
 
Example 9
Source File: RecordEL.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@ElFunction(
    prefix = RECORD_EL_PREFIX,
    name = "dValue",
    description = "Returns the value of the specified header name")
public static String getDelimitedValue(
    @ElParam("header") String header) {
  String value = null;
  Record record = getRecordInContext();
  if (record != null) {
    Field root = record.get();
    if (root != null && root.getType() == Field.Type.LIST && root.getValue() != null) {
      List<Field> list = root.getValueAsList();
      for (Field element : list) {
        if (element.getType() == Field.Type.MAP && element.getValue() != null) {
          Map<String, Field> map = element.getValueAsMap();
          if (map.containsKey("header")) {
            Field headerField = map.get("header");
            if (headerField.getType() == Field.Type.STRING && headerField.getValue() != null) {
              if (headerField.getValueAsString().equals(header)) {
                if (map.containsKey("value")) {
                  Field valueField = map.get("value");
                  if (valueField.getType() == Field.Type.STRING && valueField.getValue() != null) {
                    value = valueField.getValueAsString();
                    break;
                  }
                }
              }
            }
          }
        }
      }
    }
  }
  return value;
}
 
Example 10
Source File: OffsetQueryUtil.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public static Map<String, String> getOffsetsFromColumns(TableRuntimeContext tableContext, Map<String, Field> fields) throws StageException {
  final Map<String, String> offsets = new HashMap<>();
  for (String offsetColumn : tableContext.getSourceTableContext().getOffsetColumns()) {
    Field field = fields.get(offsetColumn);
    String value;

    if(field.getValue() == null) {
      throw new StageException(JdbcErrors.JDBC_408, offsetColumn);
    }

    if (field.getType().isOneOf(Field.Type.DATE, Field.Type.TIME)) {
      //For DATE/TIME fields store the long in string format and convert back to date when using offset
      //in query
      value = String.valueOf(field.getValueAsDatetime().getTime());
    } else if (field.getType() == Field.Type.DATETIME) {
      //DATETIME is similar to above, but there may also be a nanosecond portion (stored in field attr)
      String nanosAttr = field.getAttribute(JdbcUtil.FIELD_ATTRIBUTE_NANOSECONDS);
      int nanos;
      if (StringUtils.isNotBlank(nanosAttr) && StringUtils.isNumeric(nanosAttr)) {
        nanos = Integer.parseInt(nanosAttr);
      } else {
        nanos = 0;
      }
      value = TableContextUtil.getOffsetValueForTimestampParts(field.getValueAsDatetime().getTime(), nanos);
    } else if(field.getType() == Field.Type.ZONED_DATETIME) {
      value = field.getValueAsZonedDateTime().format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
    } else {
      value = field.getValueAsString();
    }
    offsets.put(offsetColumn, value);
  }
  return offsets;
}
 
Example 11
Source File: Matchers.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public static Matcher<Field> stringFieldWithNullValue() {
  return new FieldMatcher(Field.Type.STRING, null) {
    @Override
    protected Object getValueFromField(Field field) {
      return field.getValue();
    }
  };
}
 
Example 12
Source File: FieldTypeConverterProcessor.java    From datacollector with Apache License 2.0 5 votes vote down vote up
private Field processByType(String matchingPath, Field rootField) throws StageException {
  switch (rootField.getType()) {
    case MAP:
    case LIST_MAP:
      if(rootField.getValue() == null) {
        return rootField;
      }
      for (Map.Entry<String, Field> entry : rootField.getValueAsMap().entrySet()) {
        entry.setValue(processByType(matchingPath + "/" + entry.getKey(), entry.getValue()));
      }
      break;
    case LIST:
      if(rootField.getValue() == null) {
        return rootField;
      }
      List<Field> fields = rootField.getValueAsList();
      for(int i = 0; i < fields.size(); i++) {
        fields.set(i, processByType(matchingPath + "[" + i + "]", fields.get(i)));
      }
      break;
    default:
      for(WholeTypeConverterConfig converterConfig : wholeTypeConverterConfigs) {
        if(converterConfig.sourceType == rootField.getType()) {
          rootField = convertField(matchingPath, rootField, converterConfig);
        }
      }
  }

  // Return original field
  return rootField;
}
 
Example 13
Source File: FieldMaskProcessor.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
protected void process(Record record, SingleLaneBatchMaker batchMaker) throws StageException {
  Set<String> fieldPaths = record.getEscapedFieldPaths();
  List<String> nonStringFields = new ArrayList<>();
  // For each individual configuration entry
  for(FieldMaskConfig fieldMaskConfig : activeFieldMaskConfigs) {
    // For each configured field expression
    for (String toMask : fieldMaskConfig.fields) {
      // Find all actual fields that matches given configured expression
      for (String matchingFieldPath : FieldPathExpressionUtil.evaluateMatchingFieldPaths(
          toMask,
          fieldPathEval,
          fieldPathVars,
          record,
          fieldPaths
      )) {
        if (record.has(matchingFieldPath)) {
          Field field = record.get(matchingFieldPath);
          if (field.getType() != Field.Type.STRING) {
            nonStringFields.add(matchingFieldPath);
          } else {
            if (field.getValue() != null) {
              Field newField = Field.create(maskField(field, fieldMaskConfig));
              record.set(matchingFieldPath, newField);
            }
          }
        }
      }
    }
  }
  if (nonStringFields.isEmpty()) {
    batchMaker.addRecord(record);
  } else {
    throw new OnRecordErrorException(Errors.MASK_00, StringUtils.join(nonStringFields, ", "), record.getHeader().getSourceId());
  }
}
 
Example 14
Source File: MapRJsonDocumentLoader.java    From datacollector with Apache License 2.0 4 votes vote down vote up
/**
 * List mode
 */
private void writeFieldToDocumentList(Field field, List list) throws IOException {
  if (field.getValue() == null) {
    list.add(null);
  } else {
    switch (field.getType()) {
      case FILE_REF:
        throw new IOException("Cannot serialize FileRef fields.");
      case MAP:
      case LIST_MAP:
        Document newDoc = loader.createNewEmptyDocument();
        Map<String, Field> map = field.getValueAsMap();
        for (Map.Entry<String, Field> fieldEntry : map.entrySet()) {
          String fieldName = fieldEntry.getKey();
          Field newField = fieldEntry.getValue();
          // recursive call in map mode.
          writeFieldToDocumentMap(newDoc, newField, fieldName);
        }
        // List mode
        list.add(newDoc);
        break;
      case LIST:
        List<Field> listOfFields = field.getValueAsList();
        List<Object> objsList = new ArrayList<>();
        for (Field f : listOfFields) {
          // recursive call in a list mode.
          writeFieldToDocumentList(f, objsList);
        }
        list.add(objsList);
        break;
      case BOOLEAN:
        list.add(field.getValueAsBoolean());
        break;
      case CHAR:
        list.add(String.valueOf(field.getValueAsChar()));
        break;
      case BYTE:
        list.add(new byte[]{field.getValueAsByte()});
        break;
      case SHORT:
        list.add(field.getValueAsShort());
        break;
      case INTEGER:
        list.add(field.getValueAsInteger());
        break;
      case LONG:
        list.add(field.getValueAsLong());
        break;
      case FLOAT:
        list.add(field.getValueAsFloat());
        break;
      case DOUBLE:
        list.add(field.getValueAsDouble());
        break;
      case DATE:
        list.add(field.getValueAsDate().getTime());
        break;
      case DATETIME:
        list.add(field.getValueAsDatetime().getTime());
        break;
      case TIME:
        list.add(field.getValueAsTime().getTime());
        break;
      case DECIMAL:
        list.add(field.getValueAsDecimal());
        break;
      case STRING:
      case ZONED_DATETIME:
        list.add(field.getValueAsString());
        break;
      case BYTE_ARRAY:
        list.add(field.getValueAsByteArray());
        break;
      default:
        throw new IllegalStateException(String.format(
            "Unrecognized field type (%s) in field: %s",
            field.getType().name(),
            field.toString())
        );
    }
  }
}
 
Example 15
Source File: MapRJsonDocumentLoader.java    From datacollector with Apache License 2.0 4 votes vote down vote up
/**
 * Map mode
 */
private void writeFieldToDocumentMap(Document doc, Field field, String name) throws IOException {
  if (field.getValue() == null) {
    // On the Map mode just set the null on the document using the name.
    doc.setNull(name);
  } else {
    switch (field.getType()) {
      case FILE_REF:
        throw new IOException("Cannot serialize FileRef fields.");
      case MAP:
      case LIST_MAP:
        Document newDoc = loader.createNewEmptyDocument();
        Map<String, Field> map = field.getValueAsMap();
        for (Map.Entry<String, Field> fieldEntry : map.entrySet()) {
          String fieldName = fieldEntry.getKey();
          Field newField = fieldEntry.getValue();
          // recursive call in map mode.
          writeFieldToDocumentMap(newDoc, newField, fieldName);
        }
        // Map the new doc
        doc.set(name, newDoc);
        break;
      case LIST:
        List<Field> listOfFields = field.getValueAsList();
        List<Object> objsList = new ArrayList<>();
        for (Field f : listOfFields) {
          // recursive call in a list mode.
          writeFieldToDocumentList(f, objsList);
        }
        doc.setArray(name, objsList.toArray());
        break;
      case BOOLEAN:
        doc.set(name, field.getValueAsBoolean());
        break;
      case CHAR:
        doc.set(name, String.valueOf(field.getValueAsChar()));
        break;
      case BYTE:
        doc.set(name, new byte[]{field.getValueAsByte()});
        break;
      case SHORT:
        doc.set(name, field.getValueAsShort());
        break;
      case INTEGER:
        doc.set(name, field.getValueAsInteger());
        break;
      case LONG:
        doc.set(name, field.getValueAsLong());
        break;
      case FLOAT:
        doc.set(name, field.getValueAsFloat());
        break;
      case DOUBLE:
        doc.set(name, field.getValueAsDouble());
        break;
      case DATE:
        doc.set(name, field.getValueAsDate().getTime());
        break;
      case DATETIME:
        doc.set(name, field.getValueAsDatetime().getTime());
        break;
      case TIME:
        doc.set(name, field.getValueAsTime().getTime());
        break;
      case DECIMAL:
        doc.set(name, field.getValueAsDecimal());
        break;
      case STRING:
      case ZONED_DATETIME:
        doc.set(name, field.getValueAsString());
        break;
      case BYTE_ARRAY:
        doc.set(name, field.getValueAsByteArray());
        break;
      default:
        throw new IllegalStateException(String.format("Unrecognized field type (%s) in field: %s",
            field.getType().name(),
            field.toString()
        ));
    }
  }
}
 
Example 16
Source File: BigQueryTarget.java    From datacollector with Apache License 2.0 4 votes vote down vote up
/**
 * Convert the sdc Field to an object for row content
 */
private Object getValueFromField(String fieldPath, Field field) {
  LOG.trace("Visiting Field Path '{}' of type '{}'", fieldPath, field.getType());
  switch (field.getType()) {
    case LIST:
      //REPEATED
      List<Field> listField = field.getValueAsList();
      //Convert the list to map with indices as key and Field as value (Map<Integer, Field>)
      Map<Integer, Field> fields =
          IntStream.range(0, listField.size()).boxed()
              .collect(Collectors.toMap(Function.identity(), listField::get));
      //filter map to remove fields with null value
      fields = fields.entrySet().stream()
          .filter(e -> e.getValue().getValue() != null)
          .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
      //now use the map index to generate field path and generate object for big query write
      return fields.entrySet().stream()
          .map(e -> getValueFromField(fieldPath + "[" + e.getKey() + "]", e.getValue()))
          .collect(Collectors.toList());
    case MAP:
    case LIST_MAP:
      //RECORD
      return field.getValueAsMap().entrySet().stream()
          .filter(me -> me.getValue().getValue() != null)
          .collect(
              Collectors.toMap(
                  Map.Entry::getKey,
                  e -> getValueFromField(fieldPath + "/" + e.getKey(), e.getValue())
              )
          );
    case DATE:
      return dateFormat.format(field.getValueAsDate());
    case TIME:
      return timeFormat.format(field.getValueAsTime());
    case DATETIME:
      return dateTimeFormat.format(field.getValueAsDatetime());
    case BYTE_ARRAY:
      return Base64.getEncoder().encodeToString(field.getValueAsByteArray());
    case DECIMAL:
    case BYTE:
    case CHAR:
    case FILE_REF:
      throw new IllegalArgumentException(Utils.format(Errors.BIGQUERY_12.getMessage(), fieldPath, field.getType()));
    default:
      //Boolean -> Map to Boolean in big query
      //Float, Double -> Map to Float in big query
      //String -> maps to String in big query
      //Short, Integer, Long -> Map to integer in big query
      return field.getValue();
  }
}
 
Example 17
Source File: DriftRuleEL.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
public boolean detect(String fieldPath, boolean ignoreWhenMissing) {
  boolean drifted = false;
  T valueInRecord = null;
  boolean missing = false;
  Field field = getRecord().get(fieldPath);
  if (field != null) {
    // Check that the field have supported type
    if(supportedTypes() != null && !supportedTypes().contains(field.getType())) {
      AlertInfoEL.setInfo(composeTypeAlert(fieldPath, field.getType(), supportedTypes()));
      return drifted;
    }

    if (field.getValue() != null) {
      valueInRecord = getValue(field);
    } else {
      missing = true;
    }
  } else {
    missing = true;
  }
  String key = getContextPrefix() + ":" + fieldPath;
  Map<String, Object> pipelineContext = getContext();
  boolean stored = pipelineContext.containsKey(key);
  T storedValue = (T) pipelineContext.get(key);
  if (missing) {
    if (!ignoreWhenMissing) {
      drifted = stored && storedValue != null;
    }
  } else {
    if (stored) {
      drifted = (storedValue == null && valueInRecord != null) || !storedValue.equals(valueInRecord);
    }
  }
  if (drifted) {
    pipelineContext.put(key, valueInRecord);
    AlertInfoEL.setInfo(composeAlert(fieldPath, storedValue, valueInRecord));
  }
  if (!stored) {
    if (ignoreWhenMissing) {
      if (!missing) {
        pipelineContext.put(key, valueInRecord);
      }
    } else {
      pipelineContext.put(key, valueInRecord);
    }
  }
  return drifted;
}
 
Example 18
Source File: HashingUtil.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Override
public void funnel(Record record, PrimitiveSink sink) {
  for (String path : getFieldsToHash(record)) {
    Field field = record.get(path);
    if (field == null) {
      throw new IllegalArgumentException(
          Utils.format("Field Path {}  does not exist in the record", path)
      );
    }
    if (field.getValue() != null) {
      switch (field.getType()) {
        case BOOLEAN:
          sink.putBoolean(field.getValueAsBoolean());
          break;
        case CHAR:
          sink.putChar(field.getValueAsChar());
          break;
        case BYTE:
          sink.putByte(field.getValueAsByte());
          break;
        case SHORT:
          sink.putShort(field.getValueAsShort());
          break;
        case INTEGER:
          sink.putInt(field.getValueAsInteger());
          break;
        case LONG:
          sink.putLong(field.getValueAsLong());
          break;
        case FLOAT:
          sink.putFloat(field.getValueAsFloat());
          break;
        case DOUBLE:
          sink.putDouble(field.getValueAsDouble());
          break;
        case DATE:
          sink.putLong(field.getValueAsDate().getTime());
          break;
        case TIME:
          sink.putLong(field.getValueAsTime().getTime());
          break;
        case DATETIME:
          sink.putLong(field.getValueAsDatetime().getTime());
          break;

        case DECIMAL:
        case STRING:
          sink.putString(field.getValueAsString(), Charset.defaultCharset());
          break;

        case BYTE_ARRAY:
          sink.putBytes(field.getValueAsByteArray());
          break;
        case FILE_REF:
          throw new IllegalStateException(
              Utils.format(
                  "Hashing not supported for field: {} of type {}",
                  path,
                  field.getType()
              )
          );
        default:
          break;
      }
    } else {
      sink.putBoolean(true);
    }
    if(useSeparators) {
      sink.putString(java.nio.CharBuffer.wrap(new char[] {separator}), Charset.forName("UTF-8"));
    }
  }

  if (this.includeRecordHeader) {
    for (String attrName : record.getHeader().getAttributeNames()) {
      String headerAttr = record.getHeader().getAttribute(attrName);
      if (headerAttr != null) {
        sink.putString(headerAttr, Charset.defaultCharset());
      } else {
        sink.putBoolean(true);
      }

      if(useSeparators) {
        sink.putString(java.nio.CharBuffer.wrap(new char[] {separator}), Charset.forName("UTF-8"));
      }
    }
  }
}
 
Example 19
Source File: MapRJsonDocumentLoader.java    From datacollector with Apache License 2.0 4 votes vote down vote up
/**
 * Root mode
 */
private void writeFieldToDocumentRoot(Document doc, Field field) throws IOException {
  if (field.getValue() != null) {
    switch (field.getType()) {
      case FILE_REF:
        throw new IOException("Cannot serialize FileRef fields.");
      case MAP:
      case LIST_MAP:
        Map<String, Field> map = field.getValueAsMap();
        for (Map.Entry<String, Field> fieldEntry : map.entrySet()) {
          String fieldName = fieldEntry.getKey();
          Field newField = fieldEntry.getValue();
          // recursive call in map mode.
          writeFieldToDocumentMap(doc, newField, fieldName);
        }
        break;
      case LIST:
        // Root mode
        throw new IllegalStateException("Wrong record format. The input record must be a MAP or LIST_MAP in order to be inserted in a MAPR JSON table.");
      case BOOLEAN:
      case CHAR:
      case BYTE:
      case SHORT:
      case INTEGER:
      case LONG:
      case FLOAT:
      case DOUBLE:
      case DATE:
      case DATETIME:
      case TIME:
      case DECIMAL:
      case STRING:
      case BYTE_ARRAY:
      case ZONED_DATETIME:
        throw new IllegalStateException("Root record value must be a MAP a LIST_MAP.");
      default:
        throw new IllegalStateException(String.format(
            "Unrecognized field type (%s) in field: %s",
            field.getType().name(),
            field.toString())
        );
    }
  }
}
 
Example 20
Source File: JsonRecordWriterImpl.java    From datacollector with Apache License 2.0 4 votes vote down vote up
private void writeFieldToJsonObject(Field field) throws IOException {
  if (field == null || field.getValue() == null) {
    generator.writeNull();
    return;
  }
  switch (field.getType()) {
    case FILE_REF:
      throw new IOException("Cannot serialize FileRef fields.");
    case MAP:
    case LIST_MAP:
      generator.writeStartObject();
      Map<String, Field> map = field.getValueAsMap();
      for (Map.Entry<String, Field> fieldEntry : map.entrySet()) {
        generator.writeFieldName(fieldEntry.getKey());
        writeFieldToJsonObject(fieldEntry.getValue());
      }
      generator.writeEndObject();
      break;
    case LIST:
      generator.writeStartArray();
      List<Field> list = field.getValueAsList();
      for (Field f : list) {
        writeFieldToJsonObject(f);
      }
      generator.writeEndArray();
      break;
    case BOOLEAN:
      generator.writeBoolean(field.getValueAsBoolean());
      break;
    case CHAR:
      generator.writeString(String.valueOf(field.getValueAsChar()));
      break;
    case BYTE:
      generator.writeBinary(new byte[] {field.getValueAsByte()});
      break;
    case SHORT:
      generator.writeNumber(field.getValueAsShort());
      break;
    case INTEGER:
      generator.writeNumber(field.getValueAsInteger());
      break;
    case LONG:
      generator.writeNumber(field.getValueAsLong());
      break;
    case FLOAT:
      generator.writeNumber(field.getValueAsFloat());
      break;
    case DOUBLE:
      generator.writeNumber(field.getValueAsDouble());
      break;
    case DATE:
      generator.writeNumber(field.getValueAsDate().getTime());
      break;
    case DATETIME:
      generator.writeNumber(field.getValueAsDatetime().getTime());
      break;
    case TIME:
      generator.writeNumber(field.getValueAsTime().getTime());
      break;
    case DECIMAL:
      generator.writeNumber(field.getValueAsDecimal());
      break;
    case STRING:
      generator.writeString(field.getValueAsString());
      break;
    case BYTE_ARRAY:
      generator.writeBinary(field.getValueAsByteArray());
      break;
    case ZONED_DATETIME:
      generator.writeString(field.getValueAsString());
      break;
    default:
      throw new IllegalStateException(String.format(
          "Unrecognized field type (%s) in field: %s",
          field.getType().name(),
          field.toString())
      );
  }
}