Java Code Examples for com.streamsets.pipeline.api.Field#createListMap()

The following examples show how to use com.streamsets.pipeline.api.Field#createListMap() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestDelimitedDataGenerator.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testGeneratorListMapIgnoreHeader() throws Exception {
  StringWriter writer = new StringWriter();
  DataGenerator gen = new DelimitedCharDataGenerator(writer, CsvMode.CSV.getFormat(), CsvHeader.IGNORE_HEADER, "h", "d", null);

  LinkedHashMap<String, Field> linkedHashMap = new LinkedHashMap<>();
  linkedHashMap.put("firstField", Field.create("sampleValue"));
  linkedHashMap.put("secondField", Field.create(20));
  Field listMapField = Field.createListMap(linkedHashMap);
  Record record = RecordCreator.create();
  record.set(listMapField);

  gen.write(record);
  gen.close();
  Assert.assertEquals("sampleValue,20\r\n", writer.toString());
}
 
Example 2
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseHandlesMultipleSheets() throws IOException, InvalidFormatException, DataParserException {
  Workbook workbook = createWorkbook("/excel/TestMultipleSheets.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Sheet1::0");

  // column header prefix, row value multiplier
  List<Pair<String, Integer>> sheetParameters = Arrays.asList(
      Pair.of("column", 1),
      Pair.of("header", 10)
  );
  for (int sheet = 1; sheet <= sheetParameters.size(); sheet++) {
    for (int row = 1; row <= 2; row++) {
      Record parsedRow = parser.parse();
      LinkedHashMap<String, Field> contentMap = new LinkedHashMap<>();
      String columnPrefix = sheetParameters.get(sheet - 1).getLeft();
      Integer valueMultiplier = sheetParameters.get(sheet - 1).getRight();
      for (int column = 1; column <= 5; column++) {
        contentMap.put(columnPrefix + column, Field.create(BigDecimal.valueOf(column * valueMultiplier)));
      }
      Field expectedRow = Field.createListMap(contentMap);
      assertEquals(String.format("Parsed value for sheet %1d, row %2d did not match expected value", sheet, row), expectedRow, parsedRow.get());
    }
  }
}
 
Example 3
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseHandlesStartingFromANonZeroOffset() throws IOException, InvalidFormatException, DataParserException {
  InputStream file = getFile("/excel/TestOffset.xlsx");
  Workbook workbook = WorkbookFactory.create(file);
  WorkbookParserSettings settings = WorkbookParserSettings.builder()
      .withHeader(ExcelHeader.IGNORE_HEADER)
      .build();

  WorkbookParser parser = new WorkbookParser(settings, getContext(), workbook, "Sheet2::2");

  Record firstContentRow = parser.parse();

  LinkedHashMap<String, Field> contentMap = new LinkedHashMap<>();
  for (int i = 0; i <= 2; i++) {
    contentMap.put(String.valueOf(i), Field.create(new BigDecimal(i + 4)));
  }
  Field expected = Field.createListMap(contentMap);

  assertEquals(expected, firstContentRow.get());
}
 
Example 4
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseCorrectlyHandlesFileWithNoHeaders() throws IOException, InvalidFormatException, DataParserException {
  Workbook workbook = createWorkbook("/excel/TestExcel.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsNoHeader, getContext(), workbook, "Sheet1::0");

  Record firstContentRow = parser.parse();

  LinkedHashMap<String, Field> contentMap = new LinkedHashMap<>();
  for (int i = 0; i <= 4; i++) {
    contentMap.put(String.valueOf(i), Field.create("column" + (i + 1)));
  }
  Field expected = Field.createListMap(contentMap);

  assertEquals(expected, firstContentRow.get());
}
 
Example 5
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseCorrectlyHandlesFileThatIgnoresHeaders() throws IOException, DataParserException, InvalidFormatException {
  Workbook workbook = createWorkbook("/excel/TestExcel.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsIgnoreHeader, getContext(), workbook, "Sheet1::0");

  Record firstContentRow = parser.parse();

  LinkedHashMap<String, Field> contentMap = new LinkedHashMap<>();
  for (int i = 0; i <= 4; i++) {
    contentMap.put(String.valueOf(i), Field.create(new BigDecimal(i + 1)));
  }
  Field expected = Field.createListMap(contentMap);

  assertEquals(expected, firstContentRow.get());
}
 
Example 6
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseCorrectlyEmptyLeadingRowsAndColumns() throws IOException, InvalidFormatException, DataParserException {
  Workbook workbook = createWorkbook("/excel/TestExcelEmptyRowsCols.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Sheet1::0");

  // column header prefix, row value multiplier
  List<Pair<String, Integer>> sheetParameters = Arrays.asList(
          Pair.of("column", 1),
          Pair.of("header", 10)
  );

  for (int sheet = 1; sheet <= sheetParameters.size(); sheet++) {
    for (int row = 1; row <= 2; row++) {
      Record parsedRow = parser.parse();
      LinkedHashMap<String, Field> contentMap = new LinkedHashMap<>();
      String columnPrefix = sheetParameters.get(sheet - 1).getLeft();
      Integer valueMultiplier = sheetParameters.get(sheet - 1).getRight();
      for (int column = 1; column <= 3+sheet; column++) {
          contentMap.put(columnPrefix + column, Field.create(BigDecimal.valueOf(column * valueMultiplier)));
      }
      Field expectedRow = Field.createListMap(contentMap);
      assertEquals(String.format("Parsed value for sheet %1d, row %2d did not match expected value", sheet, row), expectedRow, parsedRow.get());
    }
  }
}
 
Example 7
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseCorrectlyHandlesFilesWithHeaders() throws IOException, InvalidFormatException, DataParserException {
  Workbook workbook = createWorkbook("/excel/TestExcel.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Sheet1::0");

  Record firstContentRow = parser.parse();

  LinkedHashMap<String, Field> contentMap = new LinkedHashMap<>();
  for (int i = 1; i <= 5; i++) {
    contentMap.put("column" + i, Field.create(new BigDecimal(i)));
  }
  Field expected = Field.createListMap(contentMap);

  assertEquals(expected, firstContentRow.get());
  assertEquals("Sheet1", firstContentRow.getHeader().getAttribute("worksheet"));

}
 
Example 8
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseCorrectlyReturnsCachedValueOfFormula() throws IOException, InvalidFormatException, DataParserException {
  Workbook workbook = createWorkbook("/excel/TestFormulas.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsNoHeader, getContext(), workbook, "Sheet1::0");

  Record recordFirstRow = parser.parse();
  Record recordSecondRow = parser.parse();

  LinkedHashMap<String, Field> firstMap = new LinkedHashMap<>();
  firstMap.put("0", Field.create("Addition"));
  firstMap.put("1", Field.create("Division"));
  firstMap.put("2", Field.create("Neighbor Multiplication"));
  Field expectedFirstRow = Field.createListMap(firstMap);

  LinkedHashMap<String, Field> secondMap = new LinkedHashMap<>();
  secondMap.put("0", Field.create(new BigDecimal(8.0).setScale(1)));
  secondMap.put("1", Field.create(new BigDecimal(9.0).setScale(1)));
  secondMap.put("2", Field.create(new BigDecimal(72.0).setScale(1)));
  Field expectedSecondRow = Field.createListMap(secondMap);

  assertEquals(expectedFirstRow, recordFirstRow.get());
  assertEquals(expectedSecondRow, recordSecondRow.get());
}
 
Example 9
Source File: TestDelimitedDataGenerator.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testGeneratorListMapNoHeader() throws Exception {
  StringWriter writer = new StringWriter();
  DataGenerator gen = new DelimitedCharDataGenerator(writer, CsvMode.CSV.getFormat(), CsvHeader.NO_HEADER, "h", "d", null);

  LinkedHashMap<String, Field> linkedHashMap = new LinkedHashMap<>();
  linkedHashMap.put("firstField", Field.create("sampleValue"));
  linkedHashMap.put("secondField", Field.create(20));
  Field listMapField = Field.createListMap(linkedHashMap);
  Record record = RecordCreator.create();
  record.set(listMapField);

  gen.write(record);
  gen.close();
  Assert.assertEquals("sampleValue,20\r\n", writer.toString());
}
 
Example 10
Source File: TestDelimitedDataGenerator.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testGeneratorListMapWithHeader() throws Exception {
  StringWriter writer = new StringWriter();
  DataGenerator gen = new DelimitedCharDataGenerator(writer, CsvMode.CSV.getFormat(), CsvHeader.WITH_HEADER, "h", "d", null);

  LinkedHashMap<String, Field> linkedHashMap = new LinkedHashMap<>();
  linkedHashMap.put("firstField", Field.create("sampleValue"));
  linkedHashMap.put("secondField", Field.create(20));
  Field listMapField = Field.createListMap(linkedHashMap);
  Record record = RecordCreator.create();
  record.set(listMapField);

  gen.write(record);
  gen.close();
  Assert.assertEquals("firstField,secondField\r\nsampleValue,20\r\n", writer.toString());
}
 
Example 11
Source File: TestRecordImpl.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testListMapRecord() {
  RecordImpl r = new RecordImpl("stage", "source", null, null);
  LinkedHashMap<String, Field> listMap = new LinkedHashMap<>();
  listMap.put("A", Field.create("ALPHA"));
  listMap.put("B", Field.create("BETA"));
  listMap.put("G", Field.create("GAMMA"));
  Field listMapField = Field.createListMap(listMap);
  r.set(listMapField);

  Assert.assertEquals("ALPHA", r.get("/A").getValue());
  Assert.assertEquals("ALPHA", r.get("[0]").getValue());
  Assert.assertEquals("BETA", r.get("/B").getValue());
  Assert.assertEquals("BETA", r.get("[1]").getValue());
  Assert.assertEquals("GAMMA", r.get("/G").getValue());
  Assert.assertEquals("GAMMA", r.get("[2]").getValue());
}
 
Example 12
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseHandlesBlanksCells() throws IOException, InvalidFormatException, DataParserException {
  Workbook workbook = createWorkbook("/excel/TestBlankCells.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Sheet1::0");

  Record recordFirstRow = parser.parse();

  LinkedHashMap<String, Field> firstContentMap = new LinkedHashMap<>();
  firstContentMap.put("column1", Field.create(BigDecimal.valueOf(11)));
  firstContentMap.put("column2", Field.create(""));
  firstContentMap.put("column3", Field.create(""));
  firstContentMap.put("column4", Field.create(BigDecimal.valueOf(44)));

  Field expectedFirstRow = Field.createListMap(firstContentMap);

  assertEquals(expectedFirstRow, recordFirstRow.get());
}
 
Example 13
Source File: HiveMetastoreUtil.java    From datacollector with Apache License 2.0 5 votes vote down vote up
/**
 * Fill in metadata to Record. This is for new partition creation.
 * Use the {@code customLocation} flag to mark whether the {@code location} is a custom one or the default location
 * used by Hive.
 */
public static Field newPartitionMetadataFieldBuilder(
    String database,
    String tableName,
    LinkedHashMap<String, String> partitionList,
    String location,
    boolean customLocation,
    HMPDataFormat dataFormat) throws HiveStageCheckedException {
  LinkedHashMap<String, Field> metadata = new LinkedHashMap<>();
  metadata.put(VERSION, Field.create(PARTITION_ADDITION_METADATA_RECORD_VERSION));
  metadata.put(METADATA_RECORD_TYPE, Field.create(MetadataRecordType.PARTITION.name()));
  metadata.put(DATABASE_FIELD, Field.create(database));
  metadata.put(TABLE_FIELD, Field.create(tableName));
  metadata.put(LOCATION_FIELD, Field.create(location));
  metadata.put(CUSTOM_LOCATION, Field.create(customLocation));
  metadata.put(DATA_FORMAT, Field.create(dataFormat.name()));

  //fill in the partition list here
  metadata.put(
      PARTITION_FIELD,
      generateInnerFieldFromTheList(
          partitionList,
          PARTITION_NAME,
          PARTITION_VALUE,
          false
      )
  );
  return Field.createListMap(metadata);
}
 
Example 14
Source File: TestAvroDataGenerator.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test
public void testAvroGeneratorListMapType() throws Exception {
  LinkedHashMap<String, Field> linkedHashMap = new LinkedHashMap<>();
  linkedHashMap.put("name", Field.create("Jon Natkins"));
  linkedHashMap.put("age", Field.create(29));
  linkedHashMap.put("emails", Field.create(ImmutableList.of(Field.create("[email protected]"))));
  linkedHashMap.put("boss", Field.create(Field.Type.MAP, null));
  Field listMapField = Field.createListMap(linkedHashMap);
  Record record = RecordCreator.create();
  record.set(listMapField);

  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  DataGenerator gen = new AvroDataOutputStreamGenerator(
      false,
      baos,
      COMPRESSION_CODEC_DEFAULT,
      SCHEMA,
      new HashMap<String, Object>(),
      null,
      null,
      0
  );
  gen.write(record);
  gen.close();

  //reader schema must be extracted from the data file
  GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(null);
  DataFileReader<GenericRecord> dataFileReader = new DataFileReader<>(
      new SeekableByteArrayInput(baos.toByteArray()), reader);
  Assert.assertTrue(dataFileReader.hasNext());
  GenericRecord readRecord = dataFileReader.next();

  Assert.assertEquals("Jon Natkins", readRecord.get("name").toString());
  Assert.assertEquals(29, readRecord.get("age"));
  Assert.assertFalse(dataFileReader.hasNext());
}
 
Example 15
Source File: StartJobTemplateSupplier.java    From datacollector with Apache License 2.0 5 votes vote down vote up
private void generateField(List<Map<String, Object>> jobStatusList) {
  LinkedHashMap<String, Field> jobTemplateOutput = new LinkedHashMap<>();
  List<Field> templateJobInstances = new ArrayList<>();
  boolean jobTemplateSuccess = true;
  for (Map<String, Object> jobStatus : jobStatusList) {
    String status = jobStatus.containsKey("status") ? (String) jobStatus.get("status") : null;
    String statusColor = jobStatus.containsKey("color") ? (String) jobStatus.get("color") : null;
    String errorMessage = jobStatus.containsKey("errorMessage") ? (String) jobStatus.get("errorMessage") : null;
    String jobId = (String)jobStatus.get("jobId");
    boolean success = ControlHubApiUtil.determineJobSuccess(status, statusColor);
    LinkedHashMap<String, Field> startOutput = new LinkedHashMap<>();
    startOutput.put(Constants.JOB_ID_FIELD, Field.create(jobId));
    startOutput.put(Constants.STARTED_SUCCESSFULLY_FIELD, Field.create(true));
    if (!conf.runInBackground) {
      startOutput.put(Constants.FINISHED_SUCCESSFULLY_FIELD, Field.create(success));
      MetricRegistryJson jobMetrics = ControlHubApiUtil.getJobMetrics(
          clientBuilder,
          conf.baseUrl,
          jobId,
          userAuthToken
      );
      startOutput.put(Constants.JOB_METRICS_FIELD, CommonUtil.getMetricsField(jobMetrics));
    }
    startOutput.put(Constants.JOB_STATUS_FIELD, Field.create(status));
    startOutput.put(Constants.JOB_STATUS_COLOR_FIELD, Field.create(statusColor));
    startOutput.put(Constants.ERROR_MESSAGE_FIELD, Field.create(errorMessage));
    templateJobInstances.add(Field.createListMap(startOutput));
    jobTemplateSuccess &= success;
  }
  jobTemplateOutput.put(Constants.TEMPLATE_JOB_ID_FIELD, Field.create(templateJobId));
  jobTemplateOutput.put(Constants.TEMPLATE_JOB_INSTANCES_FIELD, Field.create(templateJobInstances));
  jobTemplateOutput.put(Constants.STARTED_SUCCESSFULLY_FIELD, Field.create(true));
  if (!conf.runInBackground) {
    jobTemplateOutput.put(Constants.FINISHED_SUCCESSFULLY_FIELD, Field.create(jobTemplateSuccess));
  }
  responseField = Field.createListMap(jobTemplateOutput);
}
 
Example 16
Source File: ScriptObjectFactory.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
protected Field scriptToField(Object scriptObject, Record record, String path) {
  Field field;
  if (scriptObject != null) {
    if (scriptObject instanceof Map) {
      Map<String, Object> scriptMap = (Map<String, Object>) scriptObject;
      LinkedHashMap<String, Field> fieldMap = new LinkedHashMap<>();
      for (Map.Entry<String, Object> entry : scriptMap.entrySet()) {
        fieldMap.put(entry.getKey(), scriptToField(entry.getValue(), record, composeMapPath(path, entry.getKey())));
      }
      boolean isListMap = (scriptObject instanceof MapInfo) && ((MapInfo) scriptObject).isListMap();
      field = (isListMap) ? Field.createListMap(fieldMap) : Field.create(fieldMap);
    } else if (scriptObject instanceof List) {
      List scriptArray = (List) scriptObject;
      List<Field> fieldArray = new ArrayList<>(scriptArray.size());
      for (int i = 0; i < scriptArray.size(); i++) {
        Object element = scriptArray.get(i);
        fieldArray.add(scriptToField(element, record, composeArrayPath(path, i)));
      }
      field = Field.create(fieldArray);
    } else {
      field = convertPrimitiveObject(scriptObject);
    }
  } else {
    Field originalField = record.get(path);
    if (originalField != null) {
      field = Field.create(originalField.getType(), null);
    } else {
      field = Field.create((String) null);
    }
  }
  return field;
}
 
Example 17
Source File: TestXmlCharDataGenerator.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Test
public void testFieldToXmlElementsFieldListMap() throws IOException, DataGeneratorException {
  Map<String, Field> map = ImmutableMap.of("entry", Field.create("data"));
  Field field = Field.createListMap(new LinkedHashMap<>(map));
  testFieldToXmlElementsFieldMap(field);
}
 
Example 18
Source File: TestWorkbookParser.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Test
public void testARealSpreadsheetWithMultipleSheets() throws IOException, InvalidFormatException, DataParserException, ParseException {
  Workbook workbook = createWorkbook("/excel/TestRealSheet.xlsx");

  WorkbookParser parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Orders::0");

  // column header prefix, row value multiplier
  List<Pair<String, Integer>> sheetParameters = Arrays.asList(
          Pair.of("column", 1),
          Pair.of("header", 10)
  );

  // TEST 1 - verify first non-header record of first sheet
  LinkedHashMap<String, Field> Sheet1Headers = new LinkedHashMap<>();
  String[] Sheet1HdrList = { "Row ID","Order ID","Order Date","Ship Date","Ship Mode","Customer ID","Customer Name","Segment","Country","City","State","Postal Code","Region","Product ID","Category","Sub-Category","Product Name","Sales","Quantity","Discount","Profit" };
  for (int i=0; i < Sheet1HdrList.length; i++) {
    Sheet1Headers.put(Sheet1HdrList[i], Field.create(Sheet1HdrList[i]));
  }
  LinkedHashMap<String, Field> row1Expected = new LinkedHashMap<>();
  DateFormat df = new SimpleDateFormat("MM-dd-yyyy");

  row1Expected.put("Row ID", Field.create(new BigDecimal(1.0)));
  row1Expected.put("Order ID", Field.create("CA-2016-152156"));
  row1Expected.put("Order Date", Field.createDate(df.parse("11-08-2016")));
  row1Expected.put("Ship Date", Field.createDate(df.parse("11-11-2016")));
  row1Expected.put("Ship Mode", Field.create("Second Class"));
  row1Expected.put("Customer ID", Field.create("CG-12520"));
  row1Expected.put("Customer Name", Field.create("Claire Gute"));
  row1Expected.put("Segment", Field.create("Consumer"));
  row1Expected.put("Country", Field.create("United States"));
  row1Expected.put("City", Field.create("Henderson"));
  row1Expected.put("State", Field.create("Kentucky"));
  row1Expected.put("Postal Code", Field.create(new BigDecimal("42420")));
  row1Expected.put("Region", Field.create("South"));
  row1Expected.put("Product ID", Field.create("FUR-BO-10001798"));
  row1Expected.put("Category", Field.create("Furniture"));
  row1Expected.put("Sub-Category", Field.create("Bookcases"));
  row1Expected.put("Product Name", Field.create("Bush Somerset Collection Bookcase"));
  row1Expected.put("Sales", Field.create(new BigDecimal("261.96")));
  row1Expected.put("Quantity", Field.create(new BigDecimal("2")));
  row1Expected.put("Discount", Field.create(new BigDecimal("0")));
  row1Expected.put("Profit", Field.create(new BigDecimal("41.9136")));

  Record parsedRow = parser.parse();
  Field expectedRow = Field.createListMap(row1Expected);
  assertEquals("Parsed value for sheet Orders, row 1 did not match expected value", expectedRow, parsedRow.get());

  // TEST 2 - Verify first non-header record on second sheet
  LinkedHashMap<String, Field> sheet2Expected = new LinkedHashMap<>();
  sheet2Expected.put("Returned",Field.create("Yes"));
  sheet2Expected.put("Order ID", Field.create("CA-2017-153822"));
  expectedRow = Field.createListMap(sheet2Expected);
  parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Returns::0");
  parsedRow = parser.parse();
  assertEquals("Parsed value for sheet Returns, row 1 did not match expected value", expectedRow, parsedRow.get());

  // TEST 3 - Verify total rows processed is what is in the sheet (minus header rows)
  parser = new WorkbookParser(settingsWithHeader, getContext(), workbook, "Orders::0");
  int numRows=0;
  while (parser.getOffset() != "-1") {
    parser.parse();
    ++numRows;
  }
  --numRows; // remove last increment because that round would have generated EOF
  assertEquals("Total record count mismatch", 10294, numRows);


}
 
Example 19
Source File: SoapRecordCreator.java    From datacollector with Apache License 2.0 4 votes vote down vote up
public LinkedHashMap<String, Field> addFields(
    XmlObject parent,
    Map<String, DataType> columnsToTypes
) throws StageException {
  LinkedHashMap<String, Field> map = new LinkedHashMap<>();

  Iterator<XmlObject> iter = parent.getChildren();
  String type = null;
  while (iter.hasNext()) {
    XmlObject obj = iter.next();

    String key = obj.getName().getLocalPart();
    if ("type".equals(key)) {
      // Housekeeping field
      type = obj.getValue().toString().toLowerCase();
      continue;
    }

    if (obj.hasChildren()) {
      if (QUERY_RESULT.equals(obj.getXmlType().getLocalPart())) {
        // Nested subquery - need to make an array
        Iterator<XmlObject> records = obj.getChildren(RECORDS);
        List<Field> recordList = new ArrayList<>();
        while (records.hasNext()) {
          XmlObject record = records.next();
          recordList.add(Field.createListMap(addFields(record, columnsToTypes)));
        }
        map.put(key, Field.create(recordList));
      } else {
        // Following a relationship
        map.put(key, Field.createListMap(addFields(obj, columnsToTypes)));
      }
    } else {
      Object val = obj.getValue();
      if ("Id".equalsIgnoreCase(key) && null == val) {
        // Get a null Id if you don't include it in the SELECT
        continue;
      }
      if (type == null) {
        throw new StageException(
            Errors.FORCE_04,
            "No type information for " + obj.getName().getLocalPart() +
                ". Specify component fields of compound fields, e.g. Location__Latitude__s or BillingStreet"
        );
      }

      DataType dataType = (columnsToTypes != null)
          ? columnsToTypes.get(key.toLowerCase())
          : DataType.USE_SALESFORCE_TYPE;
      if (dataType == null) {
        dataType = DataType.USE_SALESFORCE_TYPE;
      }

      Field field;
      XmlType xmlType = obj.getXmlType() != null ? XmlType.fromString(obj.getXmlType().getLocalPart()) : null;
      if (AGGREGATE_RESULT.equals(type)) {
        field = getField(xmlType, val, dataType);
      } else {
        com.sforce.soap.partner.Field sfdcField = getFieldMetadata(type, key);
        if (sfdcField == null) {
          // null relationship
          field = Field.createListMap(new LinkedHashMap<>());
        } else {
          field = createField(xmlType, val, dataType, sfdcField);
        }
        if (conf.createSalesforceNsHeaders) {
          setHeadersOnField(field, sfdcField);
        }
      }
      map.put(key, field);
    }
  }

  return map;
}
 
Example 20
Source File: HiveMetastoreUtil.java    From datacollector with Apache License 2.0 4 votes vote down vote up
/**
 * Fill in metadata to Record. This is for new schema creation.
 */
public static Field newSchemaMetadataFieldBuilder  (
    String database,
    String tableName,
    LinkedHashMap<String, HiveTypeInfo> columnList,
    LinkedHashMap<String, HiveTypeInfo> partitionTypeList,
    boolean internal,
    String location,
    String avroSchema,
    HMPDataFormat dataFormat
) throws HiveStageCheckedException  {
  LinkedHashMap<String, Field> metadata = new LinkedHashMap<>();
  metadata.put(VERSION, Field.create(SCHEMA_CHANGE_METADATA_RECORD_VERSION));
  metadata.put(METADATA_RECORD_TYPE, Field.create(MetadataRecordType.TABLE.name()));
  metadata.put(DATABASE_FIELD, Field.create(database));
  metadata.put(TABLE_FIELD, Field.create(tableName));
  metadata.put(LOCATION_FIELD, Field.create(location));
  metadata.put(DATA_FORMAT, Field.create(dataFormat.name()));

  //fill in column type list here
  metadata.put(
      COLUMNS_FIELD,
      generateInnerFieldFromTheList(
          columnList,
          COLUMN_NAME,
          TYPE_INFO,
          true
      )
  );
  //fill in partition type list here
  if (partitionTypeList != null && !partitionTypeList.isEmpty()) {
    metadata.put(
        PARTITION_FIELD,
        generateInnerFieldFromTheList(
            partitionTypeList,
            PARTITION_NAME,
            TYPE_INFO,
            true
        )
    );
  }
  metadata.put(INTERNAL_FIELD, Field.create(internal));
  metadata.put(AVRO_SCHEMA, Field.create(avroSchema));
  return Field.createListMap(metadata);
}