org.apache.commons.csv.CSVFormat Java Examples

The following examples show how to use org.apache.commons.csv.CSVFormat. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KsqlDelimitedSerializer.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 8 votes vote down vote up
@Override
public byte[] serialize(final String topic, final GenericRow genericRow) {
  if (genericRow == null) {
    return null;
  }
  try {
    StringWriter stringWriter = new StringWriter();
    CSVPrinter csvPrinter = new CSVPrinter(stringWriter, CSVFormat.DEFAULT);
    csvPrinter.printRecord(genericRow.getColumns());
    String result = stringWriter.toString();
    return result.substring(0, result.length() - 2).getBytes(StandardCharsets.UTF_8);
  } catch (Exception e) {
    throw new SerializationException("Error serializing CSV message", e);
  }

}
 
Example #2
Source File: CsvUtils.java    From webtau with Apache License 2.0 8 votes vote down vote up
private static CSVParser readCsvRecords(List<String> header, String content) {
    try {
        CSVFormat csvFormat = CSVFormat.RFC4180;
        if (header.isEmpty()) {
            csvFormat = csvFormat.withFirstRecordAsHeader();
        }

        return csvFormat.
                withIgnoreSurroundingSpaces().
                withIgnoreEmptyLines().
                withTrim().
                withDelimiter(',').
                parse(new StringReader(content));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
 
Example #3
Source File: CSVImportFileParser.java    From constellation with Apache License 2.0 7 votes vote down vote up
@Override
public List<String[]> preview(final InputSource input, final PluginParameters parameters, final int limit) throws IOException {
    // Leave the header on, as the importer expects this as the first entry.
    final ArrayList<String[]> results = new ArrayList<>();
    try (final CSVParser csvFileParser = CSVFormat.RFC4180.parse(new InputStreamReader(input.getInputStream(), StandardCharsets.UTF_8.name()))) {
        int count = 0;
        final List<CSVRecord> records = csvFileParser.getRecords();
        for (final CSVRecord record : records) {
            final String[] line = new String[record.size()];
            for (int i = 0; i < record.size(); i++) {
                line[i] = record.get(i);
            }
            results.add(line);
            count += 1;
            if (count >= limit) {
                return results;
            }
        }
    }
    return results;
}
 
Example #4
Source File: SymSpellSearchBenchMark.java    From customized-symspell with MIT License 7 votes vote down vote up
private List<String> readQueries(String queryFile) {
  List<String> queries = new ArrayList<>();
  try {
    URL queryResourceUrl = this.getClass().getClassLoader().getResource(queryFile);
    CSVParser qparser = CSVParser
        .parse(queryResourceUrl, Charset.forName("UTF-8"),
            CSVFormat.DEFAULT.withDelimiter(' '));
    java.util.Iterator<CSVRecord> csvIterator = qparser.iterator();
    while (csvIterator.hasNext()) {
      CSVRecord csvRecord = csvIterator.next();
      queries.add(csvRecord.get(0));
    }
  } catch (IOException ex) {
    System.err.println("Error occured " + ex);
  }
  return queries;
}
 
Example #5
Source File: UIPParser.java    From rival with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public DataModelIF<Long, Long> parseData(final File f) throws IOException {
    DataModelIF<Long, Long> dataset = new DataModel<>();
    Reader in = new InputStreamReader(new FileInputStream(f), "UTF-8");

    Iterable<CSVRecord> records;
    if (isHasHeader()) {
        records = CSVFormat.EXCEL.withDelimiter(getDelimiter()).withHeader().parse(in);
    } else {
        records = CSVFormat.EXCEL.withDelimiter(getDelimiter()).parse(in);
    }
    for (CSVRecord record : records) {
        long userID = Long.parseLong(record.get(getUserTok()));
        long itemID = Long.parseLong(record.get(getItemTok()));
        double preference = Double.parseDouble(record.get(getPrefTok()));
        dataset.addPreference(userID, itemID, preference);
    }
    in.close();
    return dataset;
}
 
Example #6
Source File: HashmodCSVImportFileParser.java    From constellation with Apache License 2.0 6 votes vote down vote up
public List<String[]> parse(final HashmodInputSource input, final PluginParameters parameters) throws IOException {
    final ArrayList<String[]> results = new ArrayList<>();
    try (final CSVParser csvFileParser = CSVFormat.RFC4180.parse(new InputStreamReader(input.getInputStream(), StandardCharsets.UTF_8.name()))) {
        final List<CSVRecord> records = csvFileParser.getRecords();
        for (final CSVRecord record : records) {
            final String[] line = new String[record.size()];
            for (int i = 0; i < record.size(); i++) {
                line[i] = record.get(i);
            }
            results.add(line);
        }
    }
    return results;
}
 
Example #7
Source File: CsvLogEventLayout.java    From logging-log4j2 with Apache License 2.0 6 votes vote down vote up
@PluginFactory
public static CsvLogEventLayout createLayout(
        // @formatter:off
        @PluginConfiguration final Configuration config,
        @PluginAttribute(defaultString = DEFAULT_FORMAT) final String format,
        @PluginAttribute final Character delimiter,
        @PluginAttribute final Character escape,
        @PluginAttribute final Character quote,
        @PluginAttribute final QuoteMode quoteMode,
        @PluginAttribute final String nullString,
        @PluginAttribute final String recordSeparator,
        @PluginAttribute(defaultString = DEFAULT_CHARSET) final Charset charset,
        @PluginAttribute final String header,
        @PluginAttribute final String footer)
        // @formatter:on
{

    final CSVFormat csvFormat = createFormat(format, delimiter, escape, quote, quoteMode, nullString, recordSeparator);
    return new CsvLogEventLayout(config, charset, csvFormat, header, footer);
}
 
Example #8
Source File: QueueReader.java    From maestro-java with Apache License 2.0 6 votes vote down vote up
@Override
protected QueueDataSet readReader(Reader reader) throws IOException {
    Iterable<CSVRecord> records = CSVFormat.RFC4180
            .withCommentMarker('#')
            .withFirstRecordAsHeader()
            .withRecordSeparator(';')
            .withQuote('"')
            .withQuoteMode(QuoteMode.NON_NUMERIC)
            .parse(reader);



    for (CSVRecord record : records) {
        try {
            queueProcessor.process(record.get(0), record.get(1), record.get(2), record.get(3), record.get(4),
                    record.get(5), record.get(6));
        } catch (Throwable t) {
            logger.warn("Unable to parse record: {}", t.getMessage(), t);
        }
    }

    return queueProcessor.getQueueDataSet();
}
 
Example #9
Source File: ReadCSVStep.java    From pipeline-utility-steps-plugin with MIT License 6 votes vote down vote up
@Override
public boolean permitsStaticMethod(@Nonnull Method method, @Nonnull Object[] args) {
    final Class<?> aClass = method.getDeclaringClass();
    final Package aPackage = aClass.getPackage();

    if (aPackage == null) {
        return false;
    }

    if (!aPackage.getName().equals(ORG_APACHE_COMMONS_CSV)) {
        return false;
    }

    if (aClass == CSVFormat.class) {
        return (method.getName().equals("newFormat") || method.getName().equals("valueOf"));
    }

    return false;
}
 
Example #10
Source File: CSVUtilsTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testCustomFormatWithELInvalidValues() {
    PropertyContext context = createContext("${csv.delimiter}", "${csv.quote}", "${csv.escape}", "${csv.comment}");

    Map<String, String> attributes = new HashMap<>();
    attributes.put("csv.delimiter", "invalid");
    attributes.put("csv.quote", "invalid");
    attributes.put("csv.escape", "invalid");
    attributes.put("csv.comment", "invalid");

    CSVFormat csvFormat = CSVUtils.createCSVFormat(context, attributes);

    assertEquals(',', csvFormat.getDelimiter());
    assertEquals('"', (char) csvFormat.getQuoteCharacter());
    assertEquals('\\', (char) csvFormat.getEscapeCharacter());
    assertNull(csvFormat.getCommentMarker());
}
 
Example #11
Source File: GeneralInfoReader.java    From maestro-java with Apache License 2.0 6 votes vote down vote up
/**
 * Reader of csv file
 * @param reader reader
 * @return readed data
 * @throws IOException implementation specific
 */
@Override
protected GeneralInfoDataSet readReader(Reader reader) throws IOException {
    Iterable<CSVRecord> records = CSVFormat.RFC4180
            .withCommentMarker('#')
            .withFirstRecordAsHeader()
            .withRecordSeparator(';')
            .withQuote('"')
            .withQuoteMode(QuoteMode.NON_NUMERIC)
            .parse(reader);



    for (CSVRecord record : records) {
        try {
            generalInfoProcessor.process(record.get(0), record.get(1), record.get(2), record.get(3), record.get(4),
                    record.get(5), record.get(6), record.get(7), record.get(8), record.get(9));
        } catch (Throwable t) {
            logger.warn("Unable to parse record: {}", t.getMessage(), t);
        }
    }

    return generalInfoProcessor.getGeneralInfoDataSet();
}
 
Example #12
Source File: DictionarySample.java    From cloud-search-samples with Apache License 2.0 6 votes vote down vote up
/**
 * Parses the dictionary file (CSV) into a list of DictionaryEntry items
 *
 * @param dictionaryFilePath path to CSV file containing the dictionary
 */
List<DictionaryEntry> loadEntries(String dictionaryFilePath) throws IOException {
  try (BufferedReader br = new BufferedReader(new FileReader(dictionaryFilePath))) {
    CSVParser parser = new CSVParser(br, CSVFormat.DEFAULT);
    return StreamSupport.stream(parser.spliterator(), false)
        .filter(record -> !record.get(0).startsWith("#")) // Treat any row starting with # as comment
        .map(record -> { // Convert records
          String term = record.get(0);
          // Collect remaining columns as list of synonyms for the term
          List<String> synonyms = StreamSupport.stream(record.spliterator(), false)
              .skip(1) // Skip term
              .collect(Collectors.toList());
          return new DictionaryEntry(term, synonyms);
        })
        .collect(Collectors.toList());
  }
}
 
Example #13
Source File: QDMemoryInfoWriter.java    From maestro-java with Apache License 2.0 5 votes vote down vote up
public QDMemoryInfoWriter(final File logDir, final String name) throws IOException {
    File outputFile = new File(logDir, name + ".csv");

    writer = Files.newBufferedWriter(Paths.get(outputFile.getPath()), Charset.defaultCharset());
    csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT
            .withHeader("Timestamp", "Name", "Size", "Batch", "Thread-max", "Total", "In-threads",
                    "Rebal-in", "Rebal-out", "totalFreeToHeap", "globalFreeListMax"));
}
 
Example #14
Source File: TestDelimitedCharDataParser.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test
public void testParseNoHeaderWithListMap() throws Exception {
  OverrunReader reader = new OverrunReader(new StringReader("A,B\na,b"), 1000, true, false);
  DelimitedDataParserSettings settings = DelimitedDataParserSettings.builder()
      .withSkipStartLines(0)
      .withFormat(CSVFormat.DEFAULT)
      .withHeader(CsvHeader.NO_HEADER)
      .withRecordType(CsvRecordType.LIST_MAP)
      .withMaxObjectLen(-1)
      .withParseNull(false)
      .withNullConstant(null)
      .withAllowExtraColumns(false)
      .build();
  DataParser parser = new DelimitedCharDataParser(getContext(), "id", reader, 0, settings);

  Assert.assertEquals("0", parser.getOffset());
  Record record = parser.parse();
  Assert.assertNotNull(record);
  Assert.assertEquals("id::0", record.getHeader().getSourceId());
  Assert.assertEquals("A", record.get().getValueAsListMap().get("0").getValueAsString());
  Assert.assertEquals("B", record.get().getValueAsListMap().get("1").getValueAsString());
  Assert.assertEquals("4", parser.getOffset());
  record = parser.parse();
  Assert.assertNotNull(record);
  Assert.assertEquals("id::4", record.getHeader().getSourceId());
  Assert.assertEquals("a", record.get().getValueAsListMap().get("0").getValueAsString());
  Assert.assertEquals("b", record.get().getValueAsListMap().get("1").getValueAsString());
  Assert.assertEquals("7", parser.getOffset());
  record = parser.parse();
  Assert.assertNull(record);
  Assert.assertEquals("-1", parser.getOffset());
  parser.close();
}
 
Example #15
Source File: PatientTumorLocation.java    From hmftools with GNU General Public License v3.0 5 votes vote down vote up
public static void writeRecords(@NotNull String outputPath, @NotNull List<PatientTumorLocation> patientTumorLocations)
        throws IOException {
    CSVFormat format = CSVFormat.DEFAULT.withNullString(Strings.EMPTY).withHeader(Header.class);
    CSVPrinter printer = new CSVPrinter(new FileWriter(outputPath), format);
    printer.printRecords(patientTumorLocations.stream().map(PatientTumorLocation::csvRecord).collect(Collectors.toList()));
    printer.close();
}
 
Example #16
Source File: CSVIngester.java    From macrobase with Apache License 2.0 5 votes vote down vote up
@Override
public RowSet getRows(String baseQuery,
                      Map<String, String> preds,
                      int limit,
                      int offset) throws Exception{

    filename = conf.getString(MacroBaseConf.CSV_INPUT_FILE);
    Compression compression = conf.getCsvCompression();

    if (compression == Compression.GZIP) {
        InputStream fileStream = new FileInputStream(filename);
        InputStream gzipStream = new GZIPInputStream(fileStream);
        Reader decoder = new InputStreamReader(gzipStream);
        csvParser = new CSVParser(decoder, CSVFormat.DEFAULT.withHeader());
    } else {
        File csvFile = new File(conf.getString(MacroBaseConf.CSV_INPUT_FILE));
        csvParser = CSVParser.parse(csvFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
    }
    schema = csvParser.getHeaderMap();
    Iterator<CSVRecord> rawIterator = csvParser.iterator();
    int rowCount = 0;

    List<RowSet.Row> rows = Lists.newArrayList();
    while (rawIterator.hasNext() && rowCount < limit) {
        CSVRecord record = rawIterator.next();
        List<ColumnValue> columnValues = Lists.newArrayList();

        if (includeRow(record, preds)) {
            for (Map.Entry<String, Integer> se : schema.entrySet()) {
                columnValues.add(new ColumnValue(se.getKey(),record.get(se.getValue())));
            }

            rows.add(new RowSet.Row(columnValues));
            rowCount++;
        }
    }
     return new RowSet(rows);
}
 
Example #17
Source File: TumorLocationCurator.java    From hmftools with GNU General Public License v3.0 5 votes vote down vote up
@VisibleForTesting
TumorLocationCurator(@NotNull InputStream mappingInputStream) throws IOException {
    CSVParser parser = CSVParser.parse(mappingInputStream, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
    for (CSVRecord record : parser) {
        String searchTerm = record.get("searchTerm");
        String primaryTumorLocation = record.get("primaryTumorLocation");
        String subType = record.get("subType");
        tumorLocationMap.put(searchTerm.toLowerCase(),
                ImmutableCuratedTumorLocation.of(Utils.capitalize(primaryTumorLocation), Utils.capitalize(subType), searchTerm));
    }
    // Need to create a copy of the key set so that we can remove elements from it without affecting the curation.
    unusedSearchTerms = Sets.newHashSet(tumorLocationMap.keySet());
}
 
Example #18
Source File: TestCsvParser.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test
public void testParserHeaders() throws Exception {
  CsvParser parser = new CsvParser(getReader("TestCsvParser-default.csv"),
                                   CSVFormat.DEFAULT.withHeader((String[])null).withSkipHeaderRecord(true), -1);
  try {
    Assert.assertArrayEquals(new String[]{"h1", "h2", "h3", "h4"}, parser.getHeaders());
  } finally {
    parser.close();
  }
}
 
Example #19
Source File: ImportFileHelperTest.java    From PolyGlot with MIT License 5 votes vote down vote up
@Test
public void testImportFileIgnoreDupes() {
    System.out.println("ImportFileHelperTest.testImportFileIgnoreDupes");
    
    int expectedWordCount = 4;
    int expectedWordmapSize = 4;
    int expectedMatchCount = 1;
    
    ImportFileHelper helper = new ImportFileHelper(core);
    
    try {
        helper.setOptions("0", 
                "1", 
                "2", 
                "3", 
                "4", 
                "5", 
                CSVFormat.DEFAULT, 
                true, 
                true, 
                "\"", 
                ImportFileHelper.DuplicateOption.IGNORE_DUPES);
        
        helper.importFile(TEST_FILE, 0);
        
        assertEquals(expectedWordCount, core.getWordCollection().getWordCount());
        Map<String, List<ConWord>> wordMap = core.getWordCollection().getValueMapping();
        assertEquals(expectedWordmapSize, wordMap.size());
        assertTrue(wordMap.containsKey(ORIGIN_VAL));
        List<ConWord> matchWords = wordMap.get(ORIGIN_VAL);
        assertEquals(expectedMatchCount, matchWords.size());
        assertEquals(ORIGIN_DEF, matchWords.get(0).getDefinition());
    } catch (Exception e) {
        fail(e);
    }
}
 
Example #20
Source File: RouteLinkInfoWriter.java    From maestro-java with Apache License 2.0 5 votes vote down vote up
public RouteLinkInfoWriter(final File logDir, final String name) throws IOException {
    File outputFile = new File(logDir, name + ".csv");

    writer = Files.newBufferedWriter(Paths.get(outputFile.getPath()), Charset.defaultCharset());
    csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT
            .withHeader("Timestamp", "Name", "LinkDir", "OperStatus", "Identity",
                    "DeliveryCount", "UndeliveredCount", "PresettledCount", "UnsettledCount",
                    "ReleasedCount","ModifiedCount", "AcceptedCount", "RejectedCount", "Capacity"));
}
 
Example #21
Source File: CommandLineInterface.java    From utah-parser with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a list of headers, prints the headers, then prints the records for each header.
 * If a record doesn't exist for a header, a blank space is put in its place
 *
 * @param parser
 * @param target
 * @throws IOException
 */
void printToCSV(Parser parser, PrintStream target) throws IOException {
    Map<String, String> curr = parser.next();

    List<Map<String, String>> mapList = new ArrayList<Map<String, String>>();

    Set<String> CSV_HEADERS = new TreeSet<>();
    // Get headers and create list
    while (curr != null) {
        mapList.add(curr);
        for (String str : curr.keySet()) {
            CSV_HEADERS.add(str);
        }
        curr = parser.next();
    }

    CSVFormat csvFormat = CSVFormat.DEFAULT;
    CSVPrinter printer = new CSVPrinter(target, csvFormat);
    printer.printRecord(CSV_HEADERS);

    // Cycle through array of maps printing value for each header
    for (Map<String, String> map : mapList) {
        List<String> values = new ArrayList<String>();
        for (String header : CSV_HEADERS) {
            String value = StringUtils.trimToEmpty(map.get(header));
            values.add(value);
        }
        printer.printRecord(values.toArray());
    }
}
 
Example #22
Source File: TestDelimitedCharDataParser.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test
public void testParseNoHeader() throws Exception {
  OverrunReader reader = new OverrunReader(new StringReader("A,B\na,b"), 1000, true, false);
  DelimitedDataParserSettings settings = DelimitedDataParserSettings.builder()
      .withSkipStartLines(0)
      .withFormat(CSVFormat.DEFAULT)
      .withHeader(CsvHeader.NO_HEADER)
      .withMaxObjectLen(-1)
      .withRecordType(CsvRecordType.LIST)
      .withParseNull(false)
      .withNullConstant(null)
      .withAllowExtraColumns(false)
      .build();
  DataParser parser = new DelimitedCharDataParser(getContext(), "id", reader, 0, settings);

  Assert.assertEquals("0", parser.getOffset());
  Record record = parser.parse();
  Assert.assertNotNull(record);
  Assert.assertEquals("id::0", record.getHeader().getSourceId());
  Assert.assertEquals("A", record.get().getValueAsList().get(0).getValueAsMap().get("value").getValueAsString());
  Assert.assertFalse(record.has("[0]/header"));
  Assert.assertEquals("B", record.get().getValueAsList().get(1).getValueAsMap().get("value").getValueAsString());
  Assert.assertFalse(record.has("[1]/header"));
  Assert.assertEquals("4", parser.getOffset());
  record = parser.parse();
  Assert.assertNotNull(record);
  Assert.assertEquals("id::4", record.getHeader().getSourceId());
  Assert.assertEquals("a", record.get().getValueAsList().get(0).getValueAsMap().get("value").getValueAsString());
  Assert.assertFalse(record.has("[0]/header"));
  Assert.assertEquals("b", record.get().getValueAsList().get(1).getValueAsMap().get("value").getValueAsString());
  Assert.assertFalse(record.has("[1]/header"));
  Assert.assertEquals("7", parser.getOffset());
  record = parser.parse();
  Assert.assertNull(record);
  Assert.assertEquals("-1", parser.getOffset());
  parser.close();
}
 
Example #23
Source File: DataDstUECsv.java    From xresloader with MIT License 5 votes vote down vote up
@Override
protected Object buildForUEOnInit() throws IOException {
    UEBuildObject ret = new UEBuildObject();
    ret.sb = new StringBuffer();
    ret.csv = new CSVPrinter(ret.sb, CSVFormat.INFORMIX_UNLOAD_CSV.withQuoteMode(QuoteMode.ALL));

    appendCommonHeader(ret.csv);
    ret.csv.printComment(String.format("%s=%s", "xres_ver", ProgramOptions.getInstance().getVersion()));
    ret.csv.printComment(String.format("%s=%s", "data_ver", ProgramOptions.getInstance().getDataVersion()));
    ret.csv.printComment(String.format("%s=%d", "count", DataSrcImpl.getOurInstance().getRecordNumber()));
    ret.csv.printComment(String.format("%s=%s", "hash_code", "no hash code"));

    return ret;
}
 
Example #24
Source File: MigrateV14.java    From yanagishima with Apache License 2.0 5 votes vote down vote up
private static int getRowNumbers(Path resultFilePath) throws IOException {
    int rowNumber = 0;
    try (BufferedReader reader = Files.newBufferedReader(resultFilePath, StandardCharsets.UTF_8)) {
        CSVParser parser = CSVFormat.EXCEL.withDelimiter('\t').withNullString("\\N").parse(reader);
        for (CSVRecord record : parser) {
            rowNumber++;
        }
    }
    return rowNumber;
}
 
Example #25
Source File: ImportFileHelperTest.java    From PolyGlot with MIT License 5 votes vote down vote up
@Test
public void testImportFileOverwriteDupes() {
    System.out.println("ImportFileHelperTest.testImportFileOverwriteDupes");
    
    int expectedWordCount = 4;
    int expectedWordmapSize = 4;
    int expectedMatchCount = 1;
    String expectedImportDef = "male";
    
    ImportFileHelper helper = new ImportFileHelper(core);
    
    try {
        helper.setOptions("0", 
                "1", 
                "2", 
                "3", 
                "4", 
                "5", 
                CSVFormat.DEFAULT, 
                true, 
                true, 
                "\"", 
                ImportFileHelper.DuplicateOption.OVERWRITE_DUPES);
        
        helper.importFile(TEST_FILE, 0);
        
        assertEquals(expectedWordCount, core.getWordCollection().getWordCount());
        Map<String, List<ConWord>> wordMap = core.getWordCollection().getValueMapping();
        assertEquals(expectedWordmapSize, wordMap.size());
        assertTrue(wordMap.containsKey(ORIGIN_VAL));
        List<ConWord> matchWords = wordMap.get(ORIGIN_VAL);
        assertEquals(expectedMatchCount, matchWords.size());
        assertEquals(expectedImportDef, matchWords.get(0).getDefinition());
    } catch (Exception e) {
        fail(e);
    }
}
 
Example #26
Source File: TextUtils.java    From oryx with Apache License 2.0 5 votes vote down vote up
private static String[] doParseDelimited(String delimited, CSVFormat format) {
  try (CSVParser parser = CSVParser.parse(delimited, format)) {
    Iterator<CSVRecord> records = parser.iterator();
    return records.hasNext() ?
        StreamSupport.stream(records.next().spliterator(), false).toArray(String[]::new) :
        EMPTY_STRING;
  } catch (IOException e) {
    throw new IllegalStateException(e); // Can't happen
  }
}
 
Example #27
Source File: DelimitedCharDataGenerator.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public DelimitedCharDataGenerator(Writer writer, CSVFormat format, CsvHeader header, String headerKey, String valueKey,
                                  String replaceNewLines)
    throws IOException {
  format = format.withHeader((String[])null);
  this.format = format;
  this.headerKey = headerKey;
  this.valueKey = valueKey;
  printer = new CSVPrinter(writer, format);
  this.header = header;
  firstRecord = true;
  this.replaceNewLines = replaceNewLines;
}
 
Example #28
Source File: TestCsvParser.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Test
public void testSkipLines() throws Exception {
  CsvParser parser = new CsvParser(
      new CountingReader(new StringReader("foo\nbar\r\na,b,c\naa,bb,cc\ne,f,g\n")),
      CSVFormat.DEFAULT.withHeader((String[])null).withSkipHeaderRecord(false),
      -1,
      0,
      2
  );
  try {
    Assert.assertEquals(9, parser.getReaderPosition());

    String[] record = parser.read();
    Assert.assertEquals(15, parser.getReaderPosition());
    Assert.assertNotNull(record);
    Assert.assertArrayEquals(new String[]{"a", "b", "c"}, record);
    record = parser.read();
    Assert.assertNotNull(record);
    Assert.assertArrayEquals(new String[]{"aa", "bb", "cc"}, record);
    Assert.assertEquals(24, parser.getReaderPosition());
    record = parser.read();
    Assert.assertNotNull(record);
    Assert.assertArrayEquals(new String[]{"e", "f", "g"}, record);
    Assert.assertEquals(30, parser.getReaderPosition());
    record = parser.read();
    Assert.assertNull(record);
    Assert.assertEquals(30, parser.getReaderPosition());
  } finally {
    parser.close();
  }
}
 
Example #29
Source File: BeamKafkaCSVTableTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testCsvRecorderDecoder() {
  PCollection<Row> result =
      pipeline
          .apply(Create.of("1,\"1\",1.0", "2,2,2.0"))
          .apply(ParDo.of(new String2KvBytes()))
          .apply(new BeamKafkaCSVTable.CsvRecorderDecoder(genSchema(), CSVFormat.DEFAULT));

  PAssert.that(result).containsInAnyOrder(ROW1, ROW2);

  pipeline.run();
}
 
Example #30
Source File: CSVPrinterWrapper.java    From CineLog with GNU General Public License v3.0 5 votes vote down vote up
public CSVPrinterWrapper(Appendable out, Class<? extends Enum<?>> headers) throws IOException {
    this.csvPrinter = new CSVPrinter(
            out,
            CSVFormat.DEFAULT
                    .withHeader(headers)
                    .withQuote('`')
                    .withDelimiter('ยง'));
}