org.supercsv.io.CsvListWriter Java Examples

The following examples show how to use org.supercsv.io.CsvListWriter. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FlowSummaryWithTypesAndPhysicalsExport.java    From waltz with Apache License 2.0 6 votes vote down vote up
private static CsvListWriter setupCSVWriter() throws IOException {
    CsvListWriter csvWriter = new CsvListWriter(new OutputStreamWriter(new FileOutputStream("/temp/flows.csv")), CsvPreference.EXCEL_PREFERENCE);
    csvWriter.write(
            "Source App",
            "Source Asset Code",
            "Source App Kind",
            "Source App Status",
            "Source App Org Unit", // src OU
            "Source In Scope",
            "Target App",
            "Target Asset Code",
            "Target App Kind",
            "Target App Status",
            "Target App Org Unit", // src OU
            "Target In Scope",
            "Data Types",
            "Physical Name",
            "Physical ExtId",
            "Physical Transport",
            "Physical Format",
            "Physical Frequency",
            "Criticality");
    return csvWriter;
}
 
Example #2
Source File: Serialization.java    From joinery with GNU General Public License v3.0 6 votes vote down vote up
public static <V> void writeCsv(final DataFrame<V> df, final OutputStream output)
throws IOException {
    try (CsvListWriter writer = new CsvListWriter(new OutputStreamWriter(output), CsvPreference.STANDARD_PREFERENCE)) {
        final String[] header = new String[df.size()];
        final Iterator<Object> it = df.columns().iterator();
        for (int c = 0; c < df.size(); c++) {
            header[c] = String.valueOf(it.hasNext() ? it.next() : c);
        }
        writer.writeHeader(header);
        final CellProcessor[] procs = new CellProcessor[df.size()];
        final List<Class<?>> types = df.types();
        for (int c = 0; c < df.size(); c++) {
            final Class<?> cls = types.get(c);
            if (Date.class.isAssignableFrom(cls)) {
                procs[c] = new ConvertNullTo("", new FmtDate("yyyy-MM-dd'T'HH:mm:ssXXX"));
            } else {
                procs[c] = new ConvertNullTo("");
            }
        }
        for (final List<V> row : df) {
            writer.write(row, procs);
        }
    }
}
 
Example #3
Source File: SparseDataCreator.java    From super-csv with Apache License 2.0 6 votes vote down vote up
@Ignore("This is a test only for convenience")
@Test
public void CreateSparseNumericData() throws IOException
{
	FileWriter f = new FileWriter("SparseNumbersOnly.csv"); 
	CsvListWriter ff = new CsvListWriter(f, CsvPreference.STANDARD_PREFERENCE);
	
	final int rowsToProduce = 1000000;
	int j = 33;
	for(int i = 0; i < rowsToProduce; i++)
	{
		if(j == 0) j = 2;
		ff.write(i, i*j, i/(double)j);
		j = i*j % 1843;
	}
	ff.close();
}
 
Example #4
Source File: ExportExecRowWriterTest.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
@Test
public void writeRow_withNullValue() throws IOException, StandardException {

    // given
    StringWriter writer = new StringWriter(100);
    CsvListWriter csvWriter = new CsvListWriter(writer, CsvPreference.EXCEL_PREFERENCE);
    ExportExecRowWriter execRowWriter = new ExportExecRowWriter(csvWriter);
    ResultColumnDescriptor[] columnDescriptors = columnDescriptors();

    // when
    execRowWriter.writeRow(build("AAA", "BBB", "CCC", "DDD", "EEE", 111.123456789, 222.123456789), columnDescriptors);
    execRowWriter.writeRow(build("AAA", "BBB", null, "DDD", "EEE", 111.123456789, 222.123456789), columnDescriptors);   // null!
    execRowWriter.writeRow(build("AAA", "BBB", "CCC", "DDD", "EEE", 111.123456789, 222.123456789), columnDescriptors);
    execRowWriter.close();

    // then
    assertEquals("" +
            "AAA,BBB,CCC,DDD,EEE,111.12,222.1234568\n" +
            "AAA,BBB,,DDD,EEE,111.12,222.1234568\n" +
            "AAA,BBB,CCC,DDD,EEE,111.12,222.1234568\n" +
            "", writer.toString());
}
 
Example #5
Source File: OnlineDbMVStore.java    From ipst with Mozilla Public License 2.0 5 votes vote down vote up
private void serializeStoredWorkflowsStates() {
    LOGGER.info("Serializing stored workflows states");
    for (String workflowId : workflowsStates.keySet()) {
        if (workflowStatesFolderExists(workflowId)) {
            LOGGER.info("Serializing network data of workflow {}", workflowId);
            ConcurrentHashMap<Integer, Map<HistoDbAttributeId, Object>> workflowStates = workflowsStates.get(workflowId);
            Path workflowStatesFolder = getWorkflowStatesFolder(workflowId);
            Path csvFile = Paths.get(workflowStatesFolder.toString(), SERIALIZED_STATES_FILENAME);
            try (FileWriter fileWriter = new FileWriter(csvFile.toFile());
                 CsvListWriter csvWriter = new CsvListWriter(fileWriter, new CsvPreference.Builder('"', ';', "\r\n").build())) {
                boolean printHeaders = true;
                for (Integer stateId : workflowStates.keySet()) {
                    Map<HistoDbAttributeId, Object> networkValues = workflowStates.get(stateId);
                    if (printHeaders) {
                        List<String> headers = new ArrayList<>(networkValues.size());
                        for (HistoDbAttributeId attrId : networkValues.keySet()) {
                            headers.add(attrId.toString());
                        }
                        ArrayList<String> headersList = new ArrayList<>();
                        headersList.add("workflow");
                        headersList.add("state");
                        headersList.addAll(Arrays.asList(headers.toArray(new String[]{})));
                        csvWriter.writeHeader(headersList.toArray(new String[]{}));
                        printHeaders = false;
                    }
                    ArrayList<Object> valuesList = new ArrayList<>();
                    valuesList.add(workflowId);
                    valuesList.add(stateId);
                    valuesList.addAll(Arrays.asList(networkValues.values().toArray()));
                    csvWriter.write(valuesList.toArray());
                }
            } catch (IOException e) {
                LOGGER.error("Error serializing network data for workflow {}", workflowId);
            }
        }
    }
}
 
Example #6
Source File: SurveyInstanceExtractor.java    From waltz with Apache License 2.0 5 votes vote down vote up
private byte[] mkCSVReport(List<SurveyQuestion> questions,
                           List<List<Object>> reportRows) throws IOException {
    List<String> headers = mkHeaderStrings(questions);

    StringWriter writer = new StringWriter();
    CsvListWriter csvWriter = new CsvListWriter(writer, CsvPreference.EXCEL_PREFERENCE);

    csvWriter.write(headers);
    reportRows.forEach(unchecked(row -> csvWriter.write(simplify(row))));
    csvWriter.flush();

    return writer.toString().getBytes();
}
 
Example #7
Source File: WritingFeaturesTest.java    From super-csv with Apache License 2.0 5 votes vote down vote up
@Test
public void testWriteToWriter() throws IOException {
	StringWriter writer = new StringWriter();
	new CsvListWriter(writer, STANDARD_PREFERENCE);
	new CsvMapWriter(writer, STANDARD_PREFERENCE);
	new CsvBeanWriter(writer, STANDARD_PREFERENCE);
}
 
Example #8
Source File: WritingFeaturesTest.java    From super-csv with Apache License 2.0 5 votes vote down vote up
private String writeToCsv(List<String> data, CellProcessor[] processors, CsvPreference preference)
	throws IOException {
	StringWriter writer = new StringWriter();
	CsvListWriter listWriter = new CsvListWriter(writer, preference);
	listWriter.write(data, processors);
	listWriter.close();
	
	return writer.toString();
}
 
Example #9
Source File: ExportCSVWriterBuilder.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
public CsvListWriter build(OutputStream outputStream, ExportParams exportParams) throws IOException {
    OutputStreamWriter stream = new OutputStreamWriter(outputStream, exportParams.getCharacterEncoding());
    Writer writer = new BufferedWriter(stream, WRITE_BUFFER_SIZE_BYTES);
    CsvPreference preference = new CsvPreference.Builder(
            exportParams.getQuoteChar(),
            exportParams.getFieldDelimiter(),
            exportParams.getRecordDelimiter())
            .useQuoteMode(new ColumnQuoteMode())
            .build();

    return new CsvListWriter(writer, preference);
}
 
Example #10
Source File: ExportCSVWriterBuilderTest.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void buildCVSWriter() throws IOException {

    // given
    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
    ExportParams exportParams = ExportParams.withDirectory("/tmp");

    // when
    CsvListWriter csvWriter = csvWriterBuilder.build(byteStream, exportParams);
    csvWriter.write(new String[]{"a1", "b1", "c1", "d1"});
    csvWriter.write(new String[]{"a2", "b 2", "c2", "d2"});      // space in field
    csvWriter.write(new String[]{"a3", "b3", "c3", "d,3"});      // comma in field
    csvWriter.write(new String[]{"a\n4", "b4", "c4", "d4"});     // newline in field
    csvWriter.write(new String[]{"a5", "b\"5", "c5", "d5"});     // quote in field
    csvWriter.write(new String[]{"a5", "b5", "c5\u1272", "d5"}); // multi-byte unicode char in field
    csvWriter.close();

    // then
    assertEquals("" +
                    "a1,b1,c1,d1\n" +
                    "a2,b 2,c2,d2\n" +
                    "a3,b3,c3,\"d,3\"\n" +
                    "\"a\n" +
                    "4\",b4,c4,d4\n" +
                    "a5,\"b\"\"5\",c5,d5\n" +
                    "a5,b5,c5ቲ,d5\n",
            new String(byteStream.toByteArray(), "UTF-8"));

}
 
Example #11
Source File: HistoDbClientImpl.java    From ipst with Mozilla Public License 2.0 4 votes vote down vote up
public String updateRecord(String id, String[] headers, Object[] values) {
    StringWriter sw = new StringWriter();
    CsvListWriter writer = new CsvListWriter(sw, new CsvPreference.Builder('"', ',', "\r\n").build());

    /*
    There's no need to add the _id, as it is passed in the URL

    ArrayList<String> headersList = new ArrayList<>();
    headersList.add("_id");
    headersList.addAll(Arrays.asList(headers));

    ArrayList<Object> valuesList = new ArrayList<>();
    valuesList.add(id);
    valuesList.addAll(Arrays.asList(values));

    writer.writeHeader(headersList.toArray(new String[] {}));
    writer.write(valuesList.toArray());
    */
    String idNonNull = id;
    try {
        writer.writeHeader(headers);
        writer.write(values);

        writer.close();

        // if no id is provided, rely on server-side auto-increment mechanism

        if (idNonNull == null) {
            idNonNull = "autoIncrement";
        }

        try (InputStream is = httpClient.postHttpRequest(new HistoDbUrl(config,
                                                                        "data/" + idNonNull + ".csv", // WARN here one must NOT use the itesla suffix (not supporting POST of new data)
                                                                        Collections.emptyMap()),
                                                         sw.toString().getBytes("UTF-8"))) {
            return new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8);
        }
    } catch (IOException e) {
        throw new RuntimeException("Failed to store network values for id " + idNonNull, e);
    }
}
 
Example #12
Source File: FlowSummaryWithTypesAndPhysicalsExport.java    From waltz with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws IOException {
    AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
    DSLContext dsl = ctx.getBean(DSLContext.class);
    ApplicationIdSelectorFactory appIdSelectorFactory = new ApplicationIdSelectorFactory();
    ApplicationDao applicationDao = ctx.getBean(ApplicationDao.class);
    OrganisationalUnitDao organisationalUnitDao = ctx.getBean(OrganisationalUnitDao.class);
    LogicalFlowDao logicalFlowDao = ctx.getBean(LogicalFlowDao.class);
    LogicalFlowDecoratorDao decoratorDao = ctx.getBean(LogicalFlowDecoratorDao.class);
    DataTypeDao dataTypeDao = ctx.getBean(DataTypeDao.class);

    Select<Record1<Long>> appSelector = mkAppIdSelector(appIdSelectorFactory);
    Select<Record1<Long>> logicalFlowSelector = mkLogicalFlowSelectorFromAppSelector(appSelector);

    System.out.println("Loading apps");
    Set<Application> allApps = fromCollection(applicationDao.findAll());
    System.out.println("Loading in scope apps");
    Set<Long> inScopeAppIds = toIds(applicationDao.findByAppIdSelector(appSelector));
    System.out.println("Loading OUs");
    List<OrganisationalUnit> allOUs = organisationalUnitDao.findAll();
    System.out.println("Loading DTs");
    List<DataType> allDataTypes = dataTypeDao.findAll();

    System.out.println("Loading Logical Flows");
    List<LogicalFlow> logicalFlows = logicalFlowDao.findBySelector(logicalFlowSelector);
    System.out.println("Loading decorators");
    List<DataTypeDecorator> decorators = decoratorDao.findByAppIdSelector(appSelector);
    System.out.println("Loading phys flows");
    Map<Long, Collection<Tuple7<Long, String, String, String, String, String, String>>> physicalsByLogical = loadPhysicalsByLogical(dsl, logicalFlowSelector);

    System.out.println("Indexing");
    Map<Optional<Long>, Application> appsById = indexByOptId(allApps);
    Map<Optional<Long>, DataType> dataTypesById = indexByOptId(allDataTypes);
    Map<Optional<Long>, OrganisationalUnit> ousById = indexByOptId(allOUs);

    Map<Long, Collection<DataTypeDecorator>> decoratorsByLogicalFlowId = groupBy(DataTypeDecorator::dataFlowId, decorators);

    System.out.println("Processing");
    CsvListWriter csvWriter = setupCSVWriter();

    logicalFlows
            .stream()
            .filter(lf -> lf.source().kind() == EntityKind.APPLICATION && lf.target().kind() == EntityKind.APPLICATION)
            .map(Tuple::tuple)
            .map(t -> t.concat(appsById.get(Optional.of(t.v1.source().id()))))
            .map(t -> t.concat(appsById.get(Optional.of(t.v1.target().id()))))
            .filter(t -> t.v2 != null && t.v3 != null)
            .map(t -> t.concat(ousById.get(Optional.of(t.v2.organisationalUnitId()))))
            .map(t -> t.concat(ousById.get(Optional.of(t.v3.organisationalUnitId()))))
            .map(t -> t.concat(decoratorsByLogicalFlowId
                    .getOrDefault(
                            t.v1.id().orElse(-1L),
                            emptyList())
                    .stream()
                    .filter(d -> d.decoratorEntity().kind() == EntityKind.DATA_TYPE)
                    .map(d -> dataTypesById.get(Optional.of(d.decoratorEntity().id())))
                    .sorted(Comparator.comparing(NameProvider::name))
                    .collect(Collectors.toList())))
            .map(t -> t.concat(inScopeAppIds.contains(t.v2.id().get())))
            .map(t -> t.concat(inScopeAppIds.contains(t.v3.id().get())))
            // (lf:1, src:2, trg:3, srcOu:4, trgOU:5, dataType[]:6, srcInScope: 7, trgInScope: 8)
            .flatMap(t -> physicalsByLogical
                        .getOrDefault(
                                t.v1.id().orElse(-1L),
                                newArrayList(tuple(-1L, "-", "-", "-", "-", "-", "-")))
                        .stream()
                        .map(p -> t.concat(p.skip1())))
            .map(t -> newArrayList(
                    t.v2.name(),  // src
                    t.v2.assetCode().orElse(""),
                    t.v2.applicationKind().name(),
                    t.v2.entityLifecycleStatus().name(),
                    Optional.ofNullable(t.v4).map(NameProvider::name).orElse("?"), // src OU
                    t.v7.toString(),
                    t.v3.name(),  // trg
                    t.v3.assetCode().orElse(""),
                    t.v3.applicationKind().name(),
                    t.v3.entityLifecycleStatus().name(),
                    Optional.ofNullable(t.v5).map(NameProvider::name).orElse("?"), // trg OU
                    t.v8.toString(),
                    StringUtilities.joinUsing(t.v6, NameProvider::name, ","),
                    t.v9,
                    t.v10,
                    t.v11,
                    t.v12,
                    t.v13,
                    t.v14))
            .forEach(Unchecked.consumer(csvWriter::write));
}
 
Example #13
Source File: ExportFunction.java    From spliceengine with GNU Affero General Public License v3.0 4 votes vote down vote up
public static ExportExecRowWriter initializeRowWriter(OutputStream outputStream, ExportParams exportParams) throws IOException {
    CsvListWriter writer = new ExportCSVWriterBuilder().build(outputStream, exportParams);
    return new ExportExecRowWriter(writer);
}
 
Example #14
Source File: ExportExecRowWriter.java    From spliceengine with GNU Affero General Public License v3.0 4 votes vote down vote up
public ExportExecRowWriter(CsvListWriter csvWriter) {
    checkNotNull(csvWriter);
    this.csvWriter = csvWriter;
}
 
Example #15
Source File: CSVSerializer.java    From waltz with Apache License 2.0 votes vote down vote up
void accept(CsvListWriter csvWriter) throws Exception;