Java Code Examples for au.com.bytecode.opencsv.CSVWriter#writeNext()

The following examples show how to use au.com.bytecode.opencsv.CSVWriter#writeNext() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: QueryService.java    From frostmourne with MIT License 6 votes vote down vote up
@Override
public void exportToCsv(CSVWriter csvWriter, String dataName, DateTime startTime, DateTime endTime, String esQuery,
                                           String scrollId, String sortOrder) {
    DataNameContract dataNameContract = dataAdminService.findDataNameByName(dataName);
    DataSourceContract dataSourceContract = dataAdminService.findDatasourceById(dataNameContract.getData_source_id());
    ElasticsearchDataResult elasticsearchDataResult = elasticsearchDataQuery.query(dataNameContract, dataSourceContract,
            startTime, endTime, esQuery, scrollId, sortOrder, null);
    String[] heads = elasticsearchDataResult.getFields().toArray(new String[0]);
    csvWriter.writeNext(heads);
    while (true) {
        if (elasticsearchDataResult.getTotal() > 10 * 10000) {
            throw new ProtocolException(500, "数量总数超过10万,无法下载");
        }
        if (elasticsearchDataResult.getLogs().size() == 0) {
            break;
        }
        for (Map<String, Object> log : elasticsearchDataResult.getLogs()) {
            String[] data = Arrays.stream(heads).map(h -> log.get(h) == null ? null : log.get(h).toString()).toArray(String[]::new);
            csvWriter.writeNext(data);
        }
        scrollId = elasticsearchDataResult.getScrollId();
        elasticsearchDataResult = elasticsearchDataQuery.query(dataNameContract, dataSourceContract,
                startTime, endTime, esQuery, scrollId, sortOrder, null);
    }
}
 
Example 2
Source File: ReplicateInteractions.java    From systemsgenetics with GNU General Public License v3.0 6 votes vote down vote up
private static CSVWriter writeHeader(File file, String[] row) throws IOException {
	CSVWriter replicatedSameDirectionWriter = new CSVWriter(new BufferedWriter(new FileWriter(file)), '\t', '\0', '\0');
	int c = 0;
	row[c++] = "Variant";
	row[c++] = "Gene";
	row[c++] = "Covariate";
	row[c++] = "Variant_chr";
	row[c++] = "Variant_pos";
	row[c++] = "Variant alleles";
	row[c++] = "Assessed_allele";
	row[c++] = "Discovery_meta_QTL";
	row[c++] = "Discovery_meta_SNP";
	row[c++] = "Discovery_meta_covariate";
	row[c++] = "Discovery_meta_interaction";
	row[c++] = "Replication_meta_QTL";
	row[c++] = "Replication_meta_SNP";
	row[c++] = "Replication_meta_covariate";
	row[c++] = "Replication_meta_interaction";
	replicatedSameDirectionWriter.writeNext(row);
	return replicatedSameDirectionWriter;
}
 
Example 3
Source File: DataSet.java    From aifh with Apache License 2.0 6 votes vote down vote up
/**
 * Save the specified data to an output stream.
 *
 * @param os The output stream.
 * @param ds The data set.
 */
public static void save(final OutputStream os, final DataSet ds) {
    try {
        final Writer writer = new OutputStreamWriter(os);
        final CSVWriter csv = new CSVWriter(writer);

        csv.writeNext(ds.getHeaders());
        final String[] items2 = new String[ds.getHeaderCount()];

        for (final Object[] item : ds.getData()) {
            for (int i = 0; i < ds.getHeaderCount(); i++) {
                items2[i] = item[i].toString();
            }
            csv.writeNext(items2);
        }
        csv.close();
    } catch (IOException ex) {
        throw new AIFHError(ex);
    }
}
 
Example 4
Source File: DataSet.java    From aifh with Apache License 2.0 6 votes vote down vote up
/**
 * Save the specified data to an output stream.
 *
 * @param os The output stream.
 * @param ds The data set.
 */
public static void save(final OutputStream os, final DataSet ds) {
    try {
        final Writer writer = new OutputStreamWriter(os);
        final CSVWriter csv = new CSVWriter(writer);

        csv.writeNext(ds.getHeaders());
        final String[] items2 = new String[ds.getHeaderCount()];

        for (final Object[] item : ds.getData()) {
            for (int i = 0; i < ds.getHeaderCount(); i++) {
                items2[i] = item[i].toString();
            }
            csv.writeNext(items2);
        }
        csv.close();
    } catch (IOException ex) {
        throw new AIFHError(ex);
    }
}
 
Example 5
Source File: ThreeThingsDatabase.java    From three-things-today with Apache License 2.0 6 votes vote down vote up
public synchronized String exportDatabaseToCsvString() {
    StringWriter stringWriter = new StringWriter();
    CSVWriter csvWriter = new CSVWriter(stringWriter);
    csvWriter.writeNext(ThreeThingsEntry.COLUMNS);

    SQLiteDatabase db = mDbHelper.getReadableDatabase();
    Cursor cursor = db.rawQuery("SELECT * FROM " + ThreeThingsEntry.TABLE_NAME, null);
    while (cursor.moveToNext()) {
        List<String> data = new ArrayList(ThreeThingsEntry.COLUMNS.length);
        for (String column : ThreeThingsEntry.COLUMNS) {
            data.add(cursor.getString(cursor.getColumnIndexOrThrow(column)));
        }
        csvWriter.writeNext(data.toArray(new String[0]));
    }

    try {
        csvWriter.close();
    } catch (IOException e) {
        // Ignore.
    }

    return stringWriter.toString();
}
 
Example 6
Source File: ImportExportTask.java    From Passbook with Apache License 2.0 6 votes vote down vote up
private String exportCSV() {
    String result;
    try{
        AccountManager am = Application.getInstance().getAccountManager();
        File file = new File(Environment.getExternalStorageDirectory(), "pb.csv");
        FileWriter fw = new FileWriter(file, false);
        CSVWriter csvWriter = new CSVWriter(fw);
        csvWriter.writeNext(am.getCategoryNames());
        List<AccountManager.Account> accounts = am.getAllAccounts(false);
        for(AccountManager.Account a: accounts) {
            csvWriter.writeNext(a.getStringList(am));
        }
        csvWriter.close();
        result = file.getPath();
    } catch(Exception ex) {
        result = null;
    }
    return result;
}
 
Example 7
Source File: DataMiningUtils.java    From Knowage-Server with GNU Affero General Public License v3.0 6 votes vote down vote up
public static void writeFields(DataStore dataStore, CSVWriter writer) {
	logger.debug("IN");
	Iterator records = dataStore.iterator();
	while (records.hasNext()) {
		IRecord record = (IRecord) records.next();
		String row = "";
		for (int i = 0; i < dataStore.getMetaData().getFieldCount(); i++) {
			IField field = record.getFieldAt(i);
			String value = String.valueOf(field.getValue());
			if (!value.equals("null")) {
				value = value.replaceAll(DataMiningConstants.CSV_SEPARATOR, "");
				row += value + DataMiningConstants.CSV_SEPARATOR;
			} else {
				row += "" + DataMiningConstants.CSV_SEPARATOR;
			}
		}

		writer.writeNext(row.split(DataMiningConstants.CSV_SEPARATOR));
	}
	logger.debug("OUT");
}
 
Example 8
Source File: GetFusionCsv.java    From collect-earth with MIT License 6 votes vote down vote up
private void processFile() throws IOException {
	final CSVReader csvReader = new CSVReader(new FileReader(new File("ullaan.csv")), ';');
	final CSVWriter csvWriter = new CSVWriter(new FileWriter(new File("resultFusion.csv")), ';');
	String[] nextRow;
	final String[] writeRow = new String[4];
	writeRow[0] = "Coordinates";
	writeRow[1] = "Land Use ID";
	writeRow[2] = "Land Use name";
	writeRow[3] = "Placemark ID";
	csvWriter.writeNext(writeRow);
	while ((nextRow = csvReader.readNext()) != null) {

		writeRow[0] = "<Point><coordinates>" + replaceComma(nextRow[2]) + "," + replaceComma(nextRow[3]) + ",0.0</coordinates></Point>";
		final String landUse = nextRow[5];
		final int classId = getId(landUse);
		writeRow[1] = classId + "";
		writeRow[2] = landUse;
		writeRow[3] = nextRow[0];
		csvWriter.writeNext(writeRow);
	}
	csvWriter.close();
	csvReader.close();
}
 
Example 9
Source File: CSVDataSetFormatter.java    From winter with Apache License 2.0 6 votes vote down vote up
/**
 * Writes the data set to a CSV file
 * 
 * @param file
 * @param dataset
 * @param orderedHeader
 * @throws IOException
 */
public void writeCSV(File file, DataSet<RecordType, SchemaElementType> dataset, List<SchemaElementType> orderedHeader)
		throws IOException {
	CSVWriter writer = new CSVWriter(new FileWriter(file));
	
	String[] headers = null;
	if(orderedHeader != null){
		headers = getHeader(orderedHeader);
	}
	else{
		headers = getHeader(sortAttributesAlphabetically(dataset));
	}
	

	if (headers != null) {
		writer.writeNext(headers);
	}

	for (RecordType record : dataset.get()) {
		String[] values = format(record, dataset, orderedHeader);

		writer.writeNext(values);
	}

	writer.close();
}
 
Example 10
Source File: YahooIntraDayHistoMessageConverter.java    From cloudstreetmarket.com with GNU General Public License v3.0 6 votes vote down vote up
@Override
protected void writeInternal(QuoteWrapper quotes, HttpOutputMessage httpOutputMessage) throws IOException, HttpMessageNotWritableException {
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(httpOutputMessage.getBody()));
    for (YahooQuote quote : quotes) {
        writer.writeNext(
        		new String[]{	quote.getId(),
        						quote.getName(),
        						String.valueOf(quote.getOpen()),
        						String.valueOf(quote.getPreviousClose()),
                				String.valueOf(quote.getLast()),
                        		String.valueOf(quote.getLastChange()),
                        		String.valueOf(quote.getLastChangePercent()),
                        		String.valueOf(quote.getHigh()),
                        		String.valueOf(quote.getLow()),
                        		String.valueOf(quote.getBid()),
                        		String.valueOf(quote.getAsk()),
                        		String.valueOf(quote.getVolume()),
        						quote.getExchange(),
        						quote.getCurrency()
        		});
    }

    writer.close();
}
 
Example 11
Source File: YahooQuoteMessageConverter.java    From cloudstreetmarket.com with GNU General Public License v3.0 6 votes vote down vote up
@Override
protected void writeInternal(QuoteWrapper quotes, HttpOutputMessage httpOutputMessage) throws IOException, HttpMessageNotWritableException {
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(httpOutputMessage.getBody()));
    for (YahooQuote quote : quotes) {
        writer.writeNext(
        		new String[]{	quote.getId(),
        						quote.getName(),
        						String.valueOf(quote.getOpen()),
        						String.valueOf(quote.getPreviousClose()),
                				String.valueOf(quote.getLast()),
                        		String.valueOf(quote.getLastChange()),
                        		String.valueOf(quote.getLastChangePercent()),
                        		String.valueOf(quote.getHigh()),
                        		String.valueOf(quote.getLow()),
                        		String.valueOf(quote.getBid()),
                        		String.valueOf(quote.getAsk()),
                        		String.valueOf(quote.getVolume()),
        						quote.getExchange(),
        						quote.getCurrency()
        		});
    }

    writer.close();
}
 
Example 12
Source File: YahooHistoMessageConverter.java    From cloudstreetmarket.com with GNU General Public License v3.0 6 votes vote down vote up
@Override
protected void writeInternal(QuoteWrapper quotes, HttpOutputMessage httpOutputMessage) throws IOException, HttpMessageNotWritableException {
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(httpOutputMessage.getBody()));
    for (YahooQuote quote : quotes) {
        writer.writeNext(
        		new String[]{	quote.getId(),
        						quote.getName(),
        						String.valueOf(quote.getOpen()),
        						String.valueOf(quote.getPreviousClose()),
                				String.valueOf(quote.getLast()),
                        		String.valueOf(quote.getLastChange()),
                        		String.valueOf(quote.getLastChangePercent()),
                        		String.valueOf(quote.getHigh()),
                        		String.valueOf(quote.getLow()),
                        		String.valueOf(quote.getBid()),
                        		String.valueOf(quote.getAsk()),
                        		String.valueOf(quote.getVolume()),
        						quote.getExchange(),
        						quote.getCurrency()
        		});
    }

    writer.close();
}
 
Example 13
Source File: CVSRemoteFileFormatter.java    From AIDR with GNU Affero General Public License v3.0 5 votes vote down vote up
public CSVWriter instanceToOutput(String fileName) throws Exception{
    File file = new File(fileName);
    //file.getAbsolutePath();
    CSVWriter writer = new CSVWriter(new FileWriter(fileName, true));
    //  public MicromapperOuput(String tweetID, String tweet, String author, String lat, String lng, String url, String created, String answer){

    String[] header = {"tweetID", "tweet","author", "lat", "lng", "url", "created", "answer"};
    writer.writeNext(header);

    return writer;

}
 
Example 14
Source File: KaggleOtto.java    From aifh with Apache License 2.0 5 votes vote down vote up
public void createSubmission(BasicNetwork network) throws IOException {
    System.out.println("Building submission file.");
    FileInputStream istream = new FileInputStream(KAGGLE_TEST);
    final DataSet ds = DataSet.load(istream);
    istream.close();
    int columnCount = ds.getHeaderCount();

    List<String> ids = ds.columnAsList(0);
    ds.deleteColumn(0);

    for(int i=0;i<columnCount-1;i++) {
        ds.normalizeZScore(i);
    }

    final List<BasicData> data = ds.extractSupervised(0, columnCount-1, 0, 0);

    CSVWriter writer = new CSVWriter(new FileWriter(KAGGLE_SUBMIT));
    for(int i = 0; i<data.size(); i++) {
        double[] output = network.computeRegression(data.get(i).getInput());
        String[] line = new String[10];
        line[0] = ids.get(i);
        for(int j=0;j<output.length;j++) {
            line[j+1] = String.format(Locale.ENGLISH, "%f", output[j]);
        }
        writer.writeNext(line);

    }
    writer.close();
}
 
Example 15
Source File: DataMiningUtils.java    From Knowage-Server with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void writeColumns(DataStore dataStore, CSVWriter writer) {
	logger.debug("IN");
	String col = "";

	for (int j = 0; j < dataStore.getMetaData().getFieldCount(); j++) {
		IFieldMetaData fieldMetaData = dataStore.getMetaData().getFieldMeta(j);
		String fieldHeader = fieldMetaData.getAlias() != null ? fieldMetaData.getAlias() : fieldMetaData.getName();
		col += fieldHeader + DataMiningConstants.CSV_SEPARATOR;
	}
	writer.writeNext(col.split(DataMiningConstants.CSV_SEPARATOR));
	logger.debug("OUT");
}
 
Example 16
Source File: DataUtil.java    From aifh with Apache License 2.0 5 votes vote down vote up
/**
 * Dump a dataset as a CSV.
 * @param file The file to dump to.
 * @param dataset The dataset.
 * @throws IOException If an IO error occurs.
 */
public static void dumpCSV(File file, List<BasicData> dataset) throws IOException {
    CSVWriter writer = new CSVWriter(new FileWriter(file));
    int inputCount = dataset.get(0).getInput().length;
    int outputCount = dataset.get(0).getIdeal().length;
    int totalCount = inputCount + outputCount;

    String[] headers = new String[totalCount];
    int idx = 0;
    for(int i=0;i<inputCount;i++) {
        headers[idx++] = "x"+i;
    }
    for(int i=0;i<outputCount;i++) {
        headers[idx++] = "y"+i;
    }
    writer.writeNext(headers);

    String[] line = new String[totalCount];
    for(int i = 0; i<dataset.size(); i++) {
        BasicData item = dataset.get(i);

        idx = 0;
        for(int j=0;j<inputCount;j++) {
            line[idx++] = String.format(Locale.ENGLISH, "%.2f", item.getInput()[j]);
        }
        for(int j=0;j<outputCount;j++) {
            line[idx++] = String.format(Locale.ENGLISH, "%.2f", item.getIdeal()[j]);
        }
        writer.writeNext(line);

    }
    writer.close();
}
 
Example 17
Source File: NormalizeTitanic.java    From aifh with Apache License 2.0 4 votes vote down vote up
/**
 * The main method.
 *
 * @param args The arguments.
 */
public static void main(String[] args) {
    String filename;

    if (args.length != 1) {
        filename = System.getProperty("FILENAME");
        if( filename==null ) {
            System.out.println("Please call this program with a single parameter that specifies your data directory.\n" +
                    "If you are calling with gradle, consider:\n" +
            "gradle runCapstoneTitanic1 -Pdata_path=[path to your data directory]\n");
            System.exit(0);
        }
    } else {
        filename = args[0];
    }

    File dataPath = new File(filename);
    File trainingPath = new File(dataPath, TitanicConfig.TrainingFilename);
    File testPath = new File(dataPath, TitanicConfig.TestFilename);
    File normalizePath = new File(dataPath, TitanicConfig.NormDumpFilename);


    try {
        TitanicStats stats = new TitanicStats();
        analyze(stats, trainingPath);
        analyze(stats, testPath);
        stats.dump();

        List<String> ids = new ArrayList<String>();
        List<BasicData> training = normalize(stats, trainingPath, ids,
                TitanicConfig.InputNormalizeLow,
                TitanicConfig.InputNormalizeHigh,
                TitanicConfig.PredictSurvive,
                TitanicConfig.PredictPerish);

        // Write out the normalized file, mainly so that you can examine it.
        // This file is not actually used by the program.
        FileOutputStream fos = new FileOutputStream(normalizePath);
        CSVWriter csv = new CSVWriter(new OutputStreamWriter(fos));

        csv.writeNext(new String[]{
                "id",
                "age", "sex-male", "pclass", "sibsp", "parch", "fare",
                "embarked-c", "embarked-q", "embarked-s", "name-mil", "name-nobility", "name-dr", "name-clergy"
        });

        int idx = 0;
        for (BasicData data : training) {
            String[] line = {
                    ids.get(idx++),
                    FormatNumeric.formatDouble(data.getInput()[0], 5),
                    FormatNumeric.formatDouble(data.getInput()[1], 5),
                    FormatNumeric.formatDouble(data.getInput()[2], 5),
                    FormatNumeric.formatDouble(data.getInput()[3], 5),
                    FormatNumeric.formatDouble(data.getInput()[4], 5),
                    FormatNumeric.formatDouble(data.getInput()[5], 5),
                    FormatNumeric.formatDouble(data.getInput()[6], 5),
                    FormatNumeric.formatDouble(data.getInput()[7], 5),
                    FormatNumeric.formatDouble(data.getInput()[8], 5),
                    FormatNumeric.formatDouble(data.getInput()[9], 5),
                    FormatNumeric.formatDouble(data.getInput()[10], 5),
                    FormatNumeric.formatDouble(data.getInput()[11], 5),
                    FormatNumeric.formatDouble(data.getInput()[12], 5),
                    FormatNumeric.formatDouble(data.getIdeal()[0], 5)

            };

            csv.writeNext(line);
        }

        csv.close();
        fos.close();

    } catch (IOException ex) {
        ex.printStackTrace();
    }


}
 
Example 18
Source File: CreateTableStatistics.java    From winter with Apache License 2.0 2 votes vote down vote up
public void run() throws FileNotFoundException, IOException {
	
	CSVWriter resultStatisticsWriter = new CSVWriter(new FileWriter(new File(new File(resultsLocation), "table_statistics.csv"), true));
	
	for(File f : new File(tablesLocation).listFiles()) {
        Gson gson = new Gson();
        
        String json = IOUtils.toString(new FileInputStream(f));
        
        // get the data from the JSON source
        JsonTableSchema data = gson.fromJson(json, JsonTableSchema.class);
        
        // check if any data was parsed ... if the file used the schema with mappings, data will not have any contents
        // but as no exception is thrown, we have to check attributes of data for null ...
        if(data.getRelation()==null) {
        	
        	JsonTableWithMappingSchema moreData = gson.fromJson(json, JsonTableWithMappingSchema.class);
        	
        	data = moreData.getTable();
        }
        
        if(data.getRelation()!=null) {
        	
        	int rows = 0;
        	int cols = data.getRelation().length;
        	
        	for(String[] values : data.getRelation()) {
        		rows = Math.max(values.length, rows);
        	}
        	
        	rows -= data.getNumberOfHeaderRows();
        	
   			resultStatisticsWriter.writeNext(new String[] {
   					new File(tablesLocation).getName(),
   					f.getName(),
   					Integer.toString(rows),
   					Integer.toString(cols)
   			});
    		
        }
	}
	
	resultStatisticsWriter.close();

	
}
 
Example 19
Source File: OrganizationExport.java    From usergrid with Apache License 2.0 2 votes vote down vote up
@Override
public void runTool( CommandLine line ) throws Exception {
    startSpring();

    setVerbose( line );

    prepareBaseOutputFileName( line );

    outputDir = createOutputParentDir();

    String queryString = line.getOptionValue( QUERY_ARG );

    Query query = Query.fromQL( queryString );

    logger.info( "Export directory: {}", outputDir.getAbsolutePath() );

    CSVWriter writer = new CSVWriter( new FileWriter( outputDir.getAbsolutePath() + "/admins.csv" ), ',' );

    writer.writeNext( new String[] { "Org uuid", "Org Name", "Admin uuid", "Admin Name", "Admin Email", "Admin Created Date" } );

    Results organizations = null;

    do {

        organizations = getOrganizations( query );

        for ( Entity organization : organizations.getEntities() ) {
            final String orgName = organization.getProperty( "path" ).toString();
            final UUID orgId = organization.getUuid();

            logger.info( "Org Name: {} key: {}", orgName, orgId );

            for ( UserInfo user : managementService.getAdminUsersForOrganization( organization.getUuid() ) ) {

                Entity admin = managementService.getAdminUserEntityByUuid( user.getUuid() );

                Long createdDate = ( Long ) admin.getProperties().get( "created" );

                writer.writeNext( new String[] { orgId.toString(),
                        orgName, user.getUuid().toString(), user.getName(), user.getEmail(),
                        createdDate == null ? "Unknown" : sdf.format( new Date( createdDate ) )
                } );
            }
        }

        query.setCursor( organizations.getCursor() );
    }
    while ( organizations != null && organizations.hasCursor() );

    logger.info( "Completed export" );

    writer.flush();
    writer.close();
}
 
Example 20
Source File: CVSRemoteFileFormatter.java    From AIDR with GNU Affero General Public License v3.0 2 votes vote down vote up
public void addToCVSOuputFile(String[] data, CSVWriter writer) throws Exception{

        writer.writeNext(data);

    }