Java Code Examples for au.com.bytecode.opencsv.CSVWriter#close()

The following examples show how to use au.com.bytecode.opencsv.CSVWriter#close() . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may want to check out the right sidebar which shows the related API usage.
Example 1
Source Project: pxf   File: CsvUtils.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Write {@link Table} to a CSV file.
 *
 * @param table {@link Table} contains required data list to write to CSV file
 * @param targetCsvFile to write the data Table
 * @throws IOException
 */
public static void writeTableToCsvFile(Table table, String targetCsvFile)
		throws IOException {

	// create CsvWriter using FileWriter
	CSVWriter csvWriter = new CSVWriter(new FileWriter(new File(targetCsvFile)));

	try {
		// go over list and write each inner list to csv file
		for (List<String> currentList : table.getData()) {

			Object[] objectArray = currentList.toArray();

			csvWriter.writeNext(Arrays.copyOf(currentList.toArray(), objectArray.length, String[].class));
		}
	} finally {

		// flush and close writer
		csvWriter.flush();
		csvWriter.close();
	}
}
 
Example 2
Source Project: celos   File: HiveTableDeployer.java    License: Apache License 2.0 6 votes vote down vote up
private Path createTempHdfsFileForInsertion(FixTable fixTable, TestRun testRun) throws Exception {

        Path pathToParent = new Path(testRun.getHdfsPrefix(), ".hive");
        Path pathTo = new Path(pathToParent, UUID.randomUUID().toString());
        FileSystem fileSystem = testRun.getCiContext().getFileSystem();
        fileSystem.mkdirs(pathTo.getParent());
        FSDataOutputStream outputStream = fileSystem.create(pathTo);

        CSVWriter writer = new CSVWriter(new OutputStreamWriter(outputStream), '\t', CSVWriter.NO_QUOTE_CHARACTER);

        for (FixTable.FixRow fixRow : fixTable.getRows()) {
            List<String> rowData = Lists.newArrayList();
            for (String colName : fixTable.getColumnNames()) {
                rowData.add(fixRow.getCells().get(colName));
            }
            String[] dataArray = rowData.toArray(new String[rowData.size()]);
            writer.writeNext(dataArray);
        }

        writer.close();

        fileSystem.setPermission(pathToParent, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
        fileSystem.setPermission(pathTo, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
        return pathTo;
    }
 
Example 3
Source Project: collect-earth   File: GetFusionCsv.java    License: MIT License 6 votes vote down vote up
private void processFile() throws IOException {
	final CSVReader csvReader = new CSVReader(new FileReader(new File("ullaan.csv")), ';');
	final CSVWriter csvWriter = new CSVWriter(new FileWriter(new File("resultFusion.csv")), ';');
	String[] nextRow;
	final String[] writeRow = new String[4];
	writeRow[0] = "Coordinates";
	writeRow[1] = "Land Use ID";
	writeRow[2] = "Land Use name";
	writeRow[3] = "Placemark ID";
	csvWriter.writeNext(writeRow);
	while ((nextRow = csvReader.readNext()) != null) {

		writeRow[0] = "<Point><coordinates>" + replaceComma(nextRow[2]) + "," + replaceComma(nextRow[3]) + ",0.0</coordinates></Point>";
		final String landUse = nextRow[5];
		final int classId = getId(landUse);
		writeRow[1] = classId + "";
		writeRow[2] = landUse;
		writeRow[3] = nextRow[0];
		csvWriter.writeNext(writeRow);
	}
	csvWriter.close();
	csvReader.close();
}
 
Example 4
@Override
protected void writeInternal(QuoteWrapper quotes, HttpOutputMessage httpOutputMessage) throws IOException, HttpMessageNotWritableException {
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(httpOutputMessage.getBody()));
    for (YahooQuote quote : quotes) {
        writer.writeNext(
        		new String[]{	quote.getId(),
        						quote.getName(),
        						String.valueOf(quote.getOpen()),
        						String.valueOf(quote.getPreviousClose()),
                				String.valueOf(quote.getLast()),
                        		String.valueOf(quote.getLastChange()),
                        		String.valueOf(quote.getLastChangePercent()),
                        		String.valueOf(quote.getHigh()),
                        		String.valueOf(quote.getLow()),
                        		String.valueOf(quote.getBid()),
                        		String.valueOf(quote.getAsk()),
                        		String.valueOf(quote.getVolume()),
        						quote.getExchange(),
        						quote.getCurrency()
        		});
    }

    writer.close();
}
 
Example 5
@Override
protected void writeInternal(QuoteWrapper quotes, HttpOutputMessage httpOutputMessage) throws IOException, HttpMessageNotWritableException {
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(httpOutputMessage.getBody()));
    for (YahooQuote quote : quotes) {
        writer.writeNext(
        		new String[]{	quote.getId(),
        						quote.getName(),
        						String.valueOf(quote.getOpen()),
        						String.valueOf(quote.getPreviousClose()),
                				String.valueOf(quote.getLast()),
                        		String.valueOf(quote.getLastChange()),
                        		String.valueOf(quote.getLastChangePercent()),
                        		String.valueOf(quote.getHigh()),
                        		String.valueOf(quote.getLow()),
                        		String.valueOf(quote.getBid()),
                        		String.valueOf(quote.getAsk()),
                        		String.valueOf(quote.getVolume()),
        						quote.getExchange(),
        						quote.getCurrency()
        		});
    }

    writer.close();
}
 
Example 6
Source Project: Passbook   File: ImportExportTask.java    License: Apache License 2.0 6 votes vote down vote up
private String exportCSV() {
    String result;
    try{
        AccountManager am = Application.getInstance().getAccountManager();
        File file = new File(Environment.getExternalStorageDirectory(), "pb.csv");
        FileWriter fw = new FileWriter(file, false);
        CSVWriter csvWriter = new CSVWriter(fw);
        csvWriter.writeNext(am.getCategoryNames());
        List<AccountManager.Account> accounts = am.getAllAccounts(false);
        for(AccountManager.Account a: accounts) {
            csvWriter.writeNext(a.getStringList(am));
        }
        csvWriter.close();
        result = file.getPath();
    } catch(Exception ex) {
        result = null;
    }
    return result;
}
 
Example 7
Source Project: mts   File: DerivedCounter.java    License: GNU General Public License v3.0 5 votes vote down vote up
public void writeTimeCSV(String path) throws IOException
{
    CSVWriter csvWriter = new CSVWriter(new FileWriter(new File(path)), this.csvSeparator);
    double[] graphTable = this.counter.graphDataset.getGraphArray();

    csvWriter.writeNext(new String[]
            {
                "timestamp", "value"
            });

    for (int i = 0; i < graphTable.length; i++)
    {
        long timestamp = i * this.counter.graphDataset.graphParameters.graphPeriod;

        if (timestamp > this.reportEndTimestamp)
        {
            break;
        }

        csvWriter.writeNext(new String[]
                {
                    Long.toString(timestamp), Double.toString(graphTable[i])
                });
    }

    csvWriter.close();
}
 
Example 8
Source Project: aifh   File: DataUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Dump a dataset as a CSV.
 * @param file The file to dump to.
 * @param dataset The dataset.
 * @throws IOException If an IO error occurs.
 */
public static void dumpCSV(File file, List<BasicData> dataset) throws IOException {
    CSVWriter writer = new CSVWriter(new FileWriter(file));
    int inputCount = dataset.get(0).getInput().length;
    int outputCount = dataset.get(0).getIdeal().length;
    int totalCount = inputCount + outputCount;

    String[] headers = new String[totalCount];
    int idx = 0;
    for(int i=0;i<inputCount;i++) {
        headers[idx++] = "x"+i;
    }
    for(int i=0;i<outputCount;i++) {
        headers[idx++] = "y"+i;
    }
    writer.writeNext(headers);

    String[] line = new String[totalCount];
    for(int i = 0; i<dataset.size(); i++) {
        BasicData item = dataset.get(i);

        idx = 0;
        for(int j=0;j<inputCount;j++) {
            line[idx++] = String.format(Locale.ENGLISH, "%.2f", item.getInput()[j]);
        }
        for(int j=0;j<outputCount;j++) {
            line[idx++] = String.format(Locale.ENGLISH, "%.2f", item.getIdeal()[j]);
        }
        writer.writeNext(line);

    }
    writer.close();
}
 
Example 9
Source Project: aifh   File: KaggleOtto.java    License: Apache License 2.0 5 votes vote down vote up
public void createSubmission(BasicNetwork network) throws IOException {
    System.out.println("Building submission file.");
    FileInputStream istream = new FileInputStream(KAGGLE_TEST);
    final DataSet ds = DataSet.load(istream);
    istream.close();
    int columnCount = ds.getHeaderCount();

    List<String> ids = ds.columnAsList(0);
    ds.deleteColumn(0);

    for(int i=0;i<columnCount-1;i++) {
        ds.normalizeZScore(i);
    }

    final List<BasicData> data = ds.extractSupervised(0, columnCount-1, 0, 0);

    CSVWriter writer = new CSVWriter(new FileWriter(KAGGLE_SUBMIT));
    for(int i = 0; i<data.size(); i++) {
        double[] output = network.computeRegression(data.get(i).getInput());
        String[] line = new String[10];
        line[0] = ids.get(i);
        for(int j=0;j<output.length;j++) {
            line[j+1] = String.format(Locale.ENGLISH, "%f", output[j]);
        }
        writer.writeNext(line);

    }
    writer.close();
}
 
Example 10
private static void writeCovaraiteCounts(File file, LinkedHashMap<String, CovariateCount> covariateCounts) throws IOException {

		CSVWriter covariateCountWriter = new CSVWriter(new BufferedWriter(new FileWriter(file)), '\t', '\0', '\0');
		int c = 0;
		String[] row2 = new String[6];
		row2[c++] = "Covariate";
		row2[c++] = "Significant";
		row2[c++] = "ReplicatedSameDirection";
		row2[c++] = "ReplicatedOppositeDirection";
		row2[c++] = "NotReplicateSameDirection";
		row2[c++] = "NotReplicatedOppositeDirection";
		covariateCountWriter.writeNext(row2);

		for (Map.Entry<String, CovariateCount> covariateEntry : covariateCounts.entrySet()) {

			CovariateCount thisCounts = covariateEntry.getValue();

			c = 0;
			row2[c++] = covariateEntry.getKey();
			row2[c++] = String.valueOf(thisCounts.getCovariateSignificant());
			row2[c++] = String.valueOf(thisCounts.getReplicatedSameDirection());
			row2[c++] = String.valueOf(thisCounts.getReplicatedOppositeDirection());
			row2[c++] = String.valueOf(thisCounts.getNotReplicatedSameDirection());
			row2[c++] = String.valueOf(thisCounts.getNotReplicatedOppositeDirection());
			covariateCountWriter.writeNext(row2);

		}

		covariateCountWriter.close();

	}
 
Example 11
private void writeMatrix(final RealMatrix m, final File outputFilename) throws IOException {
    final List<String []> textTable = new ArrayList<>();
    for (int i = 0; i < m.getRowDimension(); i ++){
        textTable.add(Arrays.stream(m.getRow(i)).mapToObj(Double::toString).toArray(String[]::new));
    }
    final FileWriter fw = new FileWriter(outputFilename);
    CSVWriter csvWriter = new CSVWriter(fw, '\t', CSVWriter.NO_QUOTE_CHARACTER);
    csvWriter.writeAll(textTable);
    csvWriter.flush();
    csvWriter.close();
}
 
Example 12
Source Project: sofa-acts   File: ContainerUtils.java    License: Apache License 2.0 4 votes vote down vote up
/**
 *
 * @param clsChild
 * @param cvsChild
 * @param csvParent
 * @throws FileNotFoundException
 * @throws IOException
 */
private static void csvReplaceObj(Class<?> clsChild, String cvsChild, String csvParent,
                                  Set<String> sbfWarn) {
    try {
        if (null != clsChild) {
            FileReader fReader = new FileReader(csvParent);
            CSVReader csvReader = new CSVReader(fReader);
            List<String[]> readLine = csvReader.readAll();
            List<String> addLine = new ArrayList<String>(6);
            int i = 0;
            for (String[] readDetail : readLine) {
                if (StringUtils.equals(readDetail[2], "java.lang.Object")) {
                    addLine.add(readDetail[0]);
                    addLine.add(readDetail[1]);
                    addLine.add(clsChild.getName());
                    addLine.add(readDetail[3]);

                    if (CSVApisUtil.isWrapClass(clsChild)) {
                        if (StringUtils.equals(clsChild.getName(), "java.lang.String")) {
                            addLine.add("Y");
                            addLine.add("1");
                        } else {
                            CSVApisUtil.addSimpleValue("", clsChild, addLine);
                        }
                    } else if (StringUtils.isNotBlank(cvsChild)) {
                        File openFile = new File(cvsChild);
                        if (openFile.exists()) {
                            addLine.add("Y");
                            addLine.add(CSVApisUtil.cutCsvName(cvsChild) + "@1");
                        } else {
                            addLine.add("N");
                            addLine.add("");
                        }
                    } else {
                        addLine.add("N");
                        addLine.add("");
                    }
                    break;
                }
                i++;
            }

            readLine.set(i, addLine.toArray(new String[addLine.size()]));

            csvReader.close();
            fReader.close();

            FileWriter fWrite = new FileWriter(csvParent);
            CSVWriter csvWriter = new CSVWriter(fWrite);
            csvWriter.writeAll(readLine);
            csvWriter.close();
            fWrite.close();
        }
    } catch (Exception e) {
        sbfWarn.add("failed to add sub-file [" + CSVApisUtil.cutCsvName(cvsChild)
                    + "] to file [" + CSVApisUtil.cutCsvName(csvParent) + "]");
    }
}
 
Example 13
Source Project: aifh   File: SubmitTitanic.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Prepare a Kaggle submission for Titanic.
 *
 * @param dataPath    The data path.
 * @param bestNetwork The best network.
 * @param cross       The cross validated data.
 */
public void submit(File dataPath, RBFNetwork bestNetwork, CrossValidate cross) {
    try {
        String now = new SimpleDateFormat("yyyyMMddhhmm").format(new Date());
        File trainingPath = new File(dataPath, TitanicConfig.TrainingFilename);
        File testPath = new File(dataPath, TitanicConfig.TestFilename);
        int score = (int) (cross.getScore() * 10000);
        File submitPath = new File(dataPath, "submit-" + now + "_" + score + ".csv");
        File submitInfoPath = new File(dataPath, "submit-" + now + ".txt");

        PrintWriter pw = new PrintWriter(new FileWriter(submitInfoPath));
        pw.println("Crossvalidation stats:");
        for (int i = 0; i < cross.size(); i++) {
            CrossValidateFold fold = cross.getFolds().get(i);
            pw.println("Fold #" + (i + 1) + " : Score: " + fold.getScore());
        }
        pw.println("Average Score: " + cross.getScore());
        pw.println();
        pw.println(Arrays.toString(bestNetwork.getLongTermMemory()));
        pw.close();


        FileOutputStream fos = new FileOutputStream(submitPath);
        CSVWriter csv = new CSVWriter(new OutputStreamWriter(fos));
        csv.writeNext(new String[]{"PassengerId", "Survived"});

        TitanicStats stats = new TitanicStats();
        NormalizeTitanic.analyze(stats, trainingPath);
        NormalizeTitanic.analyze(stats, testPath);

        List<String> ids = new ArrayList<String>();
        List<BasicData> training = NormalizeTitanic.normalize(stats, testPath, ids,
                TitanicConfig.InputNormalizeLow,
                TitanicConfig.InputNormalizeHigh,
                TitanicConfig.PredictSurvive,
                TitanicConfig.PredictPerish);

        int idx = 0;
        for (BasicData data : training) {
            double[] output = bestNetwork.computeRegression(data.getInput());
            int survived = output[0] > 0.5 ? 1 : 0;

            String[] line = {ids.get(idx), "" + survived};
            csv.writeNext(line);
            idx++;
        }

        csv.close();
        fos.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }

}
 
Example 14
Source Project: usergrid   File: WarehouseExport.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public void runTool( CommandLine line ) throws Exception {

    // keep it light and fast
    System.setProperty( "cassandra.readcl", "ONE" );

    startSpring();
    setVerbose( line );

    applyOrgId( line );
    prepareBaseOutputFileName( line );
    outputDir = createOutputParentDir();
    logger.info( "Export directory: {}", outputDir.getAbsolutePath() );

    // create writer
    applyStartTime( line );
    applyEndTime( line );
    logger.error( "startTime: {}, endTime: {}", startTime, endTime );
    if ( startTime.getTime() >= endTime.getTime() ) {
        logger.error( "startTime must be before endTime. exiting." );
        System.exit( 1 );
    }

    // create "modified" query to select data
    StringBuilder builder = new StringBuilder();
    builder.append( "modified >= " ).append( startTime.getTime() ).append( " and " );
    builder.append( "modified <= " ).append( endTime.getTime() );
    String queryString = builder.toString();

    // create writer
    String dateString = DATE_FORMAT.format( new Date() );
    String fileName = outputDir.getAbsolutePath() + "/" + dateString + ".csv";
    FileWriter fw = new FileWriter( fileName );
    writer = new CSVWriter( fw, SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER, '\'' );

    try {
        writeMetadata();
        writeHeaders();

        // Loop through the organizations
        Map<UUID, String> organizations = getOrganizations();
        for ( Entry<UUID, String> orgIdAndName : organizations.entrySet() ) {
            exportApplicationsForOrg( orgIdAndName, queryString );
        }
    }
    finally {
        writer.close();
    }

    // now that file is written, copy it to S3
    if ( line.hasOption( "upload" ) ) {
        logger.info( "Copy to S3" );
        copyToS3( fileName );
    }
}
 
Example 15
public static void main(String[] args) throws Exception {

		File oldResultsFile = new File(args[0]);
		File newResultsFile = new File(args[1]);
		int threads = Integer.parseInt(args[2]);

		System.out.println("Old: " + oldResultsFile.getAbsolutePath());
		System.out.println("New: " + newResultsFile.getAbsolutePath());
		System.out.println("Threads: " + threads);

		BufferedReader aseReader = new BufferedReader(new FileReader(oldResultsFile));

		ArrayList<AseVariantBean> inputAse = new ArrayList<AseVariantBean>();

		String line;
		String[] elements;
		//Header
		aseReader.readLine();
		while ((line = aseReader.readLine()) != null) {
			elements = TAB_PATTERN.split(line);
			inputAse.add(new AseVariantBean(elements));
		}
		aseReader.close();

		AseVariantRecalculate[] aseVariants = new AseVariantRecalculate[inputAse.size()];
		{
			int i = 0;
			for (AseVariantBean aseVariant : inputAse) {
				aseVariants[i] = new AseVariantRecalculate(aseVariant);
				++i;
			}
		}

		AseCalculator.startAseCalculators(aseVariants, threads);

		System.out.println("Completed ASE calculations");

		CSVWriter mappingReportWriter = new CSVWriter(new FileWriter(newResultsFile), '\t', CSVWriter.NO_QUOTE_CHARACTER);

		final String[] newResults = new String[17];
		int c = 0;
		newResults[c++] = "OldPvalue";
		newResults[c++] = "OldRatioD";
		newResults[c++] = "OldP";
		newResults[c++] = "NewPvalue";
		newResults[c++] = "NewRatioD";
		newResults[c++] = "NewP";
		newResults[c++] = "NewTheta";
		newResults[c++] = "SampleCount";
		newResults[c++] = "Chr";
		newResults[c++] = "Pos";
		newResults[c++] = "Id";
		newResults[c++] = "Ref_Allele";
		newResults[c++] = "Alt_Allele";
		newResults[c++] = "Id";
		newResults[c++] = "Genes";
		newResults[c++] = "Ref_Counts";
		newResults[c++] = "Alt_Counts";
		mappingReportWriter.writeNext(newResults);

		for (AseVariantRecalculate ase : aseVariants) {

			c = 0;
			newResults[c++] = String.valueOf(ase.getOriginalLikelihoodRatioP());
			newResults[c++] = String.valueOf(ase.getOriginalLikelihoodRatioD());
			newResults[c++] = String.valueOf(ase.getOriginalEffect());
			newResults[c++] = String.valueOf(ase.getLikelihoodRatioP());
			newResults[c++] = String.valueOf(ase.getLikelihoodRatioD());
			newResults[c++] = String.valueOf(ase.getEffect());
			newResults[c++] = String.valueOf(ase.getMle().getMaxLogLikelihoodTheta());
			newResults[c++] = String.valueOf(ase.getSampleCount());
			newResults[c++] = ase.getChr();
			newResults[c++] = String.valueOf(ase.getPos());
			newResults[c++] = ase.getId().getPrimairyId();
			newResults[c++] = ase.getA1().getAlleleAsString();
			newResults[c++] = ase.getA2().getAlleleAsString();
			newResults[c++] = String.valueOf(ase.getPos());
			newResults[c++] = ase.getGenes();
			newResults[c++] = createCountString(ase.getA1Counts());
			newResults[c++] = createCountString(ase.getA2Counts());
			mappingReportWriter.writeNext(newResults);


		}

		mappingReportWriter.close();


	}
 
Example 16
public org.uberfire.backend.vfs.Path exportDataSetCSV(DataSet dataSet) {
    try {
        if (dataSet == null) {
            throw new IllegalArgumentException("Null dataSet specified!");
        }
        int columnCount = dataSet.getColumns().size();
        int rowCount = dataSet.getRowCount();

        List<String[]> lines = new ArrayList<>(rowCount+1);

        String[] line = new String[columnCount];
        for (int cc = 0; cc < columnCount; cc++) {
            DataColumn dc = dataSet.getColumnByIndex(cc);
            line[cc] = dc.getId();
        }
        lines.add(line);

        for (int rc = 0; rc < rowCount; rc++) {
            line = new String[columnCount];
            for (int cc = 0; cc < columnCount; cc++) {
                line[cc] = formatAsString(dataSet.getValueAt(rc, cc));
            }
            lines.add(line);
        }

        String tempCsvFile = uuidGenerator.newUuid() + ".csv";
        Path tempCsvPath = gitStorage.createTempFile(tempCsvFile);

        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(Files.newOutputStream(tempCsvPath)));
        CSVWriter writer = new CSVWriter(bw,
                DEFAULT_SEPARATOR_CHAR.charAt(0),
                DEFAULT_QUOTE_CHAR.charAt(0),
                DEFAULT_ESCAPE_CHAR.charAt(0));
        writer.writeAll(lines);
        writer.close();
        writer.flush();
        writer.close();
        return Paths.convert(tempCsvPath);
    }
    catch (Exception e) {
        throw exceptionManager.handleException(e);
    }
}
 
Example 17
private String buildCsv(List<Licence> licences) throws IOException {
	List<String[]> csvLines = new ArrayList<String[]>();
	String[] csvHeader = { "RightsholderName", "Owner", "OwnerEmailAddress", "TransactionDate", "Title", "Subtitle", "Edition", "Author", "Identifier", "SubjectArea", "OutletName", "OutletId", "OutletCity", "OutletCountry", "OutletCurrency", "OutletServiceCharge", "NumberOfCopies", "Format", "DoubleSidedSheets", "CreditsReserved", "DownloadedYN", "CreditsCharged", "DollarEquivalent", "PaperightFee", "AmountOwingToRightsholder" };
	csvLines.add(csvHeader);
	for (Licence licence : licences) {
		Product product = licence.getProduct();
		Address address = licence.getCompany().getAddressContextByType(AddressContextType.DEFAULT_PRIMARY).getAddress();
		Company owner = licence.getOwnerCompany();
		String ownerEmail = null;
		String ownerName = null;
		if (owner != null) {
		    ownerName = owner.getName();
		    ownerEmail = owner.getEmail();
		}
		String[] csvLine = new String[csvHeader.length];
		csvLine[0] = product.getPublisher();//RightsholderName
		if (!StringUtils.isBlank(ownerName)) {
		    csvLine[1] = ownerName; //Owner
		} else {
		    csvLine[1] = ""; //Owner
		}
		if (!StringUtils.isBlank(ownerEmail)) {
		    csvLine[2] = ownerEmail;//Owner email address
		} else {
		    csvLine[2] = "";//Owner email address
		}
		csvLine[3] = licence.getCreatedDate().toString();//TransactionDate
		csvLine[4] = product.getTitle();//Title
		csvLine[5] = product.getSubTitle();//Subtitle
		csvLine[6] = product.getEdition();//Edition
		csvLine[7] = product.getPrimaryCreators();//Author
		csvLine[8] = product.getIdentifier();//Identifier
		csvLine[9] = product.getSubjectArea();//SubjectArea
		csvLine[10] = licence.getCompany().getName();//OutletName
		csvLine[11] = licence.getCompany().getId().toString();//OutletId
		csvLine[12] = address.getAddressLine4();//OutletCity
		csvLine[13] = address.getCountry().getName();//OutletCountry
		csvLine[14] = licence.getCurrencyCode();//OutletCurrency
		csvLine[15] = licence.getOutletCharge().toString();//OutletServiceCharge
		csvLine[16] = licence.getNumberOfCopies() + "";//NumberOfCopies
		csvLine[17] = licence.getPageLayout().toString();//Format
		csvLine[18] = licence.getPageExtent() + "";//DoubleSidedSheets
		csvLine[19] = licence.getCostInCredits().toString();//CreditsReserved
		csvLine[20] = licence.isDownloaded() ? "Y" : "N" ;//DownloadedYN
		csvLine[21] = licence.isDownloaded() ? licence.getCostInCredits().toString() : "";//CreditsCharged
		BigDecimal dollarEquivalent = licence.getCostInCredits().multiply(licence.getPaperightCreditToBaseCurrencyRate()).setScale(2, RoundingMode.UP);
		BigDecimal paperightFree = dollarEquivalent.multiply(BigDecimal.valueOf(0.2)).setScale(2, RoundingMode.HALF_UP);
		csvLine[22] = dollarEquivalent.toString();//DollarEquivalent
		csvLine[23] = paperightFree.toString();//PaperightFee
		csvLine[24] = dollarEquivalent.subtract(paperightFree).toString();//AmountOwingToRightsholder
		csvLines.add(csvLine);
	}
	Writer writer = new StringWriter();
	CSVWriter csvWriter = new CSVWriter(writer);
	try {
		csvWriter.writeAll(csvLines);
		return writer.toString();
	} finally {
		csvWriter.close();
	} 
}
 
Example 18
Source Project: winter   File: MatchingGoldStandard.java    License: Apache License 2.0 3 votes vote down vote up
public void writeToCSVFile(File file) throws IOException {
	CSVWriter writer = new CSVWriter(new FileWriter(file));
	
	writeAllLines(writer);
	
	writer.close();
}
 
Example 19
public void finalizeCVSOutputFile(CSVWriter writer) throws Exception{
    writer.close();

}
 
Example 20
Source Project: usergrid   File: OrganizationExport.java    License: Apache License 2.0 2 votes vote down vote up
@Override
public void runTool( CommandLine line ) throws Exception {
    startSpring();

    setVerbose( line );

    prepareBaseOutputFileName( line );

    outputDir = createOutputParentDir();

    String queryString = line.getOptionValue( QUERY_ARG );

    Query query = Query.fromQL( queryString );

    logger.info( "Export directory: {}", outputDir.getAbsolutePath() );

    CSVWriter writer = new CSVWriter( new FileWriter( outputDir.getAbsolutePath() + "/admins.csv" ), ',' );

    writer.writeNext( new String[] { "Org uuid", "Org Name", "Admin uuid", "Admin Name", "Admin Email", "Admin Created Date" } );

    Results organizations = null;

    do {

        organizations = getOrganizations( query );

        for ( Entity organization : organizations.getEntities() ) {
            final String orgName = organization.getProperty( "path" ).toString();
            final UUID orgId = organization.getUuid();

            logger.info( "Org Name: {} key: {}", orgName, orgId );

            for ( UserInfo user : managementService.getAdminUsersForOrganization( organization.getUuid() ) ) {

                Entity admin = managementService.getAdminUserEntityByUuid( user.getUuid() );

                Long createdDate = ( Long ) admin.getProperties().get( "created" );

                writer.writeNext( new String[] { orgId.toString(),
                        orgName, user.getUuid().toString(), user.getName(), user.getEmail(),
                        createdDate == null ? "Unknown" : sdf.format( new Date( createdDate ) )
                } );
            }
        }

        query.setCursor( organizations.getCursor() );
    }
    while ( organizations != null && organizations.hasCursor() );

    logger.info( "Completed export" );

    writer.flush();
    writer.close();
}