Java Code Examples for au.com.bytecode.opencsv.CSVWriter#NO_QUOTE_CHARACTER

The following examples show how to use au.com.bytecode.opencsv.CSVWriter#NO_QUOTE_CHARACTER . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PoNTestUtils.java    From gatk-protected with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * Reads a very basic tsv (numbers separated by tabs) into a RealMatrix.
 * <p>Very little error checking happens in this method</p>
 *
 * @param inputFile readable file.  Not {@code null}
 * @return never {@code null}
 */
public static RealMatrix readTsvIntoMatrix(final File inputFile) {
    IOUtils.canReadFile(inputFile);
    final List<double []> allData = new ArrayList<>();
    int ctr = 0;
    try {

        final CSVReader reader = new CSVReader(new FileReader(inputFile), '\t', CSVWriter.NO_QUOTE_CHARACTER);
        String[] nextLine;
        while ((nextLine = reader.readNext()) != null) {
            ctr++;
            allData.add(Arrays.stream(nextLine).filter(s -> StringUtils.trim(s).length() > 0).map(s -> Double.parseDouble(StringUtils.trim(s))).mapToDouble(d -> d).toArray());
        }
    } catch (final IOException ioe) {
        Assert.fail("Could not open test file: " + inputFile, ioe);
    }
    final RealMatrix result = new Array2DRowRealMatrix(allData.size(), allData.get(0).length);
    for (int i = 0; i < result.getRowDimension(); i++) {
        result.setRow(i, allData.get(i));
    }
    return result;
}
 
Example 2
Source File: HiveTableDeployer.java    From celos with Apache License 2.0 6 votes vote down vote up
private Path createTempHdfsFileForInsertion(FixTable fixTable, TestRun testRun) throws Exception {

        Path pathToParent = new Path(testRun.getHdfsPrefix(), ".hive");
        Path pathTo = new Path(pathToParent, UUID.randomUUID().toString());
        FileSystem fileSystem = testRun.getCiContext().getFileSystem();
        fileSystem.mkdirs(pathTo.getParent());
        FSDataOutputStream outputStream = fileSystem.create(pathTo);

        CSVWriter writer = new CSVWriter(new OutputStreamWriter(outputStream), '\t', CSVWriter.NO_QUOTE_CHARACTER);

        for (FixTable.FixRow fixRow : fixTable.getRows()) {
            List<String> rowData = Lists.newArrayList();
            for (String colName : fixTable.getColumnNames()) {
                rowData.add(fixRow.getCells().get(colName));
            }
            String[] dataArray = rowData.toArray(new String[rowData.size()]);
            writer.writeNext(dataArray);
        }

        writer.close();

        fileSystem.setPermission(pathToParent, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
        fileSystem.setPermission(pathTo, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
        return pathTo;
    }
 
Example 3
Source File: CsvPrinter.java    From presto with Apache License 2.0 5 votes vote down vote up
public CsvPrinter(List<String> fieldNames, Writer writer, CsvOutputFormat csvOutputFormat)
{
    requireNonNull(fieldNames, "fieldNames is null");
    requireNonNull(writer, "writer is null");
    this.fieldNames = ImmutableList.copyOf(fieldNames);
    this.writer = csvOutputFormat.isQuoted() ? new CSVWriter(writer) : new CSVWriter(writer, CSVWriter.DEFAULT_SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER);
    this.needHeader = csvOutputFormat.showHeader();
}
 
Example 4
Source File: DecomposeSingularValues.java    From gatk-protected with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
private void writeMatrix(final RealMatrix m, final File outputFilename) throws IOException {
    final List<String []> textTable = new ArrayList<>();
    for (int i = 0; i < m.getRowDimension(); i ++){
        textTable.add(Arrays.stream(m.getRow(i)).mapToObj(Double::toString).toArray(String[]::new));
    }
    final FileWriter fw = new FileWriter(outputFilename);
    CSVWriter csvWriter = new CSVWriter(fw, '\t', CSVWriter.NO_QUOTE_CHARACTER);
    csvWriter.writeAll(textTable);
    csvWriter.flush();
    csvWriter.close();
}
 
Example 5
Source File: SkippedInteractionWriter.java    From systemsgenetics with GNU General Public License v3.0 5 votes vote down vote up
public SkippedInteractionWriter(File skippedInteractionsFile) throws IOException {
	writer = new CSVWriter(new FileWriter(skippedInteractionsFile), '\t', CSVWriter.NO_QUOTE_CHARACTER);
	
	c = 0;
	row[c++] = "Covariate";
	row[c++] = "CountSingular";
	row[c++] = "CountSharedQtl";
	row[c++] = "SingularQtls";
	row[c++] = "SharedQtls";
	
	writer.writeNext(row);
}
 
Example 6
Source File: WarehouseExport.java    From usergrid with Apache License 2.0 5 votes vote down vote up
private long readEndTime( File file ) throws Exception {
    CSVReader reader = new CSVReader( new FileReader( file ), SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER, '\'' );
    try {
        String[] firstLine = reader.readNext();
        if ( "start".equals( firstLine[0] ) && "end".equals( firstLine[2] ) ) {
            return Long.parseLong( firstLine[3] );
        }
    }
    finally {
        reader.close();
    }
    return 0;
}
 
Example 7
Source File: AseRecalculate.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

		File oldResultsFile = new File(args[0]);
		File newResultsFile = new File(args[1]);
		int threads = Integer.parseInt(args[2]);

		System.out.println("Old: " + oldResultsFile.getAbsolutePath());
		System.out.println("New: " + newResultsFile.getAbsolutePath());
		System.out.println("Threads: " + threads);

		BufferedReader aseReader = new BufferedReader(new FileReader(oldResultsFile));

		ArrayList<AseVariantBean> inputAse = new ArrayList<AseVariantBean>();

		String line;
		String[] elements;
		//Header
		aseReader.readLine();
		while ((line = aseReader.readLine()) != null) {
			elements = TAB_PATTERN.split(line);
			inputAse.add(new AseVariantBean(elements));
		}
		aseReader.close();

		AseVariantRecalculate[] aseVariants = new AseVariantRecalculate[inputAse.size()];
		{
			int i = 0;
			for (AseVariantBean aseVariant : inputAse) {
				aseVariants[i] = new AseVariantRecalculate(aseVariant);
				++i;
			}
		}

		AseCalculator.startAseCalculators(aseVariants, threads);

		System.out.println("Completed ASE calculations");

		CSVWriter mappingReportWriter = new CSVWriter(new FileWriter(newResultsFile), '\t', CSVWriter.NO_QUOTE_CHARACTER);

		final String[] newResults = new String[17];
		int c = 0;
		newResults[c++] = "OldPvalue";
		newResults[c++] = "OldRatioD";
		newResults[c++] = "OldP";
		newResults[c++] = "NewPvalue";
		newResults[c++] = "NewRatioD";
		newResults[c++] = "NewP";
		newResults[c++] = "NewTheta";
		newResults[c++] = "SampleCount";
		newResults[c++] = "Chr";
		newResults[c++] = "Pos";
		newResults[c++] = "Id";
		newResults[c++] = "Ref_Allele";
		newResults[c++] = "Alt_Allele";
		newResults[c++] = "Id";
		newResults[c++] = "Genes";
		newResults[c++] = "Ref_Counts";
		newResults[c++] = "Alt_Counts";
		mappingReportWriter.writeNext(newResults);

		for (AseVariantRecalculate ase : aseVariants) {

			c = 0;
			newResults[c++] = String.valueOf(ase.getOriginalLikelihoodRatioP());
			newResults[c++] = String.valueOf(ase.getOriginalLikelihoodRatioD());
			newResults[c++] = String.valueOf(ase.getOriginalEffect());
			newResults[c++] = String.valueOf(ase.getLikelihoodRatioP());
			newResults[c++] = String.valueOf(ase.getLikelihoodRatioD());
			newResults[c++] = String.valueOf(ase.getEffect());
			newResults[c++] = String.valueOf(ase.getMle().getMaxLogLikelihoodTheta());
			newResults[c++] = String.valueOf(ase.getSampleCount());
			newResults[c++] = ase.getChr();
			newResults[c++] = String.valueOf(ase.getPos());
			newResults[c++] = ase.getId().getPrimairyId();
			newResults[c++] = ase.getA1().getAlleleAsString();
			newResults[c++] = ase.getA2().getAlleleAsString();
			newResults[c++] = String.valueOf(ase.getPos());
			newResults[c++] = ase.getGenes();
			newResults[c++] = createCountString(ase.getA1Counts());
			newResults[c++] = createCountString(ase.getA2Counts());
			mappingReportWriter.writeNext(newResults);


		}

		mappingReportWriter.close();


	}
 
Example 8
Source File: WarehouseExport.java    From usergrid with Apache License 2.0 4 votes vote down vote up
@Override
public void runTool( CommandLine line ) throws Exception {

    // keep it light and fast
    System.setProperty( "cassandra.readcl", "ONE" );

    startSpring();
    setVerbose( line );

    applyOrgId( line );
    prepareBaseOutputFileName( line );
    outputDir = createOutputParentDir();
    logger.info( "Export directory: {}", outputDir.getAbsolutePath() );

    // create writer
    applyStartTime( line );
    applyEndTime( line );
    logger.error( "startTime: {}, endTime: {}", startTime, endTime );
    if ( startTime.getTime() >= endTime.getTime() ) {
        logger.error( "startTime must be before endTime. exiting." );
        System.exit( 1 );
    }

    // create "modified" query to select data
    StringBuilder builder = new StringBuilder();
    builder.append( "modified >= " ).append( startTime.getTime() ).append( " and " );
    builder.append( "modified <= " ).append( endTime.getTime() );
    String queryString = builder.toString();

    // create writer
    String dateString = DATE_FORMAT.format( new Date() );
    String fileName = outputDir.getAbsolutePath() + "/" + dateString + ".csv";
    FileWriter fw = new FileWriter( fileName );
    writer = new CSVWriter( fw, SEPARATOR, CSVWriter.NO_QUOTE_CHARACTER, '\'' );

    try {
        writeMetadata();
        writeHeaders();

        // Loop through the organizations
        Map<UUID, String> organizations = getOrganizations();
        for ( Entry<UUID, String> orgIdAndName : organizations.entrySet() ) {
            exportApplicationsForOrg( orgIdAndName, queryString );
        }
    }
    finally {
        writer.close();
    }

    // now that file is written, copy it to S3
    if ( line.hasOption( "upload" ) ) {
        logger.info( "Copy to S3" );
        copyToS3( fileName );
    }
}