Java Code Examples for com.opencsv.CSVWriter#close()

The following examples show how to use com.opencsv.CSVWriter#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MatchServiceTest.java    From fuzzy-matcher with Apache License 2.0 7 votes vote down vote up
public static void writeOutput(Set<Set<Match<Document>>> result) throws IOException {
    CSVWriter writer = new CSVWriter(new FileWriter("src/test/resources/output.csv"));
    writer.writeNext(new String[]{"Key", "Matched Key", "Score", "Name", "Address", "Email", "Phone"});

    result.forEach(matches -> {
        String[] arr = {"Group"};
        writer.writeNext(arr);

        matches.stream().forEach(match -> {
                    Document md = match.getMatchedWith();
                    String[] matchArrs = Stream.concat(Stream.of("", md.getKey(), Double.toString(match.getResult())),
                            getOrderedElements(md.getElements()).map(e -> e.getValue())).toArray(String[]::new);
                    writer.writeNext(matchArrs);
                });
            });
    writer.close();
}
 
Example 2
Source File: CsvTool.java    From axelor-open-suite with GNU Affero General Public License v3.0 6 votes vote down vote up
public static void csvWriter(
    String filePath, String fileName, char separator, String[] headers, List<String[]> dataList)
    throws IOException {
  CSVWriter reconWriter = setCsvFile(filePath, fileName, separator);
  if (headers != null) {
    reconWriter.writeNext(headers);
  }
  reconWriter.writeAll(dataList);
  reconWriter.flush();
  try {
    reconWriter.close();
  } catch (IOException e) {

    reconWriter = null;
  }
}
 
Example 3
Source File: ObjectDataExportServiceImpl.java    From axelor-open-suite with GNU Affero General Public License v3.0 6 votes vote down vote up
private MetaFile writeCSV(Map<String, List<String[]>> data) throws IOException {

    File zipFile = MetaFiles.createTempFile("Data", ".zip").toFile();
    try (ZipOutputStream zout = new ZipOutputStream(new FileOutputStream(zipFile))) {

      for (String model : data.keySet()) {
        File modelFile = MetaFiles.createTempFile(model, ".csv").toFile();
        CSVWriter writer = new CSVWriter(new FileWriter(modelFile), ';');
        writer.writeAll(data.get(model));
        writer.close();
        zout.putNextEntry(new ZipEntry(model + ".csv"));
        zout.write(IOUtils.toByteArray(new FileInputStream(modelFile)));
        zout.closeEntry();
      }
      zout.close();
    }
    return metaFiles.upload(zipFile);
  }
 
Example 4
Source File: CsvFormatter.java    From yarg with Apache License 2.0 6 votes vote down vote up
protected void writeCsvDocument(BandData rootBand, OutputStream outputStream) {
    try {
        List<BandData> actualData = getActualData(rootBand);
        CSVWriter writer = new CSVWriter(new OutputStreamWriter(outputStream), separator, CSVWriter.DEFAULT_QUOTE_CHARACTER);

        writer.writeNext(header);

        for (BandData row : actualData) {
            String[] entries = new String[parametersToInsert.size()];
            for (int i = 0; i < parametersToInsert.size(); i++) {
                String parameterName = parametersToInsert.get(i);
                String fullParameterName = row.getName() + "." + parameterName;
                entries[i] = formatValue(row.getData().get(parameterName), parameterName, fullParameterName);
            }
            writer.writeNext(entries);
        }

        writer.close();
    } catch (IOException e) {
        throw new ReportFormattingException("Error while writing a csv document", e);
    }
}
 
Example 5
Source File: CsvTool.java    From axelor-open-suite with GNU Affero General Public License v3.0 6 votes vote down vote up
public static void csvWriter(
    String filePath,
    String fileName,
    char separator,
    char quoteChar,
    String[] headers,
    List<String[]> dataList)
    throws IOException {
  CSVWriter reconWriter = setCsvFile(filePath, fileName, separator, quoteChar);
  if (headers != null) {
    reconWriter.writeNext(headers);
  }
  reconWriter.writeAll(dataList);
  reconWriter.flush();
  try {
    reconWriter.close();
  } catch (IOException e) {

    reconWriter = null;
  }
}
 
Example 6
Source File: GenePvalueCalculator.java    From systemsgenetics with GNU General Public License v3.0 6 votes vote down vote up
private static void saveEigenValues(double[] eigenValues, File file) throws IOException {

		final CSVWriter eigenWriter = new CSVWriter(new FileWriter(file), '\t', '\0', '\0', "\n");
		final String[] outputLine = new String[2];
		int c = 0;
		outputLine[c++] = "Component";
		outputLine[c++] = "EigenValue";
		eigenWriter.writeNext(outputLine);

		for (int i = 0; i < eigenValues.length; ++i) {

			c = 0;
			outputLine[c++] = "PC" + (i + 1);
			outputLine[c++] = String.valueOf(eigenValues[i]);
			eigenWriter.writeNext(outputLine);

		}

		eigenWriter.close();

	}
 
Example 7
Source File: HostFileMapDao.java    From burp_data_collector with Apache License 2.0 5 votes vote down vote up
public void exportFile(String dirName, int fileCount) throws SQLException, IOException {
    String sql = "SELECT stat.filename, sum(fileCount) AS allCount\n" +
            "FROM ((SELECT hfm.filename, count(*) AS fileCount FROM host_file_map hfm GROUP BY hfm.filename)\n" +
            "      UNION ALL\n" +
            "      (SELECT filename, count AS fileCount FROM file)) stat\n" +
            "GROUP BY stat.filename\n" +
            "HAVING allCount >= ?\n" +
            "ORDER BY allCount DESC";
    PreparedStatement preparedStatement = getPreparedStatement(sql);
    preparedStatement.setInt(1, fileCount);
    ResultSet resultSet = preparedStatement.executeQuery();

    File file = new File(dirName + FILE_FILE);
    File fileImportFile = new File(dirName + FILE_IMPORT_FILE);
    FileOutputStream fileOutputStream = new FileOutputStream(file);
    FileWriter fileWriter = new FileWriter(fileImportFile);
    CSVWriter csvWriter = new CSVWriter(fileWriter);
    String[] fileHead = new String[]{"file", "count"};
    csvWriter.writeNext(fileHead);
    while (resultSet.next()) {
        String fileName = resultSet.getString(1);
        String row = fileName + "\n";
        int count = resultSet.getInt(2);
        fileOutputStream.write(row.getBytes());
        csvWriter.writeNext(new String[]{fileName, String.valueOf(count)}, true);
    }
    fileOutputStream.close();
    csvWriter.close();
}
 
Example 8
Source File: HostDirMapDao.java    From burp_data_collector with Apache License 2.0 5 votes vote down vote up
public void exportDir(String dirName, int dirCount) throws SQLException, IOException {
    String sql = "SELECT stat.dir, sum(dirCount) AS allCount\n" +
            "FROM ((SELECT hdm.dir, count(*) AS dirCount FROM host_dir_map hdm GROUP BY hdm.dir)\n" +
            "      UNION ALL\n" +
            "      (SELECT dir, count AS dirCount FROM dir)) stat\n" +
            "GROUP BY stat.dir\n" +
            "HAVING allCount >= ?\n" +
            "ORDER BY allCount DESC";
    PreparedStatement preparedStatement = getPreparedStatement(sql);
    preparedStatement.setInt(1, dirCount);
    ResultSet resultSet = preparedStatement.executeQuery();

    File dirFile = new File(dirName + DIR_FILE);
    File dirImportFile = new File(dirName + DIR_IMPORT_FILE);
    FileOutputStream dirFileOutputStream = new FileOutputStream(dirFile);
    FileWriter fileWriter = new FileWriter(dirImportFile);
    CSVWriter csvWriter = new CSVWriter(fileWriter);
    String[] fileHead = new String[]{"dir", "count"};
    csvWriter.writeNext(fileHead);
    while (resultSet.next()) {
        String dir = resultSet.getString(1);
        String row = dir + "\n";
        int count = resultSet.getInt(2);
        dirFileOutputStream.write(row.getBytes());
        csvWriter.writeNext(new String[]{dir, String.valueOf(count)}, true);

    }
    dirFileOutputStream.close();
    csvWriter.close();
}
 
Example 9
Source File: HostPathMapDao.java    From burp_data_collector with Apache License 2.0 5 votes vote down vote up
public void exportPath(String dirName, int pathCount) throws SQLException, IOException {
    String sql = "SELECT stat.path, sum(pathCount) AS allCount\n" +
            "FROM ((SELECT hpm.path, count(*) AS pathCount FROM host_path_map hpm GROUP BY hpm.path)\n" +
            "      UNION ALL\n" +
            "      (SELECT path, count AS pathCount FROM path)) stat\n" +
            "GROUP BY stat.path\n" +
            "HAVING allCount >= ?\n" +
            "ORDER BY allCount DESC";
    PreparedStatement preparedStatement = getPreparedStatement(sql);
    preparedStatement.setInt(1, pathCount);
    ResultSet resultSet = preparedStatement.executeQuery();

    File pathFile = new File(dirName + PATH_FILE);
    File pathImportFile = new File(dirName + PATH_IMPORT_FILE);
    FileOutputStream pathOutputStream = new FileOutputStream(pathFile);
    FileWriter fileWriter = new FileWriter(pathImportFile);
    CSVWriter csvWriter = new CSVWriter(fileWriter);
    String[] fileHead = {"path", "count"};
    csvWriter.writeNext(fileHead);
    while (resultSet.next()) {
        String path = resultSet.getString(1);
        String row = path + "\n";
        int count = resultSet.getInt(2);
        pathOutputStream.write(row.getBytes());
        csvWriter.writeNext(new String[]{path, String.valueOf(count)}, true);
    }
    pathOutputStream.close();
    csvWriter.close();
}
 
Example 10
Source File: CsvWriterExamples.java    From tutorials with MIT License 5 votes vote down vote up
public static String csvWriterAll(List<String[]> stringArray, Path path) {
    try {
        CSVWriter writer = new CSVWriter(new FileWriter(path.toString()));
        writer.writeAll(stringArray);
        writer.close();
    } catch (Exception ex) {
        Helpers.err(ex);
    }
    return Helpers.readFile(path);
}
 
Example 11
Source File: XlsxFormatter.java    From yarg with Apache License 2.0 5 votes vote down vote up
protected void saveXlsxAsCsv(Document document, OutputStream outputStream) throws IOException, Docx4JException {
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(outputStream), ';', CSVWriter.DEFAULT_QUOTE_CHARACTER);

    for (Document.SheetWrapper sheetWrapper : document.getWorksheets()) {
        Worksheet worksheet = sheetWrapper.getWorksheet().getContents();
        for (Row row : worksheet.getSheetData().getRow()) {
            String rows[] = new String[row.getC().size()];
            List<Cell> cells = row.getC();

            boolean emptyRow = true;
            for (int i = 0; i < cells.size(); i++) {
                checkThreadInterrupted();
                Cell cell = cells.get(i);
                String value = cell.getV();
                rows[i] = value;
                if (value != null && !value.isEmpty())
                    emptyRow = false;
            }

            if (!emptyRow)
                writer.writeNext(rows);
        }
    }
    writer.close();
}
 
Example 12
Source File: HostFullPathMapDao.java    From burp_data_collector with Apache License 2.0 5 votes vote down vote up
public void exportFullPath(String dirName, int fullPathCount) throws SQLException, IOException {
    String sql = "SELECT stat.full_path, sum(fullPathCount) AS allCount\n" +
            "FROM ((SELECT hfpm.full_path, count(*) AS fullPathCount FROM host_full_path_map hfpm GROUP BY hfpm.full_path)\n" +
            "      UNION ALL\n" +
            "      (SELECT full_path, count AS fullPathCount FROM full_path)) stat\n" +
            "GROUP BY stat.full_path\n" +
            "HAVING allCount >= ?\n" +
            "ORDER BY allCount DESC";
    PreparedStatement preparedStatement = getPreparedStatement(sql);
    preparedStatement.setInt(1, fullPathCount);
    ResultSet resultSet = preparedStatement.executeQuery();

    File fullPathFile = new File(dirName + FULL_PATH_FILE);
    File fullPathImportFile = new File(dirName + FULL_PATH_IMPORT_FILE);
    FileOutputStream fullPathOutputStream = new FileOutputStream(fullPathFile);
    FileWriter fileWriter = new FileWriter(fullPathImportFile);
    CSVWriter csvWriter = new CSVWriter(fileWriter);
    String[] fileHead = new String[]{"full_path", "count"};
    csvWriter.writeNext(fileHead);
    while (resultSet.next()) {
        String fullPath = resultSet.getString(1);
        String row = fullPath + "\n";
        int count = resultSet.getInt(2);
        fullPathOutputStream.write(row.getBytes());
        csvWriter.writeNext(new String[]{fullPath, String.valueOf(count)}, true);
    }
    fullPathOutputStream.close();
    csvWriter.close();
}
 
Example 13
Source File: DownloadEventBean.java    From sakai with Educational Community License v2.0 5 votes vote down vote up
private void csvSpreadsheet(OutputStream os, List<SignupMeetingWrapper> meetingWrappers) throws IOException {
	
	CSVExport export = new CSVExport(meetingWrappers, getSakaiFacade());

	
	CSVWriter writer = new CSVWriter(new OutputStreamWriter(os), ',', CSVWriter.DEFAULT_QUOTE_CHARACTER, CSVWriter.DEFAULT_ESCAPE_CHARACTER, downloadVersion);
	//header
	List<String> header = export.getHeaderRow();
	
	int cols = header.size(); //total number of columns is based on header row
	
	String[] headerRow = new String[cols];
	headerRow = header.toArray(headerRow);
	writer.writeNext(headerRow);
	
	//data rows
	List<List<String>> data = export.getDataRows();
	Iterator<List<String>> iter = data.iterator();
	while(iter.hasNext()) {
		List<String> row = iter.next();
		String[] dataRow = new String[cols];
		dataRow = row.toArray(dataRow);
		writer.writeNext(dataRow);
	}
	
	writer.close();
	
}
 
Example 14
Source File: AddressServiceImpl.java    From axelor-open-suite with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public int export(String path) throws IOException {
  List<Address> addresses = addressRepo.all().filter("self.certifiedOk IS FALSE").fetch();

  CSVWriter csv =
      new CSVWriter(new java.io.FileWriter(path), "|".charAt(0), CSVWriter.NO_QUOTE_CHARACTER);
  List<String> header = new ArrayList<>();
  header.add("Id");
  header.add("AddressL1");
  header.add("AddressL2");
  header.add("AddressL3");
  header.add("AddressL4");
  header.add("AddressL5");
  header.add("AddressL6");
  header.add("CodeINSEE");

  csv.writeNext(header.toArray(new String[header.size()]));
  List<String> items = new ArrayList<>();
  for (Address a : addresses) {

    items.add(a.getId() != null ? a.getId().toString() : "");
    items.add(a.getAddressL2() != null ? a.getAddressL2() : "");
    items.add(a.getAddressL3() != null ? a.getAddressL3() : "");
    items.add(a.getAddressL4() != null ? a.getAddressL4() : "");
    items.add(a.getAddressL5() != null ? a.getAddressL5() : "");
    items.add(a.getAddressL6() != null ? a.getAddressL6() : "");
    items.add(a.getInseeCode() != null ? a.getInseeCode() : "");

    csv.writeNext(items.toArray(new String[items.size()]));
    items.clear();
  }
  csv.close();
  LOG.info("{} exported", path);

  return addresses.size();
}
 
Example 15
Source File: HostParameterMapDao.java    From burp_data_collector with Apache License 2.0 5 votes vote down vote up
public void exportParameter(String dirName, int parameterCount) throws SQLException, IOException {
    String sql = "SELECT stat.parameter, sum(parameterPathCount) AS allCount\n" +
            "FROM ((SELECT hpm.parameter, count(*) AS parameterPathCount FROM host_parameter_map hpm GROUP BY hpm.parameter)\n" +
            "      UNION ALL\n" +
            "      (SELECT parameter, count AS parameterPathCount FROM parameter)) stat\n" +
            "GROUP BY stat.parameter\n" +
            "HAVING allCount >= ?\n" +
            "ORDER BY allCount DESC";
    PreparedStatement preparedStatement = getPreparedStatement(sql);
    preparedStatement.setInt(1, parameterCount);
    ResultSet resultSet = preparedStatement.executeQuery();

    File parameterFile = new File(dirName + PARAMETER_FILE);
    File parameterImportFile = new File(dirName + PARAMETER_IMPORT_FILE);
    FileOutputStream parameterOutputStream = new FileOutputStream(parameterFile);
    FileWriter fileWriter = new FileWriter(parameterImportFile);
    CSVWriter csvWriter = new CSVWriter(fileWriter);
    String[] fileHead = new String[]{"parameter", "count"};
    csvWriter.writeNext(fileHead);
    while (resultSet.next()) {
        String parameter = resultSet.getString(1);
        String row = parameter + "\n";
        int count = resultSet.getInt(2);
        parameterOutputStream.write(row.getBytes());
        csvWriter.writeNext(new String[]{parameter, String.valueOf(count)}, true);
    }
    parameterOutputStream.close();
    csvWriter.close();
}
 
Example 16
Source File: FilterPrioBasedOnMutatedGenes.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
/**
	 * @param args the command line arguments
	 */
	public static void main(String[] args) throws FileNotFoundException, IOException {
//
//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\samplesWithGeno.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\gavinRes\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\rankingCandidateGenes");

//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations3\\samplesWithGeno.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\gavinRes\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations3");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations3\\rankingCandidateGenes");
		
		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\extraUnsolved\\samplesWithGeno.txt");
		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\extraUnsolved\\gavinRes\\");
		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\extraUnsolved\\Prioritisations");
		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\extraUnsolved\\rankingCandidateGenes");

//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsDcm\\samplesWithGeno.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsDcm\\gavinRes\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsDcm");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsDcm\\rankingCandidateGenes");
//
//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations3\\samplesWithGeno.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\gavinRes\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsSpiked");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsSpiked\\rankingCandidateGenes");
//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioMieke\\samplesWithGeno.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioMieke\\Gavin\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioMieke\\Prioritisations");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioMieke\\");
//		
//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioEdgar\\Prioritisations\\samples.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioEdgar\\CandidateGenes\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioEdgar\\Prioritisations");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\PrioritisationsCardioEdgar\\");
//
//		
		resultFolder.mkdirs();

		final CSVParser parser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build();
		final CSVReader sampleFileReader = new CSVReaderBuilder(new BufferedReader(new FileReader(sampleFile))).withSkipLines(0).withCSVParser(parser).build();

		String[] nextLine;
		while ((nextLine = sampleFileReader.readNext()) != null) {

			String sample = nextLine[0];

			String genoSampleName = new File(nextLine[1]).getName();
			if (!genoSampleName.endsWith(".txt")) {
				genoSampleName += ".txt";
			}

			File genoFile = new File(genoFolder, genoSampleName);
			File prioFile = new File(prioFolder, sample + ".txt");
			File rankingFile = new File(resultFolder, sample + ".txt");

			System.out.println("------------------------------------------------------------------");
			System.out.println("Sample: " + sample);
			System.out.println("Geno: " + genoFile.getAbsolutePath());
			System.out.println("Prio: " + prioFile.getAbsolutePath());
			System.out.println("Ranking: " + rankingFile.getAbsolutePath());

			HashSet<String> genesWithMutation = getMutatedGenes(genoFile);

			final CSVReader prioFileReader = new CSVReaderBuilder(new BufferedReader(new FileReader(prioFile))).withSkipLines(0).withCSVParser(parser).build();

			CSVWriter writer = new CSVWriter(new FileWriter(rankingFile), '\t', '\0', '\0', "\n");

			String[] outputLine = prioFileReader.readNext();
			writer.writeNext(outputLine);

			while ((outputLine = prioFileReader.readNext()) != null) {

				if (genesWithMutation.contains(outputLine[1])) {
					writer.writeNext(outputLine);
				}

			}

			writer.close();
			prioFileReader.close();

		}

	}
 
Example 17
Source File: ImproveHpoPredictionBasedOnChildTerms.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
/**
	 * @param args the command line arguments
	 * @throws java.lang.Exception
	 */
	public static void main(String[] args) throws Exception {

		final File predictionMatrixFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions.txt.gz");
		final File annotationMatrixFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\ALL_SOURCES_ALL_FREQUENCIES_phenotype_to_genes.txt_matrix.txt.gz");
		final File predictedHpoTermFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_auc_bonferroni.txt");
//		final File predictionMatrixFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_testSet.txt");
//		final File annotationMatrixFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\hpo_annotation_testSet.txt");
//		final File predictedHpoTermFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_testSet_auc_bonferroni.txt");
		final File hpoOboFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\hp.obo");
		final File ouputLogFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_improved.log");
		final File updatedPredictionMatrixFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_improved.txt.gz");

		LinkedHashSet<String> predictedHpoTerms = readPredictedHpoTermFile(predictedHpoTermFile);

		DoubleMatrixDataset<String, String> predictionMatrixFull = DoubleMatrixDataset.loadDoubleData(predictionMatrixFile.getAbsolutePath());
		DoubleMatrixDataset<String, String> annotationMatrixFull = DoubleMatrixDataset.loadDoubleData(annotationMatrixFile.getAbsolutePath());

		DoubleMatrixDataset<String, String> predictionMatrixPredicted = predictionMatrixFull.viewColSelection(predictedHpoTerms);
		DoubleMatrixDataset<String, String> annotationMatrixPredicted = annotationMatrixFull.viewColSelection(predictedHpoTerms);

		Ontology hpoOntology = HpoFinder.loadHpoOntology(hpoOboFile);

		ImproveHpoPredictionBasedOnChildTerms improver = new ImproveHpoPredictionBasedOnChildTerms(predictionMatrixPredicted, annotationMatrixPredicted, hpoOntology);

		HashMap<String, UpdatedPredictionInfo> checkedHpoInfo = improver.run();

		System.out.println("Done with improving");

		CSVWriter writer = new CSVWriter(new FileWriter(ouputLogFile), '\t', '\0', '\0', "\n");

		String[] outputLine = new String[11];
		int c = 0;
		outputLine[c++] = "HPO";
		outputLine[c++] = "Gene_count";
		outputLine[c++] = "Origanl_AUC";
		outputLine[c++] = "Orignal_Pvalue";
		outputLine[c++] = "Updated_AUC";
		outputLine[c++] = "Updated_Pvalue";
		outputLine[c++] = "Is_significant";
		outputLine[c++] = "Distance_to_top";
		outputLine[c++] = "Number_of_child_terms";
		outputLine[c++] = "Number_of_child_terms_used";
		outputLine[c++] = "Child_terms_used";
		writer.writeNext(outputLine);

		for (UpdatedPredictionInfo pi : checkedHpoInfo.values()) {
			c = 0;
			outputLine[c++] = pi.getHpo();
			outputLine[c++] = String.valueOf(pi.getGeneCount());
			outputLine[c++] = String.valueOf(pi.getOriginalAuc());
			outputLine[c++] = String.valueOf(pi.getOriginalPvalue());
			outputLine[c++] = String.valueOf(pi.getUpdatedAuc());
			outputLine[c++] = String.valueOf(pi.getUpdatedPvalue());
			outputLine[c++] = String.valueOf(pi.isIsSignificant());
			outputLine[c++] = "-";
			outputLine[c++] = String.valueOf(pi.getChildTermCount());
			outputLine[c++] = String.valueOf(pi.getUsedChildTerms().size());
			outputLine[c++] = String.join(";", pi.getUsedChildTerms());
			writer.writeNext(outputLine);
		}
		writer.close();

		improver.writeUpdatedMatrix(updatedPredictionMatrixFile);
	}
 
Example 18
Source File: HpoFinder.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
/**
 * @param args the command line arguments
 */
public static void main(String[] args) throws IOException, ParseException {

	final File hpoOboFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\hp.obo");
	final File hpoPredictionInfoFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_auc_bonferroni.txt");
	final File queryFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\originalHpo.txt");
	final File outputFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\selectedHpo.txt");
	final double correctedPCutoff = 0.05;

	Map<String, PredictionInfo> predictionInfo = HpoFinder.loadPredictionInfo(hpoPredictionInfoFile);

	Ontology hpoOntology = HpoFinder.loadHpoOntology(hpoOboFile);

	HpoFinder hpoFinder = new HpoFinder(hpoOntology, predictionInfo);

	CSVWriter writer = new CSVWriter(new FileWriter(outputFile), '\t', '\0', '\0', "\n");

	int c = 0;
	String[] outputLine = new String[7];
	outputLine[c++] = "originalHPO";
	outputLine[c++] = "originalDescription";
	outputLine[c++] = "matchHPO";
	outputLine[c++] = "matchDescription";
	outputLine[c++] = "matchPvalue";
	outputLine[c++] = "matchauc";
	outputLine[c++] = "multiMatch";

	writer.writeNext(outputLine);

	BufferedReader queryReader = new BufferedReader(new FileReader(queryFile));
	String queryHpo;

	while ((queryHpo = queryReader.readLine()) != null) {

		if (hpoOntology.containsTerm(queryHpo)) {
			Term queryHpoTerm = hpoOntology.getTerm(queryHpo);

			List<Term> alternativeTerms = hpoFinder.getPredictableTerms(queryHpoTerm, correctedPCutoff);

			for (Term alternativeTerm : alternativeTerms) {

				PredictionInfo info = predictionInfo.get(alternativeTerm.getName());

				c = 0;
				outputLine[c++] = queryHpo;
				outputLine[c++] = queryHpoTerm.getDescription();
				outputLine[c++] = alternativeTerm.getName();
				outputLine[c++] = alternativeTerm.getDescription();
				outputLine[c++] = String.valueOf(info.getpValue());
				outputLine[c++] = String.valueOf(info.getAuc());
				outputLine[c++] = alternativeTerms.size() > 1 ? "x" : "-";
				writer.writeNext(outputLine);

				//System.out.println(alternativeTerm.getName() + " P-value: " + info.getpValue() + " AUC: " + info.getAuc() + " " + alternativeTerm.getDescription());
			}

			if (alternativeTerms.isEmpty()) {
				c = 0;
				outputLine[c++] = queryHpo;
				outputLine[c++] = queryHpoTerm.getDescription();
				outputLine[c++] = "NA";
				outputLine[c++] = "NA";
				outputLine[c++] = "NA";
				outputLine[c++] = "NA";
				outputLine[c++] = "NA";
				writer.writeNext(outputLine);
			}

		} else {
			c = 0;
			outputLine[c++] = queryHpo;
			outputLine[c++] = "NA";
			outputLine[c++] = "NA";
			outputLine[c++] = "NA";
			outputLine[c++] = "NA";
			outputLine[c++] = "NA";
			outputLine[c++] = "NA";
			writer.writeNext(outputLine);
		}

	}

	writer.close();

}
 
Example 19
Source File: FilterPrioBasedOnMutatedGenes2.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
/**
	 * @param args the command line arguments
	 */
	public static void main(String[] args) throws FileNotFoundException, IOException {

//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\samplesWithGeno.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\gavinRes\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\Prioritisations\\rankingCandidateGenes");


//		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\PrioritizeRequests\\Prioritisations\\samples.txt");
//		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\PrioritizeRequests\\CandidateGenes\\");
//		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\PrioritizeRequests\\Prioritisations");
//		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\PrioritizeRequests\\rankingCandidateGenes");

		final File sampleFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\New5gpm\\hpo5gpm.txt");
		final File genoFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\New5gpm\\Genes\\");
		final File prioFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\New5gpm\\Prioritisations\\");
		final File resultFolder = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\BenchmarkSamples\\New5gpm\\RankingCandidateGenes\\");

		final CSVParser parser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build();
		final CSVReader sampleFileReader = new CSVReaderBuilder(new BufferedReader(new FileReader(sampleFile))).withSkipLines(0).withCSVParser(parser).build();

		String[] nextLine;
		while ((nextLine = sampleFileReader.readNext()) != null) {

			String sample = nextLine[0];

			String genoSampleName = sample + ".txt";

			File genoFile = new File(genoFolder, genoSampleName);
			File prioFile = new File(prioFolder, sample + ".txt");
			File rankingFile = new File(resultFolder, sample + ".txt");

			System.out.println("------------------------------------------------------------------");
			System.out.println("Sample: " + sample);
			System.out.println("Geno: " + genoFile.getAbsolutePath());
			System.out.println("Prio: " + prioFile.getAbsolutePath());
			System.out.println("Ranking: " + rankingFile.getAbsolutePath());

			HashSet<String> genesWithMutation = getMutatedGenes(genoFile, 0, 0);

			final CSVReader prioFileReader = new CSVReaderBuilder(new BufferedReader(new FileReader(prioFile))).withSkipLines(0).withCSVParser(parser).build();

			CSVWriter writer = new CSVWriter(new FileWriter(rankingFile), '\t', '\0', '\0', "\n");

			String[] outputLine = prioFileReader.readNext();
			writer.writeNext(outputLine);

			while ((outputLine = prioFileReader.readNext()) != null) {

				if (genesWithMutation.contains(outputLine[1])) {
					writer.writeNext(outputLine);
				}

			}

			writer.close();
			prioFileReader.close();

		}

	}
 
Example 20
Source File: KIISEMain.java    From SIMVA-SoS with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws IOException{
        Constituent cs1 = new Constituent("CS1", 120);
        Constituent cs2 = new Constituent("CS2", 120);
        Constituent cs3 = new Constituent("CS3", 120);

        Action a1 = new Action("Action1", 2, 1);
        a1.setActionType(Action.TYPE.NORMAL);
        Action a2 = new Action("Action2", 2, 2);
        a2.setActionType(Action.TYPE.NORMAL);
        Action a3 = new Action("Action3", 3, 3);
        a3.setActionType(Action.TYPE.NORMAL);

        cs1.addCapability(a1, 1);
        cs1.addCapability(a2, 2);

        cs2.addCapability(a2, 2);
        cs2.addCapability(a3, 3);

        cs3.addCapability(a1, 1);
        cs3.addCapability(a3, 3);

        Constituent[] CSs = {cs1, cs2, cs3};
        Action[] actions = {a1, a2, a3};
        SoS sos = new SoS("SoS Manager", CSs, actions);
        Environment env = new Environment(CSs, actions);

        Simulator sim = new Simulator(CSs, sos, env);
        sim.setEndTick(300);

        int[] boundArr = {120, 125, 130, 135, 140, 145, 150};
        for(int bound: boundArr){

            String outputName = "SIM_" + bound + ".csv";
            CSVWriter cw = new CSVWriter(new OutputStreamWriter(new FileOutputStream(outputName), "UTF-8"), ',', '"');
            cw.writeNext(new String[] {"prob", "num_of_samples", "execution_time", "min_tick", "max_tick", "result"});
            ArrayList<SMCResult> resList = new ArrayList<SMCResult>();

            System.out.println("----------------------------------------------------");
            System.out.println("SoS-level benefit is greater than "+bound + ".");
            BaseChecker checker = new BaseChecker();
            checker.init(10000, bound, BaseChecker.comparisonType.GREATER_THAN_AND_EQUAL_TO);
            SPRTMethod sprt = new SPRTMethod(0.01, 0.01, 0.005);

            for(int i=1; i<100; i++){
                double theta = 0.01 * i; // theta
                long start = System.currentTimeMillis();
                sprt.setExpression(theta);

                while(!sprt.checkStopCondition())
                {
                    sim.execute();
                    SIMResult res = sim.getResult();
                    int checkResult = checker.evaluateSample(res);
                    sprt.updateResult(checkResult);
                }


                boolean h0 = sprt.getResult(); // Result
                int numSamples = sprt.getNumSamples();
//                System.out.print("SMC decides that your hypothesis is ");
//                if(h0)
//                {
//                    System.out.println("accepted at " + theta + " / number of samples: " + numSamples);
//                }
//                else
//                {
//                    System.out.println("not accepted at " + theta + " / number of samples: " + numSamples);
//                }

                long exec_time = System.currentTimeMillis() - start; //exec time
                int minTick = checker.getMinTick();
                int maxTick = checker.getMaxTick();
                sprt.reset();
                resList.add(new SMCResult(theta, numSamples, exec_time, minTick, maxTick, h0));
                if(h0) System.out.print("T");
                else System.out.print("F");
            }
            System.out.println();
            System.out.print("w");
            for(SMCResult r : resList){
                System.out.print(".");
                cw.writeNext(r.getArr());
            }
            cw.close();
            resList.clear();
            System.out.println();
        }
        System.out.println("Finished");

    }