Java Code Examples for org.apache.commons.csv.CSVParser#close()

The following examples show how to use org.apache.commons.csv.CSVParser#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FrameworkUtils.java    From data-polygamy with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * String Parsing 
 */

public static String[] splitStr(String val, Integer len) throws IOException {
    
    String[] input;
    
    try {
        CSVParser parser = new CSVParser(new StringReader(val), CSVFormat.DEFAULT);
        CSVRecord record = parser.getRecords().get(0);
        input = new String[len];
        Iterator<String> valuesIt = record.iterator();
        int i = 0;
        while (valuesIt.hasNext()) {
            input[i] = valuesIt.next().trim();
            i++;
        }
        parser.close();
    } catch (ArrayIndexOutOfBoundsException e) {
        input = val.split(",", len);
        for (int i = 0; i < input.length; i++)
            input[i] = input[i].trim();
    }
    
    return input;
}
 
Example 2
Source File: FileSourceUserGroupBuilder.java    From ranger with Apache License 2.0 5 votes vote down vote up
public Map<String, List<String>> readTextFile(File textFile) throws Exception {
	
	Map<String, List<String>> ret = new HashMap<String, List<String>>();
	
	String delimiter = config.getUserSyncFileSourceDelimiter();
	
	CSVFormat csvFormat = CSVFormat.newFormat(delimiter.charAt(0));
	
	CSVParser csvParser = new CSVParser(new BufferedReader(new FileReader(textFile)), csvFormat);
	
	List<CSVRecord> csvRecordList = csvParser.getRecords();
	
	if ( csvRecordList != null) {
		for(CSVRecord csvRecord : csvRecordList) {
			List<String> groups = new ArrayList<String>();
			String user = csvRecord.get(0);
			
			user = user.replaceAll("^\"|\"$", "");
				
			int i = csvRecord.size();
			
			for (int j = 1; j < i; j ++) {
				String group = csvRecord.get(j);
				if ( group != null && !group.isEmpty()) {
					 group = group.replaceAll("^\"|\"$", "");
					 groups.add(group);
				}
			}
			ret.put(user,groups);
		 }
	}

	csvParser.close();

	return ret;
}
 
Example 3
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVCommonsUpsert_WithTimestamp() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS TS_TABLE "
                + "(ID BIGINT NOT NULL PRIMARY KEY, TS TIMESTAMP);";
        conn = DriverManager.getConnection(getUrl()).unwrap(
                PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "TS_TABLE",
                ImmutableList.<String>of(), true, ',', '"', null, "!");
        csvUtil.upsert(
                new StringReader("ID,TS\n"
                        + "1,1970-01-01 00:00:10\n"
                        + "2,1970-01-01 00:00:10.123\n"));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn
                .prepareStatement("SELECT ID, TS FROM TS_TABLE ORDER BY ID");
        ResultSet phoenixResultSet = statement.executeQuery();
        assertTrue(phoenixResultSet.next());
        assertEquals(1L, phoenixResultSet.getLong(1));
        assertEquals(10000L, phoenixResultSet.getTimestamp(2).getTime());
        assertTrue(phoenixResultSet.next());
        assertEquals(2L, phoenixResultSet.getLong(1));
        assertEquals(10123L, phoenixResultSet.getTimestamp(2).getTime());
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 4
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVCommonsUpsert_WithArray() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS ARRAY_TABLE "
                + "(ID BIGINT NOT NULL PRIMARY KEY, VALARRAY INTEGER ARRAY);";
        conn = DriverManager.getConnection(getUrl()).unwrap(
                PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "ARRAY_TABLE",
                ImmutableList.<String>of(), true, ',', '"', null, "!");
        csvUtil.upsert(
                new StringReader("ID,VALARRAY\n"
                        + "1,2!3!4\n"));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn
                .prepareStatement("SELECT ID, VALARRAY FROM ARRAY_TABLE");
        ResultSet phoenixResultSet = statement.executeQuery();
        assertTrue(phoenixResultSet.next());
        assertEquals(1L, phoenixResultSet.getLong(1));
        assertEquals(
                PArrayDataType.instantiatePhoenixArray(PInteger.INSTANCE, new Integer[]{2, 3, 4}),
                phoenixResultSet.getArray(2));
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 5
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVCommonsUpsertBadEncapsulatedControlChars()
        throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS "
                + ENCAPSULATED_CHARS_TABLE
                + "(MYKEY VARCHAR NOT NULL PRIMARY KEY, MYVALUE VARCHAR);";
        conn = DriverManager.getConnection(getUrl())
                .unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn,
                ENCAPSULATED_CHARS_TABLE, Collections.<String> emptyList(),
                true);
        try {
            csvUtil.upsert(new StringReader(
                    CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER));
            fail();
        } catch (RuntimeException e) {
            assertTrue(
                    e.getMessage(),
                    e.getMessage()
                            .contains(
                                    "invalid char between encapsulated token and delimiter"));
        }
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 6
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVUpsertWithBogusColumnStrict() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        String stockTableName = generateUniqueName();

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName
                + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
        conn = DriverManager.getConnection(getUrl())
                .unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName,
                Arrays.asList(STOCK_COLUMNS_WITH_BOGUS), true);
        try {
            csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
            fail();
        } catch (SQLException e) {
            assertTrue(
                    e.getMessage(),
                    e.getMessage()
                            .contains(
                                    "ERROR 504 (42703): Undefined column. columnName=" + stockTableName + ".BOGUS"));
        }
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 7
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVUpsertWithAllColumn() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        String stockTableName = generateUniqueName();

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName
                + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
        conn = DriverManager.getConnection(getUrl())
                .unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName,
                Arrays.asList("FOO", "BAR"), false);

        try {
            csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
            fail();
        } catch (SQLException e) {
            assertTrue(
                    e.getMessage(),
                    e.getMessage()
                            .contains(
                                    "ERROR 504 (42703): Undefined column. columnName=" + stockTableName + ".[FOO, BAR]"));
        }
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 8
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVUpsertWithInvalidNumericalData_StrictMode() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        String stockTableName = generateUniqueName();

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName
                + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY_ID BIGINT);";
        conn = DriverManager.getConnection(getUrl())
                .unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file in strict mode
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName,
                Arrays.asList("SYMBOL", "COMPANY_ID"), true);
        try {
            csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
            fail("Running an upsert with data that can't be upserted in strict mode "
                    + "should throw an exception");
        } catch (IllegalDataException e) {
            // Expected
        }

    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 9
Source File: CSVCommonsLoader.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * Data is batched up based on connection batch size.
 * Column PDataType is read from metadata and is used to convert
 * column value to correct type before upsert.
 *
 * The format is determined by the supplied csvParser.

 * @param csvParser
 *            CSVParser instance
 * @throws Exception
 */
public void upsert(CSVParser csvParser) throws Exception {
    List<ColumnInfo> columnInfoList = buildColumnInfoList(csvParser);

    boolean wasAutoCommit = conn.getAutoCommit();
    try {
        conn.setAutoCommit(false);
        long start = EnvironmentEdgeManager.currentTimeMillis();
        CsvUpsertListener upsertListener = new CsvUpsertListener(conn,
                conn.getMutateBatchSize(), isStrict);
        CsvUpsertExecutor csvUpsertExecutor = new CsvUpsertExecutor(conn,
            SchemaUtil.getEscapedFullTableName(tableName),
                columnInfoList, upsertListener, arrayElementSeparator);

        csvUpsertExecutor.execute(csvParser);
        csvUpsertExecutor.close();

        conn.commit();
        double elapsedDuration = ((EnvironmentEdgeManager.currentTimeMillis() - start) / 1000.0);
        System.out.println("CSV Upsert complete. " + upsertListener.getTotalUpsertCount()
                + " rows upserted");
        System.out.println("Time: " + elapsedDuration + " sec(s)\n");

    } finally {

        // release reader resources.
        if (csvParser != null) {
            csvParser.close();
        }
        if (wasAutoCommit) {
            conn.setAutoCommit(true);
        }
    }
}
 
Example 10
Source File: GoogleChartGenerator.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
* Reads aggregate file and convert it to DataNode 
* @param label
* @throws Exception
*/
  private void read(String label) throws Exception {
String resultFileName = resultDir 
		+ PherfConstants.PATH_SEPARATOR
		+ PherfConstants.RESULT_PREFIX 
		+ label
		+ ResultFileDetails.CSV_AGGREGATE_PERFORMANCE.getExtension();

  	FileReader in = new FileReader(resultFileName);
  	final CSVParser parser = new CSVParser(in, CSVFormat.DEFAULT.withHeader());

      for (CSVRecord record : parser) {
          String group = record.get("QUERY_GROUP");
          String query = record.get("QUERY");
          String explain = record.get("EXPLAIN_PLAN");
          String tenantId = record.get("TENANT_ID");
          long avgTime = Long.parseLong(record.get("AVG_TIME_MS"));
          long minTime = Long.parseLong(record.get("AVG_MIN_TIME_MS"));
          long numRuns = Long.parseLong(record.get("RUN_COUNT"));
          long rowCount = Long.parseLong(record.get("RESULT_ROW_COUNT"));
          Node node = new Node(minTime, avgTime, numRuns, explain, query, tenantId, label, rowCount);
          
          if (datanodes.containsKey(group)) {
          	datanodes.get(group).getDataSet().put(label, node);
          } else {
          	datanodes.put(group, new DataNode(label, node));
          }
      }
      parser.close();
  }
 
Example 11
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVCommonsUpsert_WithTimestamp() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS TS_TABLE "
                + "(ID BIGINT NOT NULL PRIMARY KEY, TS TIMESTAMP);";
        conn = DriverManager.getConnection(getUrl()).unwrap(
                PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "TS_TABLE",
                null, true, ',', '"', null, "!");
        csvUtil.upsert(
                new StringReader("ID,TS\n"
                        + "1,1970-01-01 00:00:10\n"
                        + "2,1970-01-01 00:00:10.123\n"));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn
                .prepareStatement("SELECT ID, TS FROM TS_TABLE ORDER BY ID");
        ResultSet phoenixResultSet = statement.executeQuery();
        assertTrue(phoenixResultSet.next());
        assertEquals(1L, phoenixResultSet.getLong(1));
        assertEquals(10000L, phoenixResultSet.getTimestamp(2).getTime());
        assertTrue(phoenixResultSet.next());
        assertEquals(2L, phoenixResultSet.getLong(1));
        assertEquals(10123L, phoenixResultSet.getTimestamp(2).getTime());
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 12
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVCommonsUpsert_WithArray() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS ARRAY_TABLE "
                + "(ID BIGINT NOT NULL PRIMARY KEY, VALARRAY INTEGER ARRAY);";
        conn = DriverManager.getConnection(getUrl()).unwrap(
                PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "ARRAY_TABLE",
                null, true, ',', '"', null, "!");
        csvUtil.upsert(
                new StringReader("ID,VALARRAY\n"
                        + "1,2!3!4\n"));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn
                .prepareStatement("SELECT ID, VALARRAY FROM ARRAY_TABLE");
        ResultSet phoenixResultSet = statement.executeQuery();
        assertTrue(phoenixResultSet.next());
        assertEquals(1L, phoenixResultSet.getLong(1));
        assertEquals(
                PArrayDataType.instantiatePhoenixArray(PInteger.INSTANCE, new Integer[]{2, 3, 4}),
                phoenixResultSet.getArray(2));
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 13
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVCommonsUpsertBadEncapsulatedControlChars()
        throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS "
                + ENCAPSULATED_CHARS_TABLE
                + "(MYKEY VARCHAR NOT NULL PRIMARY KEY, MYVALUE VARCHAR);";
        conn = DriverManager.getConnection(getUrl())
                .unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn,
                ENCAPSULATED_CHARS_TABLE, Collections.<String> emptyList(),
                true);
        try {
            csvUtil.upsert(new StringReader(
                    CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER));
            fail();
        } catch (RuntimeException e) {
            assertTrue(
                    e.getMessage(),
                    e.getMessage()
                            .contains(
                                    "invalid char between encapsulated token and delimiter"));
        }
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 14
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVUpsertWithBogusColumnStrict() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
                + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
        conn = DriverManager.getConnection(getUrl())
                .unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
                Arrays.asList(STOCK_COLUMNS_WITH_BOGUS), true);
        try {
            csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
            fail();
        } catch (SQLException e) {
            assertTrue(
                    e.getMessage(),
                    e.getMessage()
                            .contains(
                                    "ERROR 504 (42703): Undefined column. columnName=STOCK_SYMBOL.BOGUS"));
        }
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 15
Source File: CSVCommonsLoaderIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testCSVUpsertWithAllColumn() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
                + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
        conn = DriverManager.getConnection(getUrl())
                .unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn,
                new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
                Arrays.asList("FOO", "BAR"), false);

        try {
            csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
            fail();
        } catch (SQLException e) {
            assertTrue(
                    e.getMessage(),
                    e.getMessage()
                            .contains(
                                    "ERROR 504 (42703): Undefined column. columnName=STOCK_SYMBOL.[FOO, BAR]"));
        }
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
 
Example 16
Source File: CSVCommonsLoader.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * Data is batched up based on connection batch size.
 * Column PDataType is read from metadata and is used to convert
 * column value to correct type before upsert.
 *
 * The format is determined by the supplied csvParser.

 * @param csvParser
 *            CSVParser instance
 * @throws Exception
 */
public void upsert(CSVParser csvParser) throws Exception {
    List<ColumnInfo> columnInfoList = buildColumnInfoList(csvParser);

    boolean wasAutoCommit = conn.getAutoCommit();
    try {
        conn.setAutoCommit(false);
        long start = System.currentTimeMillis();
        CsvUpsertListener upsertListener = new CsvUpsertListener(conn, conn.getMutateBatchSize());
        CsvUpsertExecutor csvUpsertExecutor = CsvUpsertExecutor.create(conn, tableName,
                columnInfoList, upsertListener, arrayElementSeparator);

        csvUpsertExecutor.execute(csvParser);
        csvUpsertExecutor.close();

        conn.commit();
        double elapsedDuration = ((System.currentTimeMillis() - start) / 1000.0);
        System.out.println("CSV Upsert complete. " + upsertListener.getTotalUpsertCount()
                + " rows upserted");
        System.out.println("Time: " + elapsedDuration + " sec(s)\n");

    } finally {

        // release reader resources.
        if (csvParser != null) {
            csvParser.close();
        }
        if (wasAutoCommit) {
            conn.setAutoCommit(true);
        }
    }
}
 
Example 17
Source File: FrameworkUtils.java    From data-polygamy with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
public static String[] splitStr(String val) throws IOException {
    
    CSVParser parser = new CSVParser(new StringReader(val), CSVFormat.DEFAULT);
    CSVRecord record = parser.getRecords().get(0);
    Iterator<String> valuesIt = record.iterator();
    String[] input = new String[record.size()];
    int i = 0;
    while (valuesIt.hasNext()) {
        input[i] = valuesIt.next();
        i++;
    }
    parser.close();
    return input;
}
 
Example 18
Source File: Utils.java    From systemsgenetics with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Reads tab delimited file and returns them as list of list, with [x] =
 * colummn and [x][y] is value in column. Needed for reading counts
 * file, as there the rows are the samples, as opposed to expression and
 * genotype file where the columns are the samples. Needs to be read in
 * memory, so minimal memory requirement is larger than the size of the
 * counts file.
 * 
 * 
 * @param filepath The path to a tab delimited file to read
 * 
 * @return A 2D array with each array being one column from filepath except first column
 * 		   and a 1D array with the first column (without header)
 * 
 * @throws IOException	If file at filepath can not be read
 */
public static Object[] readTabDelimitedColumns(String filepath) throws IOException {
	List<List<String>> allColumns = new ArrayList<List<String>>();
	// parses file on tabs
	CSVParser parser = new CSVParser(new FileReader(filepath), CSVFormat.newFormat('\t'));
	Boolean header = true;
	int rowNumber = 0;
	int columnIndexHeader = 0;
	List<String> firstColumn = new ArrayList<String>();
	for (CSVRecord row : parser) {
		rowNumber++;
		// starts at 1 because 1st element of column is the samplename, unless its the header row
		int columnStart = 1;
		if(header){
			columnStart = 0;
		}
		for (int columnIndex = columnStart; columnIndex < row.size(); columnIndex++) {
			// header can start from 0 if it is R styled, so check if element 0 has a value
			// R style is e.g.
			// colNameA	colNameB
			// rowNameA	AAValue	AAvalue
			// rownameB ABValue BAvalue
			// while csv style has a tab before colNameA
			if(header){
				String columnValue = row.get(columnIndex);
				if(columnValue.length() == 0){
					continue;
				}
				allColumns = addSingleValueTo2DArray(allColumns, columnIndexHeader,columnValue);
				columnIndexHeader++;
				continue;
			}
			else{
				// This changes the allColumns list of list in place, e.g. for example loop -> [[]] -> [[1]] -> [[1,2]] -> [[1,2],[3]] -> etc
				allColumns = addSingleValueTo2DArray(allColumns, columnIndex - 1, row.get(columnIndex));
			}
		}
		if(!header){
			firstColumn.add(row.get(0));
			if(row.size()-1 != columnIndexHeader){
				DeconvolutionLogger.log.info(String.format("Table %s does not have the same number of columns as there are in the header at row %d",filepath,rowNumber));
				DeconvolutionLogger.log.info(String.format("Number of header columns: %d",columnIndexHeader));
				DeconvolutionLogger.log.info(String.format("Number of columns at row %d: %d", rowNumber, row.size()-1));
				DeconvolutionLogger.log.info(row.toString());
				parser.close();
				throw new RuntimeException(String.format("Cellcount percentage table does not have the same number of columns as there are celltypes at row %d",rowNumber));
			}
		}
		if(header){
			header = false;
		}

	}
	parser.close();
	return new Object[] {firstColumn, allColumns};
}
 
Example 19
Source File: App.java    From C4CODATAAPIDEVGUIDE with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
	// Create Consumer instance and get csrfToken
	AccountAttachmentODataConsumer consumer = new AccountAttachmentODataConsumer();

	//
	if (!initApp(args))
		return;

	logger.info(">>>>>>>>>> Uploading of attachments has started. <<<<<<<<<<<<<");

	// Open the CSV file for reading
	String inputFileName = AccountAttachmentODataConsumer.getAttachmentsFolder()
			+ "/manifest.csv";

	File inputFile = new File(inputFileName);

	if (!inputFile.canRead())
		throw new RuntimeException("Can't read input: " + inputFileName);

	CSVParser inParser = CSVParser.parse(inputFile, StandardCharsets.UTF_8,
			CSVFormat.EXCEL.withHeader());

	Attachment newAttachment = null;
	int count = 0;
	int failed = 0;
	// Process the entries in the CSV file
	for (CSVRecord record : inParser) {
		newAttachment = new Attachment();

		newAttachment.set_Name(record.get("AttachmentName"));
		newAttachment.set_ParentObjectID(record
				.get("ParentObjectID"));
		newAttachment.set_MimeType(record.get("MimeType"));
		newAttachment.set_TypeCode(record.get("TypeCode"));
		newAttachment.set_Binary(readAttachmentFile(
				AccountAttachmentODataConsumer.getAttachmentsFolder() +
				"/" + record.get("Path")));
		newAttachment.setFileName(record.get("Path"));

		Optional<String> errorMessage = consumer
				.createAttachment(newAttachment);

		if (errorMessage.isPresent()) {
			logger.severe(errorMessage.get());
			failed++;
		}
		else {
			logger.info(
					"[ParentObjectID:" + record.get("ParentObjectID") + "|" +
					"MimeType:" + record.get("MimeType") + "|" +
					"TypeCode:" + record.get("TypeCode") + "|" +
					"AttachmentName:" +	record.get("AttachmentName") + "|" +
					"Filename:" +record.get("Path") + "] uploaded successfully."
					);
		}

		count++;
	}
	inParser.close();

	logger.info(">>>>>>>>>>> A total of " + count
			+ " attachments have been processed. <<<<<<<<<");
	if (failed > 0) {
		logger.info(failed
				+ " attachment"
				+ (failed > 1 ? "s" : "")
				+ " failed to upload. See details, above. (or in the log file: "
				+ logFilename + ")");
	}

}
 
Example 20
Source File: ExCommentCSVReader.java    From repositoryminer with Apache License 2.0 3 votes vote down vote up
private List<CSVRecord> readCSV(String[] header, String filename) throws IOException {
	FileReader fileReader = new FileReader(filename);

	CSVFormat format = CSVFormat.DEFAULT.withDelimiter(config.getDelimiter()).withHeader(header)
			.withSkipHeaderRecord();

	CSVParser csvParser = new CSVParser(fileReader, format);

	List<CSVRecord> records = csvParser.getRecords();

	fileReader.close();
	csvParser.close();

	return records;
}