com.opencsv.CSVReader Java Examples
The following examples show how to use
com.opencsv.CSVReader.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BaseCsvFileProcessor.java From sakai with Educational Community License v2.0 | 6 votes |
public void processFormattedFile(BufferedReader fr, FileProcessorState state) throws Exception { CSVReader csvr = new CSVReader(fr); String[] line = null; while (((line = csvr.readNext()) != null)) { state.setRecordCnt(state.getRecordCnt() + 1); boolean headerPresent = state.isHeaderRowPresent(); if (state.getColumns() != line.length) { state.appendError("Wrong Number Columns Row:, " + state.getRecordCnt() + "Saw:" + line.length + ", Expecting: " + state.getColumns()); state.setErrorCnt(state.getErrorCnt() + 1); } else if ((headerPresent && state.getRecordCnt() > 1) || !headerPresent) { try { line = trimLine(line); processRow(line, state); state.setProcessedCnt(state.getProcessedCnt() + 1); } catch (Exception e) { log.debug(e.getMessage(), e); state.appendError("Row " + state.getRecordCnt() + " " + e.getMessage()); state.setErrorCnt(state.getErrorCnt() + 1); } } } fr.close(); }
Example #2
Source File: CsvFileReader.java From herd-mdl with Apache License 2.0 | 6 votes |
/** * Loads content of the csv file into a List of rows in the CSV. Each row is represented by a Map of columnName->columnValue. This assumes that first row is * the header with column names. * * @param filePath file path to be read from * @return List of map for columns key/value pairs */ public static List<Map<String, String>> readCSVFile(String filePath) { InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath); List<Map<String, String>> contents = new ArrayList<>(); int lineNumber = 1; String[] row; try (CSVReader csvReader = new CSVReader(new InputStreamReader(inputStream))) { String[] header = csvReader.readNext(); while ((row = csvReader.readNext()) != null) { lineNumber++; Map<String, String> rowMap = new HashMap<>(); for (int i = 0; i < row.length; i++) { rowMap.put(header[i].trim(), row[i].trim()); } contents.add(rowMap); } } catch (IOException | RuntimeException e) { throw new RuntimeException("Error reading CSV file:" + filePath + " at line " + lineNumber, e); } return contents; }
Example #3
Source File: LoadIO.java From hbase-tools with Apache License 2.0 | 6 votes |
public void load(Args args, String input) { String fileName = getSavedFileName(input); if (fileName == null) return; try (CSVReader reader = new CSVReader(new FileReader(fileName), SEPARATOR)) { List<LoadEntry> savedLoadEntryList = new ArrayList<>(); readHeader(reader, savedLoadEntryList); Map<Level, LoadRecord> loadedLoadMap = new TreeMap<>(); readBody(reader, loadedLoadMap, savedLoadEntryList); load.setLoadMapStart(loadedLoadMap, getTimestamp(args, fileName)); System.out.println(fileName + " is loaded."); } catch (Throwable e) { e.printStackTrace(); } }
Example #4
Source File: BaseCsvFileProcessor.java From sakai with Educational Community License v2.0 | 6 votes |
public void processFormattedFile(BufferedReader fr, FileProcessorState state) throws Exception { CSVReader csvr = new CSVReader(fr); String[] line = null; while (((line = csvr.readNext()) != null)) { state.setRecordCnt(state.getRecordCnt() + 1); boolean headerPresent = state.isHeaderRowPresent(); if (state.getColumns() != line.length) { state.appendError("Wrong Number Columns Row:, " + state.getRecordCnt() + "Saw:" + line.length + ", Expecting: " + state.getColumns()); state.setErrorCnt(state.getErrorCnt() + 1); } else if ((headerPresent && state.getRecordCnt() > 1) || !headerPresent) { try { line = trimLine(line); processRow(line, state); state.setProcessedCnt(state.getProcessedCnt() + 1); } catch (Exception e) { log.debug(e.getMessage(), e); state.appendError("Row " + state.getRecordCnt() + " " + e.getMessage()); state.setErrorCnt(state.getErrorCnt() + 1); } } } fr.close(); }
Example #5
Source File: LoadIO.java From hbase-tools with Apache License 2.0 | 6 votes |
public void load(Args args, String input) { String fileName = getSavedFileName(input); if (fileName == null) return; try (CSVReader reader = new CSVReader(new FileReader(fileName), SEPARATOR)) { List<LoadEntry> savedLoadEntryList = new ArrayList<>(); readHeader(reader, savedLoadEntryList); Map<Level, LoadRecord> loadedLoadMap = new TreeMap<>(); readBody(reader, loadedLoadMap, savedLoadEntryList); load.setLoadMapStart(loadedLoadMap, getTimestamp(args, fileName)); System.out.println(fileName + " is loaded."); } catch (Throwable e) { e.printStackTrace(); } }
Example #6
Source File: StlDemoRestServer.java From stl-decomp-4j with Apache License 2.0 | 6 votes |
@SuppressWarnings("Duplicates") public static TimeSeries getTimeSeries(String fileName) throws IOException { CSVReaderBuilder builder = new CSVReaderBuilder(new FileReader(fileName)); TimeSeries ts = new TimeSeries(); try (CSVReader reader = builder.withSkipLines(1).build()) { String[] nextLine; while ((nextLine = reader.readNext()) != null) { double dateAsYear = Double.parseDouble(nextLine[1]); long time = (long) ((dateAsYear - 1970.0) * 365.25 * 24 * 60 * 60 * 1000); ts.times.add(time); double value = Double.parseDouble(nextLine[2]); ts.values.add(value); } } return ts; }
Example #7
Source File: RetentionExpirationDestroyerController.java From herd with Apache License 2.0 | 6 votes |
/** * Get business object data keys from the input CSV tile. This method also validates the input file format. * * @param inputCsvFile the input CSV file * * @return the list of business object data keys * @throws IOException if any problems were encountered */ protected List<BusinessObjectDataKey> getBusinessObjectDataKeys(File inputCsvFile) throws IOException { List<BusinessObjectDataKey> businessObjectDataKeyList = new ArrayList<>(); // Read the input CSV file and populate business object data key list. try (CSVReader csvReader = new CSVReader(new InputStreamReader(new FileInputStream(inputCsvFile), StandardCharsets.UTF_8))) { String[] line; // Validate required header of the CSV input file. if ((line = csvReader.readNext()) == null || !Arrays.equals(line, CSV_FILE_HEADER_COLUMNS)) { throw new IllegalArgumentException(String.format("Input file \"%s\" does not contain the expected CSV file header.", inputCsvFile.toString())); } // Process the input CSV file line by line. int lineCount = 2; while ((line = csvReader.readNext()) != null) { businessObjectDataKeyList.add(getBusinessObjectDataKey(line, lineCount++, inputCsvFile)); } } return businessObjectDataKeyList; }
Example #8
Source File: CsvDataProvider.java From NoraUi with GNU Affero General Public License v3.0 | 6 votes |
/** * {@inheritDoc} */ @Override public String[] readLine(int line, boolean readResult) { log.debug("readLine at line {}", line); try { final CSVReader reader = openInputData(); final List<String[]> a = reader.readAll(); if (line >= a.size()) { return null; } final String[] row = a.get(line); if ("".equals(row[0])) { return null; } else { final String[] ret = readResult ? new String[columns.size()] : new String[columns.size() - 1]; System.arraycopy(row, 0, ret, 0, ret.length); return ret; } } catch (final IOException e) { log.error("error CsvDataProvider.readLine()", e); return null; } }
Example #9
Source File: StlPerfTest.java From stl-decomp-4j with Apache License 2.0 | 6 votes |
@SuppressWarnings("Duplicates") private static TimeSeries getCo2Data() throws IOException { final String path = "../StlDemoRestServer/co2.csv"; CSVReaderBuilder builder = new CSVReaderBuilder(new FileReader(path)); TimeSeries ts = new TimeSeries(); try (CSVReader reader = builder.withSkipLines(1).build()) { String[] nextLine; while ((nextLine = reader.readNext()) != null) { double dateAsYear = Double.parseDouble(nextLine[1]); long time = (long) ((dateAsYear - 1970.0) * 365.25 * 24 * 60 * 60 * 1000); ts.times.add(time); double value = Double.parseDouble(nextLine[2]); ts.values.add(value); } } return ts; }
Example #10
Source File: StlPerfTest.java From stl-decomp-4j with Apache License 2.0 | 6 votes |
private static TimeSeries getHourlyData() throws IOException { final String path = "./fortran_benchmark/hourly_stl_test.csv"; CSVReaderBuilder builder = new CSVReaderBuilder(new FileReader(path)); TimeSeries ts = new TimeSeries(); try (CSVReader reader = builder.build()) { String[] nextLine; long time = 1492457959000L; while ((nextLine = reader.readNext()) != null) { ts.times.add(time); time += 3600 * 1000; double value = Double.parseDouble(nextLine[0]); ts.values.add(value); } } return ts; }
Example #11
Source File: LoadIO.java From hbase-tools with Apache License 2.0 | 6 votes |
private void readBody(CSVReader reader, Map<Level, LoadRecord> loadedLoadMap, List<LoadEntry> savedLoadEntryList) throws IOException { String[] nextLine; while ((nextLine = reader.readNext()) != null) { Level level = null; for (Level levelEntry : load.getLoadMap().keySet()) { if (levelEntry.equalsName(nextLine[0])) { level = levelEntry; } } if (level != null) { LoadRecord loadRecord = loadedLoadMap.get(level); if (loadRecord == null) { loadRecord = new LoadRecord(); loadedLoadMap.put(level, loadRecord); } int i = 2; for (LoadEntry loadEntry : savedLoadEntryList) { String string = nextLine[i++]; if (string.length() > 0) loadRecord.put(loadEntry, loadEntry.toNumber(string)); } } } }
Example #12
Source File: CsvReaderExamples.java From tutorials with MIT License | 6 votes |
public static List<String[]> oneByOne(Reader reader) { List<String[]> list = new ArrayList<>(); try { CSVParser parser = new CSVParserBuilder() .withSeparator(',') .withIgnoreQuotations(true) .build(); CSVReader csvReader = new CSVReaderBuilder(reader) .withSkipLines(0) .withCSVParser(parser) .build(); String[] line; while ((line = csvReader.readNext()) != null) { list.add(line); } reader.close(); csvReader.close(); } catch (Exception ex) { Helpers.err(ex); } return list; }
Example #13
Source File: DefaultAirportProvider.java From MetarParser with MIT License | 6 votes |
/** * Initiate airports map. */ private void initAirports() { Objects.requireNonNull(airportsFile); airports = new HashMap<>(); String[] line; try (CSVReader reader = new CSVReaderBuilder(new InputStreamReader(airportsFile, StandardCharsets.UTF_8)).withCSVParser(new CSVParser()).withSkipLines(0).build()) { while ((line = reader.readNext()) != null) { Airport airport = new Airport(); airport.setName(line[1]); airport.setCity(line[2]); airport.setCountry(countries.get(line[3])); airport.setIata(line[4]); airport.setIcao(line[5]); airport.setLatitude(Double.parseDouble(line[6])); airport.setLongitude(Double.parseDouble(line[7])); airport.setAltitude(Integer.parseInt(line[8])); airport.setTimezone(line[9]); airport.setDst(line[10]); airports.put(airport.getIcao(), airport); } } catch (IOException | CsvValidationException exception) { throw new IllegalStateException(exception.getMessage()); } }
Example #14
Source File: OurAirportsAirportProvider.java From MetarParser with MIT License | 6 votes |
/** * Connects to the airports list and build a map of {@link Airport} with the name as key. * * @throws CsvValidationException when the parsing of the file fails * @throws IOException when network error * @throws URISyntaxException when the URI is invalid */ public void buildAirport() throws URISyntaxException, IOException, CsvValidationException { URI airportsURI = new URI(AIRPORT_URI); airports = new HashMap<>(); try (InputStream airportStream = airportsURI.toURL().openStream(); CSVReader reader = new CSVReaderBuilder(new InputStreamReader(airportStream, StandardCharsets.UTF_8)).withCSVParser(new CSVParser()).withSkipLines(1).build()) { String[] line; while ((line = reader.readNext()) != null) { Airport airport = new Airport(); airport.setIcao(line[1]); airport.setName(line[3]); airport.setLatitude(NumberUtils.toDouble(line[4], 0)); airport.setLongitude(NumberUtils.toDouble(line[5], 0)); airport.setAltitude(NumberUtils.toInt(line[6], 0)); airport.setCountry(countries.get(line[8])); airport.setCity(line[10]); airport.setIata(line[13]); airports.put(airport.getIcao(), airport); } } }
Example #15
Source File: LoadIO.java From hbase-tools with Apache License 2.0 | 6 votes |
private void readBody(CSVReader reader, Map<Level, LoadRecord> loadedLoadMap, List<LoadEntry> savedLoadEntryList) throws IOException { String[] nextLine; while ((nextLine = reader.readNext()) != null) { Level level = null; for (Level levelEntry : load.getLoadMap().keySet()) { if (levelEntry.equalsName(nextLine[0])) { level = levelEntry; } } if (level != null) { LoadRecord loadRecord = loadedLoadMap.get(level); if (loadRecord == null) { loadRecord = new LoadRecord(); loadedLoadMap.put(level, loadRecord); } int i = 2; for (LoadEntry loadEntry : savedLoadEntryList) { String string = nextLine[i++]; if (string.length() > 0) loadRecord.put(loadEntry, loadEntry.toNumber(string)); } } } }
Example #16
Source File: StockDataSetIterator.java From StockPrediction with MIT License | 6 votes |
private List<StockData> readStockDataFromFile (String filename, String symbol) { List<StockData> stockDataList = new ArrayList<>(); try { for (int i = 0; i < maxArray.length; i++) { // initialize max and min arrays maxArray[i] = Double.MIN_VALUE; minArray[i] = Double.MAX_VALUE; } List<String[]> list = new CSVReader(new FileReader(filename)).readAll(); // load all elements in a list for (String[] arr : list) { if (!arr[1].equals(symbol)) continue; double[] nums = new double[VECTOR_SIZE]; for (int i = 0; i < arr.length - 2; i++) { nums[i] = Double.valueOf(arr[i + 2]); if (nums[i] > maxArray[i]) maxArray[i] = nums[i]; if (nums[i] < minArray[i]) minArray[i] = nums[i]; } stockDataList.add(new StockData(arr[0], arr[1], nums[0], nums[1], nums[2], nums[3], nums[4])); } } catch (IOException e) { e.printStackTrace(); } return stockDataList; }
Example #17
Source File: CsvContentExtractor.java From baleen with Apache License 2.0 | 6 votes |
@Override public void doProcessStream(InputStream stream, String source, JCas jCas) throws IOException { super.doProcessStream(stream, source, jCas); CSVParser parser = new CSVParserBuilder().withSeparator(separator.charAt(0)).build(); try (CSVReader reader = new CSVReaderBuilder(new InputStreamReader(stream, StandardCharsets.UTF_8)) .withCSVParser(parser) .build()) { String[] cols = reader.readNext(); if (cols == null || cols.length < contentColumn) { throw new IOException("Not enough columns"); } for (int i = 0; i < cols.length; i++) { if (i == (contentColumn - 1)) { jCas.setDocumentText(cols[i]); } else { addMetadata(jCas, i, cols[i]); } } } }
Example #18
Source File: TraitFileClean.java From systemsgenetics with GNU General Public License v3.0 | 6 votes |
public static void main(String[] args) throws FileNotFoundException, IOException { // TODO code application logic here File phase3File = new File("C:\\Users\\Sophie Mulc\\Documents\\DEPICT2\\phase3_corrected.psam"); File traitFile = new File("C:\\Users\\Sophie Mulc\\Documents\\DEPICT2\\TraitFile.txt"); File probeAnnotationFile = new File("C:\\Users\\Sophie Mulc\\Documents\\DEPICT2\\ProbeAnnotationFile.txt"); File couplingFile = new File("C:\\Users\\Sophie Mulc\\Documents\\DEPICT2\\CouplingFile.txt"); //FileReader(String phase3_corrected) final CSVParser gmtParser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build(); final CSVReader gmtReader = new CSVReaderBuilder(new BufferedReader(new FileReader(phase3File))).withSkipLines(1).withCSVParser(gmtParser).build(); List<String> iids = new ArrayList<>(); String[] inputLine; while ((inputLine = gmtReader.readNext()) != null) { String iid = inputLine[0]; iids.add(iid); } trait(iids, traitFile); probeAnnotation(probeAnnotationFile); coupling(iids, couplingFile); }
Example #19
Source File: DelimitedRest.java From mobi with GNU Affero General Public License v3.0 | 5 votes |
/** * Converts the specified number rows of a CSV file into JSON and returns * them as a String. * * @param input the CSV file to convert into JSON * @param numRows the number of rows from the CSV file to convert * @param separator a character with the character to separate the columns by * @return a string with the JSON of the CSV rows * @throws IOException csv file could not be read */ private String convertCSVRows(File input, int numRows, char separator) throws IOException { Charset charset = getCharset(Files.readAllBytes(input.toPath())); try (CSVReader reader = new CSVReader(new InputStreamReader(new FileInputStream(input), charset.name()), separator)) { List<String[]> csvRows = reader.readAll(); JSONArray returnRows = new JSONArray(); for (int i = 0; i <= numRows && i < csvRows.size(); i++) { returnRows.add(i, csvRows.get(i)); } return returnRows.toString(); } }
Example #20
Source File: Loader.java From TAcharting with GNU Lesser General Public License v2.1 | 5 votes |
public static BarSeries getDailyBarSeries(String fileName){ // load a BarSeries InputStream inputStream = Loader.class.getClassLoader().getResourceAsStream(fileName); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream)); List<Bar> ticks = new ArrayList<>(); CSVReader reader; String nameInCSV=""; try { reader = new CSVReaderBuilder(bufferedReader).withSkipLines(1).build(); String[] line; nameInCSV = reader.readNext()[0]; if(nameInCSV==null||nameInCSV.equals("")){ nameInCSV=fileName; } while ((line = reader.readNext()) != null) { ZonedDateTime date = LocalDate.parse(line[0], DATE_FORMAT_Daily).atStartOfDay(ZoneId.systemDefault()); double close = Double.parseDouble(line[1]); double volume = Double.parseDouble(line[2]); double open = Double.parseDouble(line[3]); double high = Double.parseDouble(line[4]); double low = Double.parseDouble(line[5]); ticks.add(new BaseBar(Duration.ZERO, date, open, high, low, close, volume, 0, 0, Parameter.numFunction)); } bufferedReader.close(); } catch (IOException e) { e.printStackTrace(); } if (ticks.get(0).getEndTime().isAfter(ticks.get(ticks.size()-1).getEndTime())) Collections.reverse(ticks); return new BaseBarSeries(nameInCSV, ticks); }
Example #21
Source File: SPARQLResultsTSVMappingStrategy.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void captureHeader(CSVReader reader) throws IOException { // header is mandatory in SPARQL TSV bindingNames = Stream.of(reader.readNext()) .map(s -> StringUtils.removeStart(s, "?")) .collect(Collectors.toList()); }
Example #22
Source File: GtfsFeedImpl.java From pt2matsim with GNU General Public License v2.0 | 5 votes |
/** * Basically just reads all routeIds and their corresponding names and types and puts them in {@link #routes}. * <p/> * <br/><br/> * routes.txt <i>[https://developers.google.com/transit/gtfs/reference]</i><br/> * Transit routes. A route is a group of trips that are displayed to riders as a single service. * * @throws IOException */ protected void loadRoutes() throws IOException { log.info("Loading routes.txt"); int l = 1; try { CSVReader reader = createCSVReader(root + GtfsDefinitions.Files.ROUTES.fileName); String[] header = reader.readNext(); Map<String, Integer> col = getIndices(header, GtfsDefinitions.Files.ROUTES.columns, GtfsDefinitions.Files.ROUTES.optionalColumns); String[] line = reader.readNext(); while(line != null) { l++; int routeTypeNr = Integer.parseInt(line[col.get(GtfsDefinitions.ROUTE_TYPE)]); ExtendedRouteType extendedRouteType = RouteType.getExtendedRouteType(routeTypeNr); if(extendedRouteType == null) { log.warn("Route " + line[col.get(GtfsDefinitions.ROUTE_ID)] + " of type " + routeTypeNr + " will be ignored"); ignoredRoutes.add(line[col.get(GtfsDefinitions.ROUTE_ID)]); } else { String routeId = line[col.get(GtfsDefinitions.ROUTE_ID)]; String shortName = line[col.get(GtfsDefinitions.ROUTE_SHORT_NAME)]; String longName = line[col.get(GtfsDefinitions.ROUTE_LONG_NAME)]; Route newGtfsRoute = new RouteImpl(routeId, shortName, longName, extendedRouteType); routes.put(line[col.get(GtfsDefinitions.ROUTE_ID)], newGtfsRoute); } line = reader.readNext(); } reader.close(); } catch (ArrayIndexOutOfBoundsException i) { throw new RuntimeException("Line " + l + " in routes.txt is empty or malformed."); } log.info("... routes.txt loaded"); }
Example #23
Source File: Loader.java From TAcharting with GNU Lesser General Public License v2.1 | 5 votes |
public static BarSeries getHourlyBarSeries(URL file, String name){ List<Bar> ticks = new ArrayList<>(); CSVReader reader; String nameInCSV=""; try { reader = new CSVReaderBuilder(new FileReader(file.getFile())).withSkipLines(1).build(); String[] line; nameInCSV = reader.readNext()[0]; if(nameInCSV==null || nameInCSV.equals("")){ nameInCSV = name; } while ((line = reader.readNext()) != null) { ZonedDateTime date = ZonedDateTime.parse(line[0]+" "+line[1]+" PST", DATE_FORMAT_HOURLY_MINUTE); double open = Double.parseDouble(line[2]); double high = Double.parseDouble(line[3]); double low = Double.parseDouble(line[4]); double close = Double.parseDouble(line[5]); double volume = Double.parseDouble(line[6]); ticks.add(new BaseBar(Duration.ZERO, date, open, high, low, close, volume, 0, 0, Parameter.numFunction)); } } catch (IOException e) { e.printStackTrace(); } if (ticks.get(0).getEndTime().isAfter(ticks.get(ticks.size()-1).getEndTime())) Collections.reverse(ticks); return new BaseBarSeries(nameInCSV, ticks); }
Example #24
Source File: FileUtils.java From tutorials with MIT License | 5 votes |
public Line readLine() { try { if (CSVReader == null) initReader(); String[] line = CSVReader.readNext(); if (line == null) return null; return new Line(line[0], LocalDate.parse(line[1], DateTimeFormatter.ofPattern("MM/dd/yyyy"))); } catch (Exception e) { logger.error("Error while reading line in file: " + this.fileName); return null; } }
Example #25
Source File: OpenCSVParser.java From quick-csv-streamer with GNU General Public License v2.0 | 5 votes |
public Stream<City> parse(InputStream is) { Reader reader = new InputStreamReader(is); CSVReader csvReader = new CSVReader(reader); Iterator<City> iterator = new Iterator<City>() { private boolean isEndReached = false; @Override public boolean hasNext() { return !isEndReached; } @Override public City next() { try { String[] values = csvReader.readNext(); if (values == null) { isEndReached = true; return null; } else { return toCity(values); } } catch (IOException e) { throw new UncheckedIOException(e); } } }; Spliterator<City> spliterator = Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED); return StreamSupport.stream(spliterator, false).onClose(new Runnable() { @Override public void run() { IOUtils.closeQuietly(csvReader); } }); }
Example #26
Source File: OurAirportsAirportProvider.java From MetarParser with MIT License | 5 votes |
/** * Connects to the countries list and build a map of {@link Country} with the name as key. * * @throws CsvValidationException when the parsing of the file fails * @throws IOException when network error * @throws URISyntaxException when the URI is invalid */ public void buildCountries() throws URISyntaxException, IOException, CsvValidationException { countries = new HashMap<>(); URI countriesUri = new URI(COUNTRIES_URI); try (InputStream countriesStream = countriesUri.toURL().openStream(); CSVReader reader = new CSVReaderBuilder(new InputStreamReader(countriesStream, StandardCharsets.UTF_8)).withCSVParser(new CSVParser()).withSkipLines(1).build()) { String[] line; while ((line = reader.readNext()) != null) { Country c = new Country(); c.setName(line[2]); countries.put(line[1], c); } } }
Example #27
Source File: SearchTestWithCSVDataProvider.java From Selenium-WebDriver-3-Practical-Guide-Second-Edition with MIT License | 5 votes |
@DataProvider(name = "searchWords") public Iterator<Object[]> provider() throws Exception { CSVReader reader = new CSVReader( new FileReader("./src/test/resources/data/data.csv") , ',', '\'', 1); List<Object[]> myEntries = new ArrayList<Object[]>(); String[] nextLine; while ((nextLine = reader.readNext()) != null) { myEntries.add(nextLine); } reader.close(); return myEntries.iterator(); }
Example #28
Source File: DefaultAirportProvider.java From MetarParser with MIT License | 5 votes |
/** * Initiate countries map. */ private void initCountries() { Objects.requireNonNull(countriesFile); countries = new HashMap<>(); String[] line; try (CSVReader reader = new CSVReaderBuilder(new InputStreamReader(countriesFile, StandardCharsets.UTF_8)).withCSVParser(new CSVParser()).withSkipLines(0).build()) { while ((line = reader.readNext()) != null) { Country country = new Country(); country.setName(line[0]); countries.put(country.getName(), country); } } catch (IOException | CsvValidationException exception) { throw new IllegalStateException(exception.getMessage()); } }
Example #29
Source File: SparseDataSet.java From hlta with GNU General Public License v3.0 | 5 votes |
/** * Converts the csv input data , where rows represent datacase and columns represent variables, to * the form of tuples. For Example in the original dataset if each datacase is a documnet, then we would * have tuples like (doc,word1),(doc(word2)... * Here word1 represents the name of the corresponding variable in the .csv file * We then write this converted dataset to a file for future use. * @param DataSetNameCsv .csv datafile name * @param OutputDataSetPath the path where the converted input data format will be saved * @throws IOException */ public static void convertCSVtoTuples(String DataSetNameCsv, String OutputDataSetPath) throws IOException{ PrintWriter out = new PrintWriter(OutputDataSetPath+File.separator+"SparseDataInputFormat.txt"); // get the reader, split char = , and quotation char = " // We start from line 0 CSVReader reader = new CSVReader(new FileReader(DataSetNameCsv), ',', '"', 0); Iterator<String[]> iter = reader.iterator(); //Line 0 should contain the variable names so read them first String[] varName = iter.next(); int row_id = 1; while(iter.hasNext()){ String[] datacase = iter.next(); for(int i = 0 ; i < datacase.length ; i++){ // For each datacase get the variables which are 1 if(Integer.parseInt(datacase[i]) == 1){ out.println(Integer.toString(row_id)+","+varName[i]);// write a (doc,varName) tuple } } row_id++; } out.close(); reader.close(); }
Example #30
Source File: TabularResultResponseProcessor.java From quandl4j with Apache License 2.0 | 5 votes |
/** * {@inheritDoc} */ public TabularResult process(final InputStream inputStream, final Request request) { CSVReader reader = new CSVReader(new InputStreamReader(inputStream)); try { String[] headerRow = reader.readNext(); if (headerRow != null) { HeaderDefinition headerDef = HeaderDefinition.of(Arrays.asList(headerRow)); List<Row> rows = new ArrayList<Row>(); String[] next = reader.readNext(); while (next != null) { if (next.length > headerRow.length) { // This row is not the same length as the header row, record how long it is so we can patch in a longer header afterwards. String[] stretchedHeaderRow = new String[next.length]; System.arraycopy(headerRow, 0, stretchedHeaderRow, 0, headerRow.length); for (int i = headerRow.length; i < next.length; i++) { stretchedHeaderRow[i] = "Column " + i; } headerRow = stretchedHeaderRow; headerDef = HeaderDefinition.of(Arrays.asList(headerRow)); // create a new header with the extended column labels. // NOTE: we DON'T go back and patch rows that we've already created. This is because the only case the header is used is // to look up rows by name, and given those rows don't contain data for those columns, the logic in Row now just returns // null in that case (the case where you ask for a row that isn't present). } Row row = Row.of(headerDef, next); rows.add(row); next = reader.readNext(); } reader.close(); return TabularResult.of(headerDef, rows); } else { reader.close(); throw new QuandlRuntimeException("No data returned"); } } catch (IOException | CsvValidationException e) { throw new QuandlRuntimeException("Error reading input stream", e); } }