Java Code Examples for com.csvreader.CsvReader#setSafetySwitch()

The following examples show how to use com.csvreader.CsvReader#setSafetySwitch() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BulkResultSetTest.java    From components with Apache License 2.0 6 votes vote down vote up
private int prepareSafetySwitchTest(boolean safetySwitchParameter, int columnLength) throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    CsvWriter csvWriter = new CsvWriter(new BufferedOutputStream(out), ',', Charset.forName("UTF-8"));

    StringBuilder sb = new StringBuilder();
    for (int i = 0; i < columnLength; i++) {
        sb.append("a");
    }
    String[] data = new String[] { "fieldValueA", "fieldValueB", sb.toString() };
    csvWriter.writeRecord(data);
    csvWriter.close();

    CsvReader csvReader = new CsvReader(new BufferedInputStream(new ByteArrayInputStream(out.toByteArray())), ',',
            Charset.forName("UTF-8"));
    csvReader.setSafetySwitch(safetySwitchParameter);
    BulkResultSet resultSet = new BulkResultSet(csvReader, Arrays.asList("fieldA", "fieldB", "fieldC"));
    BulkResult result = resultSet.next();
    return ((String) result.getValue("fieldC")).length();
}
 
Example 2
Source File: RulesDbClientImpl.java    From ipst with Mozilla Public License 2.0 5 votes vote down vote up
@Override
public Collection<RuleId> listRules(String workflowId, RuleAttributeSet attributeSet) {
    Objects.requireNonNull(workflowId);

    String path = RULES_PREFIX + workflowId + "/data.csv";
    Map<String, String> query = ImmutableMap.of("start", "0",
                                                "count", "-1",
                                                "headers", "true",
                                                "cols", "algoType,contingencyId,indexType");

    try {
        CsvReader csvReader = new CsvReader(httpClient.getHttpRequest(new HistoDbUrl(config, path, query)), ',', StandardCharsets.UTF_8);
        try {
            csvReader.setSafetySwitch(false);
            csvReader.readHeaders();

            List<RuleId> ruleIds = new ArrayList<>();

            while (csvReader.readRecord()) {
                String[] values = csvReader.getValues();
                ruleIds.add(new RuleId(RuleAttributeSet.valueOf(values[0]),
                            new SecurityIndexId(values[1], SecurityIndexType.fromLabel(values[2]))));
            }

            return ruleIds;
        } finally {
            csvReader.close();
        }
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
 
Example 3
Source File: Utils.java    From sailfish-core with Apache License 2.0 4 votes vote down vote up
public static long determineStartSecFromParsedFile(String parsedFileName) throws IOException
{
	CsvReader parsedReader = new CsvReader(parsedFileName);

	parsedReader.setSafetySwitch(false);

	parsedReader.readHeaders();

	parsedReader.readRecord();

	long startSec = Long.parseLong(parsedReader.getValues()[0]);

	parsedReader.close();

	return startSec;
}
 
Example 4
Source File: Utils.java    From sailfish-core with Apache License 2.0 4 votes vote down vote up
public static void generateInputFile(String folderName, long startTimeInSec, long finishTimeInSec, String generatingFile) throws IOException
{
	File folder = new File(folderName);

	if ( folder.exists() )
	{
		File[] files = folder.listFiles();

           Arrays.sort(files, new FileSpecialComparator(true, false));

           if(files.length == 0) {
               return;
           }

           int index = files.length - 1;
		for ( ; index >= 0; --index )
		{
			long startInterval = determineStartInterval(files[index].getName());

               if(startInterval < startTimeInSec * 1000000) {
                   break;
               }
		}

           if(index < 0) {
               index = 0;
           }



		BufferedWriter writer = new BufferedWriter(new FileWriter(generatingFile));

		for ( int i = index; i < files.length; ++i )
		{
			if ( isDebugged )
			{
				logger.debug(files[i].getName());
			}

			CsvReader reader = new CsvReader(folderName + File.separator + files[i].getName());
			reader.setSafetySwitch(false);

			reader.readRecord();

			if ( i == index )
			{
				writer.write(reader.getRawRecord());
				writer.newLine();
			}

			while ( reader.readRecord() )
			{
				long timestamp = Long.parseLong(reader.getValues()[0]);

                   if(timestamp >= startTimeInSec * 1000000 && (timestamp < finishTimeInSec * 1000000 || finishTimeInSec == Long.MAX_VALUE)) {
                       writer.write(reader.getRawRecord());
                       writer.newLine();
                   } else if(timestamp >= startTimeInSec * 1000000) {
                       break;
                   }
               }

			reader.close();
		}

		writer.close();

       } else {
           throw new EPSCommonException("Could not find [" + folderName + "] folder with files");
       }
}
 
Example 5
Source File: Utils.java    From sailfish-core with Apache License 2.0 4 votes vote down vote up
public static void generateMDLatFile(String folderName, long startTimeInSec, long finishTimeInSec, String generatingFile, SimpleDateFormat format, String analyzedField, long startSec) throws IOException
{
	File folder = new File(folderName);

	if ( folder.exists() )
	{
		File[] files = folder.listFiles();

           Arrays.sort(files, new FileSpecialComparator(true, false));

           if(files.length == 0) {
               return;
           }

           int index = files.length - 1;
		for ( ; index >= 0; --index )
		{
			long startInterval = determineStartInterval(files[index].getName());

               if(startInterval < startTimeInSec * 1000000) {
                   break;
               }
		}

           if(index < 0) {
               index = 0;
           }


           try(BufferedWriter writer = new BufferedWriter(new FileWriter(generatingFile))) {
               int fieldIndex = -1;

               for(int i = index; i < files.length; ++i) {
                   if(isDebugged) {
                       logger.debug(files[i].getName());
                   }

                   CsvReader reader = new CsvReader(folderName + File.separator + files[i].getName());
                   reader.setSafetySwitch(false);

                   reader.readRecord();

                   if(i == index) {

                       String[] headers = reader.getValues();

                       for(int j = 0; j < headers.length; ++j) {
                           if(headers[j].equals(analyzedField)) {
                               fieldIndex = j;
                               break;
                           }
                       }

                       writer.write("Timestamp,Latency");

                       for(int j = 3; j < headers.length; ++j) {
                           writer.write("," + headers[j]);
                       }

                       writer.newLine();

                       if(fieldIndex == -1) {
                           throw new IllegalArgumentException("Could not find fieldName = [" + analyzedField + "] in inputFile = [" + files[i].getName() + "]");
                       }
                   }

                   while(reader.readRecord()) {
                       String[] values = reader.getValues();

                       long timestamp = Long.parseLong(values[0]);

                       if(timestamp >= startTimeInSec * 1000000 && (timestamp < finishTimeInSec * 1000000 || finishTimeInSec == Long.MAX_VALUE)) {
                           try {
                               String sendingTime = values[fieldIndex];

                               long sendingTimestamp = format.parse(sendingTime + " +0000").getTime();

                               sendingTimestamp = (sendingTimestamp - startSec * 1000) * 1000;

                               writer.write(values[0] + ',' + (timestamp - sendingTimestamp));

                               for(int j = 3; j < values.length; ++j) {
                                   writer.write("," + values[j]);
                               }

                               writer.newLine();
                           } catch(ParseException e) {
                           }
                       } else if(timestamp >= startTimeInSec * 1000000) {
                           break;
                       }
                   }

                   reader.close();
               }
		}
       } else {
           throw new EPSCommonException("Could not find [" + folderName + "] folder with files");
       }
}
 
Example 6
Source File: Utils.java    From sailfish-core with Apache License 2.0 4 votes vote down vote up
public static void generateFakeInputFile(String folderName, long startTimeInSec, long finishTimeInSec, String generatingFile) throws IOException
{
	File folder = new File(folderName);

	if ( folder.exists() )
	{
		File[] files = folder.listFiles();

           Arrays.sort(files, new FileSpecialComparator(true, false));

           if(files.length == 0) {
               return;
           }

           int index = files.length - 1;
		for ( ; index >= 0; --index )
		{
			long startInterval = determineStartInterval(files[index].getName());

               if(startInterval < startTimeInSec * 1000000) {
                   break;
               }
		}

           if(index < 0) {
               index = 0;
           }

		BufferedWriter writer = new BufferedWriter(new FileWriter(generatingFile));


		boolean firstRecordWritten = false;

		for ( int i = index; i < files.length; ++i )
		{
			if ( isDebugged )
			{
				logger.debug(files[i].getName());
			}

			CsvReader reader = new CsvReader(folderName + File.separator + files[i].getName());
			reader.setSafetySwitch(false);

			reader.readRecord();

			if ( i == index )
			{
				writer.write(reader.getRawRecord());
				writer.newLine();
			}


			if ( !firstRecordWritten || i == files.length - 1)
			{
				String lastRecord = "";

				while ( reader.readRecord() )
				{
					long timestamp = Long.parseLong(reader.getValues()[0]);

					lastRecord = reader.getRawRecord();

					if ( !firstRecordWritten )
					{
                           if(timestamp >= startTimeInSec * 1000000 && (timestamp < finishTimeInSec * 1000000 || finishTimeInSec == Long.MAX_VALUE)) {
                               writer.write(reader.getRawRecord());
                               writer.newLine();
                               firstRecordWritten = true;
                           } else if(timestamp >= startTimeInSec * 1000000) {
                               break;
                           }
                       }
				}

				writer.write(lastRecord);
			}

			reader.close();
		}

		writer.close();

       } else {
           throw new EPSCommonException("Could not find [" + folderName + "] folder with files");
       }
}
 
Example 7
Source File: Utils.java    From sailfish-core with Apache License 2.0 4 votes vote down vote up
public static void mergeFiles(String folderName, String generatingFile) throws IOException
{
	File folder = new File(folderName);

	if ( folder.exists() )
	{
		File[] files = folder.listFiles();

           Arrays.sort(files, new FileComparator(CompType.TIME, "", false));

           if(files.length == 0) {
               return;
           }

		BufferedWriter writer = new BufferedWriter(new FileWriter(generatingFile));

		for ( int i = 0; i < files.length; ++i )
		{
			CsvReader reader = new CsvReader(folderName + File.separator + files[i].getName());
			reader.setSafetySwitch(false);

			reader.readRecord();

			if ( i == 0 )
			{
				writer.write(reader.getRawRecord());
				writer.newLine();
			}

			while ( reader.readRecord() )
			{
				writer.write(reader.getRawRecord());
				writer.newLine();
			}

			reader.close();
		}

		writer.close();

       } else {
           throw new EPSCommonException("Could not find [" + folderName + "] folder with files");
       }
}
 
Example 8
Source File: DefaultCsvImportService.java    From dhis2-core with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
@Override
public Metadata fromCsv( InputStream input, CsvImportOptions options )
    throws IOException
{
    CsvReader reader = CsvUtils.getReader( input );
    reader.setSafetySwitch( false ); // Disabled due to large geometry values for org units

    if ( options.isFirstRowIsHeader() )
    {
        reader.readRecord(); // Ignore first row
    }

    Metadata metadata = new Metadata();

    switch ( options.getImportClass() )
    {
        case DATA_ELEMENT:
            metadata.setDataElements( dataElementsFromCsv( reader ) );
            break;
        case DATA_ELEMENT_GROUP:
            metadata.setDataElementGroups( dataElementGroupsFromCsv( reader ) );
            break;
        case DATA_ELEMENT_GROUP_MEMBERSHIP:
            metadata.setDataElementGroups( dataElementGroupMembersFromCsv( reader ) );
            break;
        case INDICATOR_GROUP_MEMBERSHIP:
            metadata.setIndicatorGroups( indicatorGroupMembersFromCsv( reader ) );
            break;
        case CATEGORY_OPTION:
            metadata.setCategoryOptions( categoryOptionsFromCsv( reader ) );
            break;
        case CATEGORY:
            metadata.setCategories( categoriesFromCsv( reader ) );
            break;
        case CATEGORY_COMBO:
            metadata.setCategoryCombos( categoryCombosFromCsv( reader ) );
            break;
        case CATEGORY_OPTION_GROUP:
            metadata.setCategoryOptionGroups( categoryOptionGroupsFromCsv( reader ) );
            break;
        case ORGANISATION_UNIT:
            metadata.setOrganisationUnits( orgUnitsFromCsv( reader ) );
            break;
        case ORGANISATION_UNIT_GROUP:
            metadata.setOrganisationUnitGroups( orgUnitGroupsFromCsv( reader ) );
            break;
        case ORGANISATION_UNIT_GROUP_MEMBERSHIP:
            metadata.setOrganisationUnitGroups( orgUnitGroupMembersFromCsv( reader ) );
            break;
        case VALIDATION_RULE:
            metadata.setValidationRules( validationRulesFromCsv( reader ) );
            break;
        case OPTION_SET:
            setOptionSetsFromCsv( reader, metadata );
            break;
        case OPTION_GROUP:
            setOptionGroupsFromCsv( reader, metadata );
            break;
        case OPTION_GROUP_SET:
            metadata.setOptionGroupSets( setOptionGroupSetFromCsv( reader ) );
            break;
        case OPTION_GROUP_SET_MEMBERSHIP:
            metadata.setOptionGroupSets( optionGroupSetMembersFromCsv( reader ) );
            break;
        default:
            break;
    }

    return metadata;
}
 
Example 9
Source File: CSVMappingGenerator.java    From GeoTriples with Apache License 2.0 4 votes vote down vote up
public void run() throws IOException {
	CsvReader reader = new CsvReader(new FileInputStream(pathToShapefile), Charset.defaultCharset());
	reader.setDelimiter(separator);
	reader.setSafetySwitch(false);

	reader.readHeaders();
	// Iterate the rows

	Path p = Paths.get(pathToShapefile);
	String tmp = p.getFileName().toString();
	String typeName = tmp.substring(0, tmp.lastIndexOf('.'));
	triplesMaps.put(typeName, "");
	triplesMaps.put(typeName, triplesMaps.get(typeName) + printTriplesMap(typeName));
	triplesMaps.put(typeName, triplesMaps.get(typeName) + printLogicalSource(typeName));
	triplesMaps.put(typeName, triplesMaps.get(typeName) + printSubjectMap(baseURI, typeName));

	boolean hasgeometry = false;
	String typeNameGeo = typeName + "_Geometry";
	WKTReader wktReader = new WKTReader();
	String geometry_identifier = null;
	boolean has_record = reader.readRecord();
	for (String header : reader.getHeaders()) {
		String identifier = header;
		if (identifier.equalsIgnoreCase("the_geom")) {
			geometry_identifier = identifier;
			hasgeometry = true;
			continue;
		}
		if (has_record) {
			try {
				if (wktReader.read(reader.get(identifier)) != null) {
					hasgeometry = true;
					geometry_identifier = identifier;
					continue;
				}
			} catch (ParseException e1) {
				System.err.println(reader.get(identifier));
			}
		}

		String datatype = TranslateDataTypeToXSD("String");
		triplesMaps.put(typeName,
				triplesMaps.get(typeName) + printPredicateObjectMap(identifier, identifier, datatype, typeName));

	}

	if (hasgeometry) {
		triplesMaps
				.put(typeName,
						triplesMaps.get(typeName) + printPredicateObjectMap(true, "hasGeometry",
								baseURI + (baseURI.endsWith("/") ? "" : "/") + typeName
										+ "/Geometry/{GeoTriplesID}",
								null, null, "ogc", null, typeName, true, false));
		triplesMaps.put(typeNameGeo, "");
		triplesMaps.put(typeNameGeo, triplesMaps.get(typeNameGeo) + printTriplesMap(typeNameGeo));
		triplesMaps.put(typeNameGeo, triplesMaps.get(typeNameGeo) + printLogicalSource(typeName));
		triplesMaps.put(typeNameGeo,
				triplesMaps.get(typeNameGeo) + printSubjectMap(baseURI, typeName, "ogc", true));
		triplesMaps.put(typeNameGeo,
				triplesMaps.get(typeNameGeo) + printGEOPredicateObjectMaps(geometry_identifier));
	}
	printmapping();
	printontology();
}