org.supercsv.cellprocessor.ParseDate Java Examples

The following examples show how to use org.supercsv.cellprocessor.ParseDate. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Reading.java    From super-csv with Apache License 2.0 6 votes vote down vote up
/**
 * Sets up the processors used for the examples. There are 10 CSV columns, so 10 processors are defined. Empty
 * columns are read as null (hence the NotNull() for mandatory columns).
 * 
 * @return the cell processors
 */
private static CellProcessor[] getProcessors() {
	
	final String emailRegex = "[a-z0-9\\._]+@[a-z0-9\\.]+"; // just an example, not very robust!
	StrRegEx.registerMessage(emailRegex, "must be a valid email address");
	
	final CellProcessor[] processors = new CellProcessor[] { new UniqueHashCode(), // customerNo (must be unique)
		new NotNull(), // firstName
		new NotNull(), // lastName
		new ParseDate("dd/MM/yyyy"), // birthDate
		new ParseSqlTime("HH:mm:ss"),
		new NotNull(), // mailingAddress
		new Optional(new ParseBool()), // married
		new Optional(new ParseInt()), // numberOfKids
		new NotNull(), // favouriteQuote
		new StrRegEx(emailRegex), // email
		new LMinMax(0L, LMinMax.MAX_LONG) // loyaltyPoints
	};
	
	return processors;
}
 
Example #2
Source File: SuperCsvBOMTest.java    From super-csv with Apache License 2.0 6 votes vote down vote up
public void ReadTestCSVFile(Reader reader) throws IOException {
	ICsvBeanReader beanReader = new CsvBeanReader(reader, CsvPreference.STANDARD_PREFERENCE);
	final String[] header = beanReader.getHeader(true);
	assertEquals("customerNo", header[0]);
	CustomerBean customer = null;
	final String emailRegex = "[a-z0-9\\._]+@[a-z0-9\\.]+"; // just an example, not very robust!
	StrRegEx.registerMessage(emailRegex, "must be a valid email address");
	final CellProcessor[] processors = new CellProcessor[]{new UniqueHashCode(), // customerNo (must be unique)
			new NotNull(), // firstName
			new NotNull(), // lastName
			new ParseDate("dd/MM/yyyy"), // birthDate
			new ParseSqlTime("HH:mm:ss"), // birthTime
			new NotNull(), // mailingAddress
			new Optional(new ParseBool()), // married
			new Optional(new ParseInt()), // numberOfKids
			new NotNull(), // favouriteQuote
			new StrRegEx(emailRegex), // email
			new LMinMax(0L, LMinMax.MAX_LONG) // loyaltyPoints
	};
	customer = beanReader.read(CustomerBean.class, header, processors);
	assertEquals("1", customer.getCustomerNo());
	assertEquals("John", customer.getFirstName());
	assertEquals("[email protected]", customer.getEmail());
	assertEquals(0, customer.getLoyaltyPoints());
	beanReader.close();
}
 
Example #3
Source File: ReadingFeaturesTest.java    From super-csv with Apache License 2.0 6 votes vote down vote up
@Test
public void testConvertsToBasicObjects() throws Exception {
	String csv = "Connor|John|16|1999-07-12|6" + decimalFormatSymbols.getDecimalSeparator() + "65\r\n";
	String[] mapping = { "lastName", "firstName", "age", "birthDate", "savings" };
	CellProcessor[] processors = { new NotNull(), new NotNull(), new ParseInt(), new ParseDate("yyyy-MM-dd"),
		new ParseBigDecimal(decimalFormatSymbols) };
	
	CsvPreference customPreference = new Builder('"', '|', "\r\n").build();
	CsvBeanReader beanReader = new CsvBeanReader(new StringReader(csv), customPreference);
	FeatureBean character = beanReader.read(FeatureBean.class, mapping, processors);
	
	Assert.assertNotNull(character);
	Assert.assertEquals("John", character.getFirstName());
	Assert.assertEquals("Connor", character.getLastName());
	Assert.assertEquals(16, character.getAge());
	Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd").parse("1999-07-12"), character.getBirthDate());
	Assert.assertEquals(new BigDecimal(6.65, new MathContext(3)), character.getSavings());
}
 
Example #4
Source File: ReadingFeaturesTest.java    From super-csv with Apache License 2.0 6 votes vote down vote up
@Test
public void testConverterSupport() throws Exception {
	String csv = "Connor|John|16|1999-07-12|6" + decimalFormatSymbols.getDecimalSeparator() + "65\r\n";
	String[] mapping = { "lastName", "firstName", "age", "birthDate", "savings" };
	CellProcessor[] processors = { new NotNull(), new NotNull(), new ParseInt(), new ParseDate("yyyy-MM-dd"),
		new ParseBigDecimal(decimalFormatSymbols) };
	
	CsvPreference customPreference = new Builder('"', '|', "\r\n").build();
	CsvBeanReader beanReader = new CsvBeanReader(new StringReader(csv), customPreference);
	FeatureBean character = beanReader.read(FeatureBean.class, mapping, processors);
	
	Assert.assertNotNull(character);
	Assert.assertEquals("John", character.getFirstName());
	Assert.assertEquals("Connor", character.getLastName());
	Assert.assertEquals(16, character.getAge());
	Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd").parse("1999-07-12"), character.getBirthDate());
	Assert.assertEquals(new BigDecimal(6.65, new MathContext(3)), character.getSavings());
}
 
Example #5
Source File: AbstractCsvParser.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
public void initialise(String[] properties, CellProcessor[] processors)
{
  for (int i = 0; i < getFields().size(); i++) {
    FIELD_TYPE type = getFields().get(i).type;
    properties[i] = getFields().get(i).name;
    if (type == FIELD_TYPE.DOUBLE) {
      processors[i] = new Optional(new ParseDouble());
    } else if (type == FIELD_TYPE.INTEGER) {
      processors[i] = new Optional(new ParseInt());
    } else if (type == FIELD_TYPE.FLOAT) {
      processors[i] = new Optional(new ParseDouble());
    } else if (type == FIELD_TYPE.LONG) {
      processors[i] = new Optional(new ParseLong());
    } else if (type == FIELD_TYPE.SHORT) {
      processors[i] = new Optional(new ParseInt());
    } else if (type == FIELD_TYPE.STRING) {
      processors[i] = new Optional();
    } else if (type == FIELD_TYPE.CHARACTER) {
      processors[i] = new Optional(new ParseChar());
    } else if (type == FIELD_TYPE.BOOLEAN) {
      processors[i] = new Optional(new ParseChar());
    } else if (type == FIELD_TYPE.DATE) {
      processors[i] = new Optional(new ParseDate("dd/MM/yyyy"));
    }
  }

}
 
Example #6
Source File: ReadingFeaturesTest.java    From super-csv with Apache License 2.0 5 votes vote down vote up
@Test
public void testDateSupport() throws Exception {
	String csv = "1999-07-12";
	String[] mapping = { "birthDate" };
	CellProcessor[] processors = { new ParseDate("yyyy-MM-dd") };
	
	CsvBeanReader beanReader = new CsvBeanReader(new StringReader(csv), STANDARD_PREFERENCE);
	FeatureBean character = beanReader.read(FeatureBean.class, mapping, processors);
	
	Assert.assertNotNull(character);
	Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd").parse("1999-07-12"), character.getBirthDate());
}
 
Example #7
Source File: CacheLoader.java    From geode-demo-application with Apache License 2.0 4 votes vote down vote up
/**
 * Call this to load the Data, it uses the CSV files in its classpath
 */
public void loadData() {
	
	startTime = new Date().getTime();
	
	//load the customers
	String[] nameMapping = new String[]{"city","birthday","id","name", "emailAddress"};
	CellProcessor[] processors = new CellProcessor[] { 
       		new NotNull(), //city
       		new ParseDate("dd-MM-yyyy"), //birthday
               new NotNull(), //id
               new NotNull(), //name
               new NotNull() //email
       };
       loadCustomers("customer.csv",nameMapping,processors);
       
       //load the products
       nameMapping = new String[]{"stockOnHand","wholeSalePrice","brand","type", "color", "size", "gender", "id"};
       processors = new CellProcessor[] { 
       		new ParseInt(),//stockOnHand
       		new ParseDouble(),//wholeSalePrice
       		new NotNull(),//brand
       		new NotNull(),//type
       		new NotNull(),//color
       		new ParseDouble(),//size
       		new NotNull(),//gender
               new NotNull()//productId
       };
       loadProducts("products.csv",nameMapping,processors);
       
       //load the historic transactions - these are just randomly generated and do not respect stock quantity
       nameMapping = new String[]{"customerId","transactionDate","productId","quantity", "retailPrice", "id", "markUp", "orderStatus"};
       processors = new CellProcessor[] { 
       		new NotNull(),//customerId
       		new ParseDate("dd-MM-yyyy"),//transactionDate
       		new NotNull(),//productId
       		new ParseInt(),//quantity
       		new ParseDouble(),//retailsPrice
       		new NotNull(),//transactionId
       		new ParseDouble(),//markUp
       		new NotNull()//order status
       };
       loadTransactions("transactions.csv",nameMapping,processors);
       
       //load the mark ups
       nameMapping = new String[]{"id", "rate","levelName","qualifyingTransactionCountFloor","qualifyingTransactionCountCeiling"};
       processors = new CellProcessor[] {
       		new NotNull(),//id
       		new ParseDouble(),//rate
       		new NotNull(),//levelName
       		new ParseInt(),//qualifyingTransactionCountFloor
       		new ParseInt()//qualifyingTransactionCountCeiling
       };
       loadMarkUps("markUps.csv",nameMapping,processors);
       
       //clean out the alerts
       for (Alert alert : alertRepository.findAll()) {
       	alertRepository.delete(alert);
       }
       long endTime = new Date().getTime();
	long timeToLoad = endTime - startTime;
	activityLog.add("Total Loading Time: " + timeToLoad/1000 + " seconds");
       closeBeanReader();
	writeOutLogs();
}