use of au.com.bytecode.opencsv.CSVReader in project hale by halestudio.
the class CSVSchemaReader method loadFromSource.
@Override
protected Schema loadFromSource(ProgressIndicator progress, IOReporter reporter) throws IOProviderConfigurationException, IOException {
// $NON-NLS-1$
progress.begin("Load CSV schema", ProgressIndicator.UNKNOWN);
String namespace = CSVFileIO.CSVFILE_NS;
DefaultSchema schema = new DefaultSchema(namespace, getSource().getLocation());
CSVReader reader = CSVUtil.readFirst(this);
try {
// initializes the first line of the table (names of the columns)
firstLine = reader.readNext();
// create type definition
String typename = getParameter(CommonSchemaConstants.PARAM_TYPENAME).as(String.class);
if (typename == null || typename.isEmpty()) {
reporter.setSuccess(false);
reporter.error(new IOMessageImpl("No Typename was set", null));
return null;
}
DefaultTypeDefinition type = new DefaultTypeDefinition(new QName(typename));
// constraints on main type
type.setConstraint(MappingRelevantFlag.ENABLED);
type.setConstraint(MappableFlag.ENABLED);
type.setConstraint(HasValueFlag.DISABLED);
type.setConstraint(AbstractFlag.DISABLED);
// set metadata for main type
type.setLocation(getSource().getLocation());
StringBuffer defaultPropertyTypeBuffer = new StringBuffer();
String[] comboSelections;
if (getParameter(PARAM_PROPERTYTYPE).isEmpty()) {
for (int i = 0; i < firstLine.length; i++) {
defaultPropertyTypeBuffer.append("java.lang.String");
defaultPropertyTypeBuffer.append(",");
}
defaultPropertyTypeBuffer.deleteCharAt(defaultPropertyTypeBuffer.lastIndexOf(","));
String combs = defaultPropertyTypeBuffer.toString();
comboSelections = combs.split(",");
} else {
comboSelections = getParameter(PARAM_PROPERTYTYPE).as(String.class).split(",");
}
String[] properties;
if (getParameter(PARAM_PROPERTY).isEmpty()) {
properties = firstLine;
} else {
properties = getParameter(PARAM_PROPERTY).as(String.class).split(",");
}
// than the entries in the first line
if ((firstLine.length != properties.length && properties.length != 0) || (firstLine.length != comboSelections.length && comboSelections.length != 0)) {
fail("Not the same number of entries for property names, property types and words in the first line of the file");
}
for (int i = 0; i < comboSelections.length; i++) {
PropertyType propertyType;
propertyType = PropertyTypeExtension.getInstance().getFactory(comboSelections[i]).createExtensionObject();
DefaultPropertyDefinition property = new DefaultPropertyDefinition(new QName(properties[i]), type, propertyType.getTypeDefinition());
// set constraints on property
// property.setConstraint(NillableFlag.DISABLED); // nillable
// nillable FIXME
property.setConstraint(NillableFlag.ENABLED);
// should be
// configurable
// per field
// (see also
// CSVInstanceReader)
// cardinality
property.setConstraint(Cardinality.CC_EXACTLY_ONCE);
// set metadata for property
property.setLocation(getSource().getLocation());
}
boolean skip = Arrays.equals(properties, firstLine);
type.setConstraint(new CSVConfiguration(CSVUtil.getSep(this), CSVUtil.getQuote(this), CSVUtil.getEscape(this), skip ? 1 : 0));
schema.addType(type);
} catch (Exception ex) {
reporter.error(new IOMessageImpl("Cannot load csv schema", ex));
reporter.setSuccess(false);
return null;
}
reporter.setSuccess(true);
return schema;
}
use of au.com.bytecode.opencsv.CSVReader in project hale by halestudio.
the class CSVSchemaTypePage method onShowPage.
/**
* @see HaleWizardPage#onShowPage(boolean)
*/
@Override
protected void onShowPage(boolean firstShow) {
try {
CSVReader reader = CSVUtil.readFirst(getWizard().getProvider());
setHeader(reader.readNext());
setSecondRow(reader.readNext());
super.onShowPage(firstShow);
} catch (IOException e) {
setHeader(new String[0]);
setSecondRow(new String[0]);
super.onShowPage(firstShow);
setMessage("File cannot be loaded!", WARNING);
setPageComplete(false);
}
setPageComplete(super.isValid());
}
use of au.com.bytecode.opencsv.CSVReader in project hale by halestudio.
the class CSVInstanceWriterTest method testWriteSimpleSchema.
/**
* Test - write simple data, without nested properties
*
* @throws Exception , if an error occurs
*/
@Test
public void testWriteSimpleSchema() throws Exception {
TransformationExample example = TransformationExamples.getExample(TransformationExamples.SIMPLE_ASSIGN);
// alternative the data could be generated by iterating through the
// exempleproject's source data
String propertyNames = "id,a1,b1,c1";
String firstDataRow = "id0,a10,b10,c10";
// header size
int numberOfEntries = 4;
int numberOfRows = 3;
char sep = ',';
File tmpFile = tmpFolder.newFile("csvTestWriteSimpleSchema.csv");
assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, Value.of(sep), null, null, example.getSourceInstances()));
CSVReader reader = new CSVReader(new FileReader(tmpFile), sep);
List<String[]> rows = reader.readAll();
//
reader.close();
assertEquals("Not enough rows.", numberOfRows, rows.size());
// Check header ###
Iterator<String[]> row = rows.iterator();
String[] header = row.next();
assertEquals("There are not enough entries.", numberOfEntries, header.length);
for (int i = 0; i < header.length; i++) {
assertTrue("The header of the csv file do not contain all properties.", propertyNames.contains(header[i]));
}
String[] dataRow = row.next();
for (int i = 0; i < dataRow.length; i++) {
assertTrue("The first data row of the csv file do not contain all properties.", firstDataRow.contains(dataRow[i]));
}
}
use of au.com.bytecode.opencsv.CSVReader in project hale by halestudio.
the class CSVInstanceWriterTest method testWriteSimpleSchemaDelimiter.
/**
* Test - write simple data, without nested properties
*
* @throws Exception , if an error occurs
*/
@Test
public void testWriteSimpleSchemaDelimiter() throws Exception {
TransformationExample example = TransformationExamples.getExample(TransformationExamples.SIMPLE_ASSIGN);
// alternative the data could be generated by iterating through the
// exempleproject's source data
String propertyNames = "id,a1,b1,c1";
String firstDataRow = "id0,a10,b10,c10";
// header size
int numberOfEntries = 4;
int numberOfRows = 3;
char sep = '\t';
char quo = '\'';
char esc = '"';
File tmpFile = tmpFolder.newFile("csvTestWriteSimpleSchemaDelimiter.csv");
assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, Value.of(sep), Value.of(quo), Value.of(esc), example.getSourceInstances()));
CSVReader reader = new CSVReader(new FileReader(tmpFile), sep, quo, esc);
List<String[]> rows = reader.readAll();
//
reader.close();
assertEquals("Not enough rows.", numberOfRows, rows.size());
// Check header ###
Iterator<String[]> row = rows.iterator();
String[] header = row.next();
assertEquals("There are not enough entries.", numberOfEntries, header.length);
for (int i = 0; i < header.length; i++) {
assertTrue("The header of the csv file do not contain all properties.", propertyNames.contains(header[i]));
}
String[] dataRow = row.next();
for (int i = 0; i < dataRow.length; i++) {
assertTrue("The first data row of the csv file do not contain all properties. Miss on : " + dataRow[i], firstDataRow.contains(dataRow[i]));
}
}
use of au.com.bytecode.opencsv.CSVReader in project hale by halestudio.
the class CSVInstanceWriterTest method testWriteComplexSchema.
/**
* Test - write data of complex schema and analyze result
*
* @throws Exception , if an error occurs
*/
@Test
public void testWriteComplexSchema() throws Exception {
TransformationExample example = TransformationExamples.getExample(TransformationExamples.SIMPLE_COMPLEX);
// alternative the data could be generated by iterating through the
// exempleproject's source data
String propertyNames = "id,name,details.age,details.income,details.address.street,details.address.city";
String firstDataRow = "id0,name0,age0,income0,street0,city0,street1,city1";
// String secondDataRow =
// "id1,name1,age1,income1,street2,city2,street3,city3";
int numberOfEntries = 6;
int numberOfRows = 3;
char sep = ',';
File tmpFile = tmpFolder.newFile("csvTestWriteComplexSchema.csv");
assertTrue("Csv Export was not successful.", writeCsvToFile(tmpFile, true, Value.of(sep), null, null, example.getSourceInstances()));
CSVReader reader = new CSVReader(new FileReader(tmpFile), sep);
List<String[]> rows = reader.readAll();
//
reader.close();
assertEquals("Not enough rows.", numberOfRows, rows.size());
// Check header ###
Iterator<String[]> row = rows.iterator();
String[] header = row.next();
assertEquals("There are not enough entries.", numberOfEntries, header.length);
for (int i = 0; i < header.length; i++) {
assertTrue("The header of the csv file do not contain all properties.", propertyNames.contains(header[i]));
// This is for debug purposes to check which properties are missing
// propertyNames = propertyNames.replaceFirst(header[i], "");
}
String[] dataRow = row.next();
for (int i = 0; i < dataRow.length; i++) {
assertTrue("The first data row of the csv file do not contain all properties.", firstDataRow.contains(dataRow[i]));
// This is for debug purposes ...
// firstDataRow = firstDataRow.replaceFirst(dataRow[i], "");
}
}
Aggregations