use of io.atlasmap.csv.v2.CsvField in project atlasmap by atlasmap.
the class CsvFieldReader method readSchema.
/**
* Reads only the first row of the document.
*
* If firstRecordAsHeader is set to true it uses column names for field names, otherwise it uses an index
* starting from 0.
*
* @return {@link Document} built from CSV
* @throws AtlasException if it fails
*/
public Document readSchema() throws AtlasException {
CSVFormat csvFormat = csvConfig.newCsvFormat();
CSVParser parser;
try {
document.mark(Integer.MAX_VALUE);
parser = csvFormat.parse(new InputStreamReader(document));
} catch (IOException e) {
throw new AtlasException(e);
}
List<CsvField> fields = new ArrayList<>();
if (csvConfig.isFirstRecordAsHeader()) {
for (String headerName : parser.getHeaderNames()) {
CsvField field = new CsvField();
field.setName(headerName);
field.setPath("/<>/" + headerName);
field.setFieldType(FieldType.STRING);
fields.add(field);
}
} else {
CSVRecord record = parser.iterator().next();
for (int i = 0; i < record.size(); i++) {
CsvField field = new CsvField();
if (parser.getHeaderNames() != null && parser.getHeaderNames().size() > i) {
field.setName(parser.getHeaderNames().get(i));
} else {
field.setColumn(i);
field.setName(String.valueOf(i));
}
field.setPath("/<>/" + field.getName());
field.setFieldType(FieldType.STRING);
fields.add(field);
}
}
try {
document.reset();
} catch (IOException e) {
throw new AtlasException(e);
}
CsvFields csvFields = new CsvFields();
csvFields.getCsvField().addAll(fields);
CsvComplexType csvComplexType = new CsvComplexType();
csvComplexType.setFieldType(FieldType.COMPLEX);
csvComplexType.setCollectionType(CollectionType.LIST);
csvComplexType.setPath("/<>");
csvComplexType.setName("");
csvComplexType.setCsvFields(csvFields);
Fields documentFields = new Fields();
documentFields.getField().add(csvComplexType);
Document document = new Document();
document.setFields(documentFields);
return document;
}
use of io.atlasmap.csv.v2.CsvField in project atlasmap by atlasmap.
the class CsvFieldReaderTest method testWithNullDocument.
@Test
public void testWithNullDocument() throws Exception {
CsvFieldReader csvFieldReader = new CsvFieldReader(new CsvConfig());
csvFieldReader.setDocument(null);
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(AtlasInternalSession.Head.class));
when(session.head().getSourceField()).thenReturn(new CsvField());
Audits audits = new Audits();
when(session.getAudits()).thenReturn(audits);
csvFieldReader.read(session);
assertEquals(1, audits.getAudit().size());
assertEquals(AuditStatus.ERROR, audits.getAudit().get(0).getStatus());
}
use of io.atlasmap.csv.v2.CsvField in project atlasmap by atlasmap.
the class CsvFieldReaderTest method testWithSimpleDocumentWithHeaderSpecified.
@Test
public void testWithSimpleDocumentWithHeaderSpecified() throws Exception {
CsvConfig csvConfig = new CsvConfig();
csvConfig.setHeaders("givenName,familyName");
CsvFieldReader csvFieldReader = new CsvFieldReader(csvConfig);
csvFieldReader.setDocument(new ByteArrayInputStream("Bob,Smith\nAndrew,Johnson".getBytes()));
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(AtlasInternalSession.Head.class));
CsvField csvField = new CsvField();
csvField.setName("familyName");
csvField.setPath("/<>/familyName");
when(session.head().getSourceField()).thenReturn(csvField);
Audits audits = new Audits();
when(session.getAudits()).thenReturn(audits);
FieldGroup field = (FieldGroup) csvFieldReader.read(session);
assertEquals(0, audits.getAudit().size());
assertEquals("Smith", field.getField().get(0).getValue());
assertEquals("Johnson", field.getField().get(1).getValue());
}
use of io.atlasmap.csv.v2.CsvField in project atlasmap by atlasmap.
the class CsvFieldReaderTest method testWithSimpleDocumentWithHeaderAndDelimiterSpecified.
@Test
public void testWithSimpleDocumentWithHeaderAndDelimiterSpecified() throws Exception {
CsvConfig csvConfig = new CsvConfig();
csvConfig.setDelimiter(';');
csvConfig.setHeaders("givenName;familyName");
CsvFieldReader csvFieldReader = new CsvFieldReader(csvConfig);
csvFieldReader.setDocument(new ByteArrayInputStream("Bob;Smith\nAndrew;Johnson".getBytes()));
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(AtlasInternalSession.Head.class));
CsvField csvField = new CsvField();
csvField.setName("familyName");
csvField.setPath("/<>/familyName");
when(session.head().getSourceField()).thenReturn(csvField);
Audits audits = new Audits();
when(session.getAudits()).thenReturn(audits);
FieldGroup field = (FieldGroup) csvFieldReader.read(session);
assertEquals(0, audits.getAudit().size());
assertEquals("Smith", field.getField().get(0).getValue());
assertEquals("/<0>/familyName", field.getField().get(0).getPath());
assertEquals("Johnson", field.getField().get(1).getValue());
assertEquals("/<1>/familyName", field.getField().get(1).getPath());
}
use of io.atlasmap.csv.v2.CsvField in project atlasmap by atlasmap.
the class CsvFieldWriter method toCsv.
/**
* Exports as a CSV.
* @return exported
* @throws AtlasException unexpected error
*/
public String toCsv() throws AtlasException {
CSVFormat csvFormat = csvConfig.newCsvFormat();
String[] headers = csvConfig.getParsedHeaders();
boolean ignoreHeaderCase = Boolean.TRUE.equals(csvConfig.getIgnoreHeaderCase());
if (headers != null && ignoreHeaderCase) {
for (int j = 0; j < headers.length; j++) {
headers[j] = headers[j].toLowerCase();
}
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8");
CSVPrinter printer = new CSVPrinter(writer, csvFormat);
List<Field> fields = document.getFields().getField();
if (fields.isEmpty()) {
return "";
}
if (!Boolean.TRUE.equals(csvConfig.getSkipHeaderRecord())) {
if (csvConfig.getHeaders() == null) {
String[] headerRecords = new String[fields.size()];
int i = 0;
for (Field field : fields) {
CsvField csvField;
if (field instanceof FieldGroup) {
FieldGroup fieldGroup = (FieldGroup) field;
csvField = (CsvField) fieldGroup.getField().get(i);
} else {
csvField = (CsvField) field;
}
if (csvField.getColumn() != null) {
headerRecords[csvField.getColumn()] = csvField.getName();
} else {
headerRecords[i] = csvField.getName();
}
i++;
}
printer.printRecord(headerRecords);
}
}
int recordsCount;
if (fields.get(0) instanceof FieldGroup) {
recordsCount = ((FieldGroup) fields.get(0)).getField().size();
} else {
recordsCount = 1;
}
for (int i = 0; i < recordsCount; i++) {
List<String> values = new ArrayList<>();
for (Field field : fields) {
CsvField csvField;
if (field instanceof FieldGroup) {
FieldGroup fieldGroup = (FieldGroup) field;
csvField = (CsvField) fieldGroup.getField().get(i);
} else {
csvField = (CsvField) field;
}
if (csvField.getColumn() != null) {
// Add missing values
for (int j = values.size(); j < csvField.getColumn() + 1; j++) {
values.add(null);
}
values.set(csvField.getColumn(), csvField.getValue().toString());
} else if (headers != null) {
for (int j = values.size(); j < headers.length; j++) {
values.add(null);
}
int column = findColumn(headers, ignoreHeaderCase, csvField);
if (column != -1) {
values.set(column, csvField.getValue().toString());
}
} else {
values.add(csvField.getValue().toString());
}
}
printer.printRecord(values);
}
writer.flush();
String csv = out.toString();
return csv;
} catch (IOException e) {
throw new AtlasException(e);
}
}
Aggregations