use of io.atlasmap.csv.v2.CsvComplexType in project atlasmap by atlasmap.
the class CsvFieldReaderTest method testReadSchemaWithHeaderSpecified.
@Test
public void testReadSchemaWithHeaderSpecified() throws Exception {
CsvConfig csvConfig = new CsvConfig();
csvConfig.setHeaders("givenName,familyName");
CsvFieldReader csvFieldReader = new CsvFieldReader(csvConfig);
csvFieldReader.setDocument(new ByteArrayInputStream("Bob,Smith\nAndrew,Johnson".getBytes()));
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(AtlasInternalSession.Head.class));
Document document = csvFieldReader.readSchema();
CsvComplexType list = (CsvComplexType) document.getFields().getField().get(0);
assertEquals("givenName", list.getCsvFields().getCsvField().get(0).getName());
assertEquals("familyName", list.getCsvFields().getCsvField().get(1).getName());
}
use of io.atlasmap.csv.v2.CsvComplexType in project atlasmap by atlasmap.
the class CsvServiceTest method testSchemaNoParametersSpecified.
@Test
public void testSchemaNoParametersSpecified() throws Exception {
final String source = "l1r1,l1r2,l1r3\n" + "l2r1,l2r2,l2r3\n" + "l3r1,l3r2,l3r3\n";
InputStream inputStream = new ByteArrayInputStream(source.getBytes());
Response res = csvService.inspect(inputStream, null, null, null, null, null, null, null, null, null, null, null, null, null, null);
Object entity = res.getEntity();
assertEquals(byte[].class, entity.getClass());
CsvInspectionResponse csvInspectionResponse = Json.mapper().readValue((byte[]) entity, CsvInspectionResponse.class);
CsvComplexType complexType = (CsvComplexType) csvInspectionResponse.getCsvDocument().getFields().getField().get(0);
List<CsvField> fields = complexType.getCsvFields().getCsvField();
assertEquals("0", fields.get(0).getName());
assertEquals("1", fields.get(1).getName());
assertEquals("2", fields.get(2).getName());
}
use of io.atlasmap.csv.v2.CsvComplexType in project atlasmap by atlasmap.
the class CsvServiceTest method testSchemaFile.
@Test
public void testSchemaFile() throws Exception {
InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("test.csv");
Response res = csvService.inspect(inputStream, null, ",", true, null, null, null, null, null, null, null, null, null, null, null);
Object entity = res.getEntity();
assertEquals(byte[].class, entity.getClass());
CsvInspectionResponse csvInspectionResponse = Json.mapper().readValue((byte[]) entity, CsvInspectionResponse.class);
CsvComplexType complexType = (CsvComplexType) csvInspectionResponse.getCsvDocument().getFields().getField().get(0);
List<CsvField> fields = complexType.getCsvFields().getCsvField();
assertEquals(5, fields.size());
assertEquals("sourceCsvString", fields.get(0).getName());
assertEquals("sourceCsvNumber", fields.get(1).getName());
assertEquals("sourceCsvDecimal", fields.get(2).getName());
assertEquals("sourceCsvDate", fields.get(3).getName());
assertEquals("sourceCsvBoolean", fields.get(4).getName());
}
use of io.atlasmap.csv.v2.CsvComplexType in project atlasmap by atlasmap.
the class CsvServiceTest method testSchema.
@Test
public void testSchema() throws Exception {
final String source = "header1,header2,header3\n" + "l1r1,l1r2,l1r3\n" + "l2r1,l2r2,l2r3\n" + "l3r1,l3r2,l3r3\n";
InputStream inputStream = new ByteArrayInputStream(source.getBytes());
Response res = csvService.inspect(inputStream, null, ",", true, null, null, null, null, null, null, null, null, null, null, null);
Object entity = res.getEntity();
assertEquals(byte[].class, entity.getClass());
CsvInspectionResponse csvInspectionResponse = Json.mapper().readValue((byte[]) entity, CsvInspectionResponse.class);
CsvComplexType complexType = (CsvComplexType) csvInspectionResponse.getCsvDocument().getFields().getField().get(0);
List<CsvField> fields = complexType.getCsvFields().getCsvField();
assertEquals("header1", fields.get(0).getName());
assertEquals("header2", fields.get(1).getName());
assertEquals("header3", fields.get(2).getName());
}
use of io.atlasmap.csv.v2.CsvComplexType in project atlasmap by atlasmap.
the class CsvFieldReader method readSchema.
/**
* Reads only the first row of the document.
*
* If firstRecordAsHeader is set to true it uses column names for field names, otherwise it uses an index
* starting from 0.
*
* @return {@link Document} built from CSV
* @throws AtlasException if it fails
*/
public Document readSchema() throws AtlasException {
CSVFormat csvFormat = csvConfig.newCsvFormat();
CSVParser parser;
try {
document.mark(Integer.MAX_VALUE);
parser = csvFormat.parse(new InputStreamReader(document));
} catch (IOException e) {
throw new AtlasException(e);
}
List<CsvField> fields = new ArrayList<>();
if (csvConfig.isFirstRecordAsHeader()) {
for (String headerName : parser.getHeaderNames()) {
CsvField field = new CsvField();
field.setName(headerName);
field.setPath("/<>/" + headerName);
field.setFieldType(FieldType.STRING);
fields.add(field);
}
} else {
CSVRecord record = parser.iterator().next();
for (int i = 0; i < record.size(); i++) {
CsvField field = new CsvField();
if (parser.getHeaderNames() != null && parser.getHeaderNames().size() > i) {
field.setName(parser.getHeaderNames().get(i));
} else {
field.setColumn(i);
field.setName(String.valueOf(i));
}
field.setPath("/<>/" + field.getName());
field.setFieldType(FieldType.STRING);
fields.add(field);
}
}
try {
document.reset();
} catch (IOException e) {
throw new AtlasException(e);
}
CsvFields csvFields = new CsvFields();
csvFields.getCsvField().addAll(fields);
CsvComplexType csvComplexType = new CsvComplexType();
csvComplexType.setFieldType(FieldType.COMPLEX);
csvComplexType.setCollectionType(CollectionType.LIST);
csvComplexType.setPath("/<>");
csvComplexType.setName("");
csvComplexType.setCsvFields(csvFields);
Fields documentFields = new Fields();
documentFields.getField().add(csvComplexType);
Document document = new Document();
document.setFields(documentFields);
return document;
}
Aggregations