use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class XmlFieldReaderTest method testReadParentCollection.
@Test
public void testReadParentCollection() throws Exception {
final Document document = getDocumentFromFile("src/test/resources/complex-repeated.xml", false);
reader.setDocument(document);
FieldGroup orders = new FieldGroup();
orders.setFieldType(FieldType.COMPLEX);
orders.setDocId("xml");
orders.setPath("/orders/order[]");
orders.setCollectionType(CollectionType.ARRAY);
FieldGroup address = new FieldGroup();
address.setFieldType(FieldType.COMPLEX);
address.setDocId("xml");
address.setPath("/orders/order[]/address");
orders.getField().add(address);
XmlField addressLine1 = AtlasXmlModelFactory.createXmlField();
addressLine1.setFieldType(FieldType.STRING);
addressLine1.setDocId("xml");
addressLine1.setPath("/orders/order[]/address/addressLine1");
address.getField().add(addressLine1);
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(Head.class));
when(session.head().getSourceField()).thenReturn(address);
Field readField = reader.read(session);
assertNotNull(readField);
assertEquals(FieldGroup.class, readField.getClass());
FieldGroup readGroup = FieldGroup.class.cast(readField);
assertEquals(5, readGroup.getField().size());
for (int i = 0; i < 5; i++) {
FieldGroup addr = (FieldGroup) readGroup.getField().get(i);
assertEquals("/orders/order[" + i + "]/address", addr.getPath());
assertEquals(1, addr.getField().size());
Field addressLine = (Field) addr.getField().get(0);
assertEquals("/orders/order[" + i + "]/address/addressLine1", addressLine.getPath());
assertEquals("123 Main St (" + (i + 1) + ")", addressLine.getValue());
}
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class XmlFieldReader method read.
@Override
public Field read(AtlasInternalSession session) throws AtlasException {
Field field = session.head().getSourceField();
if (document == null) {
AtlasUtil.addAudit(session, field, String.format("Cannot read field '%s' of document '%s', document is null", field.getPath(), field.getDocId()), AuditStatus.ERROR, null);
return field;
}
if (field == null) {
throw new AtlasException(new IllegalArgumentException("Argument 'field' cannot be null"));
}
if (!(field instanceof XmlField) && !(field instanceof FieldGroup) && !(field instanceof XmlEnumField)) {
throw new AtlasException(String.format("Unsupported field type '%s'", field.getClass()));
}
seedDocumentNamespaces(document);
if (LOG.isDebugEnabled()) {
LOG.debug("Reading source value for field: " + field.getPath());
}
Optional<XmlNamespaces> xmlNamespaces = getSourceNamespaces(session, field);
XmlPath path = new XmlPath(field.getPath());
List<Field> fields = getFieldsForPath(session, xmlNamespaces, document.getDocumentElement(), field, path, 0);
if (path.hasCollection() && !path.isIndexedCollection()) {
FieldGroup fieldGroup = AtlasModelFactory.createFieldGroupFrom(field, true);
fieldGroup.getField().addAll(fields);
session.head().setSourceField(fieldGroup);
return fieldGroup;
} else if (fields.size() == 1) {
field.setValue(fields.get(0).getValue());
return field;
} else {
return field;
}
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class AtlasKafkaConnectModelFactory method cloneFieldGroup.
/**
* Clones the FieldGroup.
* @param group FieldGroup
* @return cloned
*/
public static FieldGroup cloneFieldGroup(FieldGroup group) {
FieldGroup clone = AtlasModelFactory.copyFieldGroup(group);
List<Field> newChildren = new ArrayList<>();
for (Field child : group.getField()) {
if (child instanceof FieldGroup) {
newChildren.add(cloneFieldGroup((FieldGroup) child));
} else {
newChildren.add(cloneField((KafkaConnectField) child, true));
}
}
clone.getField().addAll(newChildren);
return clone;
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class CsvFieldReaderTest method testWithSimpleDocumentWithHeaderSpecified.
@Test
public void testWithSimpleDocumentWithHeaderSpecified() throws Exception {
CsvConfig csvConfig = new CsvConfig();
csvConfig.setHeaders("givenName,familyName");
CsvFieldReader csvFieldReader = new CsvFieldReader(csvConfig);
csvFieldReader.setDocument(new ByteArrayInputStream("Bob,Smith\nAndrew,Johnson".getBytes()));
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(AtlasInternalSession.Head.class));
CsvField csvField = new CsvField();
csvField.setName("familyName");
csvField.setPath("/<>/familyName");
when(session.head().getSourceField()).thenReturn(csvField);
Audits audits = new Audits();
when(session.getAudits()).thenReturn(audits);
FieldGroup field = (FieldGroup) csvFieldReader.read(session);
assertEquals(0, audits.getAudit().size());
assertEquals("Smith", field.getField().get(0).getValue());
assertEquals("Johnson", field.getField().get(1).getValue());
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class CsvFieldWriter method toCsv.
/**
* Exports as a CSV.
* @return exported
* @throws AtlasException unexpected error
*/
public String toCsv() throws AtlasException {
CSVFormat csvFormat = csvConfig.newCsvFormat();
String[] headers = csvConfig.getParsedHeaders();
boolean ignoreHeaderCase = Boolean.TRUE.equals(csvConfig.getIgnoreHeaderCase());
if (headers != null && ignoreHeaderCase) {
for (int j = 0; j < headers.length; j++) {
headers[j] = headers[j].toLowerCase();
}
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8");
CSVPrinter printer = new CSVPrinter(writer, csvFormat);
List<Field> fields = document.getFields().getField();
if (fields.isEmpty()) {
return "";
}
if (!Boolean.TRUE.equals(csvConfig.getSkipHeaderRecord())) {
if (csvConfig.getHeaders() == null) {
String[] headerRecords = new String[fields.size()];
int i = 0;
for (Field field : fields) {
CsvField csvField;
if (field instanceof FieldGroup) {
FieldGroup fieldGroup = (FieldGroup) field;
csvField = (CsvField) fieldGroup.getField().get(i);
} else {
csvField = (CsvField) field;
}
if (csvField.getColumn() != null) {
headerRecords[csvField.getColumn()] = csvField.getName();
} else {
headerRecords[i] = csvField.getName();
}
i++;
}
printer.printRecord(headerRecords);
}
}
int recordsCount;
if (fields.get(0) instanceof FieldGroup) {
recordsCount = ((FieldGroup) fields.get(0)).getField().size();
} else {
recordsCount = 1;
}
for (int i = 0; i < recordsCount; i++) {
List<String> values = new ArrayList<>();
for (Field field : fields) {
CsvField csvField;
if (field instanceof FieldGroup) {
FieldGroup fieldGroup = (FieldGroup) field;
csvField = (CsvField) fieldGroup.getField().get(i);
} else {
csvField = (CsvField) field;
}
if (csvField.getColumn() != null) {
// Add missing values
for (int j = values.size(); j < csvField.getColumn() + 1; j++) {
values.add(null);
}
values.set(csvField.getColumn(), csvField.getValue().toString());
} else if (headers != null) {
for (int j = values.size(); j < headers.length; j++) {
values.add(null);
}
int column = findColumn(headers, ignoreHeaderCase, csvField);
if (column != -1) {
values.set(column, csvField.getValue().toString());
}
} else {
values.add(csvField.getValue().toString());
}
}
printer.printRecord(values);
}
writer.flush();
String csv = out.toString();
return csv;
} catch (IOException e) {
throw new AtlasException(e);
}
}
Aggregations