Search in sources :

Example 91 with MapRecord

use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.

the class JsonPathRowRecordReader method convertJsonNodeToRecord.

@Override
protected Record convertJsonNodeToRecord(final JsonNode jsonNode, final RecordSchema schema, final boolean coerceTypes, final boolean dropUnknownFields) throws IOException {
    if (jsonNode == null) {
        return null;
    }
    final DocumentContext ctx = JsonPath.using(STRICT_PROVIDER_CONFIGURATION).parse(jsonNode.toString());
    final Map<String, Object> values = new HashMap<>(schema.getFieldCount());
    for (final Map.Entry<String, JsonPath> entry : jsonPaths.entrySet()) {
        final String fieldName = entry.getKey();
        final DataType desiredType = schema.getDataType(fieldName).orElse(null);
        if (desiredType == null && dropUnknownFields) {
            continue;
        }
        final JsonPath jsonPath = entry.getValue();
        Object value;
        try {
            value = ctx.read(jsonPath);
        } catch (final PathNotFoundException pnfe) {
            logger.debug("Evaluated JSONPath Expression {} but the path was not found; will use a null value", new Object[] { entry.getValue() });
            value = null;
        }
        final Optional<RecordField> field = schema.getField(fieldName);
        final Object defaultValue = field.isPresent() ? field.get().getDefaultValue() : null;
        if (coerceTypes && desiredType != null) {
            value = convert(value, desiredType, fieldName, defaultValue);
        } else {
            final DataType dataType = field.isPresent() ? field.get().getDataType() : null;
            value = convert(value, dataType);
        }
        values.put(fieldName, value);
    }
    return new MapRecord(schema, values);
}
Also used : MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) JsonPath(com.jayway.jsonpath.JsonPath) DataType(org.apache.nifi.serialization.record.DataType) RecordDataType(org.apache.nifi.serialization.record.type.RecordDataType) ArrayDataType(org.apache.nifi.serialization.record.type.ArrayDataType) PathNotFoundException(com.jayway.jsonpath.PathNotFoundException) DocumentContext(com.jayway.jsonpath.DocumentContext) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Example 92 with MapRecord

use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.

the class JsonTreeRowRecordReader method convertJsonNodeToRecord.

private Record convertJsonNodeToRecord(final JsonNode jsonNode, final RecordSchema schema, final String fieldNamePrefix, final boolean coerceTypes, final boolean dropUnknown) throws IOException, MalformedRecordException {
    final Map<String, Object> values = new HashMap<>(schema.getFieldCount() * 2);
    if (dropUnknown) {
        for (final RecordField recordField : schema.getFields()) {
            final JsonNode childNode = getChildNode(jsonNode, recordField);
            if (childNode == null) {
                continue;
            }
            final String fieldName = recordField.getFieldName();
            final Object value;
            if (coerceTypes) {
                final DataType desiredType = recordField.getDataType();
                final String fullFieldName = fieldNamePrefix == null ? fieldName : fieldNamePrefix + fieldName;
                value = convertField(childNode, fullFieldName, desiredType, dropUnknown);
            } else {
                value = getRawNodeValue(childNode, recordField == null ? null : recordField.getDataType());
            }
            values.put(fieldName, value);
        }
    } else {
        final Iterator<String> fieldNames = jsonNode.getFieldNames();
        while (fieldNames.hasNext()) {
            final String fieldName = fieldNames.next();
            final JsonNode childNode = jsonNode.get(fieldName);
            final RecordField recordField = schema.getField(fieldName).orElse(null);
            final Object value;
            if (coerceTypes && recordField != null) {
                final DataType desiredType = recordField.getDataType();
                final String fullFieldName = fieldNamePrefix == null ? fieldName : fieldNamePrefix + fieldName;
                value = convertField(childNode, fullFieldName, desiredType, dropUnknown);
            } else {
                value = getRawNodeValue(childNode, recordField == null ? null : recordField.getDataType());
            }
            values.put(fieldName, value);
        }
    }
    final Supplier<String> supplier = () -> jsonNode.toString();
    return new MapRecord(schema, values, SerializedForm.of(supplier, "application/json"), false, dropUnknown);
}
Also used : MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) DataType(org.apache.nifi.serialization.record.DataType) RecordDataType(org.apache.nifi.serialization.record.type.RecordDataType) MapDataType(org.apache.nifi.serialization.record.type.MapDataType) ArrayDataType(org.apache.nifi.serialization.record.type.ArrayDataType) JsonNode(org.codehaus.jackson.JsonNode)

Example 93 with MapRecord

use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.

the class TestAvroReaderWithEmbeddedSchema method testDataTypes.

@Test
public void testDataTypes() throws IOException, MalformedRecordException, SchemaNotFoundException {
    final List<Field> accountFields = new ArrayList<>();
    accountFields.add(new Field("accountId", Schema.create(Type.LONG), null, (Object) null));
    accountFields.add(new Field("accountName", Schema.create(Type.STRING), null, (Object) null));
    final Schema accountSchema = Schema.createRecord("account", null, null, false);
    accountSchema.setFields(accountFields);
    final List<Field> catFields = new ArrayList<>();
    catFields.add(new Field("catTailLength", Schema.create(Type.INT), null, (Object) null));
    catFields.add(new Field("catName", Schema.create(Type.STRING), null, (Object) null));
    final Schema catSchema = Schema.createRecord("cat", null, null, false);
    catSchema.setFields(catFields);
    final List<Field> dogFields = new ArrayList<>();
    dogFields.add(new Field("dogTailLength", Schema.create(Type.INT), null, (Object) null));
    dogFields.add(new Field("dogName", Schema.create(Type.STRING), null, (Object) null));
    final Schema dogSchema = Schema.createRecord("dog", null, null, false);
    dogSchema.setFields(dogFields);
    final List<Field> fields = new ArrayList<>();
    fields.add(new Field("name", Schema.create(Type.STRING), null, (Object) null));
    fields.add(new Field("age", Schema.create(Type.INT), null, (Object) null));
    fields.add(new Field("balance", Schema.create(Type.DOUBLE), null, (Object) null));
    fields.add(new Field("rate", Schema.create(Type.FLOAT), null, (Object) null));
    fields.add(new Field("debt", Schema.create(Type.BOOLEAN), null, (Object) null));
    fields.add(new Field("nickname", Schema.create(Type.NULL), null, (Object) null));
    fields.add(new Field("binary", Schema.create(Type.BYTES), null, (Object) null));
    fields.add(new Field("fixed", Schema.createFixed("fixed", null, null, 5), null, (Object) null));
    fields.add(new Field("map", Schema.createMap(Schema.create(Type.STRING)), null, (Object) null));
    fields.add(new Field("array", Schema.createArray(Schema.create(Type.LONG)), null, (Object) null));
    fields.add(new Field("account", accountSchema, null, (Object) null));
    fields.add(new Field("desiredbalance", // test union of NULL and other type with no value
    Schema.createUnion(Arrays.asList(Schema.create(Type.NULL), Schema.create(Type.DOUBLE))), null, (Object) null));
    fields.add(new Field("dreambalance", // test union of NULL and other type with a value
    Schema.createUnion(Arrays.asList(Schema.create(Type.NULL), Schema.create(Type.DOUBLE))), null, (Object) null));
    fields.add(new Field("favAnimal", Schema.createUnion(Arrays.asList(catSchema, dogSchema)), null, (Object) null));
    fields.add(new Field("otherFavAnimal", Schema.createUnion(Arrays.asList(catSchema, dogSchema)), null, (Object) null));
    final Schema schema = Schema.createRecord("record", null, null, false);
    schema.setFields(fields);
    final byte[] source;
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final Map<String, String> map = new HashMap<>();
    map.put("greeting", "hello");
    map.put("salutation", "good-bye");
    final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter);
        final DataFileWriter<GenericRecord> writer = dataFileWriter.create(schema, baos)) {
        final GenericRecord record = new GenericData.Record(schema);
        record.put("name", "John");
        record.put("age", 33);
        record.put("balance", 1234.56D);
        record.put("rate", 0.045F);
        record.put("debt", false);
        record.put("binary", ByteBuffer.wrap("binary".getBytes(StandardCharsets.UTF_8)));
        record.put("fixed", new GenericData.Fixed(Schema.create(Type.BYTES), "fixed".getBytes(StandardCharsets.UTF_8)));
        record.put("map", map);
        record.put("array", Arrays.asList(1L, 2L));
        record.put("dreambalance", 10_000_000.00D);
        final GenericRecord accountRecord = new GenericData.Record(accountSchema);
        accountRecord.put("accountId", 83L);
        accountRecord.put("accountName", "Checking");
        record.put("account", accountRecord);
        final GenericRecord catRecord = new GenericData.Record(catSchema);
        catRecord.put("catTailLength", 1);
        catRecord.put("catName", "Meow");
        record.put("otherFavAnimal", catRecord);
        final GenericRecord dogRecord = new GenericData.Record(dogSchema);
        dogRecord.put("dogTailLength", 14);
        dogRecord.put("dogName", "Fido");
        record.put("favAnimal", dogRecord);
        writer.append(record);
    }
    source = baos.toByteArray();
    try (final InputStream in = new ByteArrayInputStream(source)) {
        final AvroRecordReader reader = new AvroReaderWithEmbeddedSchema(in);
        final RecordSchema recordSchema = reader.getSchema();
        assertEquals(15, recordSchema.getFieldCount());
        assertEquals(RecordFieldType.STRING, recordSchema.getDataType("name").get().getFieldType());
        assertEquals(RecordFieldType.INT, recordSchema.getDataType("age").get().getFieldType());
        assertEquals(RecordFieldType.DOUBLE, recordSchema.getDataType("balance").get().getFieldType());
        assertEquals(RecordFieldType.FLOAT, recordSchema.getDataType("rate").get().getFieldType());
        assertEquals(RecordFieldType.BOOLEAN, recordSchema.getDataType("debt").get().getFieldType());
        assertEquals(RecordFieldType.STRING, recordSchema.getDataType("nickname").get().getFieldType());
        assertEquals(RecordFieldType.ARRAY, recordSchema.getDataType("binary").get().getFieldType());
        assertEquals(RecordFieldType.ARRAY, recordSchema.getDataType("fixed").get().getFieldType());
        assertEquals(RecordFieldType.MAP, recordSchema.getDataType("map").get().getFieldType());
        assertEquals(RecordFieldType.ARRAY, recordSchema.getDataType("array").get().getFieldType());
        assertEquals(RecordFieldType.RECORD, recordSchema.getDataType("account").get().getFieldType());
        assertEquals(RecordFieldType.DOUBLE, recordSchema.getDataType("desiredbalance").get().getFieldType());
        assertEquals(RecordFieldType.DOUBLE, recordSchema.getDataType("dreambalance").get().getFieldType());
        assertEquals(RecordFieldType.CHOICE, recordSchema.getDataType("favAnimal").get().getFieldType());
        assertEquals(RecordFieldType.CHOICE, recordSchema.getDataType("otherFavAnimal").get().getFieldType());
        final Object[] values = reader.nextRecord().getValues();
        assertEquals(15, values.length);
        assertEquals("John", values[0]);
        assertEquals(33, values[1]);
        assertEquals(1234.56D, values[2]);
        assertEquals(0.045F, values[3]);
        assertEquals(false, values[4]);
        assertEquals(null, values[5]);
        assertArrayEquals(toObjectArray("binary".getBytes(StandardCharsets.UTF_8)), (Object[]) values[6]);
        assertArrayEquals(toObjectArray("fixed".getBytes(StandardCharsets.UTF_8)), (Object[]) values[7]);
        assertEquals(map, values[8]);
        assertArrayEquals(new Object[] { 1L, 2L }, (Object[]) values[9]);
        final Map<String, Object> accountValues = new HashMap<>();
        accountValues.put("accountName", "Checking");
        accountValues.put("accountId", 83L);
        final List<RecordField> accountRecordFields = new ArrayList<>();
        accountRecordFields.add(new RecordField("accountId", RecordFieldType.LONG.getDataType(), false));
        accountRecordFields.add(new RecordField("accountName", RecordFieldType.STRING.getDataType(), false));
        final RecordSchema accountRecordSchema = new SimpleRecordSchema(accountRecordFields);
        final Record mapRecord = new MapRecord(accountRecordSchema, accountValues);
        assertEquals(mapRecord, values[10]);
        assertNull(values[11]);
        assertEquals(10_000_000.0D, values[12]);
        final Map<String, Object> dogMap = new HashMap<>();
        dogMap.put("dogName", "Fido");
        dogMap.put("dogTailLength", 14);
        final List<RecordField> dogRecordFields = new ArrayList<>();
        dogRecordFields.add(new RecordField("dogTailLength", RecordFieldType.INT.getDataType(), false));
        dogRecordFields.add(new RecordField("dogName", RecordFieldType.STRING.getDataType(), false));
        final RecordSchema dogRecordSchema = new SimpleRecordSchema(dogRecordFields);
        final Record dogRecord = new MapRecord(dogRecordSchema, dogMap);
        assertEquals(dogRecord, values[13]);
        final Map<String, Object> catMap = new HashMap<>();
        catMap.put("catName", "Meow");
        catMap.put("catTailLength", 1);
        final List<RecordField> catRecordFields = new ArrayList<>();
        catRecordFields.add(new RecordField("catTailLength", RecordFieldType.INT.getDataType(), false));
        catRecordFields.add(new RecordField("catName", RecordFieldType.STRING.getDataType(), false));
        final RecordSchema catRecordSchema = new SimpleRecordSchema(catRecordFields);
        final Record catRecord = new MapRecord(catRecordSchema, catMap);
        assertEquals(catRecord, values[14]);
    }
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) Schema(org.apache.avro.Schema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) ArrayList(java.util.ArrayList) Field(org.apache.avro.Schema.Field) RecordField(org.apache.nifi.serialization.record.RecordField) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) DataFileWriter(org.apache.avro.file.DataFileWriter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) GenericData(org.apache.avro.generic.GenericData) ByteArrayInputStream(java.io.ByteArrayInputStream) Test(org.junit.Test)

Example 94 with MapRecord

use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.

the class TestWriteAvroResult method testDataTypes.

@Test
public void testDataTypes() throws IOException {
    final Schema schema = new Schema.Parser().parse(new File("src/test/resources/avro/datatypes.avsc"));
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final List<RecordField> subRecordFields = Collections.singletonList(new RecordField("field1", RecordFieldType.STRING.getDataType()));
    final RecordSchema subRecordSchema = new SimpleRecordSchema(subRecordFields);
    final DataType subRecordDataType = RecordFieldType.RECORD.getRecordDataType(subRecordSchema);
    final List<RecordField> fields = new ArrayList<>();
    fields.add(new RecordField("string", RecordFieldType.STRING.getDataType()));
    fields.add(new RecordField("int", RecordFieldType.INT.getDataType()));
    fields.add(new RecordField("long", RecordFieldType.LONG.getDataType()));
    fields.add(new RecordField("double", RecordFieldType.DOUBLE.getDataType()));
    fields.add(new RecordField("float", RecordFieldType.FLOAT.getDataType()));
    fields.add(new RecordField("boolean", RecordFieldType.BOOLEAN.getDataType()));
    fields.add(new RecordField("bytes", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType())));
    fields.add(new RecordField("nullOrLong", RecordFieldType.LONG.getDataType()));
    fields.add(new RecordField("array", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.INT.getDataType())));
    fields.add(new RecordField("record", subRecordDataType));
    fields.add(new RecordField("map", RecordFieldType.MAP.getMapDataType(subRecordDataType)));
    final RecordSchema recordSchema = new SimpleRecordSchema(fields);
    final Record innerRecord = new MapRecord(subRecordSchema, Collections.singletonMap("field1", "hello"));
    final Map<String, Object> innerMap = new HashMap<>();
    innerMap.put("key1", innerRecord);
    final Map<String, Object> values = new HashMap<>();
    values.put("string", "hello");
    values.put("int", 8);
    values.put("long", 42L);
    values.put("double", 3.14159D);
    values.put("float", 1.23456F);
    values.put("boolean", true);
    values.put("bytes", AvroTypeUtil.convertByteArray("hello".getBytes()));
    values.put("nullOrLong", null);
    values.put("array", new Integer[] { 1, 2, 3 });
    values.put("record", innerRecord);
    values.put("map", innerMap);
    final Record record = new MapRecord(recordSchema, values);
    final WriteResult writeResult;
    try (final RecordSetWriter writer = createWriter(schema, baos)) {
        writeResult = writer.write(RecordSet.of(record.getSchema(), record));
    }
    verify(writeResult);
    final byte[] data = baos.toByteArray();
    try (final InputStream in = new ByteArrayInputStream(data)) {
        final GenericRecord avroRecord = readRecord(in, schema);
        assertMatch(record, avroRecord);
    }
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) Schema(org.apache.avro.Schema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) WriteResult(org.apache.nifi.serialization.WriteResult) ByteArrayInputStream(java.io.ByteArrayInputStream) DataType(org.apache.nifi.serialization.record.DataType) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) File(java.io.File) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) Test(org.junit.Test)

Example 95 with MapRecord

use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.

the class TestWriteAvroResult method testLogicalTypes.

private void testLogicalTypes(Schema schema) throws ParseException, IOException {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final List<RecordField> fields = new ArrayList<>();
    fields.add(new RecordField("timeMillis", RecordFieldType.TIME.getDataType()));
    fields.add(new RecordField("timeMicros", RecordFieldType.TIME.getDataType()));
    fields.add(new RecordField("timestampMillis", RecordFieldType.TIMESTAMP.getDataType()));
    fields.add(new RecordField("timestampMicros", RecordFieldType.TIMESTAMP.getDataType()));
    fields.add(new RecordField("date", RecordFieldType.DATE.getDataType()));
    // Avro decimal is represented as double in NiFi type system.
    fields.add(new RecordField("decimal", RecordFieldType.DOUBLE.getDataType()));
    final RecordSchema recordSchema = new SimpleRecordSchema(fields);
    final String expectedTime = "2017-04-04 14:20:33.789";
    final DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
    df.setTimeZone(TimeZone.getTimeZone("gmt"));
    final long timeLong = df.parse(expectedTime).getTime();
    final Map<String, Object> values = new HashMap<>();
    values.put("timeMillis", new Time(timeLong));
    values.put("timeMicros", new Time(timeLong));
    values.put("timestampMillis", new Timestamp(timeLong));
    values.put("timestampMicros", new Timestamp(timeLong));
    values.put("date", new Date(timeLong));
    // Avro decimal is represented as double in NiFi type system.
    final BigDecimal expectedDecimal = new BigDecimal("123.45");
    values.put("decimal", expectedDecimal.doubleValue());
    final Record record = new MapRecord(recordSchema, values);
    try (final RecordSetWriter writer = createWriter(schema, baos)) {
        writer.write(RecordSet.of(record.getSchema(), record));
    }
    final byte[] data = baos.toByteArray();
    try (final InputStream in = new ByteArrayInputStream(data)) {
        final GenericRecord avroRecord = readRecord(in, schema);
        final long secondsSinceMidnight = 33 + (20 * 60) + (14 * 60 * 60);
        final long millisSinceMidnight = (secondsSinceMidnight * 1000L) + 789;
        assertEquals((int) millisSinceMidnight, avroRecord.get("timeMillis"));
        assertEquals(millisSinceMidnight * 1000L, avroRecord.get("timeMicros"));
        assertEquals(timeLong, avroRecord.get("timestampMillis"));
        assertEquals(timeLong * 1000L, avroRecord.get("timestampMicros"));
        assertEquals(17260, avroRecord.get("date"));
        // Double value will be converted into logical decimal if Avro schema is defined as logical decimal.
        final Schema decimalSchema = schema.getField("decimal").schema();
        final LogicalType logicalType = decimalSchema.getLogicalType() != null ? decimalSchema.getLogicalType() : // Union type doesn't return logical type. Find the first logical type defined within the union.
        decimalSchema.getTypes().stream().map(s -> s.getLogicalType()).filter(Objects::nonNull).findFirst().get();
        final BigDecimal decimal = new Conversions.DecimalConversion().fromBytes((ByteBuffer) avroRecord.get("decimal"), decimalSchema, logicalType);
        assertEquals(expectedDecimal, decimal);
    }
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) Schema(org.apache.avro.Schema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) ArrayList(java.util.ArrayList) LogicalType(org.apache.avro.LogicalType) Time(java.sql.Time) Timestamp(java.sql.Timestamp) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Date(java.sql.Date) BigDecimal(java.math.BigDecimal) Conversions(org.apache.avro.Conversions) ByteArrayInputStream(java.io.ByteArrayInputStream) SimpleDateFormat(java.text.SimpleDateFormat) DateFormat(java.text.DateFormat) SimpleDateFormat(java.text.SimpleDateFormat)

Aggregations

MapRecord (org.apache.nifi.serialization.record.MapRecord)101 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)89 SimpleRecordSchema (org.apache.nifi.serialization.SimpleRecordSchema)88 Record (org.apache.nifi.serialization.record.Record)82 Test (org.junit.Test)79 HashMap (java.util.HashMap)73 RecordField (org.apache.nifi.serialization.record.RecordField)71 ArrayList (java.util.ArrayList)59 LinkedHashMap (java.util.LinkedHashMap)29 ByteArrayOutputStream (java.io.ByteArrayOutputStream)20 SchemaNameAsAttribute (org.apache.nifi.schema.access.SchemaNameAsAttribute)17 DataType (org.apache.nifi.serialization.record.DataType)12 ComponentLog (org.apache.nifi.logging.ComponentLog)11 Date (java.sql.Date)7 CSVFormat (org.apache.commons.csv.CSVFormat)7 SchemaValidationResult (org.apache.nifi.serialization.record.validation.SchemaValidationResult)7 Time (java.sql.Time)6 Timestamp (java.sql.Timestamp)6 DateFormat (java.text.DateFormat)6 Map (java.util.Map)6