use of org.apache.nifi.serialization.WriteResult in project nifi by apache.
the class ValidateRecord method completeFlowFile.
private void completeFlowFile(final ProcessSession session, final FlowFile flowFile, final RecordSetWriter writer, final Relationship relationship, final String details) throws IOException {
final WriteResult writeResult = writer.finishRecordSet();
writer.close();
final Map<String, String> attributes = new HashMap<>();
attributes.putAll(writeResult.getAttributes());
attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
session.putAllAttributes(flowFile, attributes);
session.transfer(flowFile, relationship);
session.getProvenanceReporter().route(flowFile, relationship, details);
}
use of org.apache.nifi.serialization.WriteResult in project nifi by apache.
the class TestWriteAvroResult method testDataTypes.
@Test
public void testDataTypes() throws IOException {
final Schema schema = new Schema.Parser().parse(new File("src/test/resources/avro/datatypes.avsc"));
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final List<RecordField> subRecordFields = Collections.singletonList(new RecordField("field1", RecordFieldType.STRING.getDataType()));
final RecordSchema subRecordSchema = new SimpleRecordSchema(subRecordFields);
final DataType subRecordDataType = RecordFieldType.RECORD.getRecordDataType(subRecordSchema);
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("string", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("int", RecordFieldType.INT.getDataType()));
fields.add(new RecordField("long", RecordFieldType.LONG.getDataType()));
fields.add(new RecordField("double", RecordFieldType.DOUBLE.getDataType()));
fields.add(new RecordField("float", RecordFieldType.FLOAT.getDataType()));
fields.add(new RecordField("boolean", RecordFieldType.BOOLEAN.getDataType()));
fields.add(new RecordField("bytes", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType())));
fields.add(new RecordField("nullOrLong", RecordFieldType.LONG.getDataType()));
fields.add(new RecordField("array", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.INT.getDataType())));
fields.add(new RecordField("record", subRecordDataType));
fields.add(new RecordField("map", RecordFieldType.MAP.getMapDataType(subRecordDataType)));
final RecordSchema recordSchema = new SimpleRecordSchema(fields);
final Record innerRecord = new MapRecord(subRecordSchema, Collections.singletonMap("field1", "hello"));
final Map<String, Object> innerMap = new HashMap<>();
innerMap.put("key1", innerRecord);
final Map<String, Object> values = new HashMap<>();
values.put("string", "hello");
values.put("int", 8);
values.put("long", 42L);
values.put("double", 3.14159D);
values.put("float", 1.23456F);
values.put("boolean", true);
values.put("bytes", AvroTypeUtil.convertByteArray("hello".getBytes()));
values.put("nullOrLong", null);
values.put("array", new Integer[] { 1, 2, 3 });
values.put("record", innerRecord);
values.put("map", innerMap);
final Record record = new MapRecord(recordSchema, values);
final WriteResult writeResult;
try (final RecordSetWriter writer = createWriter(schema, baos)) {
writeResult = writer.write(RecordSet.of(record.getSchema(), record));
}
verify(writeResult);
final byte[] data = baos.toByteArray();
try (final InputStream in = new ByteArrayInputStream(data)) {
final GenericRecord avroRecord = readRecord(in, schema);
assertMatch(record, avroRecord);
}
}
Aggregations