use of org.apache.nifi.repository.schema.SimpleRecordField in project nifi by apache.
the class TestSchemaRecordReaderWriter method testFieldRemovedFromSchema.
@Test
public void testFieldRemovedFromSchema() throws IOException {
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
try {
// Create a schema that has the fields modified
final RecordSchema schemaV1 = ProvenanceEventSchema.PROVENANCE_EVENT_SCHEMA_V1;
final List<RecordField> fields = new ArrayList<>(schemaV1.getFields());
fields.remove(new SimpleRecordField(EventFieldNames.UPDATED_ATTRIBUTES, FieldType.STRING, Repetition.EXACTLY_ONE));
fields.remove(new SimpleRecordField(EventFieldNames.PREVIOUS_ATTRIBUTES, FieldType.STRING, Repetition.EXACTLY_ONE));
final RecordSchema recordSchema = new RecordSchema(fields);
// Create a record writer whose schema does not contain updated attributes or previous attributes.
// This means that we must also override the method that writes out attributes so that we are able
// to avoid actually writing them out.
final ByteArraySchemaRecordWriter writer = new ByteArraySchemaRecordWriter(journalFile, idGenerator, tocWriter, false, 0) {
@Override
public void writeHeader(long firstEventId, DataOutputStream out) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
recordSchema.writeTo(baos);
out.writeInt(baos.size());
baos.writeTo(out);
}
@Override
protected Record createRecord(final ProvenanceEventRecord event, final long eventId) {
final RecordSchema contentClaimSchema = new RecordSchema(recordSchema.getField(EventFieldNames.CONTENT_CLAIM).getSubFields());
return new EventRecord(event, eventId, recordSchema, contentClaimSchema);
}
};
try {
writer.writeHeader(1L);
writer.writeRecord(createEvent());
writer.writeRecord(createEvent());
} finally {
writer.close();
}
} finally {
tocWriter.close();
}
// Read the records in and make sure that they have the info that we expect.
try (final InputStream in = new FileInputStream(journalFile);
final TocReader tocReader = new StandardTocReader(tocFile);
final RecordReader reader = createReader(in, journalFile.getName(), tocReader, 10000)) {
for (int i = 0; i < 2; i++) {
final StandardProvenanceEventRecord event = reader.nextRecord();
assertNotNull(event);
assertEquals(ProvenanceEventType.RECEIVE, event.getEventType());
// We will still have a Map<String, String> for updated attributes because the
// Provenance Event Builder will create an empty map.
assertNotNull(event.getUpdatedAttributes());
assertTrue(event.getUpdatedAttributes().isEmpty());
}
}
}
use of org.apache.nifi.repository.schema.SimpleRecordField in project nifi by apache.
the class TestSchemaRecordReaderWriter method testAddOneRecordReadTwice.
@Test
public void testAddOneRecordReadTwice() throws IOException {
final RecordField unitTestField = new SimpleRecordField("Unit Test Field", FieldType.STRING, Repetition.EXACTLY_ONE);
final Consumer<List<RecordField>> schemaModifier = fields -> fields.add(unitTestField);
final Map<RecordField, Object> toAdd = new HashMap<>();
toAdd.put(unitTestField, "hello");
try (final ByteArraySchemaRecordWriter writer = createSchemaWriter(schemaModifier, toAdd)) {
writer.writeHeader(1L);
writer.writeRecord(createEvent());
}
try (final InputStream in = new FileInputStream(journalFile);
final TocReader tocReader = new StandardTocReader(tocFile);
final RecordReader reader = createReader(in, journalFile.getName(), tocReader, 10000)) {
final ProvenanceEventRecord firstEvent = reader.nextRecord();
assertNotNull(firstEvent);
final ProvenanceEventRecord secondEvent = reader.nextRecord();
assertNull(secondEvent);
}
}
use of org.apache.nifi.repository.schema.SimpleRecordField in project nifi by apache.
the class EventIdFirstHeaderSchema method buildSchema.
private static RecordSchema buildSchema() {
final List<RecordField> fields = new ArrayList<>();
fields.add(new SimpleRecordField(FieldNames.FIRST_EVENT_ID, FieldType.LONG, Repetition.EXACTLY_ONE));
fields.add(new SimpleRecordField(FieldNames.TIMESTAMP_OFFSET, FieldType.LONG, Repetition.EXACTLY_ONE));
fields.add(new SimpleRecordField(FieldNames.COMPONENT_IDS, FieldType.STRING, Repetition.ZERO_OR_MORE));
fields.add(new SimpleRecordField(FieldNames.COMPONENT_TYPES, FieldType.STRING, Repetition.ZERO_OR_MORE));
fields.add(new SimpleRecordField(FieldNames.QUEUE_IDS, FieldType.STRING, Repetition.ZERO_OR_MORE));
fields.add(new SimpleRecordField(FieldNames.EVENT_TYPES, FieldType.STRING, Repetition.ZERO_OR_MORE));
return new RecordSchema(fields);
}
use of org.apache.nifi.repository.schema.SimpleRecordField in project nifi by apache.
the class SchemaSwapSerializer method serializeFlowFiles.
@Override
public void serializeFlowFiles(final List<FlowFileRecord> toSwap, final FlowFileQueue queue, final String swapLocation, final OutputStream out) throws IOException {
schema.writeTo(out);
long contentSize = 0L;
long maxFlowFileId = -1L;
final List<ResourceClaim> resourceClaims = new ArrayList<>();
for (final FlowFileRecord flowFile : toSwap) {
contentSize += flowFile.getSize();
if (flowFile.getId() > maxFlowFileId) {
maxFlowFileId = flowFile.getId();
}
final ContentClaim contentClaim = flowFile.getContentClaim();
if (contentClaim != null) {
resourceClaims.add(contentClaim.getResourceClaim());
}
}
final QueueSize queueSize = new QueueSize(toSwap.size(), contentSize);
final SwapSummary swapSummary = new StandardSwapSummary(queueSize, maxFlowFileId, resourceClaims);
final Record summaryRecord = new SwapSummaryFieldMap(swapSummary, queue.getIdentifier(), SwapSchema.SWAP_SUMMARY_SCHEMA_V1);
final List<Record> flowFileRecords = toSwap.stream().map(flowFile -> new FlowFileRecordFieldMap(flowFile, flowFileSchema)).collect(Collectors.toList());
// Create a simple record to hold the summary and the flowfile contents
final RecordField summaryField = new SimpleRecordField(SwapSchema.SWAP_SUMMARY, FieldType.COMPLEX, Repetition.EXACTLY_ONE);
final RecordField contentsField = new ComplexRecordField(SwapSchema.FLOWFILE_CONTENTS, Repetition.ZERO_OR_MORE, FlowFileSchema.FLOWFILE_SCHEMA_V2.getFields());
final List<RecordField> fields = new ArrayList<>(2);
fields.add(summaryField);
fields.add(contentsField);
final Map<RecordField, Object> swapFileMap = new LinkedHashMap<>();
swapFileMap.put(summaryField, summaryRecord);
swapFileMap.put(contentsField, flowFileRecords);
final Record swapFileRecord = new FieldMapRecord(swapFileMap, new RecordSchema(fields));
final SchemaRecordWriter writer = new SchemaRecordWriter();
writer.writeRecord(swapFileRecord, out);
out.flush();
}
use of org.apache.nifi.repository.schema.SimpleRecordField in project nifi by apache.
the class SchemaRepositoryRecordSerde method swapOutRecord.
private RepositoryRecord swapOutRecord(final Record record) {
final Long recordId = (Long) record.getFieldValue(RepositoryRecordSchema.RECORD_ID_FIELD);
final String queueId = (String) record.getFieldValue(new SimpleRecordField(RepositoryRecordSchema.QUEUE_IDENTIFIER, FieldType.STRING, Repetition.EXACTLY_ONE));
final String swapLocation = (String) record.getFieldValue(new SimpleRecordField(RepositoryRecordSchema.SWAP_LOCATION, FieldType.STRING, Repetition.EXACTLY_ONE));
final FlowFileQueue queue = getFlowFileQueue(queueId);
final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder().id(recordId).build();
return new StandardRepositoryRecord(queue, flowFileRecord, swapLocation);
}
Aggregations