use of org.apache.nifi.repository.schema.RecordSchema in project nifi by apache.
the class SchemaSwapDeserializer method getSwapSummary.
@Override
public SwapSummary getSwapSummary(final DataInputStream in, final String swapLocation, final ResourceClaimManager claimManager) throws IOException {
final RecordSchema schema = RecordSchema.readFrom(in);
final List<RecordField> summaryFields = schema.getField(SwapSchema.SWAP_SUMMARY).getSubFields();
final RecordField summaryRecordField = new ComplexRecordField(SwapSchema.SWAP_SUMMARY, Repetition.EXACTLY_ONE, summaryFields);
final RecordSchema summarySchema = new RecordSchema(Collections.singletonList(summaryRecordField));
final Record summaryRecordParent = SchemaRecordReader.fromSchema(summarySchema).readRecord(in);
final Record summaryRecord = (Record) summaryRecordParent.getFieldValue(SwapSchema.SWAP_SUMMARY);
final SwapSummary swapSummary = SwapSummaryFieldMap.getSwapSummary(summaryRecord, claimManager);
return swapSummary;
}
use of org.apache.nifi.repository.schema.RecordSchema in project nifi by apache.
the class SchemaSwapSerializer method serializeFlowFiles.
@Override
public void serializeFlowFiles(final List<FlowFileRecord> toSwap, final FlowFileQueue queue, final String swapLocation, final OutputStream out) throws IOException {
schema.writeTo(out);
long contentSize = 0L;
long maxFlowFileId = -1L;
final List<ResourceClaim> resourceClaims = new ArrayList<>();
for (final FlowFileRecord flowFile : toSwap) {
contentSize += flowFile.getSize();
if (flowFile.getId() > maxFlowFileId) {
maxFlowFileId = flowFile.getId();
}
final ContentClaim contentClaim = flowFile.getContentClaim();
if (contentClaim != null) {
resourceClaims.add(contentClaim.getResourceClaim());
}
}
final QueueSize queueSize = new QueueSize(toSwap.size(), contentSize);
final SwapSummary swapSummary = new StandardSwapSummary(queueSize, maxFlowFileId, resourceClaims);
final Record summaryRecord = new SwapSummaryFieldMap(swapSummary, queue.getIdentifier(), SwapSchema.SWAP_SUMMARY_SCHEMA_V1);
final List<Record> flowFileRecords = toSwap.stream().map(flowFile -> new FlowFileRecordFieldMap(flowFile, flowFileSchema)).collect(Collectors.toList());
// Create a simple record to hold the summary and the flowfile contents
final RecordField summaryField = new SimpleRecordField(SwapSchema.SWAP_SUMMARY, FieldType.COMPLEX, Repetition.EXACTLY_ONE);
final RecordField contentsField = new ComplexRecordField(SwapSchema.FLOWFILE_CONTENTS, Repetition.ZERO_OR_MORE, FlowFileSchema.FLOWFILE_SCHEMA_V2.getFields());
final List<RecordField> fields = new ArrayList<>(2);
fields.add(summaryField);
fields.add(contentsField);
final Map<RecordField, Object> swapFileMap = new LinkedHashMap<>();
swapFileMap.put(summaryField, summaryRecord);
swapFileMap.put(contentsField, flowFileRecords);
final Record swapFileRecord = new FieldMapRecord(swapFileMap, new RecordSchema(fields));
final SchemaRecordWriter writer = new SchemaRecordWriter();
writer.writeRecord(swapFileRecord, out);
out.flush();
}
use of org.apache.nifi.repository.schema.RecordSchema in project nifi by apache.
the class SchemaSwapDeserializer method deserializeFlowFiles.
@Override
@SuppressWarnings("unchecked")
public SwapContents deserializeFlowFiles(final DataInputStream in, final String swapLocation, final FlowFileQueue queue, final ResourceClaimManager claimManager) throws IOException {
final RecordSchema schema = RecordSchema.readFrom(in);
final SchemaRecordReader reader = SchemaRecordReader.fromSchema(schema);
final Record parentRecord = reader.readRecord(in);
final List<Record> flowFileRecords = (List<Record>) parentRecord.getFieldValue(SwapSchema.FLOWFILE_CONTENTS);
final List<FlowFileRecord> flowFiles = new ArrayList<>(flowFileRecords.size());
for (final Record record : flowFileRecords) {
flowFiles.add(FlowFileRecordFieldMap.getFlowFile(record, claimManager));
}
final Record summaryRecord = (Record) parentRecord.getFieldValue(SwapSchema.SWAP_SUMMARY);
final SwapSummary swapSummary = SwapSummaryFieldMap.getSwapSummary(summaryRecord, claimManager);
return new StandardSwapContents(swapSummary, flowFiles);
}
use of org.apache.nifi.repository.schema.RecordSchema in project nifi by apache.
the class SchemaRepositoryRecordSerde method serializeRecord.
@Override
public void serializeRecord(final RepositoryRecord record, final DataOutputStream out) throws IOException {
final RecordSchema schema;
switch(record.getType()) {
case CREATE:
case UPDATE:
schema = RepositoryRecordSchema.CREATE_OR_UPDATE_SCHEMA_V2;
break;
case CONTENTMISSING:
case DELETE:
schema = RepositoryRecordSchema.DELETE_SCHEMA_V2;
break;
case SWAP_IN:
schema = RepositoryRecordSchema.SWAP_IN_SCHEMA_V2;
break;
case SWAP_OUT:
schema = RepositoryRecordSchema.SWAP_OUT_SCHEMA_V2;
break;
default:
// won't happen.
throw new IllegalArgumentException("Received Repository Record with unknown Update Type: " + record.getType());
}
serializeRecord(record, out, schema, RepositoryRecordSchema.REPOSITORY_RECORD_SCHEMA_V2);
}
use of org.apache.nifi.repository.schema.RecordSchema in project nifi by apache.
the class TestSchemaRecordReaderWriter method createSchemaWriter.
/**
* Creates a SchemaRecordWriter that uses a modified schema
*
* @param fieldModifier the callback for modifying the schema
* @return a SchemaRecordWriter that uses the modified schema
* @throws IOException if unable to create the writer
*/
private ByteArraySchemaRecordWriter createSchemaWriter(final Consumer<List<RecordField>> fieldModifier, final Map<RecordField, Object> fieldsToAdd) throws IOException {
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
// Create a schema that has the fields modified
final RecordSchema schemaV1 = ProvenanceEventSchema.PROVENANCE_EVENT_SCHEMA_V1;
final List<RecordField> fields = new ArrayList<>(schemaV1.getFields());
fieldModifier.accept(fields);
final RecordSchema recordSchema = new RecordSchema(fields);
final RecordSchema contentClaimSchema = new RecordSchema(recordSchema.getField(EventFieldNames.CONTENT_CLAIM).getSubFields());
final ByteArraySchemaRecordWriter writer = new ByteArraySchemaRecordWriter(journalFile, idGenerator, tocWriter, false, 0) {
@Override
public void writeHeader(long firstEventId, DataOutputStream out) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
recordSchema.writeTo(baos);
out.writeInt(baos.size());
baos.writeTo(out);
}
@Override
protected Record createRecord(final ProvenanceEventRecord event, final long eventId) {
final Map<RecordField, Object> values = new HashMap<>();
final EventRecord eventRecord = new EventRecord(event, eventId, recordSchema, contentClaimSchema);
for (final RecordField field : recordSchema.getFields()) {
final Object value = eventRecord.getFieldValue(field);
values.put(field, value);
}
values.putAll(fieldsToAdd);
return new FieldMapRecord(values, recordSchema);
}
};
return writer;
}
Aggregations