use of org.apache.nifi.repository.schema.FieldMapRecord in project nifi by apache.
the class EventIdFirstSchemaRecordWriter method writeHeader.
@Override
protected synchronized void writeHeader(final long firstEventId, final DataOutputStream out) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
eventSchema.writeTo(baos);
out.writeInt(baos.size());
baos.writeTo(out);
baos.reset();
headerSchema.writeTo(baos);
out.writeInt(baos.size());
baos.writeTo(out);
this.firstEventId = firstEventId;
this.systemTimeOffset = System.currentTimeMillis();
final Map<String, Object> headerValues = new HashMap<>();
headerValues.put(EventIdFirstHeaderSchema.FieldNames.FIRST_EVENT_ID, firstEventId);
headerValues.put(EventIdFirstHeaderSchema.FieldNames.TIMESTAMP_OFFSET, systemTimeOffset);
headerValues.put(EventIdFirstHeaderSchema.FieldNames.COMPONENT_IDS, idLookup.getComponentIdentifiers());
headerValues.put(EventIdFirstHeaderSchema.FieldNames.COMPONENT_TYPES, idLookup.getComponentTypes());
headerValues.put(EventIdFirstHeaderSchema.FieldNames.QUEUE_IDS, idLookup.getQueueIdentifiers());
headerValues.put(EventIdFirstHeaderSchema.FieldNames.EVENT_TYPES, eventTypeNames);
final FieldMapRecord headerInfo = new FieldMapRecord(headerSchema, headerValues);
schemaRecordWriter.writeRecord(headerInfo, out);
}
use of org.apache.nifi.repository.schema.FieldMapRecord in project nifi by apache.
the class LookupTableEventRecord method createLookupValue.
private NamedValue createLookupValue(final String literalValue, final Map<String, Integer> lookup) {
if (literalValue == null) {
final Map<RecordField, Object> lookupValues = Collections.singletonMap(LookupTableEventRecordFields.NO_VALUE, EventFieldNames.NO_VALUE);
final Record record = new FieldMapRecord(lookupValues, LookupTableEventSchema.NO_VALUE_SCHEMA);
final NamedValue namedValue = new NamedValue(EventFieldNames.NO_VALUE, record);
return namedValue;
}
final Integer index = lookup.get(literalValue);
if (index == null) {
final Map<RecordField, Object> lookupValues = Collections.singletonMap(LookupTableEventRecordFields.EXPLICIT_STRING, literalValue);
final Record record = new FieldMapRecord(lookupValues, LookupTableEventSchema.EXPLICIT_STRING_SCHEMA);
final NamedValue namedValue = new NamedValue(EventFieldNames.EXPLICIT_VALUE, record);
return namedValue;
} else {
final Map<RecordField, Object> lookupValues = Collections.singletonMap(LookupTableEventRecordFields.LOOKUP_VALUE, index);
final Record record = new FieldMapRecord(lookupValues, LookupTableEventSchema.LOOKUP_VALUE_SCHEMA);
final NamedValue namedValue = new NamedValue(EventFieldNames.LOOKUP_VALUE, record);
return namedValue;
}
}
use of org.apache.nifi.repository.schema.FieldMapRecord in project nifi by apache.
the class SchemaSwapSerializer method serializeFlowFiles.
@Override
public void serializeFlowFiles(final List<FlowFileRecord> toSwap, final FlowFileQueue queue, final String swapLocation, final OutputStream out) throws IOException {
schema.writeTo(out);
long contentSize = 0L;
long maxFlowFileId = -1L;
final List<ResourceClaim> resourceClaims = new ArrayList<>();
for (final FlowFileRecord flowFile : toSwap) {
contentSize += flowFile.getSize();
if (flowFile.getId() > maxFlowFileId) {
maxFlowFileId = flowFile.getId();
}
final ContentClaim contentClaim = flowFile.getContentClaim();
if (contentClaim != null) {
resourceClaims.add(contentClaim.getResourceClaim());
}
}
final QueueSize queueSize = new QueueSize(toSwap.size(), contentSize);
final SwapSummary swapSummary = new StandardSwapSummary(queueSize, maxFlowFileId, resourceClaims);
final Record summaryRecord = new SwapSummaryFieldMap(swapSummary, queue.getIdentifier(), SwapSchema.SWAP_SUMMARY_SCHEMA_V1);
final List<Record> flowFileRecords = toSwap.stream().map(flowFile -> new FlowFileRecordFieldMap(flowFile, flowFileSchema)).collect(Collectors.toList());
// Create a simple record to hold the summary and the flowfile contents
final RecordField summaryField = new SimpleRecordField(SwapSchema.SWAP_SUMMARY, FieldType.COMPLEX, Repetition.EXACTLY_ONE);
final RecordField contentsField = new ComplexRecordField(SwapSchema.FLOWFILE_CONTENTS, Repetition.ZERO_OR_MORE, FlowFileSchema.FLOWFILE_SCHEMA_V2.getFields());
final List<RecordField> fields = new ArrayList<>(2);
fields.add(summaryField);
fields.add(contentsField);
final Map<RecordField, Object> swapFileMap = new LinkedHashMap<>();
swapFileMap.put(summaryField, summaryRecord);
swapFileMap.put(contentsField, flowFileRecords);
final Record swapFileRecord = new FieldMapRecord(swapFileMap, new RecordSchema(fields));
final SchemaRecordWriter writer = new SchemaRecordWriter();
writer.writeRecord(swapFileRecord, out);
out.flush();
}
use of org.apache.nifi.repository.schema.FieldMapRecord in project nifi by apache.
the class TestSchemaRecordReaderWriter method createSchemaWriter.
/**
* Creates a SchemaRecordWriter that uses a modified schema
*
* @param fieldModifier the callback for modifying the schema
* @return a SchemaRecordWriter that uses the modified schema
* @throws IOException if unable to create the writer
*/
private ByteArraySchemaRecordWriter createSchemaWriter(final Consumer<List<RecordField>> fieldModifier, final Map<RecordField, Object> fieldsToAdd) throws IOException {
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
// Create a schema that has the fields modified
final RecordSchema schemaV1 = ProvenanceEventSchema.PROVENANCE_EVENT_SCHEMA_V1;
final List<RecordField> fields = new ArrayList<>(schemaV1.getFields());
fieldModifier.accept(fields);
final RecordSchema recordSchema = new RecordSchema(fields);
final RecordSchema contentClaimSchema = new RecordSchema(recordSchema.getField(EventFieldNames.CONTENT_CLAIM).getSubFields());
final ByteArraySchemaRecordWriter writer = new ByteArraySchemaRecordWriter(journalFile, idGenerator, tocWriter, false, 0) {
@Override
public void writeHeader(long firstEventId, DataOutputStream out) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
recordSchema.writeTo(baos);
out.writeInt(baos.size());
baos.writeTo(out);
}
@Override
protected Record createRecord(final ProvenanceEventRecord event, final long eventId) {
final Map<RecordField, Object> values = new HashMap<>();
final EventRecord eventRecord = new EventRecord(event, eventId, recordSchema, contentClaimSchema);
for (final RecordField field : recordSchema.getFields()) {
final Object value = eventRecord.getFieldValue(field);
values.put(field, value);
}
values.putAll(fieldsToAdd);
return new FieldMapRecord(values, recordSchema);
}
};
return writer;
}
use of org.apache.nifi.repository.schema.FieldMapRecord in project nifi by apache.
the class LookupTableEventRecord method createContentClaimRecord.
private static Record createContentClaimRecord(final RecordSchema contentClaimSchema, final String container, final String section, final String identifier, final Long offset, final Long size) {
if (container == null || section == null || identifier == null) {
final Map<RecordField, Object> lookupValues = Collections.singletonMap(LookupTableEventRecordFields.NO_VALUE, EventFieldNames.NO_VALUE);
final List<RecordField> noValueFields = Collections.singletonList(contentClaimSchema.getField(EventFieldNames.NO_VALUE));
return new FieldMapRecord(lookupValues, new RecordSchema(noValueFields));
}
final Map<RecordField, Object> fieldValues = new HashMap<>();
fieldValues.put(EventRecordFields.CONTENT_CLAIM_CONTAINER, container);
fieldValues.put(EventRecordFields.CONTENT_CLAIM_SECTION, section);
fieldValues.put(EventRecordFields.CONTENT_CLAIM_IDENTIFIER, identifier);
fieldValues.put(EventRecordFields.CONTENT_CLAIM_OFFSET, offset);
fieldValues.put(EventRecordFields.CONTENT_CLAIM_SIZE, size);
final List<RecordField> explicitClaimFields = contentClaimSchema.getField(EventFieldNames.EXPLICIT_VALUE).getSubFields();
final Record explicitClaimRecord = new FieldMapRecord(fieldValues, new RecordSchema(explicitClaimFields));
return explicitClaimRecord;
}
Aggregations