use of org.apache.nifi.provenance.toc.StandardTocWriter in project nifi by apache.
the class TestEventIdFirstSchemaRecordReaderWriter method testContentClaimChanged.
@Test
public void testContentClaimChanged() throws IOException {
final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
final File tocFile = TocUtil.getTocFile(journalFile);
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);
final Map<String, String> attributes = new HashMap<>();
attributes.put("filename", "1.txt");
attributes.put("uuid", UUID.randomUUID().toString());
final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
builder.setEventTime(System.currentTimeMillis());
builder.setEventType(ProvenanceEventType.RECEIVE);
builder.setTransitUri("nifi://unit-test");
builder.fromFlowFile(TestUtil.createFlowFile(3L, 3000L, attributes));
builder.setComponentId("1234");
builder.setComponentType("dummy processor");
builder.setPreviousContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
builder.setCurrentContentClaim("container-2", "section-2", "identifier-2", 2L, 2L);
final ProvenanceEventRecord record = builder.build();
writer.writeHeader(1L);
writer.writeRecord(record);
writer.close();
final TocReader tocReader = new StandardTocReader(tocFile);
try (final FileInputStream fis = new FileInputStream(journalFile);
final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
assertEquals(0, reader.getBlockIndex());
reader.skipToBlock(0);
final StandardProvenanceEventRecord recovered = reader.nextRecord();
assertNotNull(recovered);
assertEquals("nifi://unit-test", recovered.getTransitUri());
assertEquals("container-1", recovered.getPreviousContentClaimContainer());
assertEquals("container-2", recovered.getContentClaimContainer());
assertEquals("section-1", recovered.getPreviousContentClaimSection());
assertEquals("section-2", recovered.getContentClaimSection());
assertEquals("identifier-1", recovered.getPreviousContentClaimIdentifier());
assertEquals("identifier-2", recovered.getContentClaimIdentifier());
assertEquals(1L, recovered.getPreviousContentClaimOffset().longValue());
assertEquals(2L, recovered.getContentClaimOffset().longValue());
assertEquals(1L, recovered.getPreviousFileSize().longValue());
assertEquals(2L, recovered.getContentClaimOffset().longValue());
assertNull(reader.nextRecord());
}
FileUtils.deleteFile(journalFile.getParentFile(), true);
}
use of org.apache.nifi.provenance.toc.StandardTocWriter in project nifi by apache.
the class TestEventIdFirstSchemaRecordReaderWriter method testContentClaimUnchanged.
@Test
public void testContentClaimUnchanged() throws IOException {
final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
final File tocFile = TocUtil.getTocFile(journalFile);
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);
final Map<String, String> attributes = new HashMap<>();
attributes.put("filename", "1.txt");
attributes.put("uuid", UUID.randomUUID().toString());
final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
builder.setEventTime(System.currentTimeMillis());
builder.setEventType(ProvenanceEventType.RECEIVE);
builder.setTransitUri("nifi://unit-test");
builder.fromFlowFile(TestUtil.createFlowFile(3L, 3000L, attributes));
builder.setComponentId("1234");
builder.setComponentType("dummy processor");
builder.setPreviousContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
builder.setCurrentContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
final ProvenanceEventRecord record = builder.build();
writer.writeHeader(1L);
writer.writeRecord(record);
writer.close();
final TocReader tocReader = new StandardTocReader(tocFile);
try (final FileInputStream fis = new FileInputStream(journalFile);
final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
assertEquals(0, reader.getBlockIndex());
reader.skipToBlock(0);
final StandardProvenanceEventRecord recovered = reader.nextRecord();
assertNotNull(recovered);
assertEquals("nifi://unit-test", recovered.getTransitUri());
assertEquals("container-1", recovered.getPreviousContentClaimContainer());
assertEquals("container-1", recovered.getContentClaimContainer());
assertEquals("section-1", recovered.getPreviousContentClaimSection());
assertEquals("section-1", recovered.getContentClaimSection());
assertEquals("identifier-1", recovered.getPreviousContentClaimIdentifier());
assertEquals("identifier-1", recovered.getContentClaimIdentifier());
assertEquals(1L, recovered.getPreviousContentClaimOffset().longValue());
assertEquals(1L, recovered.getContentClaimOffset().longValue());
assertEquals(1L, recovered.getPreviousFileSize().longValue());
assertEquals(1L, recovered.getContentClaimOffset().longValue());
assertNull(reader.nextRecord());
}
FileUtils.deleteFile(journalFile.getParentFile(), true);
}
use of org.apache.nifi.provenance.toc.StandardTocWriter in project nifi by apache.
the class TestSchemaRecordReaderWriter method createSchemaWriter.
/**
* Creates a SchemaRecordWriter that uses a modified schema
*
* @param fieldModifier the callback for modifying the schema
* @return a SchemaRecordWriter that uses the modified schema
* @throws IOException if unable to create the writer
*/
private ByteArraySchemaRecordWriter createSchemaWriter(final Consumer<List<RecordField>> fieldModifier, final Map<RecordField, Object> fieldsToAdd) throws IOException {
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
// Create a schema that has the fields modified
final RecordSchema schemaV1 = ProvenanceEventSchema.PROVENANCE_EVENT_SCHEMA_V1;
final List<RecordField> fields = new ArrayList<>(schemaV1.getFields());
fieldModifier.accept(fields);
final RecordSchema recordSchema = new RecordSchema(fields);
final RecordSchema contentClaimSchema = new RecordSchema(recordSchema.getField(EventFieldNames.CONTENT_CLAIM).getSubFields());
final ByteArraySchemaRecordWriter writer = new ByteArraySchemaRecordWriter(journalFile, idGenerator, tocWriter, false, 0) {
@Override
public void writeHeader(long firstEventId, DataOutputStream out) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
recordSchema.writeTo(baos);
out.writeInt(baos.size());
baos.writeTo(out);
}
@Override
protected Record createRecord(final ProvenanceEventRecord event, final long eventId) {
final Map<RecordField, Object> values = new HashMap<>();
final EventRecord eventRecord = new EventRecord(event, eventId, recordSchema, contentClaimSchema);
for (final RecordField field : recordSchema.getFields()) {
final Object value = eventRecord.getFieldValue(field);
values.put(field, value);
}
values.putAll(fieldsToAdd);
return new FieldMapRecord(values, recordSchema);
}
};
return writer;
}
use of org.apache.nifi.provenance.toc.StandardTocWriter in project nifi by apache.
the class WriteAheadProvenanceRepository method initialize.
@Override
public synchronized void initialize(final EventReporter eventReporter, final Authorizer authorizer, final ProvenanceAuthorizableFactory resourceFactory, final IdentifierLookup idLookup) throws IOException {
final RecordWriterFactory recordWriterFactory = (file, idGenerator, compressed, createToc) -> {
final TocWriter tocWriter = createToc ? new StandardTocWriter(TocUtil.getTocFile(file), false, false) : null;
return new EventIdFirstSchemaRecordWriter(file, idGenerator, tocWriter, compressed, BLOCK_SIZE, idLookup);
};
final EventFileManager fileManager = new EventFileManager();
final RecordReaderFactory recordReaderFactory = (file, logs, maxChars) -> {
fileManager.obtainReadLock(file);
try {
return RecordReaders.newRecordReader(file, logs, maxChars);
} finally {
fileManager.releaseReadLock(file);
}
};
init(recordWriterFactory, recordReaderFactory, eventReporter, authorizer, resourceFactory);
}
use of org.apache.nifi.provenance.toc.StandardTocWriter in project nifi by apache.
the class EncryptedWriteAheadProvenanceRepository method initialize.
/**
* This method initializes the repository. It first builds the key provider and event encryptor
* from the config values, then creates the encrypted record writer and reader, then delegates
* back to the superclass for the common implementation.
*
* @param eventReporter the event reporter
* @param authorizer the authorizer
* @param resourceFactory the authorizable factory
* @param idLookup the lookup provider
* @throws IOException if there is an error initializing this repository
*/
@Override
public synchronized void initialize(final EventReporter eventReporter, final Authorizer authorizer, final ProvenanceAuthorizableFactory resourceFactory, final IdentifierLookup idLookup) throws IOException {
// Initialize the encryption-specific fields
ProvenanceEventEncryptor provenanceEventEncryptor;
if (getConfig().supportsEncryption()) {
try {
KeyProvider keyProvider;
if (KeyProviderFactory.requiresMasterKey(getConfig().getKeyProviderImplementation())) {
SecretKey masterKey = getMasterKey();
keyProvider = buildKeyProvider(masterKey);
} else {
keyProvider = buildKeyProvider();
}
provenanceEventEncryptor = new AESProvenanceEventEncryptor();
provenanceEventEncryptor.initialize(keyProvider);
} catch (KeyManagementException e) {
String msg = "Encountered an error building the key provider";
logger.error(msg, e);
throw new IOException(msg, e);
}
} else {
throw new IOException("The provided configuration does not support a encrypted repository");
}
// Build a factory using lambda which injects the encryptor
final RecordWriterFactory recordWriterFactory = (file, idGenerator, compressed, createToc) -> {
try {
final TocWriter tocWriter = createToc ? new StandardTocWriter(TocUtil.getTocFile(file), false, false) : null;
return new EncryptedSchemaRecordWriter(file, idGenerator, tocWriter, compressed, BLOCK_SIZE, idLookup, provenanceEventEncryptor, getConfig().getDebugFrequency());
} catch (EncryptionException e) {
logger.error("Encountered an error building the schema record writer factory: ", e);
throw new IOException(e);
}
};
// Build a factory using lambda which injects the encryptor
final EventFileManager fileManager = new EventFileManager();
final RecordReaderFactory recordReaderFactory = (file, logs, maxChars) -> {
fileManager.obtainReadLock(file);
try {
EncryptedSchemaRecordReader tempReader = (EncryptedSchemaRecordReader) RecordReaders.newRecordReader(file, logs, maxChars);
tempReader.setProvenanceEventEncryptor(provenanceEventEncryptor);
return tempReader;
} finally {
fileManager.releaseReadLock(file);
}
};
// Delegate the init to the parent impl
super.init(recordWriterFactory, recordReaderFactory, eventReporter, authorizer, resourceFactory);
}
Aggregations