use of org.apache.nifi.provenance.serialization.RecordWriter in project nifi-minifi by apache.
the class MiNiFiPersistentProvenanceRepository method close.
@Override
public synchronized void close() throws IOException {
this.closed.set(true);
writeLock.lock();
try {
logger.debug("Obtained write lock for close");
scheduledExecService.shutdownNow();
rolloverExecutor.shutdownNow();
queryExecService.shutdownNow();
getIndexManager().close();
if (writers != null) {
for (final RecordWriter writer : writers) {
writer.close();
}
}
} finally {
writeLock.unlock();
}
}
use of org.apache.nifi.provenance.serialization.RecordWriter in project nifi-minifi by apache.
the class MiNiFiPersistentProvenanceRepositoryTest method findJournalSizes.
@BeforeClass
public static void findJournalSizes() throws IOException {
// determine header and record size
final Map<String, String> attributes = new HashMap<>();
final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
builder.setEventTime(System.currentTimeMillis());
builder.setEventType(ProvenanceEventType.RECEIVE);
builder.setTransitUri("nifi://unit-test");
attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
builder.setComponentId("1234");
builder.setComponentType("dummy processor");
final ProvenanceEventRecord record = builder.build();
builder.setComponentId("2345");
final ProvenanceEventRecord record2 = builder.build();
final File tempRecordFile = tempFolder.newFile("record.tmp");
System.out.println("findJournalSizes position 0 = " + tempRecordFile.length());
final AtomicLong idGenerator = new AtomicLong(0L);
final RecordWriter writer = RecordWriters.newSchemaRecordWriter(tempRecordFile, idGenerator, false, false);
writer.writeHeader(12345L);
writer.flush();
headerSize = Long.valueOf(tempRecordFile.length()).intValue();
writer.writeRecord(record);
writer.flush();
recordSize = Long.valueOf(tempRecordFile.length()).intValue() - headerSize;
writer.writeRecord(record2);
writer.flush();
recordSize2 = Long.valueOf(tempRecordFile.length()).intValue() - headerSize - recordSize;
writer.close();
System.out.println("headerSize =" + headerSize);
System.out.println("recordSize =" + recordSize);
System.out.println("recordSize2=" + recordSize2);
}
use of org.apache.nifi.provenance.serialization.RecordWriter in project nifi-minifi by apache.
the class MiNiFiPersistentProvenanceRepositoryTest method testMergeJournalsBadRecordAfterFirst.
@Test
public void testMergeJournalsBadRecordAfterFirst() throws IOException, InterruptedException {
// skip if on windows
assumeFalse(isWindowsEnvironment());
final RepositoryConfiguration config = createConfiguration();
config.setMaxEventFileLife(3, TimeUnit.SECONDS);
TestableMiNiFiPersistentProvenanceRepository testRepo = new TestableMiNiFiPersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
testRepo.initialize(getEventReporter(), null, null, null);
final Map<String, String> attributes = new HashMap<>();
final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
builder.setEventTime(System.currentTimeMillis());
builder.setEventType(ProvenanceEventType.RECEIVE);
builder.setTransitUri("nifi://unit-test");
attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
builder.setComponentId("1234");
builder.setComponentType("dummy processor");
final ProvenanceEventRecord record = builder.build();
final ExecutorService exec = Executors.newFixedThreadPool(10);
final List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < 10000; i++) {
futures.add(exec.submit(new Runnable() {
@Override
public void run() {
testRepo.registerEvent(record);
}
}));
}
// corrupt the first record of the first journal file
for (Future<?> future : futures) {
while (!future.isDone()) {
Thread.sleep(10);
}
}
RecordWriter firstWriter = testRepo.getWriters()[0];
corruptJournalFile(firstWriter.getFile(), headerSize + 15 + recordSize, "RECEIVE", "BADTYPE");
testRepo.recoverJournalFiles();
final File storageDir = config.getStorageDirectories().values().iterator().next();
assertTrue(checkJournalRecords(storageDir, false) < 10000);
}
use of org.apache.nifi.provenance.serialization.RecordWriter in project nifi by apache.
the class AbstractTestRecordReaderWriter method testMultipleRecordsSameBlockCompressed.
@Test
public void testMultipleRecordsSameBlockCompressed() throws IOException {
final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
final File tocFile = TocUtil.getTocFile(journalFile);
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
// new record each 1 MB of uncompressed data
final RecordWriter writer = createWriter(journalFile, tocWriter, true, 1024 * 1024);
writer.writeHeader(1L);
for (int i = 0; i < 10; i++) {
writer.writeRecord(createEvent());
}
writer.close();
final TocReader tocReader = new StandardTocReader(tocFile);
try (final FileInputStream fis = new FileInputStream(journalFile);
final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
for (int i = 0; i < 10; i++) {
assertEquals(0, reader.getBlockIndex());
// the other half of the time to ensure that it's okay.
if (i <= 5) {
reader.skipToBlock(0);
}
final StandardProvenanceEventRecord recovered = reader.nextRecord();
assertNotNull(recovered);
assertEquals("nifi://unit-test", recovered.getTransitUri());
}
assertNull(reader.nextRecord());
}
FileUtils.deleteFile(journalFile.getParentFile(), true);
}
use of org.apache.nifi.provenance.serialization.RecordWriter in project nifi by apache.
the class AbstractTestRecordReaderWriter method testSingleRecordCompressed.
@Test
public void testSingleRecordCompressed() throws IOException {
final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
final File tocFile = TocUtil.getTocFile(journalFile);
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);
writer.writeHeader(1L);
writer.writeRecord(createEvent());
writer.close();
final TocReader tocReader = new StandardTocReader(tocFile);
assertRecoveredRecord(journalFile, tocReader, "nifi://unit-test", 0);
FileUtils.deleteFile(journalFile.getParentFile(), true);
}
Aggregations