use of org.apache.nifi.provenance.toc.StandardTocReader in project nifi by apache.
the class TestEventIdFirstSchemaRecordReaderWriter method testContentClaimUnchanged.
@Test
public void testContentClaimUnchanged() throws IOException {
final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
final File tocFile = TocUtil.getTocFile(journalFile);
final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);
final Map<String, String> attributes = new HashMap<>();
attributes.put("filename", "1.txt");
attributes.put("uuid", UUID.randomUUID().toString());
final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
builder.setEventTime(System.currentTimeMillis());
builder.setEventType(ProvenanceEventType.RECEIVE);
builder.setTransitUri("nifi://unit-test");
builder.fromFlowFile(TestUtil.createFlowFile(3L, 3000L, attributes));
builder.setComponentId("1234");
builder.setComponentType("dummy processor");
builder.setPreviousContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
builder.setCurrentContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
final ProvenanceEventRecord record = builder.build();
writer.writeHeader(1L);
writer.writeRecord(record);
writer.close();
final TocReader tocReader = new StandardTocReader(tocFile);
try (final FileInputStream fis = new FileInputStream(journalFile);
final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
assertEquals(0, reader.getBlockIndex());
reader.skipToBlock(0);
final StandardProvenanceEventRecord recovered = reader.nextRecord();
assertNotNull(recovered);
assertEquals("nifi://unit-test", recovered.getTransitUri());
assertEquals("container-1", recovered.getPreviousContentClaimContainer());
assertEquals("container-1", recovered.getContentClaimContainer());
assertEquals("section-1", recovered.getPreviousContentClaimSection());
assertEquals("section-1", recovered.getContentClaimSection());
assertEquals("identifier-1", recovered.getPreviousContentClaimIdentifier());
assertEquals("identifier-1", recovered.getContentClaimIdentifier());
assertEquals(1L, recovered.getPreviousContentClaimOffset().longValue());
assertEquals(1L, recovered.getContentClaimOffset().longValue());
assertEquals(1L, recovered.getPreviousFileSize().longValue());
assertEquals(1L, recovered.getContentClaimOffset().longValue());
assertNull(reader.nextRecord());
}
FileUtils.deleteFile(journalFile.getParentFile(), true);
}
use of org.apache.nifi.provenance.toc.StandardTocReader in project nifi by apache.
the class TestSchemaRecordReaderWriter method testFieldAddedToSchema.
@Test
public void testFieldAddedToSchema() throws IOException {
final RecordField unitTestField = new SimpleRecordField("Unit Test Field", FieldType.STRING, Repetition.EXACTLY_ONE);
final Consumer<List<RecordField>> schemaModifier = fields -> fields.add(unitTestField);
final Map<RecordField, Object> toAdd = new HashMap<>();
toAdd.put(unitTestField, "hello");
try (final ByteArraySchemaRecordWriter writer = createSchemaWriter(schemaModifier, toAdd)) {
writer.writeHeader(1L);
writer.writeRecord(createEvent());
writer.writeRecord(createEvent());
}
try (final InputStream in = new FileInputStream(journalFile);
final TocReader tocReader = new StandardTocReader(tocFile);
final RecordReader reader = createReader(in, journalFile.getName(), tocReader, 10000)) {
for (int i = 0; i < 2; i++) {
final StandardProvenanceEventRecord event = reader.nextRecord();
assertNotNull(event);
assertEquals("1234", event.getComponentId());
assertEquals(ProvenanceEventType.RECEIVE, event.getEventType());
assertNotNull(event.getUpdatedAttributes());
assertFalse(event.getUpdatedAttributes().isEmpty());
}
}
}
use of org.apache.nifi.provenance.toc.StandardTocReader in project nifi by apache.
the class EventFileCompressor method run.
@Override
public void run() {
while (!shutdown) {
File uncompressedEventFile = null;
try {
final long start = System.nanoTime();
uncompressedEventFile = filesToCompress.poll(1, TimeUnit.SECONDS);
if (uncompressedEventFile == null || shutdown) {
continue;
}
File outputFile = null;
long bytesBefore = 0L;
StandardTocReader tocReader = null;
File tmpTocFile = null;
eventFileManager.obtainReadLock(uncompressedEventFile);
try {
StandardTocWriter tocWriter = null;
final File tocFile = TocUtil.getTocFile(uncompressedEventFile);
try {
tocReader = new StandardTocReader(tocFile);
} catch (final IOException e) {
logger.error("Failed to read TOC File {}", tocFile, e);
continue;
}
bytesBefore = uncompressedEventFile.length();
try {
outputFile = new File(uncompressedEventFile.getParentFile(), uncompressedEventFile.getName() + ".gz");
try {
tmpTocFile = new File(tocFile.getParentFile(), tocFile.getName() + ".tmp");
tocWriter = new StandardTocWriter(tmpTocFile, true, false);
compress(uncompressedEventFile, tocReader, outputFile, tocWriter);
tocWriter.close();
} catch (final IOException ioe) {
logger.error("Failed to compress {} on rollover", uncompressedEventFile, ioe);
}
} finally {
CloseableUtil.closeQuietly(tocReader, tocWriter);
}
} finally {
eventFileManager.releaseReadLock(uncompressedEventFile);
}
eventFileManager.obtainWriteLock(uncompressedEventFile);
try {
// Attempt to delete the input file and associated toc file
if (uncompressedEventFile.delete()) {
if (tocReader != null) {
final File tocFile = tocReader.getFile();
if (!tocFile.delete()) {
logger.warn("Failed to delete {}; this file should be cleaned up manually", tocFile);
}
if (tmpTocFile != null) {
tmpTocFile.renameTo(tocFile);
}
}
} else {
logger.warn("Failed to delete {}; this file should be cleaned up manually", uncompressedEventFile);
}
} finally {
eventFileManager.releaseWriteLock(uncompressedEventFile);
}
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
final long bytesAfter = outputFile.length();
final double reduction = 100 * (1 - (double) bytesAfter / (double) bytesBefore);
final String reductionTwoDecimals = String.format("%.2f", reduction);
logger.debug("Successfully compressed Provenance Event File {} in {} millis from {} to {}, a reduction of {}%", uncompressedEventFile, millis, FormatUtils.formatDataSize(bytesBefore), FormatUtils.formatDataSize(bytesAfter), reductionTwoDecimals);
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
return;
} catch (final Exception e) {
logger.error("Failed to compress {}", uncompressedEventFile, e);
}
}
}
Aggregations