use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.
the class TestFileSystemRepository method testReadWithNoContent.
@Test(expected = ContentNotFoundException.class)
public void testReadWithNoContent() throws IOException {
final ContentClaim claim = new StandardContentClaim(new StandardResourceClaim(claimManager, "container1", "section 1", "1", false), 0L);
final InputStream in = repository.read(claim);
in.close();
}
use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.
the class TestFileSystemRepository method testWriteWithNoContent.
@Test
public void testWriteWithNoContent() throws IOException {
final ContentClaim claim1 = repository.create(false);
try (final OutputStream out = repository.write(claim1)) {
out.write("Hello".getBytes());
}
final ContentClaim claim2 = repository.create(false);
assertEquals(claim1.getResourceClaim(), claim2.getResourceClaim());
try (final OutputStream out = repository.write(claim2)) {
}
final ContentClaim claim3 = repository.create(false);
assertEquals(claim1.getResourceClaim(), claim3.getResourceClaim());
try (final OutputStream out = repository.write(claim3)) {
out.write(" World".getBytes());
}
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (final InputStream in = repository.read(claim1)) {
StreamUtils.copy(in, baos);
}
assertEquals("Hello", baos.toString());
baos.reset();
try (final InputStream in = repository.read(claim2)) {
StreamUtils.copy(in, baos);
}
assertEquals("", baos.toString());
assertEquals(0, baos.size());
baos.reset();
try (final InputStream in = repository.read(claim3)) {
StreamUtils.copy(in, baos);
}
assertEquals(" World", baos.toString());
}
use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.
the class TestFileSystemRepository method testRemoveDeletesFileIfNoClaimants.
@Test
public void testRemoveDeletesFileIfNoClaimants() throws IOException {
final ContentClaim claim = repository.create(true);
assertNotNull(claim);
assertEquals(1, repository.getClaimantCount(claim));
repository.incrementClaimaintCount(claim);
final Path claimPath = getPath(claim);
final String maxAppendableClaimLength = nifiProperties.getMaxAppendableClaimSize();
final int maxClaimLength = DataUnit.parseDataSize(maxAppendableClaimLength, DataUnit.B).intValue();
// Create the file.
try (final OutputStream out = repository.write(claim)) {
out.write(new byte[maxClaimLength]);
}
int count = repository.decrementClaimantCount(claim);
assertEquals(1, count);
assertTrue(Files.exists(claimPath));
// ensure that no Exception is thrown here.
repository.remove(null);
assertTrue(Files.exists(claimPath));
count = repository.decrementClaimantCount(claim);
assertEquals(0, count);
repository.remove(claim);
assertFalse(Files.exists(claimPath));
}
use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.
the class TestStandardProcessSession method testExportTo.
@Test
public void testExportTo() throws IOException {
final ContentClaim claim = contentRepo.create(false);
final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder().contentClaim(claim).addAttribute("uuid", "12345678-1234-1234-1234-123456789012").entryDate(System.currentTimeMillis()).build();
flowFileQueue.put(flowFileRecord);
FlowFile flowFile = session.get();
assertNotNull(flowFile);
flowFile = session.append(flowFile, new OutputStreamCallback() {
@Override
public void process(OutputStream out) throws IOException {
out.write("Hello World".getBytes());
}
});
// should be OK
ByteArrayOutputStream os = new ByteArrayOutputStream();
session.exportTo(flowFile, os);
assertEquals("Hello World", new String(os.toByteArray()));
os.close();
// should throw ProcessException because of IOException (from processor code)
FileOutputStream mock = Mockito.mock(FileOutputStream.class);
doThrow(new IOException()).when(mock).write((byte[]) notNull(), any(Integer.class), any(Integer.class));
try {
session.exportTo(flowFile, mock);
Assert.fail("Expected ProcessException");
} catch (ProcessException e) {
}
}
use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.
the class StandardProcessSession method merge.
@Override
public FlowFile merge(Collection<FlowFile> sources, FlowFile destination, final byte[] header, final byte[] footer, final byte[] demarcator) {
verifyTaskActive();
sources = validateRecordState(sources);
destination = validateRecordState(destination);
if (sources.contains(destination)) {
throw new IllegalArgumentException("Destination cannot be within sources");
}
final Collection<StandardRepositoryRecord> sourceRecords = new ArrayList<>();
for (final FlowFile source : sources) {
final StandardRepositoryRecord record = records.get(source);
sourceRecords.add(record);
try {
ensureNotAppending(record.getCurrentClaim());
claimCache.flush(record.getCurrentClaim());
} catch (final IOException e) {
throw new FlowFileAccessException("Unable to read from source " + source + " due to " + e.toString(), e);
}
}
final StandardRepositoryRecord destinationRecord = records.get(destination);
final ContentRepository contentRepo = context.getContentRepository();
final ContentClaim newClaim;
try {
newClaim = contentRepo.create(context.getConnectable().isLossTolerant());
claimLog.debug("Creating ContentClaim {} for 'merge' for {}", newClaim, destinationRecord.getCurrent());
} catch (final IOException e) {
throw new FlowFileAccessException("Unable to create ContentClaim due to " + e.toString(), e);
}
long readCount = 0L;
long writtenCount = 0L;
try {
try (final OutputStream rawOut = contentRepo.write(newClaim);
final OutputStream out = new BufferedOutputStream(rawOut)) {
if (header != null && header.length > 0) {
out.write(header);
writtenCount += header.length;
}
int objectIndex = 0;
final boolean useDemarcator = demarcator != null && demarcator.length > 0;
final int numSources = sources.size();
for (final FlowFile source : sources) {
final StandardRepositoryRecord sourceRecord = records.get(source);
final long copied = contentRepo.exportTo(sourceRecord.getCurrentClaim(), out, sourceRecord.getCurrentClaimOffset(), source.getSize());
writtenCount += copied;
readCount += copied;
// don't add demarcator after the last claim
if (useDemarcator && ++objectIndex < numSources) {
out.write(demarcator);
writtenCount += demarcator.length;
}
}
if (footer != null && footer.length > 0) {
out.write(footer);
writtenCount += footer.length;
}
} finally {
bytesWritten += writtenCount;
bytesRead += readCount;
}
} catch (final ContentNotFoundException nfe) {
destroyContent(newClaim);
handleContentNotFound(nfe, destinationRecord);
handleContentNotFound(nfe, sourceRecords);
} catch (final IOException ioe) {
destroyContent(newClaim);
throw new FlowFileAccessException("Failed to merge " + sources.size() + " into " + destination + " due to " + ioe.toString(), ioe);
} catch (final Throwable t) {
destroyContent(newClaim);
throw t;
}
removeTemporaryClaim(destinationRecord);
final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(destinationRecord.getCurrent()).contentClaim(newClaim).contentClaimOffset(0L).size(writtenCount).build();
destinationRecord.setWorking(newFile);
records.put(newFile, destinationRecord);
return newFile;
}
Aggregations