use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ParseEvtxTest method testGetBasenameExtension.
@Test
public void testGetBasenameExtension() {
String basename = "basename.wrongextension";
FlowFile flowFile = mock(FlowFile.class);
ComponentLog componentLog = mock(ComponentLog.class);
when(flowFile.getAttribute(CoreAttributes.FILENAME.key())).thenReturn(basename);
assertEquals(basename, parseEvtx.getBasename(flowFile, componentLog));
verify(componentLog).warn(anyString(), isA(Object[].class));
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ResultProcessorTest method testProcessResultFileSuccess.
@Test
public void testProcessResultFileSuccess() {
ProcessSession processSession = mock(ProcessSession.class);
ComponentLog componentLog = mock(ComponentLog.class);
FlowFile flowFile = mock(FlowFile.class);
Exception exception = null;
String name = "basename";
when(processSession.putAttribute(eq(flowFile), anyString(), anyString())).thenReturn(flowFile);
resultProcessor.process(processSession, componentLog, flowFile, exception, name);
verify(processSession).putAttribute(flowFile, CoreAttributes.FILENAME.key(), name);
verify(processSession).putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_XML_UTF_8.toString());
verify(processSession).transfer(flowFile, successRelationship);
verifyNoMoreInteractions(componentLog);
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class TestKerberosProperties method testValidatePrincipalAndKeytab.
@Test
public void testValidatePrincipalAndKeytab() {
final ComponentLog log = Mockito.mock(ComponentLog.class);
final Configuration config = new Configuration();
// no security enabled in config so doesn't matter what principal and keytab are
List<ValidationResult> results = KerberosProperties.validatePrincipalAndKeytab("test", config, null, null, log);
Assert.assertEquals(0, results.size());
results = KerberosProperties.validatePrincipalAndKeytab("test", config, "principal", null, log);
Assert.assertEquals(0, results.size());
results = KerberosProperties.validatePrincipalAndKeytab("test", config, "principal", "keytab", log);
Assert.assertEquals(0, results.size());
// change the config to have kerberos turned on
config.set("hadoop.security.authentication", "kerberos");
config.set("hadoop.security.authorization", "true");
results = KerberosProperties.validatePrincipalAndKeytab("test", config, null, null, log);
Assert.assertEquals(2, results.size());
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class BinFiles method processBins.
private int processBins(final ProcessContext context) {
final ComponentLog logger = getLogger();
int processedBins = 0;
Bin bin;
while ((bin = readyBins.poll()) != null) {
boolean binAlreadyCommitted;
try {
binAlreadyCommitted = this.processBin(bin, context);
} catch (final ProcessException e) {
logger.error("Failed to process bundle of {} files due to {}", new Object[] { bin.getContents().size(), e });
final ProcessSession binSession = bin.getSession();
for (final FlowFile flowFile : bin.getContents()) {
binSession.transfer(flowFile, REL_FAILURE);
}
binSession.commit();
continue;
} catch (final Exception e) {
logger.error("Failed to process bundle of {} files due to {}; rolling back sessions", new Object[] { bin.getContents().size(), e });
bin.getSession().rollback();
continue;
}
// If this bin's session has been committed, move on.
if (!binAlreadyCommitted) {
final ProcessSession binSession = bin.getSession();
binSession.transfer(bin.getContents(), REL_ORIGINAL);
binSession.commit();
}
processedBins++;
}
return processedBins;
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ExtractImageMetadata method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowfile = session.get();
if (flowfile == null) {
return;
}
final ComponentLog logger = this.getLogger();
final AtomicReference<Metadata> value = new AtomicReference<>(null);
final Integer max = context.getProperty(MAX_NUMBER_OF_ATTRIBUTES).asInteger();
try {
session.read(flowfile, new InputStreamCallback() {
@Override
public void process(InputStream in) throws IOException {
try {
Metadata imageMetadata = ImageMetadataReader.readMetadata(in);
value.set(imageMetadata);
} catch (ImageProcessingException ex) {
throw new ProcessException(ex);
}
}
});
Metadata metadata = value.get();
Map<String, String> results = getTags(max, metadata);
// Write the results to an attribute
if (!results.isEmpty()) {
flowfile = session.putAllAttributes(flowfile, results);
}
session.transfer(flowfile, SUCCESS);
} catch (ProcessException e) {
logger.error("Failed to extract image metadata from {} due to {}", new Object[] { flowfile, e });
session.transfer(flowfile, FAILURE);
}
}
Aggregations