use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class ITConsumeKafka method validateGetAllMessages.
@Test
public void validateGetAllMessages() throws Exception {
String groupName = "validateGetAllMessages";
when(mockConsumerPool.obtainConsumer(anyObject(), anyObject())).thenReturn(mockLease);
when(mockLease.continuePolling()).thenReturn(Boolean.TRUE, Boolean.TRUE, Boolean.FALSE);
when(mockLease.commit()).thenReturn(Boolean.TRUE);
ConsumeKafka_1_0 proc = new ConsumeKafka_1_0() {
@Override
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) {
return mockConsumerPool;
}
};
final TestRunner runner = TestRunners.newTestRunner(proc);
runner.setProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS, "0.0.0.0:1234");
runner.setProperty(ConsumeKafka_1_0.TOPICS, "foo,bar");
runner.setProperty(ConsumeKafka_1_0.GROUP_ID, groupName);
runner.setProperty(ConsumeKafka_1_0.AUTO_OFFSET_RESET, ConsumeKafka_1_0.OFFSET_EARLIEST);
runner.run(1, false);
verify(mockConsumerPool, times(1)).obtainConsumer(anyObject(), anyObject());
verify(mockLease, times(3)).continuePolling();
verify(mockLease, times(2)).poll();
verify(mockLease, times(1)).commit();
verify(mockLease, times(1)).close();
verifyNoMoreInteractions(mockConsumerPool);
verifyNoMoreInteractions(mockLease);
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class TestConfigurationProperty method testConfigurationCanary.
@Test
public void testConfigurationCanary() throws IOException {
TestRunner runner = TestRunners.newTestRunner(StoreInKiteDataset.class);
runner.setProperty(AbstractKiteProcessor.CONF_XML_FILES, confLocation.toString());
Assert.assertFalse("Should not contain canary value", DefaultConfiguration.get().getBoolean("nifi.config.canary", false));
AbstractKiteProcessor processor = new StoreInKiteDataset();
ProcessContext context = runner.getProcessContext();
processor.setDefaultConfiguration(context);
Assert.assertTrue("Should contain canary value", DefaultConfiguration.get().getBoolean("nifi.config.canary", false));
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class TestExtractMediaMetadata method testProperties.
@Test
public void testProperties() {
final TestRunner runner = TestRunners.newTestRunner(new ExtractMediaMetadata());
ProcessContext context = runner.getProcessContext();
Map<PropertyDescriptor, String> propertyValues = context.getProperties();
assertEquals(4, propertyValues.size());
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class GetMongo method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile input = null;
if (context.hasIncomingConnection()) {
input = session.get();
if (input == null && context.hasNonLoopConnection()) {
return;
}
}
final ComponentLog logger = getLogger();
Map attributes = new HashMap();
attributes.put(CoreAttributes.MIME_TYPE.key(), "application/json");
final Document query;
String queryStr;
if (context.getProperty(QUERY).isSet()) {
queryStr = context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue();
query = Document.parse(queryStr);
} else if (!context.getProperty(QUERY).isSet() && input == null) {
queryStr = "{}";
query = Document.parse("{}");
} else {
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
session.exportTo(input, out);
out.close();
queryStr = new String(out.toByteArray());
query = Document.parse(queryStr);
} catch (Exception ex) {
getLogger().error("Error reading flowfile", ex);
if (input != null) {
// Likely culprit is a bad query
session.transfer(input, REL_FAILURE);
return;
} else {
throw new ProcessException(ex);
}
}
}
if (context.getProperty(QUERY_ATTRIBUTE).isSet()) {
final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue();
attributes.put(queryAttr, queryStr);
}
final Document projection = context.getProperty(PROJECTION).isSet() ? Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue()) : null;
final Document sort = context.getProperty(SORT).isSet() ? Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue()) : null;
final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue();
final String usePrettyPrint = context.getProperty(USE_PRETTY_PRINTING).getValue();
configureMapper(jsonTypeSetting);
final MongoCollection<Document> collection = getCollection(context);
try {
final FindIterable<Document> it = query != null ? collection.find(query) : collection.find();
if (projection != null) {
it.projection(projection);
}
if (sort != null) {
it.sort(sort);
}
if (context.getProperty(LIMIT).isSet()) {
it.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger());
}
if (context.getProperty(BATCH_SIZE).isSet()) {
it.batchSize(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions(input).asInteger());
}
final MongoCursor<Document> cursor = it.iterator();
ComponentLog log = getLogger();
try {
FlowFile flowFile = null;
if (context.getProperty(RESULTS_PER_FLOWFILE).isSet()) {
int ceiling = context.getProperty(RESULTS_PER_FLOWFILE).evaluateAttributeExpressions(input).asInteger();
List<Document> batch = new ArrayList<>();
while (cursor.hasNext()) {
batch.add(cursor.next());
if (batch.size() == ceiling) {
try {
if (log.isDebugEnabled()) {
log.debug("Writing batch...");
}
String payload = buildBatch(batch, jsonTypeSetting, usePrettyPrint);
writeBatch(payload, null, context, session, attributes, REL_SUCCESS);
batch = new ArrayList<>();
} catch (Exception ex) {
getLogger().error("Error building batch", ex);
}
}
}
if (batch.size() > 0) {
try {
writeBatch(buildBatch(batch, jsonTypeSetting, usePrettyPrint), null, context, session, attributes, REL_SUCCESS);
} catch (Exception ex) {
getLogger().error("Error sending remainder of batch", ex);
}
}
} else {
while (cursor.hasNext()) {
flowFile = session.create();
flowFile = session.write(flowFile, out -> {
String json;
if (jsonTypeSetting.equals(JSON_TYPE_STANDARD)) {
json = getObjectWriter(mapper, usePrettyPrint).writeValueAsString(cursor.next());
} else {
json = cursor.next().toJson();
}
out.write(json.getBytes(context.getProperty(CHARSET).evaluateAttributeExpressions().getValue()));
});
flowFile = session.putAllAttributes(flowFile, attributes);
session.getProvenanceReporter().receive(flowFile, getURI(context));
session.transfer(flowFile, REL_SUCCESS);
}
}
if (input != null) {
session.transfer(input, REL_ORIGINAL);
}
} finally {
cursor.close();
}
} catch (final RuntimeException e) {
if (input != null) {
session.transfer(input, REL_FAILURE);
}
context.yield();
logger.error("Failed to execute query {} due to {}", new Object[] { query, e }, e);
}
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class GetMongoIT method testValidators.
@Test
public void testValidators() {
TestRunner runner = TestRunners.newTestRunner(GetMongo.class);
Collection<ValidationResult> results;
ProcessContext pc;
// missing uri, db, collection
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(3, results.size());
Iterator<ValidationResult> it = results.iterator();
Assert.assertTrue(it.next().toString().contains("is invalid because Mongo URI is required"));
Assert.assertTrue(it.next().toString().contains("is invalid because Mongo Database Name is required"));
Assert.assertTrue(it.next().toString().contains("is invalid because Mongo Collection Name is required"));
// missing query - is ok
runner.setProperty(AbstractMongoProcessor.URI, MONGO_URI);
runner.setProperty(AbstractMongoProcessor.DATABASE_NAME, DB_NAME);
runner.setProperty(AbstractMongoProcessor.COLLECTION_NAME, COLLECTION_NAME);
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(0, results.size());
// invalid query
runner.setProperty(GetMongo.QUERY, "{a: x,y,z}");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
Assert.assertTrue(results.iterator().next().toString().contains("is invalid because"));
// invalid projection
runner.setVariable("projection", "{a: x,y,z}");
runner.setProperty(GetMongo.QUERY, "{a: 1}");
runner.setProperty(GetMongo.PROJECTION, "{a: z}");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
Assert.assertTrue(results.iterator().next().toString().contains("is invalid"));
// invalid sort
runner.removeProperty(GetMongo.PROJECTION);
runner.setProperty(GetMongo.SORT, "{a: x,y,z}");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
results = new HashSet<>();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
Assert.assertTrue(results.iterator().next().toString().contains("is invalid"));
}
Aggregations