use of org.apache.nifi.util.MockProcessContext in project nifi by apache.
the class PutHDFSTest method testValidators.
@Test
public void testValidators() {
PutHDFS proc = new TestablePutHDFS(kerberosProperties);
TestRunner runner = TestRunners.newTestRunner(proc);
Collection<ValidationResult> results;
ProcessContext pc;
results = new HashSet<>();
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because Directory is required"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "target");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
assertEquals(0, results.size());
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.REPLICATION_FACTOR, "-1");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.REPLICATION_FACTOR, "0");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "-1");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because octal umask [-1] cannot be negative"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "18");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because [18] is not a valid short octal number"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "2000");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because octal umask [2000] is not a valid umask"));
}
results = new HashSet<>();
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.COMPRESSION_CODEC, CompressionCodec.class.getName());
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
Assert.assertTrue(vr.toString().contains("is invalid because Given value not found in allowed set"));
}
}
use of org.apache.nifi.util.MockProcessContext in project kylo by Teradata.
the class SparkJobserverServiceTest method testValidators.
/**
* Verify property validators.
*/
@Test
public void testValidators() {
// Test with no properties
runner.disableControllerService(sparkJobserverService);
runner.setProperty(sparkJobserverService, SparkJobserverService.JOBSERVER_URL, (String) null);
runner.setProperty(sparkJobserverService, SparkJobserverService.SYNC_TIMEOUT, (String) null);
runner.enableControllerService(sparkJobserverService);
runner.enqueue(new byte[0]);
Collection<ValidationResult> results = ((MockProcessContext) runner.getProcessContext()).validate();
Assert.assertEquals(1, results.size());
String expected = "'Spark Jobserver Service' validated against 'sparkJobServerService' is invalid because Controller Service is not valid: 'Jobserver URL' is invalid because Jobserver URL is" + " required";
Assert.assertEquals(expected, Iterables.getOnlyElement(results).toString());
// Test with valid properties
runner.disableControllerService(sparkJobserverService);
runner.setProperty(sparkJobserverService, SparkJobserverService.JOBSERVER_URL, sparkJobserverUrl);
runner.setProperty(sparkJobserverService, SparkJobserverService.SYNC_TIMEOUT, syncTimeout);
runner.enableControllerService(sparkJobserverService);
runner.enqueue(new byte[0]);
results = ((MockProcessContext) runner.getProcessContext()).validate();
Assert.assertEquals(0, results.size());
}
use of org.apache.nifi.util.MockProcessContext in project nifi by apache.
the class TestHttpFlowFileServerProtocol method setupMockProcessSession.
private void setupMockProcessSession() {
// Construct a RootGroupPort as a processor to use NiFi mock library.
final Processor rootGroupPort = mock(Processor.class);
final Set<Relationship> relationships = new HashSet<>();
relationships.add(Relationship.ANONYMOUS);
when(rootGroupPort.getRelationships()).thenReturn(relationships);
when(rootGroupPort.getIdentifier()).thenReturn("root-group-port-id");
sessionState = new SharedSessionState(rootGroupPort, new AtomicLong(0));
processSession = new MockProcessSession(sessionState, rootGroupPort);
processContext = new MockProcessContext(rootGroupPort);
}
use of org.apache.nifi.util.MockProcessContext in project nifi by apache.
the class TestFetchElasticsearch5 method testCreateElasticsearch5ClientWithException.
@Test(expected = ProcessException.class)
public void testCreateElasticsearch5ClientWithException() throws ProcessException {
FetchElasticsearch5TestProcessor processor = new FetchElasticsearch5TestProcessor(true) {
@Override
protected Client getTransportClient(Settings.Builder settingsBuilder, String xPackPath, String username, String password, List<InetSocketAddress> esHosts, ComponentLog log) throws MalformedURLException {
throw new MalformedURLException();
}
};
MockProcessContext context = new MockProcessContext(processor);
processor.initialize(new MockProcessorInitializationContext(processor, context));
processor.callCreateElasticsearchClient(context);
}
use of org.apache.nifi.util.MockProcessContext in project nifi by apache.
the class TestFetchElasticsearch method testCreateElasticsearchClientWithException.
@Test(expected = ProcessException.class)
public void testCreateElasticsearchClientWithException() throws ProcessException {
FetchElasticsearchTestProcessor processor = new FetchElasticsearchTestProcessor(true) {
@Override
protected TransportClient getTransportClient(Settings.Builder settingsBuilder, String shieldUrl, String username, String password) throws MalformedURLException {
throw new MalformedURLException();
}
};
MockProcessContext context = new MockProcessContext(processor);
processor.initialize(new MockProcessorInitializationContext(processor, context));
processor.callCreateElasticsearchClient(context);
}
Aggregations