use of org.apache.nifi.components.ValidationResult in project nifi by apache.
the class GetHDFSTest method testValidators.
@Test
public void testValidators() {
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
TestRunner runner = TestRunners.newTestRunner(proc);
Collection<ValidationResult> results;
ProcessContext pc;
results = new HashSet<>();
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
Assert.assertTrue(vr.toString().contains("is invalid because Directory is required"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "target");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(0, results.size());
results = new HashSet<>();
runner.setProperty(GetHDFS.DIRECTORY, "/target");
runner.setProperty(GetHDFS.MIN_AGE, "10 secs");
runner.setProperty(GetHDFS.MAX_AGE, "5 secs");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
Assert.assertTrue(vr.toString().contains("is invalid because Minimum File Age cannot be greater than Maximum File Age"));
}
}
use of org.apache.nifi.components.ValidationResult in project nifi by apache.
the class PutHDFSTest method testValidators.
@Test
public void testValidators() {
PutHDFS proc = new TestablePutHDFS(kerberosProperties);
TestRunner runner = TestRunners.newTestRunner(proc);
Collection<ValidationResult> results;
ProcessContext pc;
results = new HashSet<>();
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because Directory is required"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "target");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
assertEquals(0, results.size());
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.REPLICATION_FACTOR, "-1");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.REPLICATION_FACTOR, "0");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "-1");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because octal umask [-1] cannot be negative"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "18");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because [18] is not a valid short octal number"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "2000");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because octal umask [2000] is not a valid umask"));
}
results = new HashSet<>();
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.COMPRESSION_CODEC, CompressionCodec.class.getName());
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
Assert.assertTrue(vr.toString().contains("is invalid because Given value not found in allowed set"));
}
}
use of org.apache.nifi.components.ValidationResult in project nifi by apache.
the class TestEventTypeValidator method inputWithMultipleInvalidEventTypeShouldProperlyDisplayEventsInExplanation.
@Test
public void inputWithMultipleInvalidEventTypeShouldProperlyDisplayEventsInExplanation() throws Exception {
String subject = "subject";
String input = "append, CREATE, invalidValue1, rename, metadata, unlink, invalidValue2";
ValidationResult result = eventTypeValidator.validate(subject, input, context);
assertEquals("subject", result.getSubject());
assertEquals("append, CREATE, invalidValue1, rename, metadata, unlink, invalidValue2", result.getInput());
assertEquals("The following are not valid event types: [invalidValue1, invalidValue2]", result.getExplanation());
assertFalse(result.isValid());
}
use of org.apache.nifi.components.ValidationResult in project nifi by apache.
the class TestEventTypeValidator method emptyInputShouldProperlyFail.
@Test
public void emptyInputShouldProperlyFail() throws Exception {
String subject = "subject";
String input = "";
ValidationResult result = eventTypeValidator.validate(subject, input, context);
assertEquals("subject", result.getSubject());
assertEquals("", result.getInput());
assertEquals("Empty event types are not allowed.", result.getExplanation());
assertFalse(result.isValid());
}
use of org.apache.nifi.components.ValidationResult in project nifi by apache.
the class StandardProcessorNode method isValid.
@Override
public boolean isValid() {
try {
final ValidationContext validationContext = getValidationContext();
final Collection<ValidationResult> validationResults = super.validate(validationContext);
for (final ValidationResult result : validationResults) {
if (!result.isValid()) {
return false;
}
}
for (final Relationship undef : getUndefinedRelationships()) {
if (!isAutoTerminated(undef)) {
return false;
}
}
switch(getInputRequirement()) {
case INPUT_ALLOWED:
break;
case INPUT_FORBIDDEN:
{
if (!getIncomingNonLoopConnections().isEmpty()) {
return false;
}
break;
}
case INPUT_REQUIRED:
{
if (getIncomingNonLoopConnections().isEmpty()) {
return false;
}
break;
}
}
} catch (final Throwable t) {
LOG.warn("Failed during validation", t);
return false;
}
return true;
}
Aggregations