use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class GetHDFSTest method testValidators.
@Test
public void testValidators() {
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
TestRunner runner = TestRunners.newTestRunner(proc);
Collection<ValidationResult> results;
ProcessContext pc;
results = new HashSet<>();
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
Assert.assertTrue(vr.toString().contains("is invalid because Directory is required"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "target");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(0, results.size());
results = new HashSet<>();
runner.setProperty(GetHDFS.DIRECTORY, "/target");
runner.setProperty(GetHDFS.MIN_AGE, "10 secs");
runner.setProperty(GetHDFS.MAX_AGE, "5 secs");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
Assert.assertTrue(vr.toString().contains("is invalid because Minimum File Age cannot be greater than Maximum File Age"));
}
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class PutHDFSTest method testPutFileWithException.
@Test
public void testPutFileWithException() throws IOException {
// Refer to comment in the BeforeClass method for an explanation
assumeTrue(isNotWindows());
String dirName = "target/testPutFileWrongPermissions";
File file = new File(dirName);
file.mkdirs();
Configuration config = new Configuration();
FileSystem fs = FileSystem.get(config);
Path p = new Path(dirName).makeQualified(fs.getUri(), fs.getWorkingDirectory());
final KerberosProperties testKerberosProperties = kerberosProperties;
TestRunner runner = TestRunners.newTestRunner(new PutHDFS() {
@Override
protected void changeOwner(ProcessContext context, FileSystem hdfs, Path name, FlowFile flowFile) {
throw new ProcessException("Forcing Exception to get thrown in order to verify proper handling");
}
@Override
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
return testKerberosProperties;
}
});
runner.setProperty(PutHDFS.DIRECTORY, dirName);
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
try (FileInputStream fis = new FileInputStream("src/test/resources/testdata/randombytes-1")) {
Map<String, String> attributes = new HashMap<String, String>();
attributes.put(CoreAttributes.FILENAME.key(), "randombytes-1");
runner.enqueue(fis, attributes);
runner.run();
}
List<MockFlowFile> failedFlowFiles = runner.getFlowFilesForRelationship(new Relationship.Builder().name("failure").build());
assertFalse(failedFlowFiles.isEmpty());
assertTrue(failedFlowFiles.get(0).isPenalized());
fs.delete(p, true);
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class PutHDFSTest method testValidators.
@Test
public void testValidators() {
PutHDFS proc = new TestablePutHDFS(kerberosProperties);
TestRunner runner = TestRunners.newTestRunner(proc);
Collection<ValidationResult> results;
ProcessContext pc;
results = new HashSet<>();
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because Directory is required"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "target");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
assertEquals(0, results.size());
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.REPLICATION_FACTOR, "-1");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.REPLICATION_FACTOR, "0");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "-1");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because octal umask [-1] cannot be negative"));
}
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "18");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because [18] is not a valid short octal number"));
}
results = new HashSet<>();
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.UMASK, "2000");
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
assertTrue(vr.toString().contains("is invalid because octal umask [2000] is not a valid umask"));
}
results = new HashSet<>();
proc = new TestablePutHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
runner.setProperty(PutHDFS.DIRECTORY, "/target");
runner.setProperty(PutHDFS.COMPRESSION_CODEC, CompressionCodec.class.getName());
runner.enqueue(new byte[0]);
pc = runner.getProcessContext();
if (pc instanceof MockProcessContext) {
results = ((MockProcessContext) pc).validate();
}
Assert.assertEquals(1, results.size());
for (ValidationResult vr : results) {
Assert.assertTrue(vr.toString().contains("is invalid because Given value not found in allowed set"));
}
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class TestPutInfluxDB method testWriteThrowsSocketTimeoutException.
@Test
public void testWriteThrowsSocketTimeoutException() {
mockPutInfluxDB = new PutInfluxDB() {
@Override
protected void writeToInfluxDB(ProcessContext context, String consistencyLevel, String database, String retentionPolicy, String records) {
throw new InfluxDBIOException(new SocketTimeoutException("SocketTimeoutException"));
}
};
runner = TestRunners.newTestRunner(mockPutInfluxDB);
runner.setProperty(PutInfluxDB.DB_NAME, "test");
runner.setProperty(PutInfluxDB.USERNAME, "u1");
runner.setProperty(PutInfluxDB.PASSWORD, "p1");
runner.setProperty(PutInfluxDB.CHARSET, "UTF-8");
runner.setProperty(PutInfluxDB.INFLUX_DB_URL, "http://dbUrl");
runner.setProperty(PutInfluxDB.CONSISTENCY_LEVEL, PutInfluxDB.CONSISTENCY_LEVEL_ONE.getValue());
runner.setProperty(PutInfluxDB.RETENTION_POLICY, "autogen");
runner.setProperty(PutInfluxDB.MAX_RECORDS_SIZE, "1 KB");
runner.assertValid();
byte[] bytes = "test".getBytes();
runner.enqueue(bytes);
runner.run(1, true, true);
runner.assertAllFlowFilesTransferred(PutInfluxDB.REL_RETRY, 1);
List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutInfluxDB.REL_RETRY);
assertEquals(flowFiles.get(0).getAttribute(PutInfluxDB.INFLUX_DB_ERROR_MESSAGE), "java.net.SocketTimeoutException: SocketTimeoutException");
}
use of org.apache.nifi.processor.ProcessContext in project nifi by apache.
the class TestPutInfluxDB method testWriteThrowsIOException.
@Test
public void testWriteThrowsIOException() {
mockPutInfluxDB = new PutInfluxDB() {
@Override
protected void writeToInfluxDB(ProcessContext context, String consistencyLevel, String database, String retentionPolicy, String records) {
throw new InfluxDBIOException(new EOFException("EOFException"));
}
};
runner = TestRunners.newTestRunner(mockPutInfluxDB);
runner.setProperty(PutInfluxDB.DB_NAME, "test");
runner.setProperty(PutInfluxDB.USERNAME, "u1");
runner.setProperty(PutInfluxDB.PASSWORD, "p1");
runner.setProperty(PutInfluxDB.CHARSET, "UTF-8");
runner.setProperty(PutInfluxDB.INFLUX_DB_URL, "http://dbUrl");
runner.setProperty(PutInfluxDB.CONSISTENCY_LEVEL, PutInfluxDB.CONSISTENCY_LEVEL_ONE.getValue());
runner.setProperty(PutInfluxDB.RETENTION_POLICY, "autogen");
runner.setProperty(PutInfluxDB.MAX_RECORDS_SIZE, "1 KB");
runner.assertValid();
byte[] bytes = "test".getBytes();
runner.enqueue(bytes);
runner.run(1, true, true);
runner.assertAllFlowFilesTransferred(PutInfluxDB.REL_FAILURE, 1);
List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutInfluxDB.REL_FAILURE);
assertEquals(flowFiles.get(0).getAttribute(PutInfluxDB.INFLUX_DB_ERROR_MESSAGE), "java.io.EOFException: EOFException");
}
Aggregations