Search in sources :

Example 56 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class GetHDFSTest method testValidators.

@Test
public void testValidators() {
    GetHDFS proc = new TestableGetHDFS(kerberosProperties);
    TestRunner runner = TestRunners.newTestRunner(proc);
    Collection<ValidationResult> results;
    ProcessContext pc;
    results = new HashSet<>();
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        Assert.assertTrue(vr.toString().contains("is invalid because Directory is required"));
    }
    results = new HashSet<>();
    runner.setProperty(PutHDFS.DIRECTORY, "target");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(0, results.size());
    results = new HashSet<>();
    runner.setProperty(GetHDFS.DIRECTORY, "/target");
    runner.setProperty(GetHDFS.MIN_AGE, "10 secs");
    runner.setProperty(GetHDFS.MAX_AGE, "5 secs");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        Assert.assertTrue(vr.toString().contains("is invalid because Minimum File Age cannot be greater than Maximum File Age"));
    }
}
Also used : TestRunner(org.apache.nifi.util.TestRunner) ValidationResult(org.apache.nifi.components.ValidationResult) MockProcessContext(org.apache.nifi.util.MockProcessContext) ProcessContext(org.apache.nifi.processor.ProcessContext) MockProcessContext(org.apache.nifi.util.MockProcessContext) Test(org.junit.Test)

Example 57 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class PutHDFSTest method testPutFileWithException.

@Test
public void testPutFileWithException() throws IOException {
    // Refer to comment in the BeforeClass method for an explanation
    assumeTrue(isNotWindows());
    String dirName = "target/testPutFileWrongPermissions";
    File file = new File(dirName);
    file.mkdirs();
    Configuration config = new Configuration();
    FileSystem fs = FileSystem.get(config);
    Path p = new Path(dirName).makeQualified(fs.getUri(), fs.getWorkingDirectory());
    final KerberosProperties testKerberosProperties = kerberosProperties;
    TestRunner runner = TestRunners.newTestRunner(new PutHDFS() {

        @Override
        protected void changeOwner(ProcessContext context, FileSystem hdfs, Path name, FlowFile flowFile) {
            throw new ProcessException("Forcing Exception to get thrown in order to verify proper handling");
        }

        @Override
        protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
            return testKerberosProperties;
        }
    });
    runner.setProperty(PutHDFS.DIRECTORY, dirName);
    runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
    try (FileInputStream fis = new FileInputStream("src/test/resources/testdata/randombytes-1")) {
        Map<String, String> attributes = new HashMap<String, String>();
        attributes.put(CoreAttributes.FILENAME.key(), "randombytes-1");
        runner.enqueue(fis, attributes);
        runner.run();
    }
    List<MockFlowFile> failedFlowFiles = runner.getFlowFilesForRelationship(new Relationship.Builder().name("failure").build());
    assertFalse(failedFlowFiles.isEmpty());
    assertTrue(failedFlowFiles.get(0).isPenalized());
    fs.delete(p, true);
}
Also used : Path(org.apache.hadoop.fs.Path) FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TestRunner(org.apache.nifi.util.TestRunner) MockProcessContext(org.apache.nifi.util.MockProcessContext) ProcessContext(org.apache.nifi.processor.ProcessContext) FileInputStream(java.io.FileInputStream) MockFlowFile(org.apache.nifi.util.MockFlowFile) ProcessException(org.apache.nifi.processor.exception.ProcessException) FileSystem(org.apache.hadoop.fs.FileSystem) Relationship(org.apache.nifi.processor.Relationship) FlowFile(org.apache.nifi.flowfile.FlowFile) File(java.io.File) MockFlowFile(org.apache.nifi.util.MockFlowFile) KerberosProperties(org.apache.nifi.hadoop.KerberosProperties) Test(org.junit.Test)

Example 58 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class PutHDFSTest method testValidators.

@Test
public void testValidators() {
    PutHDFS proc = new TestablePutHDFS(kerberosProperties);
    TestRunner runner = TestRunners.newTestRunner(proc);
    Collection<ValidationResult> results;
    ProcessContext pc;
    results = new HashSet<>();
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        assertTrue(vr.toString().contains("is invalid because Directory is required"));
    }
    results = new HashSet<>();
    runner.setProperty(PutHDFS.DIRECTORY, "target");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    assertEquals(0, results.size());
    results = new HashSet<>();
    runner.setProperty(PutHDFS.DIRECTORY, "/target");
    runner.setProperty(PutHDFS.REPLICATION_FACTOR, "-1");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
    }
    proc = new TestablePutHDFS(kerberosProperties);
    runner = TestRunners.newTestRunner(proc);
    results = new HashSet<>();
    runner.setProperty(PutHDFS.DIRECTORY, "/target");
    runner.setProperty(PutHDFS.REPLICATION_FACTOR, "0");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
    }
    proc = new TestablePutHDFS(kerberosProperties);
    runner = TestRunners.newTestRunner(proc);
    results = new HashSet<>();
    runner.setProperty(PutHDFS.DIRECTORY, "/target");
    runner.setProperty(PutHDFS.UMASK, "-1");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        assertTrue(vr.toString().contains("is invalid because octal umask [-1] cannot be negative"));
    }
    proc = new TestablePutHDFS(kerberosProperties);
    runner = TestRunners.newTestRunner(proc);
    results = new HashSet<>();
    runner.setProperty(PutHDFS.DIRECTORY, "/target");
    runner.setProperty(PutHDFS.UMASK, "18");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        assertTrue(vr.toString().contains("is invalid because [18] is not a valid short octal number"));
    }
    results = new HashSet<>();
    runner.setProperty(PutHDFS.DIRECTORY, "/target");
    runner.setProperty(PutHDFS.UMASK, "2000");
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        assertTrue(vr.toString().contains("is invalid because octal umask [2000] is not a valid umask"));
    }
    results = new HashSet<>();
    proc = new TestablePutHDFS(kerberosProperties);
    runner = TestRunners.newTestRunner(proc);
    runner.setProperty(PutHDFS.DIRECTORY, "/target");
    runner.setProperty(PutHDFS.COMPRESSION_CODEC, CompressionCodec.class.getName());
    runner.enqueue(new byte[0]);
    pc = runner.getProcessContext();
    if (pc instanceof MockProcessContext) {
        results = ((MockProcessContext) pc).validate();
    }
    Assert.assertEquals(1, results.size());
    for (ValidationResult vr : results) {
        Assert.assertTrue(vr.toString().contains("is invalid because Given value not found in allowed set"));
    }
}
Also used : TestRunner(org.apache.nifi.util.TestRunner) CompressionCodec(org.apache.hadoop.io.compress.CompressionCodec) ValidationResult(org.apache.nifi.components.ValidationResult) MockProcessContext(org.apache.nifi.util.MockProcessContext) ProcessContext(org.apache.nifi.processor.ProcessContext) MockProcessContext(org.apache.nifi.util.MockProcessContext) Test(org.junit.Test)

Example 59 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class TestPutInfluxDB method testWriteThrowsSocketTimeoutException.

@Test
public void testWriteThrowsSocketTimeoutException() {
    mockPutInfluxDB = new PutInfluxDB() {

        @Override
        protected void writeToInfluxDB(ProcessContext context, String consistencyLevel, String database, String retentionPolicy, String records) {
            throw new InfluxDBIOException(new SocketTimeoutException("SocketTimeoutException"));
        }
    };
    runner = TestRunners.newTestRunner(mockPutInfluxDB);
    runner.setProperty(PutInfluxDB.DB_NAME, "test");
    runner.setProperty(PutInfluxDB.USERNAME, "u1");
    runner.setProperty(PutInfluxDB.PASSWORD, "p1");
    runner.setProperty(PutInfluxDB.CHARSET, "UTF-8");
    runner.setProperty(PutInfluxDB.INFLUX_DB_URL, "http://dbUrl");
    runner.setProperty(PutInfluxDB.CONSISTENCY_LEVEL, PutInfluxDB.CONSISTENCY_LEVEL_ONE.getValue());
    runner.setProperty(PutInfluxDB.RETENTION_POLICY, "autogen");
    runner.setProperty(PutInfluxDB.MAX_RECORDS_SIZE, "1 KB");
    runner.assertValid();
    byte[] bytes = "test".getBytes();
    runner.enqueue(bytes);
    runner.run(1, true, true);
    runner.assertAllFlowFilesTransferred(PutInfluxDB.REL_RETRY, 1);
    List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutInfluxDB.REL_RETRY);
    assertEquals(flowFiles.get(0).getAttribute(PutInfluxDB.INFLUX_DB_ERROR_MESSAGE), "java.net.SocketTimeoutException: SocketTimeoutException");
}
Also used : MockFlowFile(org.apache.nifi.util.MockFlowFile) SocketTimeoutException(java.net.SocketTimeoutException) InfluxDBIOException(org.influxdb.InfluxDBIOException) ProcessContext(org.apache.nifi.processor.ProcessContext) Test(org.junit.Test)

Example 60 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class TestPutInfluxDB method testWriteThrowsIOException.

@Test
public void testWriteThrowsIOException() {
    mockPutInfluxDB = new PutInfluxDB() {

        @Override
        protected void writeToInfluxDB(ProcessContext context, String consistencyLevel, String database, String retentionPolicy, String records) {
            throw new InfluxDBIOException(new EOFException("EOFException"));
        }
    };
    runner = TestRunners.newTestRunner(mockPutInfluxDB);
    runner.setProperty(PutInfluxDB.DB_NAME, "test");
    runner.setProperty(PutInfluxDB.USERNAME, "u1");
    runner.setProperty(PutInfluxDB.PASSWORD, "p1");
    runner.setProperty(PutInfluxDB.CHARSET, "UTF-8");
    runner.setProperty(PutInfluxDB.INFLUX_DB_URL, "http://dbUrl");
    runner.setProperty(PutInfluxDB.CONSISTENCY_LEVEL, PutInfluxDB.CONSISTENCY_LEVEL_ONE.getValue());
    runner.setProperty(PutInfluxDB.RETENTION_POLICY, "autogen");
    runner.setProperty(PutInfluxDB.MAX_RECORDS_SIZE, "1 KB");
    runner.assertValid();
    byte[] bytes = "test".getBytes();
    runner.enqueue(bytes);
    runner.run(1, true, true);
    runner.assertAllFlowFilesTransferred(PutInfluxDB.REL_FAILURE, 1);
    List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutInfluxDB.REL_FAILURE);
    assertEquals(flowFiles.get(0).getAttribute(PutInfluxDB.INFLUX_DB_ERROR_MESSAGE), "java.io.EOFException: EOFException");
}
Also used : MockFlowFile(org.apache.nifi.util.MockFlowFile) InfluxDBIOException(org.influxdb.InfluxDBIOException) EOFException(java.io.EOFException) ProcessContext(org.apache.nifi.processor.ProcessContext) Test(org.junit.Test)

Aggregations

ProcessContext (org.apache.nifi.processor.ProcessContext)115 Test (org.junit.Test)67 TestRunner (org.apache.nifi.util.TestRunner)56 ProcessSession (org.apache.nifi.processor.ProcessSession)49 FlowFile (org.apache.nifi.flowfile.FlowFile)40 MockFlowFile (org.apache.nifi.util.MockFlowFile)39 HashSet (java.util.HashSet)35 Relationship (org.apache.nifi.processor.Relationship)35 List (java.util.List)34 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)34 ArrayList (java.util.ArrayList)33 Set (java.util.Set)33 Tags (org.apache.nifi.annotation.documentation.Tags)31 IOException (java.io.IOException)30 HashMap (java.util.HashMap)30 CapabilityDescription (org.apache.nifi.annotation.documentation.CapabilityDescription)30 ProcessException (org.apache.nifi.processor.exception.ProcessException)30 Collections (java.util.Collections)29 InputRequirement (org.apache.nifi.annotation.behavior.InputRequirement)29 ProcessSessionFactory (org.apache.nifi.processor.ProcessSessionFactory)29