use of org.apache.nifi.hadoop.KerberosProperties in project nifi by apache.
the class PutHDFSTest method testPutFileWithException.
@Test
public void testPutFileWithException() throws IOException {
// Refer to comment in the BeforeClass method for an explanation
assumeTrue(isNotWindows());
String dirName = "target/testPutFileWrongPermissions";
File file = new File(dirName);
file.mkdirs();
Configuration config = new Configuration();
FileSystem fs = FileSystem.get(config);
Path p = new Path(dirName).makeQualified(fs.getUri(), fs.getWorkingDirectory());
final KerberosProperties testKerberosProperties = kerberosProperties;
TestRunner runner = TestRunners.newTestRunner(new PutHDFS() {
@Override
protected void changeOwner(ProcessContext context, FileSystem hdfs, Path name, FlowFile flowFile) {
throw new ProcessException("Forcing Exception to get thrown in order to verify proper handling");
}
@Override
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
return testKerberosProperties;
}
});
runner.setProperty(PutHDFS.DIRECTORY, dirName);
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
try (FileInputStream fis = new FileInputStream("src/test/resources/testdata/randombytes-1")) {
Map<String, String> attributes = new HashMap<String, String>();
attributes.put(CoreAttributes.FILENAME.key(), "randombytes-1");
runner.enqueue(fis, attributes);
runner.run();
}
List<MockFlowFile> failedFlowFiles = runner.getFlowFilesForRelationship(new Relationship.Builder().name("failure").build());
assertFalse(failedFlowFiles.isEmpty());
assertTrue(failedFlowFiles.get(0).isPenalized());
fs.delete(p, true);
}
use of org.apache.nifi.hadoop.KerberosProperties in project nifi by apache.
the class PutHDFSTest method setup.
@Before
public void setup() {
mockNiFiProperties = mock(NiFiProperties.class);
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
kerberosProperties = new KerberosProperties(null);
}
use of org.apache.nifi.hadoop.KerberosProperties in project nifi by apache.
the class TestDeleteHDFS method setup.
@Before
public void setup() throws Exception {
mockNiFiProperties = mock(NiFiProperties.class);
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
kerberosProperties = new KerberosProperties(null);
mockFileSystem = mock(FileSystem.class);
}
use of org.apache.nifi.hadoop.KerberosProperties in project nifi by apache.
the class TestFetchHDFS method setup.
@Before
public void setup() {
mockNiFiProperties = mock(NiFiProperties.class);
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
kerberosProperties = new KerberosProperties(null);
proc = new TestableFetchHDFS(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
}
use of org.apache.nifi.hadoop.KerberosProperties in project nifi by apache.
the class PutHiveStreaming method init.
@Override
protected void init(ProcessorInitializationContext context) {
List<PropertyDescriptor> props = new ArrayList<>();
props.add(METASTORE_URI);
props.add(HIVE_CONFIGURATION_RESOURCES);
props.add(DB_NAME);
props.add(TABLE_NAME);
props.add(PARTITION_COLUMNS);
props.add(AUTOCREATE_PARTITIONS);
props.add(MAX_OPEN_CONNECTIONS);
props.add(HEARTBEAT_INTERVAL);
props.add(TXNS_PER_BATCH);
props.add(RECORDS_PER_TXN);
props.add(CALL_TIMEOUT);
props.add(ROLLBACK_ON_FAILURE);
props.add(KERBEROS_CREDENTIALS_SERVICE);
kerberosConfigFile = context.getKerberosConfigurationFile();
kerberosProperties = new KerberosProperties(kerberosConfigFile);
props.add(kerberosProperties.getKerberosPrincipal());
props.add(kerberosProperties.getKerberosKeytab());
propertyDescriptors = Collections.unmodifiableList(props);
Set<Relationship> _relationships = new HashSet<>();
_relationships.add(REL_SUCCESS);
_relationships.add(REL_FAILURE);
_relationships.add(REL_RETRY);
relationships = Collections.unmodifiableSet(_relationships);
}
Aggregations