use of org.apache.nifi.components.PropertyDescriptor in project kylo by Teradata.
the class ThriftConnectionPool method init.
@Override
protected void init(@Nonnull final ControllerServiceInitializationContext config) throws InitializationException {
// Create Kerberos properties
final SpringSecurityContextLoader securityContextLoader = SpringSecurityContextLoader.create(config);
final KerberosProperties kerberosProperties = securityContextLoader.getKerberosProperties();
kerberosKeytab = kerberosProperties.createKerberosKeytabProperty();
kerberosPrincipal = kerberosProperties.createKerberosPrincipalProperty();
// Create list of properties
final List<PropertyDescriptor> props = new ArrayList<>();
props.add(DATABASE_URL);
props.add(DB_DRIVERNAME);
props.add(DB_DRIVER_JAR_URL);
props.add(DB_USER);
props.add(DB_PASSWORD);
props.add(DB_VALIDATION_QUERY);
props.add(DB_VALIDATION_QUERY_TIMEOUT);
props.add(MAX_WAIT_TIME);
props.add(MAX_TOTAL_CONNECTIONS);
props.add(HADOOP_CONFIGURATION_RESOURCES);
props.add(kerberosPrincipal);
props.add(kerberosKeytab);
properties = Collections.unmodifiableList(props);
}
use of org.apache.nifi.components.PropertyDescriptor in project kylo by Teradata.
the class ImportSqoop method init.
@Override
protected void init(@Nonnull final ProcessorInitializationContext context) {
super.init(context);
/* Create Kerberos properties */
final SpringSecurityContextLoader securityContextLoader = SpringSecurityContextLoader.create(context);
final KerberosProperties kerberosProperties = securityContextLoader.getKerberosProperties();
KERBEROS_KEYTAB = kerberosProperties.createKerberosKeytabProperty();
KERBEROS_PRINCIPAL = kerberosProperties.createKerberosPrincipalProperty();
/* Create list of properties */
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(KERBEROS_PRINCIPAL);
properties.add(KERBEROS_KEYTAB);
properties.add(SQOOP_CONNECTION_SERVICE);
properties.add(SOURCE_TABLE_NAME);
properties.add(SOURCE_TABLE_FIELDS);
properties.add(SOURCE_TABLE_WHERE_CLAUSE);
properties.add(SOURCE_LOAD_STRATEGY);
properties.add(SOURCE_CHECK_COLUMN_NAME);
properties.add(SOURCE_CHECK_COLUMN_LAST_VALUE);
properties.add(SOURCE_PROPERTY_WATERMARK);
properties.add(SOURCE_SPLIT_BY_FIELD);
properties.add(SOURCE_BOUNDARY_QUERY);
properties.add(CLUSTER_MAP_TASKS);
properties.add(CLUSTER_UI_JOB_NAME);
properties.add(TARGET_HDFS_DIRECTORY);
properties.add(TARGET_HDFS_DIRECTORY_EXISTS_STRATEGY);
properties.add(TARGET_EXTRACT_DATA_FORMAT);
properties.add(TARGET_HDFS_FILE_FIELD_DELIMITER);
properties.add(TARGET_HDFS_FILE_RECORD_DELIMITER);
properties.add(TARGET_HIVE_DELIM_STRATEGY);
properties.add(TARGET_HIVE_REPLACE_DELIM);
properties.add(TARGET_COMPRESSION_ALGORITHM);
properties.add(TARGET_COLUMN_TYPE_MAPPING);
properties.add(SQOOP_CODEGEN_DIR);
properties.add(SOURCESPECIFIC_SQLSERVER_SCHEMA);
properties.add(SQOOP_SYSTEM_PROPERTIES);
properties.add(SQOOP_ADDITIONAL_ARGUMENTS);
this.properties = Collections.unmodifiableList(properties);
/* Create list of relationships */
final Set<Relationship> relationships = new HashSet<>();
relationships.add(REL_SUCCESS);
relationships.add(REL_FAILURE);
this.relationships = Collections.unmodifiableSet(relationships);
}
use of org.apache.nifi.components.PropertyDescriptor in project kylo by Teradata.
the class PutFeedMetadata method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final ComponentLog logger = getLog();
FlowFile flowFile = null;
try {
if (context.hasIncomingConnection()) {
flowFile = session.get();
// we know that we should run only if we have a FlowFile.
if (flowFile == null && context.hasNonLoopConnection()) {
return;
}
}
final FlowFile incoming = flowFile;
// Get the feed id
String category = context.getProperty(CATEGORY_NAME).evaluateAttributeExpressions(flowFile).getValue();
String feed = context.getProperty(FEED_NAME).evaluateAttributeExpressions(flowFile).getValue();
String namespace = context.getProperty(NAMESPACE).evaluateAttributeExpressions(flowFile).getValue();
getLog().debug("The category is: " + category + " and feed is " + feed);
MetadataProvider metadataProvider = getMetadataService(context).getProvider();
// Ignore the 3 required properties and send the rest to the metadata server
Map<PropertyDescriptor, String> properties = context.getProperties();
Set<PropertyDescriptor> propertyKeys = properties.keySet();
Properties metadataProperties = new Properties();
for (PropertyDescriptor property : propertyKeys) {
String propertyName = property.getName();
String value = context.getProperty(propertyName).evaluateAttributeExpressions(flowFile).getValue();
if (!PROPERTY_LIST_TO_IGNORE.contains(propertyName)) {
metadataProperties.setProperty(METADATA_FIELD_PREFIX + ":" + namespace + ":" + propertyName, value);
}
}
String feedId = metadataProvider.getFeedId(category, feed);
metadataProvider.updateFeedProperties(feedId, metadataProperties);
session.transfer(flowFile, REL_SUCCESS);
} catch (Exception e) {
logger.error("Error processing custom feed metadata", e);
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.components.PropertyDescriptor in project nifi by apache.
the class StandardNiFiServiceFacade method getControllerServicePropertyDescriptor.
@Override
public PropertyDescriptorDTO getControllerServicePropertyDescriptor(final String id, final String property) {
final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(id);
PropertyDescriptor descriptor = controllerService.getControllerServiceImplementation().getPropertyDescriptor(property);
// return an invalid descriptor if the controller service doesn't support this property
if (descriptor == null) {
descriptor = new PropertyDescriptor.Builder().name(property).addValidator(Validator.INVALID).dynamic(true).build();
}
final String groupId = controllerService.getProcessGroup() == null ? null : controllerService.getProcessGroup().getIdentifier();
return dtoFactory.createPropertyDescriptorDto(descriptor, groupId);
}
use of org.apache.nifi.components.PropertyDescriptor in project nifi by apache.
the class TestFlowController method testSynchronizeFlowWithReportingTaskAndProcessorReferencingControllerService.
@Test
public void testSynchronizeFlowWithReportingTaskAndProcessorReferencingControllerService() throws IOException {
final FlowSynchronizer standardFlowSynchronizer = new StandardFlowSynchronizer(StringEncryptor.createEncryptor(nifiProperties), nifiProperties);
// create a mock proposed data flow with the same auth fingerprint as the current authorizer
final String authFingerprint = authorizer.getFingerprint();
final DataFlow proposedDataFlow = Mockito.mock(DataFlow.class);
when(proposedDataFlow.getAuthorizerFingerprint()).thenReturn(authFingerprint.getBytes(StandardCharsets.UTF_8));
final File flowFile = new File("src/test/resources/conf/reporting-task-with-cs-flow-0.7.0.xml");
final String flow = IOUtils.toString(new FileInputStream(flowFile));
when(proposedDataFlow.getFlow()).thenReturn(flow.getBytes(StandardCharsets.UTF_8));
controller.synchronize(standardFlowSynchronizer, proposedDataFlow);
// should be two controller services
final Set<ControllerServiceNode> controllerServiceNodes = controller.getAllControllerServices();
assertNotNull(controllerServiceNodes);
assertEquals(2, controllerServiceNodes.size());
// find the controller service that was moved to the root group
final ControllerServiceNode rootGroupCs = controllerServiceNodes.stream().filter(c -> c.getProcessGroup() != null).findFirst().get();
assertNotNull(rootGroupCs);
// find the controller service that was not moved to the root group
final ControllerServiceNode controllerCs = controllerServiceNodes.stream().filter(c -> c.getProcessGroup() == null).findFirst().get();
assertNotNull(controllerCs);
// should be same class (not Ghost), different ids, and same properties
assertEquals(rootGroupCs.getCanonicalClassName(), controllerCs.getCanonicalClassName());
assertFalse(rootGroupCs.getCanonicalClassName().contains("Ghost"));
assertNotEquals(rootGroupCs.getIdentifier(), controllerCs.getIdentifier());
assertEquals(rootGroupCs.getProperties(), controllerCs.getProperties());
// should be one processor
final Set<ProcessorNode> processorNodes = controller.getGroup(controller.getRootGroupId()).getProcessors();
assertNotNull(processorNodes);
assertEquals(1, processorNodes.size());
// verify the processor is still pointing at the controller service that got moved to the root group
final ProcessorNode processorNode = processorNodes.stream().findFirst().get();
final PropertyDescriptor procControllerServiceProp = processorNode.getProperties().entrySet().stream().filter(e -> e.getValue().equals(rootGroupCs.getIdentifier())).map(e -> e.getKey()).findFirst().get();
assertNotNull(procControllerServiceProp);
// should be one reporting task
final Set<ReportingTaskNode> reportingTaskNodes = controller.getAllReportingTasks();
assertNotNull(reportingTaskNodes);
assertEquals(1, reportingTaskNodes.size());
// verify that the reporting task is pointing at the controller service at the controller level
final ReportingTaskNode reportingTaskNode = reportingTaskNodes.stream().findFirst().get();
final PropertyDescriptor reportingTaskControllerServiceProp = reportingTaskNode.getProperties().entrySet().stream().filter(e -> e.getValue().equals(controllerCs.getIdentifier())).map(e -> e.getKey()).findFirst().get();
assertNotNull(reportingTaskControllerServiceProp);
}
Aggregations