use of com.thinkbiganalytics.nifi.v2.ingest.IngestProperties.FIELD_SPECIFICATION in project kylo by Teradata.
the class CreateElasticsearchBackedHiveTable method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
String jarUrl = context.getProperty(JAR_URL).evaluateAttributeExpressions(flowFile).getValue();
String useWan = context.getProperty(USE_WAN).getValue();
String autoIndex = context.getProperty(AUTO_CREATE_INDEX).getValue();
String idField = context.getProperty(ID_FIELD).evaluateAttributeExpressions(flowFile).getValue();
final ColumnSpec[] columnSpecs = Optional.ofNullable(context.getProperty(FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
validateArrayProperty(FIELD_SPECIFICATION.getDisplayName(), columnSpecs, session, flowFile);
final String feedName = context.getProperty(IngestProperties.FEED_NAME).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FEED_NAME.getDisplayName(), feedName, session, flowFile);
final String categoryName = context.getProperty(IngestProperties.FEED_CATEGORY).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FEED_CATEGORY.getDisplayName(), categoryName, session, flowFile);
final String nodes = context.getProperty(NODES).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(NODES.getDisplayName(), nodes, session, flowFile);
final String indexString = context.getProperty(FIELD_INDEX_STRING).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FIELD_INDEX_STRING.getDisplayName(), indexString, session, flowFile);
final String feedRoot = context.getProperty(FEED_ROOT).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FEED_ROOT.getDisplayName(), indexString, session, flowFile);
List<String> hiveStatements = getHQLStatements(columnSpecs, nodes, feedRoot, feedName, categoryName, useWan, autoIndex, idField, jarUrl, indexString);
final ThriftService thriftService = context.getProperty(THRIFT_SERVICE).asControllerService(ThriftService.class);
executeStatements(context, session, flowFile, hiveStatements.toArray(new String[hiveStatements.size()]), thriftService);
}
use of com.thinkbiganalytics.nifi.v2.ingest.IngestProperties.FIELD_SPECIFICATION in project kylo by Teradata.
the class RegisterFeedTables method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
// Verify flow file exists
final FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
// Verify properties and attributes
final String feedFormatOptions = Optional.ofNullable(context.getProperty(FEED_FORMAT_SPECS).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).orElse(DEFAULT_FEED_FORMAT_OPTIONS);
final String targetFormatOptions = Optional.ofNullable(context.getProperty(TARGET_FORMAT_SPECS).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).orElse(DEFAULT_STORAGE_FORMAT);
final String targetTableProperties = context.getProperty(TARGET_TBLPROPERTIES).evaluateAttributeExpressions(flowFile).getValue();
final ColumnSpec[] partitions = Optional.ofNullable(context.getProperty(PARTITION_SPECS).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
final String tableType = context.getProperty(TABLE_TYPE).getValue();
final ColumnSpec[] columnSpecs = Optional.ofNullable(context.getProperty(FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
if (columnSpecs == null || columnSpecs.length == 0) {
getLog().error("Missing field specification");
session.transfer(flowFile, IngestProperties.REL_FAILURE);
return;
}
ColumnSpec[] feedColumnSpecs = Optional.ofNullable(context.getProperty(FEED_FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
if (feedColumnSpecs == null || feedColumnSpecs.length == 0) {
// Backwards compatibility with older templates we set the source and target to the same
feedColumnSpecs = columnSpecs;
}
final String entity = context.getProperty(IngestProperties.FEED_NAME).evaluateAttributeExpressions(flowFile).getValue();
if (entity == null || entity.isEmpty()) {
getLog().error("Missing feed name");
session.transfer(flowFile, IngestProperties.REL_FAILURE);
return;
}
final String source = context.getProperty(IngestProperties.FEED_CATEGORY).evaluateAttributeExpressions(flowFile).getValue();
if (source == null || source.isEmpty()) {
getLog().error("Missing category name");
session.transfer(flowFile, IngestProperties.REL_FAILURE);
return;
}
final String feedRoot = context.getProperty(FEED_ROOT).evaluateAttributeExpressions(flowFile).getValue();
final String profileRoot = context.getProperty(PROFILE_ROOT).evaluateAttributeExpressions(flowFile).getValue();
final String masterRoot = context.getProperty(MASTER_ROOT).evaluateAttributeExpressions(flowFile).getValue();
final TableRegisterConfiguration config = new TableRegisterConfiguration(feedRoot, profileRoot, masterRoot);
// Register the tables
final ThriftService thriftService = context.getProperty(THRIFT_SERVICE).asControllerService(ThriftService.class);
try (final Connection conn = thriftService.getConnection()) {
final TableRegisterSupport register = new TableRegisterSupport(conn, config);
final boolean result;
if (ALL_TABLES.equals(tableType)) {
result = register.registerStandardTables(source, entity, feedColumnSpecs, feedFormatOptions, targetFormatOptions, partitions, columnSpecs, targetTableProperties);
} else {
result = register.registerTable(source, entity, feedColumnSpecs, feedFormatOptions, targetFormatOptions, partitions, columnSpecs, targetTableProperties, TableType.valueOf(tableType), true);
}
final Relationship relnResult = (result ? REL_SUCCESS : REL_FAILURE);
session.transfer(flowFile, relnResult);
} catch (final ProcessException | SQLException e) {
getLog().error("Unable to obtain connection for {} due to {}; routing to failure", new Object[] { flowFile, e });
session.transfer(flowFile, REL_FAILURE);
}
}
Aggregations