use of org.apache.commons.lang3.StringUtils in project hub-alert by blackducksoftware.
the class UpdateEmailService method sendUpdateEmail.
public void sendUpdateEmail(UpdateModel updateModel) {
String updateVersion = updateModel.getDockerTagVersion();
if (wasEmailAlreadySentForVersion(updateVersion)) {
return;
}
String username = "sysadmin";
Optional<String> optionalEmailAddress = userAccessor.getUser(username).map(UserModel::getEmailAddress).filter(StringUtils::isNotBlank);
if (optionalEmailAddress.isPresent()) {
try {
EmailGlobalConfigModel emailServerConfiguration = emailGlobalConfigAccessor.getConfiguration().orElseThrow(() -> new AlertException("No global email configuration found"));
String alertServerUrl = alertProperties.getServerURL();
Map<String, Object> templateFields = new HashMap<>();
templateFields.put(EmailPropertyKeys.TEMPLATE_KEY_SUBJECT_LINE.getPropertyKey(), SUBJECT_LINE);
templateFields.put("newVersionName", updateVersion);
templateFields.put("repositoryUrl", updateModel.getRepositoryUrl());
templateFields.put(FreemarkerTemplatingService.KEY_ALERT_SERVER_URL, alertServerUrl);
handleSend(javamailPropertiesFactory.createJavaMailProperties(emailServerConfiguration), emailServerConfiguration.getSmtpFrom().orElse(StringUtils.EMPTY), emailServerConfiguration.getSmtpHost().orElse(StringUtils.EMPTY), emailServerConfiguration.getSmtpPort().orElse(0), emailServerConfiguration.getSmtpAuth().orElse(false), emailServerConfiguration.getSmtpUsername().orElse(StringUtils.EMPTY), emailServerConfiguration.getSmtpPassword().orElse(StringUtils.EMPTY), templateFields, optionalEmailAddress.get());
settingsKeyAccessor.saveSettingsKey(SETTINGS_KEY_VERSION_FOR_UPDATE_EMAIL, updateVersion);
} catch (AlertException e) {
logger.debug("Problem sending version update email.", e);
}
} else {
logger.debug("No email address configured for user: {}", username);
}
}
use of org.apache.commons.lang3.StringUtils in project hub-alert by blackducksoftware.
the class JobConfigActions method testWithoutChecks.
@Override
protected ValidationActionResponse testWithoutChecks(JobFieldModel resource) {
ValidationResponseModel responseModel;
String jobIdString = resource.getJobId();
UUID jobId = Optional.ofNullable(jobIdString).filter(StringUtils::isNotBlank).map(UUID::fromString).orElse(null);
try {
Collection<FieldModel> otherJobModels = new LinkedList<>();
FieldModel channelFieldModel = getChannelFieldModelAndPopulateOtherJobModels(resource, otherJobModels);
if (null != channelFieldModel) {
String channelDescriptorName = channelFieldModel.getDescriptorName();
Map<String, ConfigurationFieldModel> fields = createFieldsMap(channelFieldModel, otherJobModels);
// The custom message fields are not written to the database or defined fields in the database. Need to manually add them.
// TODO Create a mechanism to create the field accessor with a combination of fields in the database and fields that are not.
Optional<ConfigurationFieldModel> topicField = convertFieldToConfigurationField(channelFieldModel, FieldModelTestAction.KEY_CUSTOM_TOPIC);
Optional<ConfigurationFieldModel> messageField = convertFieldToConfigurationField(channelFieldModel, FieldModelTestAction.KEY_CUSTOM_MESSAGE);
topicField.ifPresent(model -> fields.put(FieldModelTestAction.KEY_CUSTOM_TOPIC, model));
messageField.ifPresent(model -> fields.put(FieldModelTestAction.KEY_CUSTOM_MESSAGE, model));
MessageResult providerTestResult = testProviderConfig(new FieldUtility(fields), jobIdString, channelFieldModel);
if (providerTestResult.hasErrors()) {
responseModel = ValidationResponseModel.fromStatusCollection(providerTestResult.getStatusMessage(), providerTestResult.getFieldStatuses());
return new ValidationActionResponse(HttpStatus.OK, responseModel);
}
List<BlackDuckProjectDetailsModel> projectFilterDetails = Optional.ofNullable(resource.getConfiguredProviderProjects()).orElse(List.of()).stream().map(jobProject -> new BlackDuckProjectDetailsModel(jobProject.getName(), jobProject.getHref())).collect(Collectors.toList());
DistributionJobModel testJobModel = distributionJobModelExtractor.convertToJobModel(jobId, fields, DateUtils.createCurrentDateTimestamp(), null, projectFilterDetails);
DistributionChannelTestAction distributionChannelTestAction = channelTestActionMap.findRequiredAction(channelDescriptorName);
MessageResult testActionResult = distributionChannelTestAction.testConfig(testJobModel, testJobModel.getName(), topicField.flatMap(ConfigurationFieldModel::getFieldValue).orElse(null), messageField.flatMap(ConfigurationFieldModel::getFieldValue).orElse(null));
List<AlertFieldStatus> resultFieldStatuses = testActionResult.getFieldStatuses();
List<AlertFieldStatus> allStatuses = Stream.concat(resultFieldStatuses.stream(), providerTestResult.fieldWarnings().stream()).collect(Collectors.toList());
responseModel = ValidationResponseModel.fromStatusCollection(testActionResult.getStatusMessage(), allStatuses);
return new ValidationActionResponse(HttpStatus.OK, responseModel);
}
responseModel = ValidationResponseModel.generalError("No field model of type channel was was sent to test.");
return new ValidationActionResponse(HttpStatus.BAD_REQUEST, responseModel);
} catch (IntegrationRestException e) {
logger.error(e.getMessage(), e);
return ValidationActionResponse.createResponseFromIntegrationRestException(e);
} catch (AlertFieldException e) {
logger.error("Test Error with field Errors", e);
responseModel = ValidationResponseModel.fromStatusCollection(e.getMessage(), e.getFieldErrors());
return new ValidationActionResponse(HttpStatus.OK, responseModel);
} catch (IntegrationException e) {
// TODO this is not necessarily a PKIX
responseModel = pkixErrorResponseFactory.createSSLExceptionResponse(e).orElse(ValidationResponseModel.generalError(e.getMessage()));
return new ValidationActionResponse(HttpStatus.OK, responseModel);
} catch (Exception e) {
logger.error(e.getMessage(), e);
responseModel = pkixErrorResponseFactory.createSSLExceptionResponse(e).orElse(ValidationResponseModel.generalError(e.getMessage()));
return new ValidationActionResponse(HttpStatus.OK, responseModel);
}
}
use of org.apache.commons.lang3.StringUtils in project kylo by Teradata.
the class AbstractMergeTable method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final ComponentLog logger = getLog();
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final String blockingValue = context.getProperty(BLOCKING_KEY).evaluateAttributeExpressions(flowFile).getValue();
String flowFileId = flowFile.getAttribute(CoreAttributes.UUID.key());
boolean block = false;
if (blocking && blockingCache.putIfAbsent(blockingValue, flowFileId) != null) {
if (StringUtils.isBlank(flowFile.getAttribute(BLOCKED_START_TIME))) {
flowFile = session.putAttribute(flowFile, BLOCKED_START_TIME, String.valueOf(System.currentTimeMillis()));
getLogger().info("Transferring Flow file {} to blocked relationship", new Object[] { flowFile });
}
// penalize the flow file and transfer to BLOCKED
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_BLOCKED);
return;
}
// Add Blocking time to flow file if this was a blocked flowfile.
if (blocking && StringUtils.isNotBlank(flowFile.getAttribute(BLOCKED_START_TIME))) {
String blockedStartTime = flowFile.getAttribute(BLOCKED_START_TIME);
try {
Long l = Long.parseLong(blockedStartTime);
Long blockTime = System.currentTimeMillis() - l;
getLogger().info("Processing Blocked flow file {}. This was blocked for {} ms", new Object[] { flowFile, blockTime });
flowFile = session.putAttribute(flowFile, BLOCKED_TIME, String.valueOf(blockTime) + " ms");
} catch (NumberFormatException e) {
}
}
String PROVENANCE_EXECUTION_STATUS_KEY = context.getName() + " Execution Status";
String partitionSpecString = context.getProperty(PARTITION_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue();
String sourceSchema = context.getProperty(SOURCE_SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
String sourceTable = context.getProperty(SOURCE_TABLE).evaluateAttributeExpressions(flowFile).getValue();
String targetSchema = context.getProperty(TARGET_SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
String targetTable = context.getProperty(TARGET_TABLE).evaluateAttributeExpressions(flowFile).getValue();
String feedPartitionValue = context.getProperty(FEED_PARTITION).evaluateAttributeExpressions(flowFile).getValue();
String mergeStrategyValue = context.getProperty(MERGE_STRATEGY).evaluateAttributeExpressions(flowFile).getValue();
String hiveConfigurations = context.getProperty(HIVE_CONFIGURATIONS).evaluateAttributeExpressions(flowFile).getValue();
boolean resetHive = context.getProperty(RESET_HIVE).asBoolean();
final ColumnSpec[] columnSpecs = Optional.ofNullable(context.getProperty(FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
if (STRATEGY_PK_MERGE.equals(mergeStrategyValue) && (columnSpecs == null || columnSpecs.length == 0)) {
getLog().error("Missing required field specification for PK merge feature");
flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Failed: Missing required field specification for PK merge feature");
release(blockingValue);
session.transfer(flowFile, IngestProperties.REL_FAILURE);
return;
}
// Maintain default for backward compatibility
if (StringUtils.isEmpty(mergeStrategyValue)) {
mergeStrategyValue = STRATEGY_DEDUPE_MERGE;
}
logger.info("Merge strategy: " + mergeStrategyValue + " Using Source: " + sourceTable + " Target: " + targetTable + " feed partition:" + feedPartitionValue + " partSpec: " + partitionSpecString);
final StopWatch stopWatch = new StopWatch(true);
try (final Connection conn = getConnection(context)) {
TableMergeSyncSupport mergeSupport = new TableMergeSyncSupport(conn);
if (resetHive) {
mergeSupport.resetHiveConf();
}
mergeSupport.enableDynamicPartitions();
if (StringUtils.isNotEmpty(hiveConfigurations)) {
mergeSupport.setHiveConf(hiveConfigurations.split("\\|"));
}
PartitionSpec partitionSpec = new PartitionSpec(partitionSpecString);
if (STRATEGY_DEDUPE_MERGE.equals(mergeStrategyValue)) {
mergeSupport.doMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, true);
} else if (STRATEGY_MERGE.equals(mergeStrategyValue)) {
mergeSupport.doMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, false);
} else if (STRATEGY_SYNC.equals(mergeStrategyValue)) {
mergeSupport.doSync(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue);
} else if (STRATEGY_ROLLING_SYNC.equals(mergeStrategyValue)) {
mergeSupport.doRollingSync(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue);
} else if (STRATEGY_PK_MERGE.equals(mergeStrategyValue)) {
mergeSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, columnSpecs);
} else {
throw new UnsupportedOperationException("Failed to resolve the merge strategy");
}
session.getProvenanceReporter().modifyContent(flowFile, "Execution completed", stopWatch.getElapsed(TimeUnit.MILLISECONDS));
flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Successful");
release(blockingValue);
logger.info("Execution completed: " + stopWatch.getElapsed(TimeUnit.MILLISECONDS) + " Merge strategy: " + mergeStrategyValue + " Using Source: " + sourceTable + " Target: " + targetTable + " feed partition:" + feedPartitionValue + " partSpec: " + partitionSpecString);
session.transfer(flowFile, REL_SUCCESS);
} catch (final Exception e) {
logger.error("Unable to execute merge doMerge for {} due to {}; routing to failure", new Object[] { flowFile, e }, e);
flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Failed: " + e.getMessage());
release(blockingValue);
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.commons.lang3.StringUtils in project kylo by Teradata.
the class CreateElasticsearchBackedHiveTable method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
String jarUrl = context.getProperty(JAR_URL).evaluateAttributeExpressions(flowFile).getValue();
String useWan = context.getProperty(USE_WAN).getValue();
String autoIndex = context.getProperty(AUTO_CREATE_INDEX).getValue();
String idField = context.getProperty(ID_FIELD).evaluateAttributeExpressions(flowFile).getValue();
final ColumnSpec[] columnSpecs = Optional.ofNullable(context.getProperty(FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
validateArrayProperty(FIELD_SPECIFICATION.getDisplayName(), columnSpecs, session, flowFile);
final String feedName = context.getProperty(IngestProperties.FEED_NAME).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FEED_NAME.getDisplayName(), feedName, session, flowFile);
final String categoryName = context.getProperty(IngestProperties.FEED_CATEGORY).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FEED_CATEGORY.getDisplayName(), categoryName, session, flowFile);
final String nodes = context.getProperty(NODES).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(NODES.getDisplayName(), nodes, session, flowFile);
final String indexString = context.getProperty(FIELD_INDEX_STRING).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FIELD_INDEX_STRING.getDisplayName(), indexString, session, flowFile);
final String feedRoot = context.getProperty(FEED_ROOT).evaluateAttributeExpressions(flowFile).getValue();
validateStringProperty(FEED_ROOT.getDisplayName(), indexString, session, flowFile);
List<String> hiveStatements = getHQLStatements(columnSpecs, nodes, feedRoot, feedName, categoryName, useWan, autoIndex, idField, jarUrl, indexString);
final ThriftService thriftService = context.getProperty(THRIFT_SERVICE).asControllerService(ThriftService.class);
executeStatements(context, session, flowFile, hiveStatements.toArray(new String[hiveStatements.size()]), thriftService);
}
use of org.apache.commons.lang3.StringUtils in project kylo by Teradata.
the class RegisterFeedTables method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
// Verify flow file exists
final FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
// Verify properties and attributes
final String feedFormatOptions = Optional.ofNullable(context.getProperty(FEED_FORMAT_SPECS).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).orElse(DEFAULT_FEED_FORMAT_OPTIONS);
final String targetFormatOptions = Optional.ofNullable(context.getProperty(TARGET_FORMAT_SPECS).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).orElse(DEFAULT_STORAGE_FORMAT);
final String feedTableProperties = context.getProperty(FEED_TBLPROPERTIES).evaluateAttributeExpressions(flowFile).getValue();
final String targetTableProperties = context.getProperty(TARGET_TBLPROPERTIES).evaluateAttributeExpressions(flowFile).getValue();
final ColumnSpec[] partitions = Optional.ofNullable(context.getProperty(PARTITION_SPECS).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
final String tableType = context.getProperty(TABLE_TYPE).getValue();
final String feedTableOverride = context.getProperty(FEED_TABLE_OVERRIDE).evaluateAttributeExpressions(flowFile).getValue();
final ColumnSpec[] columnSpecs = Optional.ofNullable(context.getProperty(FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
if (StringUtils.isEmpty(feedTableOverride) && (columnSpecs == null || columnSpecs.length == 0)) {
getLog().error("Missing field specification");
session.transfer(flowFile, IngestProperties.REL_FAILURE);
return;
}
ColumnSpec[] feedColumnSpecs = Optional.ofNullable(context.getProperty(FEED_FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
if (feedColumnSpecs == null || feedColumnSpecs.length == 0) {
// Backwards compatibility with older templates we set the source and target to the same
feedColumnSpecs = columnSpecs;
}
final String entity = context.getProperty(IngestProperties.FEED_NAME).evaluateAttributeExpressions(flowFile).getValue();
if (entity == null || entity.isEmpty()) {
getLog().error("Missing feed name");
session.transfer(flowFile, IngestProperties.REL_FAILURE);
return;
}
final String source = context.getProperty(IngestProperties.FEED_CATEGORY).evaluateAttributeExpressions(flowFile).getValue();
if (source == null || source.isEmpty()) {
getLog().error("Missing category name");
session.transfer(flowFile, IngestProperties.REL_FAILURE);
return;
}
final String feedRoot = context.getProperty(FEED_ROOT).evaluateAttributeExpressions(flowFile).getValue();
final String profileRoot = context.getProperty(PROFILE_ROOT).evaluateAttributeExpressions(flowFile).getValue();
final String masterRoot = context.getProperty(MASTER_ROOT).evaluateAttributeExpressions(flowFile).getValue();
final TableRegisterConfiguration config = new TableRegisterConfiguration(feedRoot, profileRoot, masterRoot);
// Register the tables
final ThriftService thriftService = context.getProperty(THRIFT_SERVICE).asControllerService(ThriftService.class);
try (final Connection conn = thriftService.getConnection()) {
final TableRegisterSupport register = new TableRegisterSupport(conn, config);
final boolean result;
if (ALL_TABLES.equals(tableType)) {
result = register.registerStandardTables(source, entity, feedColumnSpecs, feedFormatOptions, targetFormatOptions, partitions, columnSpecs, feedTableProperties, targetTableProperties, feedTableOverride);
} else {
result = register.registerTable(source, entity, feedColumnSpecs, feedFormatOptions, targetFormatOptions, partitions, columnSpecs, feedTableProperties, targetTableProperties, TableType.valueOf(tableType), true, feedTableOverride);
}
final Relationship relnResult = (result ? REL_SUCCESS : REL_FAILURE);
session.transfer(flowFile, relnResult);
} catch (final ProcessException | SQLException e) {
getLog().error("Unable to obtain connection for {} due to {}; routing to failure", new Object[] { flowFile, e });
session.transfer(flowFile, REL_FAILURE);
}
}
Aggregations