use of org.apache.nifi.components.ValidationResult in project nifi-minifi by apache.
the class StatusConfigReporterTest method addValidationErrors.
private void addValidationErrors(ConfiguredComponent connectable) {
ValidationResult validationResult = new ValidationResult.Builder().input("input").subject("subject").explanation("is not valid").build();
ValidationResult validationResult2 = new ValidationResult.Builder().input("input2").subject("subject2").explanation("is not valid too").build();
List<ValidationResult> validationResultList = new ArrayList<>();
validationResultList.add(validationResult);
validationResultList.add(validationResult2);
when(connectable.getValidationErrors()).thenReturn(validationResultList);
}
use of org.apache.nifi.components.ValidationResult in project nifi-minifi by apache.
the class StatusConfigReporterTest method populateReportingTask.
private void populateReportingTask(boolean addBulletins, boolean validationErrors) {
if (addBulletins) {
addBulletins("Bulletin message", "ReportProvenance");
}
ReportingTaskNode reportingTaskNode = mock(ReportingTaskNode.class);
addReportingTaskNodeVariables(reportingTaskNode);
HashSet<ReportingTaskNode> reportingTaskNodes = new HashSet<>();
reportingTaskNodes.add(reportingTaskNode);
when(mockFlowController.getAllReportingTasks()).thenReturn(reportingTaskNodes);
if (validationErrors) {
ValidationResult validationResult = new ValidationResult.Builder().input("input").subject("subject").explanation("is not valid").build();
ValidationResult validationResult2 = new ValidationResult.Builder().input("input2").subject("subject2").explanation("is not valid too").build();
List<ValidationResult> validationResultList = new ArrayList<>();
validationResultList.add(validationResult);
validationResultList.add(validationResult2);
when(reportingTaskNode.getValidationErrors()).thenReturn(validationResultList);
} else {
when(reportingTaskNode.getValidationErrors()).thenReturn(Collections.EMPTY_LIST);
}
}
use of org.apache.nifi.components.ValidationResult in project kylo by Teradata.
the class ExportSqoop method customValidate.
/**
* Called by the framework this method does additional validation on properties
*
* @param validationContext used to retrieves the properties to check
* @return A collection of {@link ValidationResult} which will be checked by the framework
*/
@Override
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>();
final ExportNullInterpretationStrategy sourceNullInterpretationStrategy = ExportNullInterpretationStrategy.valueOf(validationContext.getProperty(SOURCE_NULL_INTERPRETATION_STRATEGY).getValue());
final String sourceNullCustomStringIdentifier = validationContext.getProperty(SOURCE_NULL_CUSTOM_STRING_IDENTIFIER).evaluateAttributeExpressions().getValue();
final String sourceNullCustomNonStringIdentifier = validationContext.getProperty(SOURCE_NULL_CUSTOM_NON_STRING_IDENTIFIER).evaluateAttributeExpressions().getValue();
if (sourceNullInterpretationStrategy == ExportNullInterpretationStrategy.CUSTOM_VALUES) {
if ((sourceNullCustomStringIdentifier == null) || (sourceNullCustomNonStringIdentifier == null)) {
results.add(new ValidationResult.Builder().subject(this.getClass().getSimpleName()).valid(false).explanation("For Custom Source Null Interpret Strategy, custom strings for identifying null strings and null non-strings in HDFS data must be provided.").build());
}
}
return results;
}
use of org.apache.nifi.components.ValidationResult in project kylo by Teradata.
the class ExecutePySpark method customValidate.
@Override
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>();
final String sparkMaster = validationContext.getProperty(SPARK_MASTER).evaluateAttributeExpressions().getValue().trim().toLowerCase();
final String sparkYarnDeployMode = validationContext.getProperty(SPARK_YARN_DEPLOY_MODE).evaluateAttributeExpressions().getValue();
final String pySparkAppArgs = validationContext.getProperty(PYSPARK_APP_ARGS).evaluateAttributeExpressions().getValue();
final String additionalSparkConfigOptions = validationContext.getProperty(ADDITIONAL_SPARK_CONFIG_OPTIONS).evaluateAttributeExpressions().getValue();
PySparkUtils pySparkUtils = new PySparkUtils();
if ((!sparkMaster.contains("local")) && (!sparkMaster.equals("yarn")) && (!sparkMaster.contains("mesos")) && (!sparkMaster.contains("spark"))) {
results.add(new ValidationResult.Builder().subject(this.getClass().getSimpleName()).valid(false).explanation("invalid spark master provided. Valid values will have local, local[n], local[*], yarn, mesos, spark").build());
}
if (sparkMaster.equals("yarn")) {
if (!(sparkYarnDeployMode.equals("client") || sparkYarnDeployMode.equals("cluster"))) {
results.add(new ValidationResult.Builder().subject(this.getClass().getSimpleName()).valid(false).explanation("yarn master requires a deploy mode to be specified as either 'client' or 'cluster'").build());
}
}
if (!StringUtils.isEmpty(pySparkAppArgs)) {
if (!pySparkUtils.validateCsvArgs(pySparkAppArgs)) {
results.add(new ValidationResult.Builder().subject(this.getClass().getSimpleName()).valid(false).explanation("app args in invalid format. They should be provided as arg1,arg2,arg3 and so on.").build());
}
}
if (!StringUtils.isEmpty(additionalSparkConfigOptions)) {
if (!pySparkUtils.validateKeyValueArgs(additionalSparkConfigOptions)) {
results.add(new ValidationResult.Builder().subject(this.getClass().getSimpleName()).valid(false).explanation("additional spark config options in invalid format. They should be provided as config1=value1,config2=value2 and so on.").build());
}
}
return results;
}
use of org.apache.nifi.components.ValidationResult in project kylo by Teradata.
the class TdchExportHiveToTeradataTest method testTeradataForceStage.
@Test
public void testTeradataForceStage() throws InitializationException {
final TestRunner runner = TestRunners.newTestRunner(TdchExportHiveToTeradata.class);
TdchConnectionService tdchConnectionService = new DummyTdchConnectionService();
runner.addControllerService(CONNECTION_SERVICE_ID, tdchConnectionService);
runner.assertValid(tdchConnectionService);
runner.enableControllerService(tdchConnectionService);
runner.setProperty(TdchExportHiveToTeradata.TDCH_CONNECTION_SERVICE, CONNECTION_SERVICE_ID);
runner.setProperty(TdchExportHiveToTeradata.HIVE_DATABASE, "hive_db");
runner.setProperty(TdchExportHiveToTeradata.HIVE_TABLE, "hive_table");
runner.setProperty(TdchExportHiveToTeradata.TERADATA_DATABASE_TABLE, "teradata_db.teradata_table");
runner.assertValid();
Assert.assertEquals("false", runner.getProcessor().getPropertyDescriptor(TdchExportHiveToTeradata.TERADATA_FORCE_STAGE_NAME).getDefaultValue());
Assert.assertFalse(runner.getProcessor().getPropertyDescriptor(TdchExportHiveToTeradata.TERADATA_FORCE_STAGE_NAME).isRequired());
Assert.assertTrue(runner.getProcessor().getPropertyDescriptor(TdchExportHiveToTeradata.TERADATA_FORCE_STAGE_NAME).isExpressionLanguageSupported());
ValidationResult result = runner.setProperty(TdchExportHiveToTeradata.TERADATA_FORCE_STAGE, "true");
Assert.assertTrue(result.isValid());
result = runner.setProperty(TdchExportHiveToTeradata.TERADATA_FORCE_STAGE, "");
Assert.assertFalse(result.isValid());
result = runner.setProperty(TdchExportHiveToTeradata.TERADATA_FORCE_STAGE, "not-boolean-value");
Assert.assertFalse(result.isValid());
runner.removeProperty(TdchExportHiveToTeradata.TERADATA_FORCE_STAGE);
runner.assertValid();
}
Aggregations