Search in sources :

Example 16 with FieldPolicy

use of com.thinkbiganalytics.policy.FieldPolicy in project kylo by Teradata.

the class CleanseAndValidateRow method call.

@Override
public CleansedRowResult call(@Nonnull final Row row) throws Exception {
    /*
    Cache for performance. Validators accept different parameters (numeric,string, etc) so we need to resolve the type using reflection
     */
    Map<Class, Class> validatorParamType = new HashMap<>();
    int nulls = hasProcessingDttm ? 1 : 0;
    // Create placeholder for the new values plus one columns for reject_reason
    Object[] newValues = new Object[dataTypes.length + 1];
    boolean rowValid = true;
    String sbRejectReason;
    List<ValidationResult> results = null;
    boolean[] columnsValid = new boolean[dataTypes.length];
    Map<Integer, Object> originalValues = new HashMap<>();
    // Iterate through columns to cleanse and validate
    for (int idx = 0; idx < dataTypes.length; idx++) {
        ValidationResult result;
        FieldPolicy fieldPolicy = policies[idx];
        HCatDataType dataType = dataTypes[idx];
        boolean columnValid = true;
        boolean isBinaryType = dataType.getConvertibleType().equals(byte[].class);
        // Extract the value (allowing for null or missing field for odd-ball data)
        Object val = (idx == row.length() || row.isNullAt(idx) ? null : row.get(idx));
        if (dataType.isUnchecked()) {
            if (val == null) {
                nulls++;
            }
            newValues[idx] = val;
            originalValues.put(idx, val);
        } else {
            Object fieldValue = (val);
            boolean isEmpty;
            if (fieldValue == null) {
                nulls++;
            }
            originalValues.put(idx, fieldValue);
            StandardizationAndValidationResult standardizationAndValidationResult = standardizeAndValidateField(fieldPolicy, fieldValue, dataType, validatorParamType);
            result = standardizationAndValidationResult.getFinalValidationResult();
            // only apply the standardized result value if the routine is valid
            fieldValue = result.isValid() ? standardizationAndValidationResult.getFieldValue() : fieldValue;
            // reevaluate the isEmpty flag
            isEmpty = ((fieldValue == null) || (StringUtils.isEmpty(fieldValue.toString())));
            // hive will auto convert byte[] or String fields to a target binary type.
            if (result.isValid() && isBinaryType && !(fieldValue instanceof byte[]) && !(fieldValue instanceof String)) {
                // set it to null
                fieldValue = null;
            } else if ((dataType.isNumeric() || isBinaryType) && isEmpty) {
                // if its a numeric column and the field is empty then set it to null as well
                fieldValue = null;
            }
            newValues[idx] = fieldValue;
            if (!result.isValid()) {
                rowValid = false;
                results = (results == null ? new Vector<ValidationResult>() : results);
                results.addAll(standardizationAndValidationResult.getValidationResults());
                columnValid = false;
            }
        }
        // Record fact that we there was an invalid column
        columnsValid[idx] = columnValid;
    }
    // Return success unless all values were null.  That would indicate a blank line in the file.
    if (nulls >= dataTypes.length) {
        rowValid = false;
        results = (results == null ? new Vector<ValidationResult>() : results);
        results.add(ValidationResult.failRow("empty", "Row is empty"));
    }
    if (!rowValid) {
        for (int idx = 0; idx < dataTypes.length; idx++) {
            // the _invalid table dataTypes matches the source, not the destination
            if (newValues[idx] == null || originalValues.get(idx) == null || newValues[idx].getClass() != originalValues.get(idx).getClass()) {
                newValues[idx] = originalValues.get(idx);
            }
        // otherwise the data has changed, but its still the same data type so we can keep the newly changed value
        }
    }
    // Convert to reject reasons to JSON
    sbRejectReason = toJSONArray(results);
    // Record the results in the appended columns, move processing partition value last
    if (hasProcessingDttm) {
        // PROCESSING_DTTM_COL
        newValues[dataTypes.length] = newValues[dataTypes.length - 1];
        // REJECT_REASON_COL
        newValues[dataTypes.length - 1] = sbRejectReason;
    } else {
        newValues[dataTypes.length] = sbRejectReason;
    }
    return new CleansedRowResult(RowFactory.create(newValues), columnsValid, rowValid);
}
Also used : FieldPolicy(com.thinkbiganalytics.policy.FieldPolicy) BaseFieldPolicy(com.thinkbiganalytics.policy.BaseFieldPolicy) HashMap(java.util.HashMap) CleansedRowResult(com.thinkbiganalytics.spark.datavalidator.CleansedRowResult) StandardizationAndValidationResult(com.thinkbiganalytics.spark.datavalidator.StandardizationAndValidationResult) ValidationResult(com.thinkbiganalytics.policy.validation.ValidationResult) StandardizationAndValidationResult(com.thinkbiganalytics.spark.datavalidator.StandardizationAndValidationResult) HCatDataType(com.thinkbiganalytics.spark.validation.HCatDataType)

Aggregations

FieldPolicy (com.thinkbiganalytics.policy.FieldPolicy)16 ArrayList (java.util.ArrayList)12 BaseFieldPolicy (com.thinkbiganalytics.policy.BaseFieldPolicy)10 StandardizationAndValidationResult (com.thinkbiganalytics.spark.datavalidator.StandardizationAndValidationResult)10 HCatDataType (com.thinkbiganalytics.spark.validation.HCatDataType)9 Test (org.junit.Test)7 SimpleRegexReplacer (com.thinkbiganalytics.policy.standardization.SimpleRegexReplacer)4 LookupValidator (com.thinkbiganalytics.policy.validation.LookupValidator)3 HashMap (java.util.HashMap)3 StructField (org.apache.spark.sql.types.StructField)3 Nonnull (javax.annotation.Nonnull)2 FieldPoliciesJsonTransformer (com.thinkbiganalytics.policy.FieldPoliciesJsonTransformer)1 StandardizationPolicy (com.thinkbiganalytics.policy.standardization.StandardizationPolicy)1 CharacterValidator (com.thinkbiganalytics.policy.validation.CharacterValidator)1 NotNullValidator (com.thinkbiganalytics.policy.validation.NotNullValidator)1 RangeValidator (com.thinkbiganalytics.policy.validation.RangeValidator)1 ValidationResult (com.thinkbiganalytics.policy.validation.ValidationResult)1 DataSet (com.thinkbiganalytics.spark.DataSet)1 CleansedRowResult (com.thinkbiganalytics.spark.datavalidator.CleansedRowResult)1 FieldPolicyLoader (com.thinkbiganalytics.spark.policy.FieldPolicyLoader)1