use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class PrintTableSinkFactory method createDynamicTableSink.
@Override
public DynamicTableSink createDynamicTableSink(Context context) {
FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
helper.validate();
ReadableConfig options = helper.getOptions();
return new PrintSink(context.getCatalogTable().getResolvedSchema().toPhysicalRowDataType(), context.getCatalogTable().getPartitionKeys(), options.get(PRINT_IDENTIFIER), options.get(STANDARD_ERROR), options.getOptional(FactoryUtil.SINK_PARALLELISM).orElse(null));
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class CommonExecSink method applyConstraintValidations.
/**
* Apply an operator to filter or report error to process not-null values for not-null fields.
*/
private Transformation<RowData> applyConstraintValidations(Transformation<RowData> inputTransform, ReadableConfig config, RowType physicalRowType) {
final ConstraintEnforcer.Builder validatorBuilder = ConstraintEnforcer.newBuilder();
final String[] fieldNames = physicalRowType.getFieldNames().toArray(new String[0]);
// Build NOT NULL enforcer
final int[] notNullFieldIndices = getNotNullFieldIndices(physicalRowType);
if (notNullFieldIndices.length > 0) {
final ExecutionConfigOptions.NotNullEnforcer notNullEnforcer = config.get(ExecutionConfigOptions.TABLE_EXEC_SINK_NOT_NULL_ENFORCER);
final List<String> notNullFieldNames = Arrays.stream(notNullFieldIndices).mapToObj(idx -> fieldNames[idx]).collect(Collectors.toList());
validatorBuilder.addNotNullConstraint(notNullEnforcer, notNullFieldIndices, notNullFieldNames, fieldNames);
}
final ExecutionConfigOptions.TypeLengthEnforcer typeLengthEnforcer = config.get(ExecutionConfigOptions.TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER);
// Build CHAR/VARCHAR length enforcer
final List<ConstraintEnforcer.FieldInfo> charFieldInfo = getFieldInfoForLengthEnforcer(physicalRowType, LengthEnforcerType.CHAR);
if (!charFieldInfo.isEmpty()) {
final List<String> charFieldNames = charFieldInfo.stream().map(cfi -> fieldNames[cfi.fieldIdx()]).collect(Collectors.toList());
validatorBuilder.addCharLengthConstraint(typeLengthEnforcer, charFieldInfo, charFieldNames, fieldNames);
}
// Build BINARY/VARBINARY length enforcer
final List<ConstraintEnforcer.FieldInfo> binaryFieldInfo = getFieldInfoForLengthEnforcer(physicalRowType, LengthEnforcerType.BINARY);
if (!binaryFieldInfo.isEmpty()) {
final List<String> binaryFieldNames = binaryFieldInfo.stream().map(cfi -> fieldNames[cfi.fieldIdx()]).collect(Collectors.toList());
validatorBuilder.addBinaryLengthConstraint(typeLengthEnforcer, binaryFieldInfo, binaryFieldNames, fieldNames);
}
ConstraintEnforcer constraintEnforcer = validatorBuilder.build();
if (constraintEnforcer != null) {
return ExecNodeUtil.createOneInputTransformation(inputTransform, createTransformationMeta(CONSTRAINT_VALIDATOR_TRANSFORMATION, constraintEnforcer.getOperatorName(), "ConstraintEnforcer", config), constraintEnforcer, getInputTypeInfo(), inputTransform.getParallelism());
} else {
// there are no not-null fields, just skip adding the enforcer operator
return inputTransform;
}
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class RexNodeJsonSerializer method serialize.
@Override
public void serialize(RexNode rexNode, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
final ReadableConfig config = SerdeContext.get(serializerProvider).getConfiguration();
final CatalogPlanCompilation compilationStrategy = config.get(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS);
switch(rexNode.getKind()) {
case INPUT_REF:
case TABLE_INPUT_REF:
serializeInputRef((RexInputRef) rexNode, jsonGenerator, serializerProvider);
break;
case LITERAL:
serializeLiteral((RexLiteral) rexNode, jsonGenerator, serializerProvider);
break;
case FIELD_ACCESS:
serializeFieldAccess((RexFieldAccess) rexNode, jsonGenerator, serializerProvider);
break;
case CORREL_VARIABLE:
serializeCorrelVariable((RexCorrelVariable) rexNode, jsonGenerator, serializerProvider);
break;
case PATTERN_INPUT_REF:
serializePatternFieldRef((RexPatternFieldRef) rexNode, jsonGenerator, serializerProvider);
break;
default:
if (rexNode instanceof RexCall) {
serializeCall((RexCall) rexNode, jsonGenerator, serializerProvider, compilationStrategy);
} else {
throw new TableException("Unknown RexNode: " + rexNode);
}
}
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class LogicalTypeJsonSerializer method serialize.
@Override
public void serialize(LogicalType logicalType, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
final ReadableConfig config = SerdeContext.get(serializerProvider).getConfiguration();
final boolean serializeCatalogObjects = !config.get(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS).equals(CatalogPlanCompilation.IDENTIFIER);
serializeInternal(logicalType, jsonGenerator, serializerProvider, serializeCatalogObjects);
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class AggregateCallJsonSerializer method serialize.
@Override
public void serialize(AggregateCall aggCall, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
final ReadableConfig config = SerdeContext.get(serializerProvider).getConfiguration();
final CatalogPlanCompilation compilationStrategy = config.get(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS);
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField(FIELD_NAME_NAME, aggCall.getName());
RexNodeJsonSerializer.serializeSqlOperator(aggCall.getAggregation(), jsonGenerator, serializerProvider, compilationStrategy == CatalogPlanCompilation.ALL);
jsonGenerator.writeFieldName(FIELD_NAME_ARG_LIST);
jsonGenerator.writeStartArray();
for (int arg : aggCall.getArgList()) {
jsonGenerator.writeNumber(arg);
}
jsonGenerator.writeEndArray();
jsonGenerator.writeNumberField(FIELD_NAME_FILTER_ARG, aggCall.filterArg);
jsonGenerator.writeBooleanField(FIELD_NAME_DISTINCT, aggCall.isDistinct());
jsonGenerator.writeBooleanField(FIELD_NAME_APPROXIMATE, aggCall.isApproximate());
jsonGenerator.writeBooleanField(FIELD_NAME_IGNORE_NULLS, aggCall.ignoreNulls());
serializerProvider.defaultSerializeField(FIELD_NAME_TYPE, aggCall.getType(), jsonGenerator);
jsonGenerator.writeEndObject();
}
Aggregations