use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class HBaseDynamicTableFactoryTest method testUnknownOption.
@Test
public void testUnknownOption() {
Map<String, String> options = getAllOptions();
options.put("sink.unknown.key", "unknown-value");
ResolvedSchema schema = ResolvedSchema.of(Column.physical(ROWKEY, STRING()), Column.physical(FAMILY1, ROW(FIELD(COL1, DOUBLE()), FIELD(COL2, INT()))));
try {
createTableSource(schema, options);
fail("Should fail");
} catch (Exception e) {
assertTrue(ExceptionUtils.findThrowableWithMessage(e, "Unsupported options:\n\nsink.unknown.key").isPresent());
}
try {
createTableSink(schema, options);
fail("Should fail");
} catch (Exception e) {
assertTrue(ExceptionUtils.findThrowableWithMessage(e, "Unsupported options:\n\nsink.unknown.key").isPresent());
}
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class HBaseDynamicTableFactoryTest method testTypeWithUnsupportedPrecision.
@Test
public void testTypeWithUnsupportedPrecision() {
Map<String, String> options = getAllOptions();
// test unsupported timestamp precision
ResolvedSchema schema = ResolvedSchema.of(Column.physical(ROWKEY, STRING()), Column.physical(FAMILY1, ROW(FIELD(COL1, TIMESTAMP(6)), FIELD(COL2, INT()))));
try {
createTableSource(schema, options);
fail("Should fail");
} catch (Exception e) {
assertTrue(ExceptionUtils.findThrowableWithMessage(e, "The precision 6 of TIMESTAMP type is out of the range [0, 3]" + " supported by HBase connector").isPresent());
}
try {
createTableSink(schema, options);
fail("Should fail");
} catch (Exception e) {
assertTrue(ExceptionUtils.findThrowableWithMessage(e, "The precision 6 of TIMESTAMP type is out of the range [0, 3]" + " supported by HBase connector").isPresent());
}
// test unsupported time precision
schema = ResolvedSchema.of(Column.physical(ROWKEY, STRING()), Column.physical(FAMILY1, ROW(FIELD(COL1, TIME(6)), FIELD(COL2, INT()))));
try {
createTableSource(schema, options);
fail("Should fail");
} catch (Exception e) {
assertTrue(ExceptionUtils.findThrowableWithMessage(e, "The precision 6 of TIME type is out of the range [0, 3]" + " supported by HBase connector").isPresent());
}
try {
createTableSink(schema, options);
fail("Should fail");
} catch (Exception e) {
assertTrue(ExceptionUtils.findThrowableWithMessage(e, "The precision 6 of TIME type is out of the range [0, 3]" + " supported by HBase connector").isPresent());
}
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class CommonExecSink method createSinkTransformation.
@SuppressWarnings("unchecked")
protected Transformation<Object> createSinkTransformation(StreamExecutionEnvironment streamExecEnv, ReadableConfig config, Transformation<RowData> inputTransform, DynamicTableSink tableSink, int rowtimeFieldIndex, boolean upsertMaterialize) {
final ResolvedSchema schema = tableSinkSpec.getContextResolvedTable().getResolvedSchema();
final SinkRuntimeProvider runtimeProvider = tableSink.getSinkRuntimeProvider(new SinkRuntimeProviderContext(isBounded));
final RowType physicalRowType = getPhysicalRowType(schema);
final int[] primaryKeys = getPrimaryKeyIndices(physicalRowType, schema);
final int sinkParallelism = deriveSinkParallelism(inputTransform, runtimeProvider);
final int inputParallelism = inputTransform.getParallelism();
final boolean inputInsertOnly = inputChangelogMode.containsOnly(RowKind.INSERT);
final boolean hasPk = primaryKeys.length > 0;
if (!inputInsertOnly && sinkParallelism != inputParallelism && !hasPk) {
throw new TableException(String.format("The sink for table '%s' has a configured parallelism of %s, while the input parallelism is %s. " + "Since the configured parallelism is different from the input's parallelism and " + "the changelog mode is not insert-only, a primary key is required but could not " + "be found.", tableSinkSpec.getContextResolvedTable().getIdentifier().asSummaryString(), sinkParallelism, inputParallelism));
}
// only add materialization if input has change
final boolean needMaterialization = !inputInsertOnly && upsertMaterialize;
Transformation<RowData> sinkTransform = applyConstraintValidations(inputTransform, config, physicalRowType);
if (hasPk) {
sinkTransform = applyKeyBy(config, sinkTransform, primaryKeys, sinkParallelism, inputParallelism, inputInsertOnly, needMaterialization);
}
if (needMaterialization) {
sinkTransform = applyUpsertMaterialize(sinkTransform, primaryKeys, sinkParallelism, config, physicalRowType);
}
return (Transformation<Object>) applySinkProvider(sinkTransform, streamExecEnv, runtimeProvider, rowtimeFieldIndex, sinkParallelism, config);
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class ResolvedSchemaJsonDeserializer method deserialize.
@Override
public ResolvedSchema deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
ObjectNode jsonNode = jsonParser.readValueAsTree();
List<Column> columns = ctx.readValue(traverse(jsonNode.required(COLUMNS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, Column.class));
List<WatermarkSpec> watermarkSpecs = ctx.readValue(traverse(jsonNode.required(WATERMARK_SPECS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, WatermarkSpec.class));
UniqueConstraint primaryKey = deserializeOptionalField(jsonNode, PRIMARY_KEY, UniqueConstraint.class, jsonParser.getCodec(), ctx).orElse(null);
return new ResolvedSchema(columns, watermarkSpecs, primaryKey);
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class ResolvedCatalogTableJsonDeserializer method deserialize.
@Override
public ResolvedCatalogTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
ObjectNode jsonNode = jsonParser.readValueAsTree();
ResolvedSchema resolvedSchema = ctx.readValue(traverse(jsonNode.required(RESOLVED_SCHEMA), jsonParser.getCodec()), ResolvedSchema.class);
List<String> partitionKeys = ctx.readValue(traverse(jsonNode.required(PARTITION_KEYS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, String.class));
String comment = deserializeOptionalField(jsonNode, COMMENT, String.class, jsonParser.getCodec(), ctx).orElse(null);
@SuppressWarnings("unchecked") Map<String, String> options = (Map<String, String>) deserializeOptionalField(jsonNode, OPTIONS, ctx.getTypeFactory().constructMapType(Map.class, String.class, String.class), jsonParser.getCodec(), ctx).orElse(Collections.emptyMap());
return new ResolvedCatalogTable(CatalogTable.of(// reason, in case one tries to access the unresolved schema.
Schema.newBuilder().fromResolvedSchema(resolvedSchema).build(), comment, partitionKeys, options), resolvedSchema);
}
Aggregations