use of org.apache.flink.table.catalog.ExternalCatalogTable in project flink by apache.
the class ResolvedCatalogTableSerdeTest method testDontSerializeExternalInlineTable.
@Test
void testDontSerializeExternalInlineTable() {
SerdeContext serdeCtx = configuredSerdeContext();
ObjectWriter objectWriter = JsonSerdeUtil.createObjectWriter(serdeCtx);
assertThatThrownBy(() -> objectWriter.writeValueAsString(new ResolvedCatalogTable(new ExternalCatalogTable(Schema.newBuilder().fromResolvedSchema(FULL_RESOLVED_SCHEMA).build()), FULL_RESOLVED_SCHEMA))).satisfies(FlinkAssertions.anyCauseMatches(ValidationException.class, "Cannot serialize the table as it's an external inline table"));
}
use of org.apache.flink.table.catalog.ExternalCatalogTable in project flink by apache.
the class DynamicSinkUtils method convertCollectToRel.
/**
* Converts an {@link TableResult#collect()} sink to a {@link RelNode}.
*/
public static RelNode convertCollectToRel(FlinkRelBuilder relBuilder, RelNode input, CollectModifyOperation collectModifyOperation, ReadableConfig configuration, ClassLoader classLoader) {
final DataTypeFactory dataTypeFactory = unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory();
final ResolvedSchema childSchema = collectModifyOperation.getChild().getResolvedSchema();
final ResolvedSchema schema = ResolvedSchema.physical(childSchema.getColumnNames(), childSchema.getColumnDataTypes());
final ResolvedCatalogTable catalogTable = new ResolvedCatalogTable(new ExternalCatalogTable(Schema.newBuilder().fromResolvedSchema(schema).build()), schema);
final ContextResolvedTable contextResolvedTable = ContextResolvedTable.anonymous("collect", catalogTable);
final DataType consumedDataType = fixCollectDataType(dataTypeFactory, schema);
final String zone = configuration.get(TableConfigOptions.LOCAL_TIME_ZONE);
final ZoneId zoneId = TableConfigOptions.LOCAL_TIME_ZONE.defaultValue().equals(zone) ? ZoneId.systemDefault() : ZoneId.of(zone);
final CollectDynamicSink tableSink = new CollectDynamicSink(contextResolvedTable.getIdentifier(), consumedDataType, configuration.get(CollectSinkOperatorFactory.MAX_BATCH_SIZE), configuration.get(CollectSinkOperatorFactory.SOCKET_TIMEOUT), classLoader, zoneId, configuration.get(ExecutionConfigOptions.TABLE_EXEC_LEGACY_CAST_BEHAVIOUR).isEnabled());
collectModifyOperation.setSelectResultProvider(tableSink.getSelectResultProvider());
collectModifyOperation.setConsumedDataType(consumedDataType);
return convertSinkToRel(relBuilder, input, // dynamicOptions
Collections.emptyMap(), contextResolvedTable, // staticPartitions
Collections.emptyMap(), false, tableSink);
}
use of org.apache.flink.table.catalog.ExternalCatalogTable in project flink by apache.
the class ResolvedCatalogTableJsonSerializer method serialize.
static void serialize(ResolvedCatalogTable resolvedCatalogTable, boolean serializeOptions, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
// This should never happen anyway, but we keep this assertion for sanity check
assert resolvedCatalogTable.getTableKind() == CatalogBaseTable.TableKind.TABLE;
jsonGenerator.writeStartObject();
if (resolvedCatalogTable.getOrigin() instanceof ExternalCatalogTable) {
throw new ValidationException("Cannot serialize the table as it's an external inline table. " + "This might be caused by a usage of " + "StreamTableEnvironment#fromDataStream or TableResult#collect, " + "which are not supported in compiled plans.");
}
serializerProvider.defaultSerializeField(RESOLVED_SCHEMA, resolvedCatalogTable.getResolvedSchema(), jsonGenerator);
jsonGenerator.writeObjectField(PARTITION_KEYS, resolvedCatalogTable.getPartitionKeys());
if (serializeOptions) {
if (!resolvedCatalogTable.getComment().isEmpty()) {
jsonGenerator.writeObjectField(COMMENT, resolvedCatalogTable.getComment());
}
try {
jsonGenerator.writeObjectField(OPTIONS, resolvedCatalogTable.getOptions());
} catch (Exception e) {
throw new ValidationException(String.format("The table is not serializable as %s#getOptions() failed. " + "It seems the table is not intended to be stored in a " + "persisted plan. Either declare the table as a temporary " + "table or use '%s' = '%s' / '%s' to only compile an identifier " + "into the plan.", resolvedCatalogTable.getOrigin().getClass(), TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS.key(), CatalogPlanCompilation.SCHEMA.name(), CatalogPlanCompilation.IDENTIFIER.name()), e);
}
}
jsonGenerator.writeEndObject();
}
use of org.apache.flink.table.catalog.ExternalCatalogTable in project flink by apache.
the class AbstractStreamTableEnvironmentImpl method toStreamInternal.
protected <T> DataStream<T> toStreamInternal(Table table, SchemaTranslator.ProducingResult schemaTranslationResult, @Nullable ChangelogMode changelogMode) {
final CatalogManager catalogManager = getCatalogManager();
final OperationTreeBuilder operationTreeBuilder = getOperationTreeBuilder();
final QueryOperation projectOperation = schemaTranslationResult.getProjections().map(projections -> operationTreeBuilder.project(projections.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), table.getQueryOperation())).orElseGet(table::getQueryOperation);
final ResolvedCatalogTable resolvedCatalogTable = catalogManager.resolveCatalogTable(new ExternalCatalogTable(schemaTranslationResult.getSchema()));
final ExternalModifyOperation modifyOperation = new ExternalModifyOperation(ContextResolvedTable.anonymous("datastream_sink", resolvedCatalogTable), projectOperation, changelogMode, schemaTranslationResult.getPhysicalDataType().orElseGet(() -> resolvedCatalogTable.getResolvedSchema().toPhysicalRowDataType()));
return toStreamInternal(table, modifyOperation);
}
use of org.apache.flink.table.catalog.ExternalCatalogTable in project flink by apache.
the class AbstractStreamTableEnvironmentImpl method fromStreamInternal.
protected <T> Table fromStreamInternal(DataStream<T> dataStream, @Nullable Schema schema, @Nullable String viewPath, ChangelogMode changelogMode) {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.");
Preconditions.checkNotNull(changelogMode, "Changelog mode must not be null.");
if (dataStream.getExecutionEnvironment() != executionEnvironment) {
throw new ValidationException("The DataStream's StreamExecutionEnvironment must be identical to the one that " + "has been passed to the StreamTableEnvironment during instantiation.");
}
final CatalogManager catalogManager = getCatalogManager();
final OperationTreeBuilder operationTreeBuilder = getOperationTreeBuilder();
final SchemaTranslator.ConsumingResult schemaTranslationResult = SchemaTranslator.createConsumingResult(catalogManager.getDataTypeFactory(), dataStream.getType(), schema);
final ResolvedCatalogTable resolvedCatalogTable = catalogManager.resolveCatalogTable(new ExternalCatalogTable(schemaTranslationResult.getSchema()));
final ContextResolvedTable contextResolvedTable;
if (viewPath != null) {
UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(viewPath);
final ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
contextResolvedTable = ContextResolvedTable.temporary(objectIdentifier, resolvedCatalogTable);
} else {
contextResolvedTable = ContextResolvedTable.anonymous("datastream_source", resolvedCatalogTable);
}
final QueryOperation scanOperation = new ExternalQueryOperation<>(contextResolvedTable, dataStream, schemaTranslationResult.getPhysicalDataType(), schemaTranslationResult.isTopLevelRecord(), changelogMode);
final List<String> projections = schemaTranslationResult.getProjections();
if (projections == null) {
return createTable(scanOperation);
}
final QueryOperation projectOperation = operationTreeBuilder.project(projections.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), scanOperation);
return createTable(projectOperation);
}
Aggregations