use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class ContextResolvedTableSerdeTest method temporaryTableAndMissingIdentifierInCatalog.
@Test
void temporaryTableAndMissingIdentifierInCatalog() throws Exception {
final SerdeContext ctx = serdeContext(TableConfigOptions.CatalogPlanCompilation.ALL, TableConfigOptions.CatalogPlanRestore.ALL_ENFORCED);
final ObjectIdentifier objectIdentifier = ObjectIdentifier.of(DEFAULT_CATALOG, "db2", "some-nonexistent-table");
final ContextResolvedTable spec = ContextResolvedTable.temporary(objectIdentifier, new ResolvedCatalogTable(CatalogTable.of(CATALOG_TABLE_SCHEMA, "my amazing table", Collections.emptyList(), PLAN_OPTIONS), CATALOG_TABLE_RESOLVED_SCHEMA));
final byte[] actualSerialized = createObjectWriter(ctx).writeValueAsBytes(spec);
assertThatThrownBy(() -> createObjectReader(ctx).readValue(actualSerialized, ContextResolvedTable.class)).satisfies(anyCauseMatches(TableException.class, ContextResolvedTableJsonDeserializer.missingTableFromCatalog(objectIdentifier, false).getMessage()));
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class DynamicTableSinkSpecSerdeTest method testDynamicTableSinkSpecSerdeWithEnrichmentOptions.
@Test
void testDynamicTableSinkSpecSerdeWithEnrichmentOptions() throws Exception {
// Test model
ObjectIdentifier identifier = ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "my_table");
String formatPrefix = FactoryUtil.getFormatPrefix(FORMAT, TestFormatFactory.IDENTIFIER);
Map<String, String> planOptions = new HashMap<>();
planOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
planOptions.put(TARGET.key(), "abc");
planOptions.put(BUFFER_SIZE.key(), "1000");
planOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
planOptions.put(formatPrefix + DELIMITER.key(), "|");
Map<String, String> catalogOptions = new HashMap<>();
catalogOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
catalogOptions.put(TARGET.key(), "xyz");
catalogOptions.put(BUFFER_SIZE.key(), "2000");
catalogOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
catalogOptions.put(formatPrefix + DELIMITER.key(), ",");
ResolvedCatalogTable planResolvedCatalogTable = tableWithOnlyPhysicalColumns(planOptions);
ResolvedCatalogTable catalogResolvedCatalogTable = tableWithOnlyPhysicalColumns(catalogOptions);
// Create planner mocks
PlannerMocks plannerMocks = PlannerMocks.create(new Configuration().set(PLAN_RESTORE_CATALOG_OBJECTS, CatalogPlanRestore.ALL).set(PLAN_COMPILE_CATALOG_OBJECTS, CatalogPlanCompilation.ALL));
CatalogManager catalogManager = plannerMocks.getCatalogManager();
catalogManager.createTable(catalogResolvedCatalogTable, identifier, false);
// Mock the context
SerdeContext serdeCtx = configuredSerdeContext(catalogManager, plannerMocks.getTableConfig());
DynamicTableSinkSpec planSpec = new DynamicTableSinkSpec(ContextResolvedTable.permanent(identifier, catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get(), planResolvedCatalogTable), Collections.emptyList());
String actualJson = toJson(serdeCtx, planSpec);
DynamicTableSinkSpec actual = toObject(serdeCtx, actualJson, DynamicTableSinkSpec.class);
assertThat(actual.getContextResolvedTable()).isEqualTo(planSpec.getContextResolvedTable());
assertThat(actual.getSinkAbilities()).isNull();
TestDynamicTableFactory.DynamicTableSinkMock dynamicTableSink = (TestDynamicTableFactory.DynamicTableSinkMock) actual.getTableSink(plannerMocks.getPlannerContext().getFlinkContext());
assertThat(dynamicTableSink.target).isEqualTo("abc");
assertThat(dynamicTableSink.bufferSize).isEqualTo(2000);
assertThat(((TestFormatFactory.EncodingFormatMock) dynamicTableSink.valueFormat).delimiter).isEqualTo(",");
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class TableImpl method insertInto.
@Override
public TablePipeline insertInto(String tablePath, boolean overwrite) {
UnresolvedIdentifier unresolvedIdentifier = tableEnvironment.getParser().parseIdentifier(tablePath);
ObjectIdentifier objectIdentifier = tableEnvironment.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable contextResolvedTable = tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier);
return insertInto(contextResolvedTable, overwrite);
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class RexNodeJsonDeserializer method deserializeFunctionClass.
private static SqlOperator deserializeFunctionClass(JsonNode jsonNode, SerdeContext serdeContext) {
final String className = jsonNode.required(FIELD_NAME_CLASS).asText();
final Class<?> functionClass = loadClass(className, serdeContext, "function");
final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass);
final ContextResolvedFunction resolvedFunction;
// because we never serialize classes for system functions
if (jsonNode.has(FIELD_NAME_CATALOG_NAME)) {
final ObjectIdentifier objectIdentifier = ObjectIdentifierJsonDeserializer.deserialize(jsonNode.required(FIELD_NAME_CATALOG_NAME).asText(), serdeContext);
resolvedFunction = ContextResolvedFunction.permanent(FunctionIdentifier.of(objectIdentifier), functionInstance);
} else {
resolvedFunction = ContextResolvedFunction.anonymous(functionInstance);
}
switch(functionInstance.getKind()) {
case SCALAR:
case TABLE:
return BridgingSqlFunction.of(serdeContext.getFlinkContext(), serdeContext.getTypeFactory(), resolvedFunction);
case AGGREGATE:
return BridgingSqlAggFunction.of(serdeContext.getFlinkContext(), serdeContext.getTypeFactory(), resolvedFunction);
default:
throw new TableException(String.format("Unsupported anonymous function kind '%s' for class '%s'.", functionInstance.getKind(), className));
}
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class AbstractStreamTableEnvironmentImpl method fromStreamInternal.
protected <T> Table fromStreamInternal(DataStream<T> dataStream, @Nullable Schema schema, @Nullable String viewPath, ChangelogMode changelogMode) {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.");
Preconditions.checkNotNull(changelogMode, "Changelog mode must not be null.");
if (dataStream.getExecutionEnvironment() != executionEnvironment) {
throw new ValidationException("The DataStream's StreamExecutionEnvironment must be identical to the one that " + "has been passed to the StreamTableEnvironment during instantiation.");
}
final CatalogManager catalogManager = getCatalogManager();
final OperationTreeBuilder operationTreeBuilder = getOperationTreeBuilder();
final SchemaTranslator.ConsumingResult schemaTranslationResult = SchemaTranslator.createConsumingResult(catalogManager.getDataTypeFactory(), dataStream.getType(), schema);
final ResolvedCatalogTable resolvedCatalogTable = catalogManager.resolveCatalogTable(new ExternalCatalogTable(schemaTranslationResult.getSchema()));
final ContextResolvedTable contextResolvedTable;
if (viewPath != null) {
UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(viewPath);
final ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
contextResolvedTable = ContextResolvedTable.temporary(objectIdentifier, resolvedCatalogTable);
} else {
contextResolvedTable = ContextResolvedTable.anonymous("datastream_source", resolvedCatalogTable);
}
final QueryOperation scanOperation = new ExternalQueryOperation<>(contextResolvedTable, dataStream, schemaTranslationResult.getPhysicalDataType(), schemaTranslationResult.isTopLevelRecord(), changelogMode);
final List<String> projections = schemaTranslationResult.getProjections();
if (projections == null) {
return createTable(scanOperation);
}
final QueryOperation projectOperation = operationTreeBuilder.project(projections.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), scanOperation);
return createTable(projectOperation);
}
Aggregations