use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class SqlToOperationConverterTest method prepareTable.
private void prepareTable(boolean managedTable, boolean hasPartition, boolean hasConstraint) throws Exception {
Catalog catalog = new GenericInMemoryCatalog("default", "default");
catalogManager.registerCatalog("cat1", catalog);
catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
Schema.Builder builder = Schema.newBuilder().column("a", DataTypes.STRING().notNull()).column("b", DataTypes.BIGINT().notNull()).column("c", DataTypes.BIGINT());
Map<String, String> options = new HashMap<>();
options.put("k", "v");
if (!managedTable) {
options.put("connector", "dummy");
}
CatalogTable catalogTable = CatalogTable.of(hasConstraint ? builder.primaryKeyNamed("ct1", "a", "b").build() : builder.build(), "tb1", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options));
catalogManager.setCurrentCatalog("cat1");
catalogManager.setCurrentDatabase("db1");
ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
catalogManager.createTable(catalogTable, tableIdentifier, true);
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class DynamicTableSourceSpecSerdeTest method testDynamicTableSourceSpecSerdeWithEnrichmentOptions.
@Test
void testDynamicTableSourceSpecSerdeWithEnrichmentOptions() throws Exception {
// Test model
ObjectIdentifier identifier = ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "my_table");
String formatPrefix = FactoryUtil.getFormatPrefix(FORMAT, TestFormatFactory.IDENTIFIER);
Map<String, String> planOptions = new HashMap<>();
planOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
planOptions.put(TARGET.key(), "abc");
planOptions.put(PASSWORD.key(), "abc");
planOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
planOptions.put(formatPrefix + DELIMITER.key(), "|");
Map<String, String> catalogOptions = new HashMap<>();
catalogOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
catalogOptions.put(TARGET.key(), "abc");
catalogOptions.put(PASSWORD.key(), "xyz");
catalogOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
catalogOptions.put(formatPrefix + DELIMITER.key(), ",");
ResolvedCatalogTable planResolvedCatalogTable = tableWithOnlyPhysicalColumns(planOptions);
ResolvedCatalogTable catalogResolvedCatalogTable = tableWithOnlyPhysicalColumns(catalogOptions);
// Create planner mocks
PlannerMocks plannerMocks = PlannerMocks.create(new Configuration().set(PLAN_RESTORE_CATALOG_OBJECTS, CatalogPlanRestore.ALL).set(PLAN_COMPILE_CATALOG_OBJECTS, CatalogPlanCompilation.ALL));
CatalogManager catalogManager = plannerMocks.getCatalogManager();
catalogManager.createTable(catalogResolvedCatalogTable, identifier, false);
// Mock the context
SerdeContext serdeCtx = configuredSerdeContext(catalogManager, plannerMocks.getTableConfig());
DynamicTableSourceSpec planSpec = new DynamicTableSourceSpec(ContextResolvedTable.permanent(identifier, catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get(), planResolvedCatalogTable), Collections.emptyList());
String actualJson = toJson(serdeCtx, planSpec);
DynamicTableSourceSpec actual = toObject(serdeCtx, actualJson, DynamicTableSourceSpec.class);
assertThat(actual.getContextResolvedTable()).isEqualTo(planSpec.getContextResolvedTable());
assertThat(actual.getSourceAbilities()).isNull();
TestDynamicTableFactory.DynamicTableSourceMock dynamicTableSource = (TestDynamicTableFactory.DynamicTableSourceMock) actual.getScanTableSource(plannerMocks.getPlannerContext().getFlinkContext());
assertThat(dynamicTableSource.password).isEqualTo("xyz");
assertThat(((TestFormatFactory.DecodingFormatMock) dynamicTableSource.valueFormat).delimiter).isEqualTo(",");
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class ContextResolvedTableSerdeTest method temporaryTableAndMissingIdentifierInCatalog.
@Test
void temporaryTableAndMissingIdentifierInCatalog() throws Exception {
final SerdeContext ctx = serdeContext(TableConfigOptions.CatalogPlanCompilation.ALL, TableConfigOptions.CatalogPlanRestore.ALL_ENFORCED);
final ObjectIdentifier objectIdentifier = ObjectIdentifier.of(DEFAULT_CATALOG, "db2", "some-nonexistent-table");
final ContextResolvedTable spec = ContextResolvedTable.temporary(objectIdentifier, new ResolvedCatalogTable(CatalogTable.of(CATALOG_TABLE_SCHEMA, "my amazing table", Collections.emptyList(), PLAN_OPTIONS), CATALOG_TABLE_RESOLVED_SCHEMA));
final byte[] actualSerialized = createObjectWriter(ctx).writeValueAsBytes(spec);
assertThatThrownBy(() -> createObjectReader(ctx).readValue(actualSerialized, ContextResolvedTable.class)).satisfies(anyCauseMatches(TableException.class, ContextResolvedTableJsonDeserializer.missingTableFromCatalog(objectIdentifier, false).getMessage()));
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class DynamicTableSinkSpecSerdeTest method testDynamicTableSinkSpecSerdeWithEnrichmentOptions.
@Test
void testDynamicTableSinkSpecSerdeWithEnrichmentOptions() throws Exception {
// Test model
ObjectIdentifier identifier = ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "my_table");
String formatPrefix = FactoryUtil.getFormatPrefix(FORMAT, TestFormatFactory.IDENTIFIER);
Map<String, String> planOptions = new HashMap<>();
planOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
planOptions.put(TARGET.key(), "abc");
planOptions.put(BUFFER_SIZE.key(), "1000");
planOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
planOptions.put(formatPrefix + DELIMITER.key(), "|");
Map<String, String> catalogOptions = new HashMap<>();
catalogOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
catalogOptions.put(TARGET.key(), "xyz");
catalogOptions.put(BUFFER_SIZE.key(), "2000");
catalogOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
catalogOptions.put(formatPrefix + DELIMITER.key(), ",");
ResolvedCatalogTable planResolvedCatalogTable = tableWithOnlyPhysicalColumns(planOptions);
ResolvedCatalogTable catalogResolvedCatalogTable = tableWithOnlyPhysicalColumns(catalogOptions);
// Create planner mocks
PlannerMocks plannerMocks = PlannerMocks.create(new Configuration().set(PLAN_RESTORE_CATALOG_OBJECTS, CatalogPlanRestore.ALL).set(PLAN_COMPILE_CATALOG_OBJECTS, CatalogPlanCompilation.ALL));
CatalogManager catalogManager = plannerMocks.getCatalogManager();
catalogManager.createTable(catalogResolvedCatalogTable, identifier, false);
// Mock the context
SerdeContext serdeCtx = configuredSerdeContext(catalogManager, plannerMocks.getTableConfig());
DynamicTableSinkSpec planSpec = new DynamicTableSinkSpec(ContextResolvedTable.permanent(identifier, catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get(), planResolvedCatalogTable), Collections.emptyList());
String actualJson = toJson(serdeCtx, planSpec);
DynamicTableSinkSpec actual = toObject(serdeCtx, actualJson, DynamicTableSinkSpec.class);
assertThat(actual.getContextResolvedTable()).isEqualTo(planSpec.getContextResolvedTable());
assertThat(actual.getSinkAbilities()).isNull();
TestDynamicTableFactory.DynamicTableSinkMock dynamicTableSink = (TestDynamicTableFactory.DynamicTableSinkMock) actual.getTableSink(plannerMocks.getPlannerContext().getFlinkContext());
assertThat(dynamicTableSink.target).isEqualTo("abc");
assertThat(dynamicTableSink.bufferSize).isEqualTo(2000);
assertThat(((TestFormatFactory.EncodingFormatMock) dynamicTableSink.valueFormat).delimiter).isEqualTo(",");
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.
the class TableImpl method insertInto.
@Override
public TablePipeline insertInto(String tablePath, boolean overwrite) {
UnresolvedIdentifier unresolvedIdentifier = tableEnvironment.getParser().parseIdentifier(tablePath);
ObjectIdentifier objectIdentifier = tableEnvironment.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable contextResolvedTable = tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier);
return insertInto(contextResolvedTable, overwrite);
}
Aggregations