use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class TableEnvironmentTest method innerTestManagedTableFromDescriptor.
private void innerTestManagedTableFromDescriptor(boolean ignoreIfExists, boolean isTemporary) {
final TableEnvironmentMock tEnv = TableEnvironmentMock.getStreamingInstance();
final String catalog = tEnv.getCurrentCatalog();
final String database = tEnv.getCurrentDatabase();
final Schema schema = Schema.newBuilder().column("f0", DataTypes.INT()).build();
final String tableName = UUID.randomUUID().toString();
ObjectIdentifier identifier = ObjectIdentifier.of(catalog, database, tableName);
// create table
MANAGED_TABLES.put(identifier, new AtomicReference<>());
CreateTableOperation createOperation = new CreateTableOperation(identifier, TableDescriptor.forManaged().schema(schema).option("a", "Test").build().toCatalogTable(), ignoreIfExists, isTemporary);
tEnv.executeInternal(createOperation);
// test ignore: create again
if (ignoreIfExists) {
tEnv.executeInternal(createOperation);
} else {
assertThatThrownBy(() -> tEnv.executeInternal(createOperation), isTemporary ? "already exists" : "Could not execute CreateTable");
}
// lookup table
boolean isInCatalog = tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).tableExists(new ObjectPath(database, tableName));
if (isTemporary) {
assertThat(isInCatalog).isFalse();
} else {
assertThat(isInCatalog).isTrue();
}
final Optional<ContextResolvedTable> lookupResult = tEnv.getCatalogManager().getTable(identifier);
assertThat(lookupResult.isPresent()).isTrue();
final CatalogBaseTable catalogTable = lookupResult.get().getTable();
assertThat(catalogTable instanceof CatalogTable).isTrue();
assertThat(catalogTable.getUnresolvedSchema()).isEqualTo(schema);
assertThat(catalogTable.getOptions().get("a")).isEqualTo("Test");
assertThat(catalogTable.getOptions().get(ENRICHED_KEY)).isEqualTo(ENRICHED_VALUE);
AtomicReference<Map<String, String>> reference = MANAGED_TABLES.get(identifier);
assertThat(reference.get()).isNotNull();
assertThat(reference.get().get("a")).isEqualTo("Test");
assertThat(reference.get().get(ENRICHED_KEY)).isEqualTo(ENRICHED_VALUE);
DropTableOperation dropOperation = new DropTableOperation(identifier, ignoreIfExists, isTemporary);
tEnv.executeInternal(dropOperation);
assertThat(MANAGED_TABLES.get(identifier).get()).isNull();
// test ignore: drop again
if (ignoreIfExists) {
tEnv.executeInternal(dropOperation);
} else {
assertThatThrownBy(() -> tEnv.executeInternal(dropOperation), "does not exist");
}
MANAGED_TABLES.remove(identifier);
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class TableEnvironmentTest method testCreateTemporaryTableFromDescriptor.
@Test
public void testCreateTemporaryTableFromDescriptor() {
final TableEnvironmentMock tEnv = TableEnvironmentMock.getStreamingInstance();
final String catalog = tEnv.getCurrentCatalog();
final String database = tEnv.getCurrentDatabase();
final Schema schema = Schema.newBuilder().column("f0", DataTypes.INT()).build();
tEnv.createTemporaryTable("T", TableDescriptor.forConnector("fake").schema(schema).option("a", "Test").build());
assertThat(tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).tableExists(new ObjectPath(database, "T"))).isFalse();
final Optional<ContextResolvedTable> lookupResult = tEnv.getCatalogManager().getTable(ObjectIdentifier.of(catalog, database, "T"));
assertThat(lookupResult.isPresent()).isTrue();
final CatalogBaseTable catalogTable = lookupResult.get().getTable();
assertThat(catalogTable instanceof CatalogTable).isTrue();
assertThat(catalogTable.getUnresolvedSchema()).isEqualTo(schema);
assertThat(catalogTable.getOptions().get("connector")).isEqualTo("fake");
assertThat(catalogTable.getOptions().get("a")).isEqualTo("Test");
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class ContextResolvedTableJsonDeserializer method deserialize.
@Override
public ContextResolvedTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
final CatalogPlanRestore planRestoreOption = SerdeContext.get(ctx).getConfiguration().get(PLAN_RESTORE_CATALOG_OBJECTS);
final CatalogManager catalogManager = SerdeContext.get(ctx).getFlinkContext().getCatalogManager();
final ObjectNode objectNode = jsonParser.readValueAsTree();
// Deserialize the two fields, if available
final ObjectIdentifier identifier = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_IDENTIFIER, ObjectIdentifier.class, jsonParser.getCodec(), ctx).orElse(null);
ResolvedCatalogTable resolvedCatalogTable = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_CATALOG_TABLE, ResolvedCatalogTable.class, jsonParser.getCodec(), ctx).orElse(null);
if (identifier == null && resolvedCatalogTable == null) {
throw new ValidationException(String.format("The input JSON is invalid because it doesn't contain '%s', nor the '%s'.", FIELD_NAME_IDENTIFIER, FIELD_NAME_CATALOG_TABLE));
}
if (identifier == null) {
if (isLookupForced(planRestoreOption)) {
throw missingIdentifier();
}
return ContextResolvedTable.anonymous(resolvedCatalogTable);
}
Optional<ContextResolvedTable> contextResolvedTableFromCatalog = isLookupEnabled(planRestoreOption) ? catalogManager.getTable(identifier) : Optional.empty();
// If we have a schema from the plan and from the catalog, we need to check they match.
if (contextResolvedTableFromCatalog.isPresent() && resolvedCatalogTable != null) {
ResolvedSchema schemaFromPlan = resolvedCatalogTable.getResolvedSchema();
ResolvedSchema schemaFromCatalog = contextResolvedTableFromCatalog.get().getResolvedSchema();
if (!areResolvedSchemasEqual(schemaFromPlan, schemaFromCatalog)) {
throw schemaNotMatching(identifier, schemaFromPlan, schemaFromCatalog);
}
}
if (resolvedCatalogTable == null || isLookupForced(planRestoreOption)) {
if (!isLookupEnabled(planRestoreOption)) {
throw lookupDisabled(identifier);
}
// We use what is stored inside the catalog
return contextResolvedTableFromCatalog.orElseThrow(() -> missingTableFromCatalog(identifier, isLookupForced(planRestoreOption)));
}
if (contextResolvedTableFromCatalog.isPresent()) {
// SCHEMA, so we just need to return the catalog query result
if (objectNode.at("/" + FIELD_NAME_CATALOG_TABLE + "/" + OPTIONS).isMissingNode()) {
return contextResolvedTableFromCatalog.get();
}
return contextResolvedTableFromCatalog.flatMap(ContextResolvedTable::getCatalog).map(c -> ContextResolvedTable.permanent(identifier, c, resolvedCatalogTable)).orElseGet(() -> ContextResolvedTable.temporary(identifier, resolvedCatalogTable));
}
return ContextResolvedTable.temporary(identifier, resolvedCatalogTable);
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class AbstractStreamTableEnvironmentImpl method fromStreamInternal.
protected <T> Table fromStreamInternal(DataStream<T> dataStream, @Nullable Schema schema, @Nullable String viewPath, ChangelogMode changelogMode) {
Preconditions.checkNotNull(dataStream, "Data stream must not be null.");
Preconditions.checkNotNull(changelogMode, "Changelog mode must not be null.");
if (dataStream.getExecutionEnvironment() != executionEnvironment) {
throw new ValidationException("The DataStream's StreamExecutionEnvironment must be identical to the one that " + "has been passed to the StreamTableEnvironment during instantiation.");
}
final CatalogManager catalogManager = getCatalogManager();
final OperationTreeBuilder operationTreeBuilder = getOperationTreeBuilder();
final SchemaTranslator.ConsumingResult schemaTranslationResult = SchemaTranslator.createConsumingResult(catalogManager.getDataTypeFactory(), dataStream.getType(), schema);
final ResolvedCatalogTable resolvedCatalogTable = catalogManager.resolveCatalogTable(new ExternalCatalogTable(schemaTranslationResult.getSchema()));
final ContextResolvedTable contextResolvedTable;
if (viewPath != null) {
UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(viewPath);
final ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
contextResolvedTable = ContextResolvedTable.temporary(objectIdentifier, resolvedCatalogTable);
} else {
contextResolvedTable = ContextResolvedTable.anonymous("datastream_source", resolvedCatalogTable);
}
final QueryOperation scanOperation = new ExternalQueryOperation<>(contextResolvedTable, dataStream, schemaTranslationResult.getPhysicalDataType(), schemaTranslationResult.isTopLevelRecord(), changelogMode);
final List<String> projections = schemaTranslationResult.getProjections();
if (projections == null) {
return createTable(scanOperation);
}
final QueryOperation projectOperation = operationTreeBuilder.project(projections.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), scanOperation);
return createTable(projectOperation);
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class TableImpl method insertInto.
@Override
public TablePipeline insertInto(String tablePath, boolean overwrite) {
UnresolvedIdentifier unresolvedIdentifier = tableEnvironment.getParser().parseIdentifier(tablePath);
ObjectIdentifier objectIdentifier = tableEnvironment.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable contextResolvedTable = tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier);
return insertInto(contextResolvedTable, overwrite);
}
Aggregations