Search in sources :

Example 6 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class TableEnvironmentTest method innerTestManagedTableFromDescriptor.

private void innerTestManagedTableFromDescriptor(boolean ignoreIfExists, boolean isTemporary) {
    final TableEnvironmentMock tEnv = TableEnvironmentMock.getStreamingInstance();
    final String catalog = tEnv.getCurrentCatalog();
    final String database = tEnv.getCurrentDatabase();
    final Schema schema = Schema.newBuilder().column("f0", DataTypes.INT()).build();
    final String tableName = UUID.randomUUID().toString();
    ObjectIdentifier identifier = ObjectIdentifier.of(catalog, database, tableName);
    // create table
    MANAGED_TABLES.put(identifier, new AtomicReference<>());
    CreateTableOperation createOperation = new CreateTableOperation(identifier, TableDescriptor.forManaged().schema(schema).option("a", "Test").build().toCatalogTable(), ignoreIfExists, isTemporary);
    tEnv.executeInternal(createOperation);
    // test ignore: create again
    if (ignoreIfExists) {
        tEnv.executeInternal(createOperation);
    } else {
        assertThatThrownBy(() -> tEnv.executeInternal(createOperation), isTemporary ? "already exists" : "Could not execute CreateTable");
    }
    // lookup table
    boolean isInCatalog = tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).tableExists(new ObjectPath(database, tableName));
    if (isTemporary) {
        assertThat(isInCatalog).isFalse();
    } else {
        assertThat(isInCatalog).isTrue();
    }
    final Optional<ContextResolvedTable> lookupResult = tEnv.getCatalogManager().getTable(identifier);
    assertThat(lookupResult.isPresent()).isTrue();
    final CatalogBaseTable catalogTable = lookupResult.get().getTable();
    assertThat(catalogTable instanceof CatalogTable).isTrue();
    assertThat(catalogTable.getUnresolvedSchema()).isEqualTo(schema);
    assertThat(catalogTable.getOptions().get("a")).isEqualTo("Test");
    assertThat(catalogTable.getOptions().get(ENRICHED_KEY)).isEqualTo(ENRICHED_VALUE);
    AtomicReference<Map<String, String>> reference = MANAGED_TABLES.get(identifier);
    assertThat(reference.get()).isNotNull();
    assertThat(reference.get().get("a")).isEqualTo("Test");
    assertThat(reference.get().get(ENRICHED_KEY)).isEqualTo(ENRICHED_VALUE);
    DropTableOperation dropOperation = new DropTableOperation(identifier, ignoreIfExists, isTemporary);
    tEnv.executeInternal(dropOperation);
    assertThat(MANAGED_TABLES.get(identifier).get()).isNull();
    // test ignore: drop again
    if (ignoreIfExists) {
        tEnv.executeInternal(dropOperation);
    } else {
        assertThatThrownBy(() -> tEnv.executeInternal(dropOperation), "does not exist");
    }
    MANAGED_TABLES.remove(identifier);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) TableEnvironmentMock(org.apache.flink.table.utils.TableEnvironmentMock) Map(java.util.Map) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 7 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class TableEnvironmentTest method testCreateTemporaryTableFromDescriptor.

@Test
public void testCreateTemporaryTableFromDescriptor() {
    final TableEnvironmentMock tEnv = TableEnvironmentMock.getStreamingInstance();
    final String catalog = tEnv.getCurrentCatalog();
    final String database = tEnv.getCurrentDatabase();
    final Schema schema = Schema.newBuilder().column("f0", DataTypes.INT()).build();
    tEnv.createTemporaryTable("T", TableDescriptor.forConnector("fake").schema(schema).option("a", "Test").build());
    assertThat(tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).tableExists(new ObjectPath(database, "T"))).isFalse();
    final Optional<ContextResolvedTable> lookupResult = tEnv.getCatalogManager().getTable(ObjectIdentifier.of(catalog, database, "T"));
    assertThat(lookupResult.isPresent()).isTrue();
    final CatalogBaseTable catalogTable = lookupResult.get().getTable();
    assertThat(catalogTable instanceof CatalogTable).isTrue();
    assertThat(catalogTable.getUnresolvedSchema()).isEqualTo(schema);
    assertThat(catalogTable.getOptions().get("connector")).isEqualTo("fake");
    assertThat(catalogTable.getOptions().get("a")).isEqualTo("Test");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableEnvironmentMock(org.apache.flink.table.utils.TableEnvironmentMock) Test(org.junit.jupiter.api.Test)

Example 8 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class ContextResolvedTableJsonDeserializer method deserialize.

@Override
public ContextResolvedTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
    final CatalogPlanRestore planRestoreOption = SerdeContext.get(ctx).getConfiguration().get(PLAN_RESTORE_CATALOG_OBJECTS);
    final CatalogManager catalogManager = SerdeContext.get(ctx).getFlinkContext().getCatalogManager();
    final ObjectNode objectNode = jsonParser.readValueAsTree();
    // Deserialize the two fields, if available
    final ObjectIdentifier identifier = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_IDENTIFIER, ObjectIdentifier.class, jsonParser.getCodec(), ctx).orElse(null);
    ResolvedCatalogTable resolvedCatalogTable = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_CATALOG_TABLE, ResolvedCatalogTable.class, jsonParser.getCodec(), ctx).orElse(null);
    if (identifier == null && resolvedCatalogTable == null) {
        throw new ValidationException(String.format("The input JSON is invalid because it doesn't contain '%s', nor the '%s'.", FIELD_NAME_IDENTIFIER, FIELD_NAME_CATALOG_TABLE));
    }
    if (identifier == null) {
        if (isLookupForced(planRestoreOption)) {
            throw missingIdentifier();
        }
        return ContextResolvedTable.anonymous(resolvedCatalogTable);
    }
    Optional<ContextResolvedTable> contextResolvedTableFromCatalog = isLookupEnabled(planRestoreOption) ? catalogManager.getTable(identifier) : Optional.empty();
    // If we have a schema from the plan and from the catalog, we need to check they match.
    if (contextResolvedTableFromCatalog.isPresent() && resolvedCatalogTable != null) {
        ResolvedSchema schemaFromPlan = resolvedCatalogTable.getResolvedSchema();
        ResolvedSchema schemaFromCatalog = contextResolvedTableFromCatalog.get().getResolvedSchema();
        if (!areResolvedSchemasEqual(schemaFromPlan, schemaFromCatalog)) {
            throw schemaNotMatching(identifier, schemaFromPlan, schemaFromCatalog);
        }
    }
    if (resolvedCatalogTable == null || isLookupForced(planRestoreOption)) {
        if (!isLookupEnabled(planRestoreOption)) {
            throw lookupDisabled(identifier);
        }
        // We use what is stored inside the catalog
        return contextResolvedTableFromCatalog.orElseThrow(() -> missingTableFromCatalog(identifier, isLookupForced(planRestoreOption)));
    }
    if (contextResolvedTableFromCatalog.isPresent()) {
        // SCHEMA, so we just need to return the catalog query result
        if (objectNode.at("/" + FIELD_NAME_CATALOG_TABLE + "/" + OPTIONS).isMissingNode()) {
            return contextResolvedTableFromCatalog.get();
        }
        return contextResolvedTableFromCatalog.flatMap(ContextResolvedTable::getCatalog).map(c -> ContextResolvedTable.permanent(identifier, c, resolvedCatalogTable)).orElseGet(() -> ContextResolvedTable.temporary(identifier, resolvedCatalogTable));
    }
    return ContextResolvedTable.temporary(identifier, resolvedCatalogTable);
}
Also used : CatalogManager(org.apache.flink.table.catalog.CatalogManager) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) FIELD_NAME_IDENTIFIER(org.apache.flink.table.planner.plan.nodes.exec.serde.ContextResolvedTableJsonSerializer.FIELD_NAME_IDENTIFIER) Column(org.apache.flink.table.catalog.Column) ObjectNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode) IDENTIFIER(org.apache.flink.table.api.config.TableConfigOptions.CatalogPlanRestore.IDENTIFIER) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) JsonParser(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonParser) IOException(java.io.IOException) PLAN_COMPILE_CATALOG_OBJECTS(org.apache.flink.table.api.config.TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS) CatalogPlanRestore(org.apache.flink.table.api.config.TableConfigOptions.CatalogPlanRestore) CatalogPlanCompilation(org.apache.flink.table.api.config.TableConfigOptions.CatalogPlanCompilation) Objects(java.util.Objects) OPTIONS(org.apache.flink.table.planner.plan.nodes.exec.serde.ResolvedCatalogTableJsonSerializer.OPTIONS) DeserializationContext(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationContext) List(java.util.List) ValidationException(org.apache.flink.table.api.ValidationException) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) StdDeserializer(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.deser.std.StdDeserializer) PLAN_RESTORE_CATALOG_OBJECTS(org.apache.flink.table.api.config.TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) FIELD_NAME_CATALOG_TABLE(org.apache.flink.table.planner.plan.nodes.exec.serde.ContextResolvedTableJsonSerializer.FIELD_NAME_CATALOG_TABLE) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) ValidationException(org.apache.flink.table.api.ValidationException) ObjectNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) CatalogPlanRestore(org.apache.flink.table.api.config.TableConfigOptions.CatalogPlanRestore) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) CatalogManager(org.apache.flink.table.catalog.CatalogManager) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 9 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class AbstractStreamTableEnvironmentImpl method fromStreamInternal.

protected <T> Table fromStreamInternal(DataStream<T> dataStream, @Nullable Schema schema, @Nullable String viewPath, ChangelogMode changelogMode) {
    Preconditions.checkNotNull(dataStream, "Data stream must not be null.");
    Preconditions.checkNotNull(changelogMode, "Changelog mode must not be null.");
    if (dataStream.getExecutionEnvironment() != executionEnvironment) {
        throw new ValidationException("The DataStream's StreamExecutionEnvironment must be identical to the one that " + "has been passed to the StreamTableEnvironment during instantiation.");
    }
    final CatalogManager catalogManager = getCatalogManager();
    final OperationTreeBuilder operationTreeBuilder = getOperationTreeBuilder();
    final SchemaTranslator.ConsumingResult schemaTranslationResult = SchemaTranslator.createConsumingResult(catalogManager.getDataTypeFactory(), dataStream.getType(), schema);
    final ResolvedCatalogTable resolvedCatalogTable = catalogManager.resolveCatalogTable(new ExternalCatalogTable(schemaTranslationResult.getSchema()));
    final ContextResolvedTable contextResolvedTable;
    if (viewPath != null) {
        UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(viewPath);
        final ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
        contextResolvedTable = ContextResolvedTable.temporary(objectIdentifier, resolvedCatalogTable);
    } else {
        contextResolvedTable = ContextResolvedTable.anonymous("datastream_source", resolvedCatalogTable);
    }
    final QueryOperation scanOperation = new ExternalQueryOperation<>(contextResolvedTable, dataStream, schemaTranslationResult.getPhysicalDataType(), schemaTranslationResult.isTopLevelRecord(), changelogMode);
    final List<String> projections = schemaTranslationResult.getProjections();
    if (projections == null) {
        return createTable(scanOperation);
    }
    final QueryOperation projectOperation = operationTreeBuilder.project(projections.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), scanOperation);
    return createTable(projectOperation);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ExternalCatalogTable(org.apache.flink.table.catalog.ExternalCatalogTable) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ApiExpressionUtils(org.apache.flink.table.expressions.ApiExpressionUtils) CatalogManager(org.apache.flink.table.catalog.CatalogManager) SchemaTranslator(org.apache.flink.table.catalog.SchemaTranslator) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) OperationTreeBuilder(org.apache.flink.table.operations.utils.OperationTreeBuilder) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) ExternalQueryOperation(org.apache.flink.table.operations.ExternalQueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) QueryOperation(org.apache.flink.table.operations.QueryOperation) DataStreamQueryOperation(org.apache.flink.table.operations.DataStreamQueryOperation) ExternalQueryOperation(org.apache.flink.table.operations.ExternalQueryOperation)

Example 10 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class TableImpl method insertInto.

@Override
public TablePipeline insertInto(String tablePath, boolean overwrite) {
    UnresolvedIdentifier unresolvedIdentifier = tableEnvironment.getParser().parseIdentifier(tablePath);
    ObjectIdentifier objectIdentifier = tableEnvironment.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
    ContextResolvedTable contextResolvedTable = tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier);
    return insertInto(contextResolvedTable, overwrite);
}
Also used : UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)15 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 ValidationException (org.apache.flink.table.api.ValidationException)7 CatalogTable (org.apache.flink.table.catalog.CatalogTable)6 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)6 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)5 Map (java.util.Map)4 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)4 HashMap (java.util.HashMap)3 List (java.util.List)3 RelHint (org.apache.calcite.rel.hint.RelHint)3 ObjectPath (org.apache.flink.table.catalog.ObjectPath)3 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2 Optional (java.util.Optional)2 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)2 SinkModifyOperation (org.apache.flink.table.operations.SinkModifyOperation)2 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)2 AlterViewPropertiesOperation (org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation)2