Search in sources :

Example 1 with UniqueConstraint

use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.

the class AbstractJdbcCatalog method getPrimaryKey.

// ------ retrieve PK constraint ------
protected Optional<UniqueConstraint> getPrimaryKey(DatabaseMetaData metaData, String schema, String table) throws SQLException {
    // According to the Javadoc of java.sql.DatabaseMetaData#getPrimaryKeys,
    // the returned primary key columns are ordered by COLUMN_NAME, not by KEY_SEQ.
    // We need to sort them based on the KEY_SEQ value.
    ResultSet rs = metaData.getPrimaryKeys(null, schema, table);
    Map<Integer, String> keySeqColumnName = new HashMap<>();
    String pkName = null;
    while (rs.next()) {
        String columnName = rs.getString("COLUMN_NAME");
        // all the PK_NAME should be the same
        pkName = rs.getString("PK_NAME");
        int keySeq = rs.getInt("KEY_SEQ");
        // KEY_SEQ is 1-based index
        keySeqColumnName.put(keySeq - 1, columnName);
    }
    List<String> pkFields = // initialize size
    Arrays.asList(new String[keySeqColumnName.size()]);
    keySeqColumnName.forEach(pkFields::set);
    if (!pkFields.isEmpty()) {
        // PK_NAME maybe null according to the javadoc, generate an unique name in that case
        pkName = pkName == null ? "pk_" + String.join("_", pkFields) : pkName;
        return Optional.of(UniqueConstraint.primaryKey(pkName, pkFields));
    }
    return Optional.empty();
}
Also used : HashMap(java.util.HashMap) ResultSet(java.sql.ResultSet) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint)

Example 2 with UniqueConstraint

use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.

the class AbstractJdbcCatalog method getTable.

// ------ tables and views ------
@Override
public CatalogBaseTable getTable(ObjectPath tablePath) throws TableNotExistException, CatalogException {
    if (!tableExists(tablePath)) {
        throw new TableNotExistException(getName(), tablePath);
    }
    String dbUrl = baseUrl + tablePath.getDatabaseName();
    try (Connection conn = DriverManager.getConnection(dbUrl, username, pwd)) {
        DatabaseMetaData metaData = conn.getMetaData();
        Optional<UniqueConstraint> primaryKey = getPrimaryKey(metaData, getSchemaName(tablePath), getTableName(tablePath));
        PreparedStatement ps = conn.prepareStatement(String.format("SELECT * FROM %s;", getSchemaTableName(tablePath)));
        ResultSetMetaData resultSetMetaData = ps.getMetaData();
        String[] columnNames = new String[resultSetMetaData.getColumnCount()];
        DataType[] types = new DataType[resultSetMetaData.getColumnCount()];
        for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
            columnNames[i - 1] = resultSetMetaData.getColumnName(i);
            types[i - 1] = fromJDBCType(tablePath, resultSetMetaData, i);
            if (resultSetMetaData.isNullable(i) == ResultSetMetaData.columnNoNulls) {
                types[i - 1] = types[i - 1].notNull();
            }
        }
        Schema.Builder schemaBuilder = Schema.newBuilder().fromFields(columnNames, types);
        primaryKey.ifPresent(pk -> schemaBuilder.primaryKeyNamed(pk.getName(), pk.getColumns()));
        Schema tableSchema = schemaBuilder.build();
        Map<String, String> props = new HashMap<>();
        props.put(CONNECTOR.key(), IDENTIFIER);
        props.put(URL.key(), dbUrl);
        props.put(USERNAME.key(), username);
        props.put(PASSWORD.key(), pwd);
        props.put(TABLE_NAME.key(), getSchemaTableName(tablePath));
        return CatalogTable.of(tableSchema, null, Lists.newArrayList(), props);
    } catch (Exception e) {
        throw new CatalogException(String.format("Failed getting table %s", tablePath.getFullName()), e);
    }
}
Also used : HashMap(java.util.HashMap) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Schema(org.apache.flink.table.api.Schema) Connection(java.sql.Connection) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) PreparedStatement(java.sql.PreparedStatement) DatabaseMetaData(java.sql.DatabaseMetaData) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) PartitionNotExistException(org.apache.flink.table.catalog.exceptions.PartitionNotExistException) PartitionSpecInvalidException(org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException) TablePartitionedException(org.apache.flink.table.catalog.exceptions.TablePartitionedException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) TableNotPartitionedException(org.apache.flink.table.catalog.exceptions.TableNotPartitionedException) ValidationException(org.apache.flink.table.api.ValidationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) PartitionAlreadyExistsException(org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException) SQLException(java.sql.SQLException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) ResultSetMetaData(java.sql.ResultSetMetaData) DataType(org.apache.flink.table.types.DataType)

Example 3 with UniqueConstraint

use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.

the class ResolvedSchemaJsonDeserializer method deserialize.

@Override
public ResolvedSchema deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
    ObjectNode jsonNode = jsonParser.readValueAsTree();
    List<Column> columns = ctx.readValue(traverse(jsonNode.required(COLUMNS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, Column.class));
    List<WatermarkSpec> watermarkSpecs = ctx.readValue(traverse(jsonNode.required(WATERMARK_SPECS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, WatermarkSpec.class));
    UniqueConstraint primaryKey = deserializeOptionalField(jsonNode, PRIMARY_KEY, UniqueConstraint.class, jsonParser.getCodec(), ctx).orElse(null);
    return new ResolvedSchema(columns, watermarkSpecs, primaryKey);
}
Also used : ObjectNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode) WatermarkSpec(org.apache.flink.table.catalog.WatermarkSpec) Column(org.apache.flink.table.catalog.Column) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) List(java.util.List) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 4 with UniqueConstraint

use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.

the class PushProjectIntoTableSourceScanRule method getPrimaryKeyProjections.

private List<RexNode> getPrimaryKeyProjections(LogicalTableScan scan) {
    final TableSourceTable source = scan.getTable().unwrap(TableSourceTable.class);
    final ResolvedSchema schema = source.contextResolvedTable().getResolvedSchema();
    if (!schema.getPrimaryKey().isPresent()) {
        return Collections.emptyList();
    }
    final FlinkTypeFactory typeFactory = unwrapTypeFactory(scan);
    final UniqueConstraint primaryKey = schema.getPrimaryKey().get();
    return primaryKey.getColumns().stream().map(columnName -> {
        final int idx = scan.getRowType().getFieldNames().indexOf(columnName);
        final Column column = schema.getColumn(idx).orElseThrow(() -> new TableException(String.format("Column at index %d not found.", idx)));
        return new RexInputRef(idx, typeFactory.createFieldTypeFromLogicalType(column.getDataType().getLogicalType()));
    }).collect(Collectors.toList());
}
Also used : IntStream(java.util.stream.IntStream) NestedProjectionUtil(org.apache.flink.table.planner.plan.utils.NestedProjectionUtil) Arrays(java.util.Arrays) ShortcutUtils.unwrapTypeFactory(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapTypeFactory) SourceAbilityContext(org.apache.flink.table.planner.plan.abilities.source.SourceAbilityContext) Column(org.apache.flink.table.catalog.Column) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) RexNodeExtractor(org.apache.flink.table.planner.plan.utils.RexNodeExtractor) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RowType(org.apache.flink.table.types.logical.RowType) SupportsProjectionPushDown(org.apache.flink.table.connector.source.abilities.SupportsProjectionPushDown) ArrayList(java.util.ArrayList) RexNode(org.apache.calcite.rex.RexNode) NestedSchema(org.apache.flink.table.planner.plan.utils.NestedSchema) Projection(org.apache.flink.table.connector.Projection) ProjectRemoveRule(org.apache.calcite.rel.rules.ProjectRemoveRule) DynamicSourceUtils.createProducedType(org.apache.flink.table.planner.connectors.DynamicSourceUtils.createProducedType) RelDataType(org.apache.calcite.rel.type.RelDataType) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) TableConfig(org.apache.flink.table.api.TableConfig) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) ProjectPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.ProjectPushDownSpec) TableException(org.apache.flink.table.api.TableException) ShortcutUtils.unwrapContext(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapContext) RelRule(org.apache.calcite.plan.RelRule) NestedColumn(org.apache.flink.table.planner.plan.utils.NestedColumn) Collectors(java.util.stream.Collectors) DynamicSourceUtils.createRequiredMetadataKeys(org.apache.flink.table.planner.connectors.DynamicSourceUtils.createRequiredMetadataKeys) SourceAbilitySpec(org.apache.flink.table.planner.plan.abilities.source.SourceAbilitySpec) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) RelOptRuleCall(org.apache.calcite.plan.RelOptRuleCall) RexInputRef(org.apache.calcite.rex.RexInputRef) Objects(java.util.Objects) DynamicSourceUtils(org.apache.flink.table.planner.connectors.DynamicSourceUtils) RelOptRule(org.apache.calcite.plan.RelOptRule) List(java.util.List) Stream(java.util.stream.Stream) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) SupportsReadingMetadata(org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata) ReadingMetadataSpec(org.apache.flink.table.planner.plan.abilities.source.ReadingMetadataSpec) Internal(org.apache.flink.annotation.Internal) Collections(java.util.Collections) LogicalTableScan(org.apache.calcite.rel.logical.LogicalTableScan) TableException(org.apache.flink.table.api.TableException) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) Column(org.apache.flink.table.catalog.Column) NestedColumn(org.apache.flink.table.planner.plan.utils.NestedColumn) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) RexInputRef(org.apache.calcite.rex.RexInputRef) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Aggregations

UniqueConstraint (org.apache.flink.table.catalog.UniqueConstraint)4 HashMap (java.util.HashMap)2 List (java.util.List)2 Column (org.apache.flink.table.catalog.Column)2 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)2 Connection (java.sql.Connection)1 DatabaseMetaData (java.sql.DatabaseMetaData)1 PreparedStatement (java.sql.PreparedStatement)1 ResultSet (java.sql.ResultSet)1 ResultSetMetaData (java.sql.ResultSetMetaData)1 SQLException (java.sql.SQLException)1 ArrayList (java.util.ArrayList)1 Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 Objects (java.util.Objects)1 Collectors (java.util.stream.Collectors)1 IntStream (java.util.stream.IntStream)1 Stream (java.util.stream.Stream)1 RelOptRule (org.apache.calcite.plan.RelOptRule)1 RelOptRuleCall (org.apache.calcite.plan.RelOptRuleCall)1