use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.
the class AbstractJdbcCatalog method getPrimaryKey.
// ------ retrieve PK constraint ------
protected Optional<UniqueConstraint> getPrimaryKey(DatabaseMetaData metaData, String schema, String table) throws SQLException {
// According to the Javadoc of java.sql.DatabaseMetaData#getPrimaryKeys,
// the returned primary key columns are ordered by COLUMN_NAME, not by KEY_SEQ.
// We need to sort them based on the KEY_SEQ value.
ResultSet rs = metaData.getPrimaryKeys(null, schema, table);
Map<Integer, String> keySeqColumnName = new HashMap<>();
String pkName = null;
while (rs.next()) {
String columnName = rs.getString("COLUMN_NAME");
// all the PK_NAME should be the same
pkName = rs.getString("PK_NAME");
int keySeq = rs.getInt("KEY_SEQ");
// KEY_SEQ is 1-based index
keySeqColumnName.put(keySeq - 1, columnName);
}
List<String> pkFields = // initialize size
Arrays.asList(new String[keySeqColumnName.size()]);
keySeqColumnName.forEach(pkFields::set);
if (!pkFields.isEmpty()) {
// PK_NAME maybe null according to the javadoc, generate an unique name in that case
pkName = pkName == null ? "pk_" + String.join("_", pkFields) : pkName;
return Optional.of(UniqueConstraint.primaryKey(pkName, pkFields));
}
return Optional.empty();
}
use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.
the class AbstractJdbcCatalog method getTable.
// ------ tables and views ------
@Override
public CatalogBaseTable getTable(ObjectPath tablePath) throws TableNotExistException, CatalogException {
if (!tableExists(tablePath)) {
throw new TableNotExistException(getName(), tablePath);
}
String dbUrl = baseUrl + tablePath.getDatabaseName();
try (Connection conn = DriverManager.getConnection(dbUrl, username, pwd)) {
DatabaseMetaData metaData = conn.getMetaData();
Optional<UniqueConstraint> primaryKey = getPrimaryKey(metaData, getSchemaName(tablePath), getTableName(tablePath));
PreparedStatement ps = conn.prepareStatement(String.format("SELECT * FROM %s;", getSchemaTableName(tablePath)));
ResultSetMetaData resultSetMetaData = ps.getMetaData();
String[] columnNames = new String[resultSetMetaData.getColumnCount()];
DataType[] types = new DataType[resultSetMetaData.getColumnCount()];
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
columnNames[i - 1] = resultSetMetaData.getColumnName(i);
types[i - 1] = fromJDBCType(tablePath, resultSetMetaData, i);
if (resultSetMetaData.isNullable(i) == ResultSetMetaData.columnNoNulls) {
types[i - 1] = types[i - 1].notNull();
}
}
Schema.Builder schemaBuilder = Schema.newBuilder().fromFields(columnNames, types);
primaryKey.ifPresent(pk -> schemaBuilder.primaryKeyNamed(pk.getName(), pk.getColumns()));
Schema tableSchema = schemaBuilder.build();
Map<String, String> props = new HashMap<>();
props.put(CONNECTOR.key(), IDENTIFIER);
props.put(URL.key(), dbUrl);
props.put(USERNAME.key(), username);
props.put(PASSWORD.key(), pwd);
props.put(TABLE_NAME.key(), getSchemaTableName(tablePath));
return CatalogTable.of(tableSchema, null, Lists.newArrayList(), props);
} catch (Exception e) {
throw new CatalogException(String.format("Failed getting table %s", tablePath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.
the class ResolvedSchemaJsonDeserializer method deserialize.
@Override
public ResolvedSchema deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
ObjectNode jsonNode = jsonParser.readValueAsTree();
List<Column> columns = ctx.readValue(traverse(jsonNode.required(COLUMNS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, Column.class));
List<WatermarkSpec> watermarkSpecs = ctx.readValue(traverse(jsonNode.required(WATERMARK_SPECS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, WatermarkSpec.class));
UniqueConstraint primaryKey = deserializeOptionalField(jsonNode, PRIMARY_KEY, UniqueConstraint.class, jsonParser.getCodec(), ctx).orElse(null);
return new ResolvedSchema(columns, watermarkSpecs, primaryKey);
}
use of org.apache.flink.table.catalog.UniqueConstraint in project flink by apache.
the class PushProjectIntoTableSourceScanRule method getPrimaryKeyProjections.
private List<RexNode> getPrimaryKeyProjections(LogicalTableScan scan) {
final TableSourceTable source = scan.getTable().unwrap(TableSourceTable.class);
final ResolvedSchema schema = source.contextResolvedTable().getResolvedSchema();
if (!schema.getPrimaryKey().isPresent()) {
return Collections.emptyList();
}
final FlinkTypeFactory typeFactory = unwrapTypeFactory(scan);
final UniqueConstraint primaryKey = schema.getPrimaryKey().get();
return primaryKey.getColumns().stream().map(columnName -> {
final int idx = scan.getRowType().getFieldNames().indexOf(columnName);
final Column column = schema.getColumn(idx).orElseThrow(() -> new TableException(String.format("Column at index %d not found.", idx)));
return new RexInputRef(idx, typeFactory.createFieldTypeFromLogicalType(column.getDataType().getLogicalType()));
}).collect(Collectors.toList());
}
Aggregations