use of org.apache.flink.table.catalog.ResolvedCatalogTable in project flink by apache.
the class HiveTableFactoryTest method testHiveTable.
@Test
public void testHiveTable() throws Exception {
final ResolvedSchema schema = ResolvedSchema.of(Column.physical("name", DataTypes.STRING()), Column.physical("age", DataTypes.INT()));
catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
final Map<String, String> options = Collections.singletonMap(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
final CatalogTable table = new CatalogTableImpl(TableSchema.fromResolvedSchema(schema), options, "hive table");
catalog.createTable(new ObjectPath("mydb", "mytable"), table, true);
final DynamicTableSource tableSource = FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) catalog.getFactory().orElseThrow(IllegalStateException::new), ObjectIdentifier.of("mycatalog", "mydb", "mytable"), new ResolvedCatalogTable(table, schema), new Configuration(), Thread.currentThread().getContextClassLoader(), false);
assertTrue(tableSource instanceof HiveTableSource);
final DynamicTableSink tableSink = FactoryUtil.createDynamicTableSink((DynamicTableSinkFactory) catalog.getFactory().orElseThrow(IllegalStateException::new), ObjectIdentifier.of("mycatalog", "mydb", "mytable"), new ResolvedCatalogTable(table, schema), new Configuration(), Thread.currentThread().getContextClassLoader(), false);
assertTrue(tableSink instanceof HiveTableSink);
}
use of org.apache.flink.table.catalog.ResolvedCatalogTable in project flink by apache.
the class KinesisDynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
ReadableConfig tableOptions = helper.getOptions();
ResolvedCatalogTable catalogTable = context.getCatalogTable();
DataType physicalDataType = catalogTable.getResolvedSchema().toPhysicalRowDataType();
KinesisConnectorOptionsUtil optionsUtils = new KinesisConnectorOptionsUtil(catalogTable.getOptions(), tableOptions);
// initialize the table format early in order to register its consumedOptionKeys
// in the TableFactoryHelper, as those are needed for correct option validation
DecodingFormat<DeserializationSchema<RowData>> decodingFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, FORMAT);
// validate the data types of the table options
helper.validateExcept(optionsUtils.getNonValidatedPrefixes().toArray(new String[0]));
Properties properties = optionsUtils.getValidatedSourceConfigurations();
return new KinesisDynamicSource(physicalDataType, tableOptions.get(STREAM), properties, decodingFormat);
}
use of org.apache.flink.table.catalog.ResolvedCatalogTable in project flink by apache.
the class TestManagedTableFactory method onCompactTable.
@Override
public Map<String, String> onCompactTable(Context context, CatalogPartitionSpec catalogPartitionSpec) {
ObjectIdentifier tableIdentifier = context.getObjectIdentifier();
ResolvedCatalogTable table = context.getCatalogTable();
Map<String, String> newOptions = new HashMap<>(table.getOptions());
resolveCompactFileBasePath(tableIdentifier).ifPresent(s -> newOptions.put(COMPACT_FILE_BASE_PATH.key(), s));
validateAndResolveCompactFileEntries(tableIdentifier, catalogPartitionSpec).ifPresent(s -> newOptions.put(COMPACT_FILE_ENTRIES.key(), s));
return newOptions;
}
use of org.apache.flink.table.catalog.ResolvedCatalogTable in project flink by apache.
the class TemporalTableSourceSpecSerdeTest method testTemporalTableSourceSpecSerde.
public static Stream<TemporalTableSourceSpec> testTemporalTableSourceSpecSerde() {
Map<String, String> options1 = new HashMap<>();
options1.put("connector", "filesystem");
options1.put("format", "testcsv");
options1.put("path", "/tmp");
final ResolvedSchema resolvedSchema1 = new ResolvedSchema(Collections.singletonList(Column.physical("a", DataTypes.BIGINT())), Collections.emptyList(), null);
final CatalogTable catalogTable1 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema1).build(), null, Collections.emptyList(), options1);
ResolvedCatalogTable resolvedCatalogTable = new ResolvedCatalogTable(catalogTable1, resolvedSchema1);
RelDataType relDataType1 = FACTORY.createSqlType(SqlTypeName.BIGINT);
LookupTableSource lookupTableSource = new TestValuesTableFactory.MockedLookupTableSource();
TableSourceTable tableSourceTable1 = new TableSourceTable(null, relDataType1, FlinkStatistic.UNKNOWN(), lookupTableSource, true, ContextResolvedTable.temporary(ObjectIdentifier.of("default_catalog", "default_db", "MyTable"), resolvedCatalogTable), FLINK_CONTEXT, new SourceAbilitySpec[] { new LimitPushDownSpec(100) });
TemporalTableSourceSpec temporalTableSourceSpec1 = new TemporalTableSourceSpec(tableSourceTable1);
return Stream.of(temporalTableSourceSpec1);
}
use of org.apache.flink.table.catalog.ResolvedCatalogTable in project flink by apache.
the class ResolvedCatalogTableSerdeTest method testDontSerializeExternalInlineTable.
@Test
void testDontSerializeExternalInlineTable() {
SerdeContext serdeCtx = configuredSerdeContext();
ObjectWriter objectWriter = JsonSerdeUtil.createObjectWriter(serdeCtx);
assertThatThrownBy(() -> objectWriter.writeValueAsString(new ResolvedCatalogTable(new ExternalCatalogTable(Schema.newBuilder().fromResolvedSchema(FULL_RESOLVED_SCHEMA).build()), FULL_RESOLVED_SCHEMA))).satisfies(FlinkAssertions.anyCauseMatches(ValidationException.class, "Cannot serialize the table as it's an external inline table"));
}
Aggregations