use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class HiveLookupJoinITCase method getLookupFunction.
private FileSystemLookupFunction<HiveTablePartition> getLookupFunction(String tableName) throws Exception {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
HiveLookupTableSource hiveTableSource = (HiveLookupTableSource) FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
FileSystemLookupFunction<HiveTablePartition> lookupFunction = (FileSystemLookupFunction<HiveTablePartition>) hiveTableSource.getLookupFunction(new int[][] { { 0 } });
return lookupFunction;
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class HiveParserDMLHelper method createInsertOperationInfo.
public Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> createInsertOperationInfo(RelNode queryRelNode, Table destTable, Map<String, String> staticPartSpec, List<String> destSchema, boolean overwrite) throws SemanticException {
// sanity check
Preconditions.checkArgument(queryRelNode instanceof Project || queryRelNode instanceof Sort || queryRelNode instanceof LogicalDistribution, "Expect top RelNode to be Project, Sort, or LogicalDistribution, actually got " + queryRelNode);
if (!(queryRelNode instanceof Project)) {
RelNode parent = ((SingleRel) queryRelNode).getInput();
// SEL + SORT or SEL + DIST + LIMIT
Preconditions.checkArgument(parent instanceof Project || parent instanceof LogicalDistribution, "Expect input to be a Project or LogicalDistribution, actually got " + parent);
if (parent instanceof LogicalDistribution) {
RelNode grandParent = ((LogicalDistribution) parent).getInput();
Preconditions.checkArgument(grandParent instanceof Project, "Expect input of LogicalDistribution to be a Project, actually got " + grandParent);
}
}
// handle dest schema, e.g. insert into dest(.,.,.) select ...
queryRelNode = handleDestSchema((SingleRel) queryRelNode, destTable, destSchema, staticPartSpec.keySet());
// track each target col and its expected type
RelDataTypeFactory typeFactory = plannerContext.getTypeFactory();
LinkedHashMap<String, RelDataType> targetColToCalcType = new LinkedHashMap<>();
List<TypeInfo> targetHiveTypes = new ArrayList<>();
List<FieldSchema> allCols = new ArrayList<>(destTable.getCols());
allCols.addAll(destTable.getPartCols());
for (FieldSchema col : allCols) {
TypeInfo hiveType = TypeInfoUtils.getTypeInfoFromTypeString(col.getType());
targetHiveTypes.add(hiveType);
targetColToCalcType.put(col.getName(), HiveParserTypeConverter.convert(hiveType, typeFactory));
}
// add static partitions to query source
if (!staticPartSpec.isEmpty()) {
if (queryRelNode instanceof Project) {
queryRelNode = replaceProjectForStaticPart((Project) queryRelNode, staticPartSpec, destTable, targetColToCalcType);
} else if (queryRelNode instanceof Sort) {
Sort sort = (Sort) queryRelNode;
RelNode oldInput = sort.getInput();
RelNode newInput;
if (oldInput instanceof LogicalDistribution) {
newInput = replaceDistForStaticParts((LogicalDistribution) oldInput, destTable, staticPartSpec, targetColToCalcType);
} else {
newInput = replaceProjectForStaticPart((Project) oldInput, staticPartSpec, destTable, targetColToCalcType);
// we may need to shift the field collations
final int numDynmPart = destTable.getTTable().getPartitionKeys().size() - staticPartSpec.size();
if (!sort.getCollation().getFieldCollations().isEmpty() && numDynmPart > 0) {
sort.replaceInput(0, null);
sort = LogicalSort.create(newInput, shiftRelCollation(sort.getCollation(), (Project) oldInput, staticPartSpec.size(), numDynmPart), sort.offset, sort.fetch);
}
}
sort.replaceInput(0, newInput);
queryRelNode = sort;
} else {
queryRelNode = replaceDistForStaticParts((LogicalDistribution) queryRelNode, destTable, staticPartSpec, targetColToCalcType);
}
}
// add type conversions
queryRelNode = addTypeConversions(plannerContext.getCluster().getRexBuilder(), queryRelNode, new ArrayList<>(targetColToCalcType.values()), targetHiveTypes, funcConverter);
// create identifier
List<String> targetTablePath = Arrays.asList(destTable.getDbName(), destTable.getTableName());
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(targetTablePath);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return Tuple4.of(identifier, new PlannerQueryOperation(queryRelNode), staticPartSpec, overwrite);
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class DynamicTableSinkSpecSerdeTest method testDynamicTableSinkSpecSerdeWithEnrichmentOptions.
@Test
void testDynamicTableSinkSpecSerdeWithEnrichmentOptions() throws Exception {
// Test model
ObjectIdentifier identifier = ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "my_table");
String formatPrefix = FactoryUtil.getFormatPrefix(FORMAT, TestFormatFactory.IDENTIFIER);
Map<String, String> planOptions = new HashMap<>();
planOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
planOptions.put(TARGET.key(), "abc");
planOptions.put(BUFFER_SIZE.key(), "1000");
planOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
planOptions.put(formatPrefix + DELIMITER.key(), "|");
Map<String, String> catalogOptions = new HashMap<>();
catalogOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
catalogOptions.put(TARGET.key(), "xyz");
catalogOptions.put(BUFFER_SIZE.key(), "2000");
catalogOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
catalogOptions.put(formatPrefix + DELIMITER.key(), ",");
ResolvedCatalogTable planResolvedCatalogTable = tableWithOnlyPhysicalColumns(planOptions);
ResolvedCatalogTable catalogResolvedCatalogTable = tableWithOnlyPhysicalColumns(catalogOptions);
// Create planner mocks
PlannerMocks plannerMocks = PlannerMocks.create(new Configuration().set(PLAN_RESTORE_CATALOG_OBJECTS, CatalogPlanRestore.ALL).set(PLAN_COMPILE_CATALOG_OBJECTS, CatalogPlanCompilation.ALL));
CatalogManager catalogManager = plannerMocks.getCatalogManager();
catalogManager.createTable(catalogResolvedCatalogTable, identifier, false);
// Mock the context
SerdeContext serdeCtx = configuredSerdeContext(catalogManager, plannerMocks.getTableConfig());
DynamicTableSinkSpec planSpec = new DynamicTableSinkSpec(ContextResolvedTable.permanent(identifier, catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get(), planResolvedCatalogTable), Collections.emptyList());
String actualJson = toJson(serdeCtx, planSpec);
DynamicTableSinkSpec actual = toObject(serdeCtx, actualJson, DynamicTableSinkSpec.class);
assertThat(actual.getContextResolvedTable()).isEqualTo(planSpec.getContextResolvedTable());
assertThat(actual.getSinkAbilities()).isNull();
TestDynamicTableFactory.DynamicTableSinkMock dynamicTableSink = (TestDynamicTableFactory.DynamicTableSinkMock) actual.getTableSink(plannerMocks.getPlannerContext().getFlinkContext());
assertThat(dynamicTableSink.target).isEqualTo("abc");
assertThat(dynamicTableSink.bufferSize).isEqualTo(2000);
assertThat(((TestFormatFactory.EncodingFormatMock) dynamicTableSink.valueFormat).delimiter).isEqualTo(",");
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class DynamicTableSourceSpecSerdeTest method testDynamicTableSourceSpecSerdeWithEnrichmentOptions.
@Test
void testDynamicTableSourceSpecSerdeWithEnrichmentOptions() throws Exception {
// Test model
ObjectIdentifier identifier = ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "my_table");
String formatPrefix = FactoryUtil.getFormatPrefix(FORMAT, TestFormatFactory.IDENTIFIER);
Map<String, String> planOptions = new HashMap<>();
planOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
planOptions.put(TARGET.key(), "abc");
planOptions.put(PASSWORD.key(), "abc");
planOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
planOptions.put(formatPrefix + DELIMITER.key(), "|");
Map<String, String> catalogOptions = new HashMap<>();
catalogOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
catalogOptions.put(TARGET.key(), "abc");
catalogOptions.put(PASSWORD.key(), "xyz");
catalogOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
catalogOptions.put(formatPrefix + DELIMITER.key(), ",");
ResolvedCatalogTable planResolvedCatalogTable = tableWithOnlyPhysicalColumns(planOptions);
ResolvedCatalogTable catalogResolvedCatalogTable = tableWithOnlyPhysicalColumns(catalogOptions);
// Create planner mocks
PlannerMocks plannerMocks = PlannerMocks.create(new Configuration().set(PLAN_RESTORE_CATALOG_OBJECTS, CatalogPlanRestore.ALL).set(PLAN_COMPILE_CATALOG_OBJECTS, CatalogPlanCompilation.ALL));
CatalogManager catalogManager = plannerMocks.getCatalogManager();
catalogManager.createTable(catalogResolvedCatalogTable, identifier, false);
// Mock the context
SerdeContext serdeCtx = configuredSerdeContext(catalogManager, plannerMocks.getTableConfig());
DynamicTableSourceSpec planSpec = new DynamicTableSourceSpec(ContextResolvedTable.permanent(identifier, catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get(), planResolvedCatalogTable), Collections.emptyList());
String actualJson = toJson(serdeCtx, planSpec);
DynamicTableSourceSpec actual = toObject(serdeCtx, actualJson, DynamicTableSourceSpec.class);
assertThat(actual.getContextResolvedTable()).isEqualTo(planSpec.getContextResolvedTable());
assertThat(actual.getSourceAbilities()).isNull();
TestDynamicTableFactory.DynamicTableSourceMock dynamicTableSource = (TestDynamicTableFactory.DynamicTableSourceMock) actual.getScanTableSource(plannerMocks.getPlannerContext().getFlinkContext());
assertThat(dynamicTableSource.password).isEqualTo("xyz");
assertThat(((TestFormatFactory.DecodingFormatMock) dynamicTableSource.valueFormat).delimiter).isEqualTo(",");
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class TestManagedTableFactory method onCompactTable.
@Override
public Map<String, String> onCompactTable(Context context, CatalogPartitionSpec catalogPartitionSpec) {
ObjectIdentifier tableIdentifier = context.getObjectIdentifier();
ResolvedCatalogTable table = context.getCatalogTable();
Map<String, String> newOptions = new HashMap<>(table.getOptions());
resolveCompactFileBasePath(tableIdentifier).ifPresent(s -> newOptions.put(COMPACT_FILE_BASE_PATH.key(), s));
validateAndResolveCompactFileEntries(tableIdentifier, catalogPartitionSpec).ifPresent(s -> newOptions.put(COMPACT_FILE_ENTRIES.key(), s));
return newOptions;
}
Aggregations