use of org.apache.flink.table.catalog.CatalogManager in project flink by apache.
the class CatalogITCase method testGetTablesFromGivenCatalogDatabase.
@Test
public void testGetTablesFromGivenCatalogDatabase() throws Exception {
final Catalog c1 = new GenericInMemoryCatalog("c1", "default");
final Catalog c2 = new GenericInMemoryCatalog("c2", "d2");
final CatalogManager catalogManager = CatalogManagerMocks.preparedCatalogManager().defaultCatalog("c2", c2).build();
catalogManager.registerCatalog("c1", c1);
final CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().build(), null, new ArrayList<>(), new HashMap<>());
c1.createDatabase("d1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
c1.createTable(new ObjectPath("d1", "t1"), catalogTable, true);
c2.createTable(new ObjectPath(catalogManager.getCurrentDatabase(), "t2"), catalogTable, true);
assertThat(catalogManager.getCurrentCatalog()).isEqualTo("c2");
assertThat(catalogManager.getCurrentDatabase()).isEqualTo("d2");
assertThat(catalogManager.listTables()).containsExactlyInAnyOrder("t2");
assertThat(catalogManager.listTables("c1", "d1")).containsExactlyInAnyOrder("t1");
}
use of org.apache.flink.table.catalog.CatalogManager in project flink by apache.
the class ContextResolvedTableJsonDeserializer method deserialize.
@Override
public ContextResolvedTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
final CatalogPlanRestore planRestoreOption = SerdeContext.get(ctx).getConfiguration().get(PLAN_RESTORE_CATALOG_OBJECTS);
final CatalogManager catalogManager = SerdeContext.get(ctx).getFlinkContext().getCatalogManager();
final ObjectNode objectNode = jsonParser.readValueAsTree();
// Deserialize the two fields, if available
final ObjectIdentifier identifier = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_IDENTIFIER, ObjectIdentifier.class, jsonParser.getCodec(), ctx).orElse(null);
ResolvedCatalogTable resolvedCatalogTable = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_CATALOG_TABLE, ResolvedCatalogTable.class, jsonParser.getCodec(), ctx).orElse(null);
if (identifier == null && resolvedCatalogTable == null) {
throw new ValidationException(String.format("The input JSON is invalid because it doesn't contain '%s', nor the '%s'.", FIELD_NAME_IDENTIFIER, FIELD_NAME_CATALOG_TABLE));
}
if (identifier == null) {
if (isLookupForced(planRestoreOption)) {
throw missingIdentifier();
}
return ContextResolvedTable.anonymous(resolvedCatalogTable);
}
Optional<ContextResolvedTable> contextResolvedTableFromCatalog = isLookupEnabled(planRestoreOption) ? catalogManager.getTable(identifier) : Optional.empty();
// If we have a schema from the plan and from the catalog, we need to check they match.
if (contextResolvedTableFromCatalog.isPresent() && resolvedCatalogTable != null) {
ResolvedSchema schemaFromPlan = resolvedCatalogTable.getResolvedSchema();
ResolvedSchema schemaFromCatalog = contextResolvedTableFromCatalog.get().getResolvedSchema();
if (!areResolvedSchemasEqual(schemaFromPlan, schemaFromCatalog)) {
throw schemaNotMatching(identifier, schemaFromPlan, schemaFromCatalog);
}
}
if (resolvedCatalogTable == null || isLookupForced(planRestoreOption)) {
if (!isLookupEnabled(planRestoreOption)) {
throw lookupDisabled(identifier);
}
// We use what is stored inside the catalog
return contextResolvedTableFromCatalog.orElseThrow(() -> missingTableFromCatalog(identifier, isLookupForced(planRestoreOption)));
}
if (contextResolvedTableFromCatalog.isPresent()) {
// SCHEMA, so we just need to return the catalog query result
if (objectNode.at("/" + FIELD_NAME_CATALOG_TABLE + "/" + OPTIONS).isMissingNode()) {
return contextResolvedTableFromCatalog.get();
}
return contextResolvedTableFromCatalog.flatMap(ContextResolvedTable::getCatalog).map(c -> ContextResolvedTable.permanent(identifier, c, resolvedCatalogTable)).orElseGet(() -> ContextResolvedTable.temporary(identifier, resolvedCatalogTable));
}
return ContextResolvedTable.temporary(identifier, resolvedCatalogTable);
}
use of org.apache.flink.table.catalog.CatalogManager in project zeppelin by apache.
the class TableEnvFactory method createJavaFlinkStreamTableEnvironment.
public TableEnvironment createJavaFlinkStreamTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), oldPlannerBatchTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(oldPlannerCatalogManager, moduleManager, oldPlannerFunctionCatalog, oldPlannerStreamTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(oldPlannerCatalogManager, moduleManager, oldPlannerFunctionCatalog, oldPlannerStreamTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
throw new TableException("Fail to createJavaFlinkStreamTableEnvironment", e);
}
}
use of org.apache.flink.table.catalog.CatalogManager in project zeppelin by apache.
the class TableEnvFactory method createScalaBlinkStreamTableEnvironment.
public TableEnvironment createScalaBlinkStreamTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), streamTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, streamTableConfig, senv, planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, streamTableConfig, senv, planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
throw new TableException("Fail to createScalaBlinkStreamTableEnvironment", e);
}
}
use of org.apache.flink.table.catalog.CatalogManager in project zeppelin by apache.
the class TableEnvFactory method createJavaBlinkBatchTableEnvironment.
public TableEnvironment createJavaBlinkBatchTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), batchTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, batchTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, batchTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
LOGGER.info(ExceptionUtils.getStackTrace(e));
throw new TableException("Fail to createJavaBlinkBatchTableEnvironment", e);
}
}
Aggregations