use of org.apache.flink.table.module.ModuleManager in project flink by apache.
the class StreamTableEnvironmentImpl method create.
public static StreamTableEnvironment create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) {
// temporary solution until FLINK-15635 is fixed
final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
final ModuleManager moduleManager = new ModuleManager();
final CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).executionConfig(executionEnvironment.getConfig()).build();
final FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);
final Executor executor = lookupExecutor(classLoader, settings.getExecutor(), executionEnvironment);
final Planner planner = PlannerFactoryUtil.createPlanner(settings.getPlanner(), executor, tableConfig, moduleManager, catalogManager, functionCatalog);
return new StreamTableEnvironmentImpl(catalogManager, moduleManager, functionCatalog, tableConfig, executionEnvironment, planner, executor, settings.isStreamingMode(), classLoader);
}
use of org.apache.flink.table.module.ModuleManager in project flink by apache.
the class LookupKeySerdeTest method testLookupKey.
@Test
public void testLookupKey() throws IOException {
TableConfig tableConfig = TableConfig.getDefault();
ModuleManager moduleManager = new ModuleManager();
CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(Thread.currentThread().getContextClassLoader()).config(tableConfig.getConfiguration()).defaultCatalog("default_catalog", new GenericInMemoryCatalog("default_db")).build();
FlinkContext flinkContext = new FlinkContextImpl(false, tableConfig, moduleManager, new FunctionCatalog(tableConfig, catalogManager, moduleManager), catalogManager, null);
SerdeContext serdeCtx = new SerdeContext(null, flinkContext, Thread.currentThread().getContextClassLoader(), FlinkTypeFactory.INSTANCE(), FlinkSqlOperatorTable.instance());
ObjectReader objectReader = JsonSerdeUtil.createObjectReader(serdeCtx);
ObjectWriter objectWriter = JsonSerdeUtil.createObjectWriter(serdeCtx);
LookupJoinUtil.LookupKey[] lookupKeys = new LookupJoinUtil.LookupKey[] { new LookupJoinUtil.ConstantLookupKey(new BigIntType(), new RexBuilder(FlinkTypeFactory.INSTANCE()).makeLiteral("a")), new LookupJoinUtil.FieldRefLookupKey(3) };
for (LookupJoinUtil.LookupKey lookupKey : lookupKeys) {
LookupJoinUtil.LookupKey result = objectReader.readValue(objectWriter.writeValueAsString(lookupKey), LookupJoinUtil.LookupKey.class);
assertEquals(lookupKey, result);
}
}
use of org.apache.flink.table.module.ModuleManager in project flink by apache.
the class LogicalWindowSerdeTest method testLogicalWindowSerde.
@Test
public void testLogicalWindowSerde() throws IOException {
SerdeContext serdeCtx = new SerdeContext(null, new FlinkContextImpl(false, TableConfig.getDefault(), new ModuleManager(), null, CatalogManagerMocks.createEmptyCatalogManager(), null), Thread.currentThread().getContextClassLoader(), FlinkTypeFactory.INSTANCE(), FlinkSqlOperatorTable.instance());
ObjectReader objectReader = JsonSerdeUtil.createObjectReader(serdeCtx);
ObjectWriter objectWriter = JsonSerdeUtil.createObjectWriter(serdeCtx);
assertEquals(objectReader.readValue(objectWriter.writeValueAsString(window), LogicalWindow.class), window);
}
use of org.apache.flink.table.module.ModuleManager in project zeppelin by apache.
the class TableEnvFactory method createJavaBlinkBatchTableEnvironment.
public TableEnvironment createJavaBlinkBatchTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), batchTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, batchTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, batchTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
LOGGER.info(ExceptionUtils.getStackTrace(e));
throw new TableException("Fail to createJavaBlinkBatchTableEnvironment", e);
}
}
use of org.apache.flink.table.module.ModuleManager in project zeppelin by apache.
the class TableEnvFactory method createScalaFlinkStreamTableEnvironment.
public TableEnvironment createScalaFlinkStreamTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), oldPlannerStreamTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(oldPlannerCatalogManager, moduleManager, oldPlannerFunctionCatalog, oldPlannerStreamTableConfig, senv, planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(oldPlannerCatalogManager, moduleManager, oldPlannerFunctionCatalog, oldPlannerStreamTableConfig, senv, planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
throw new TableException("Fail to createScalaFlinkStreamTableEnvironment", e);
}
}
Aggregations