use of org.apache.flink.table.delegation.Executor in project zeppelin by apache.
the class TableEnvFactory method createJavaBlinkStreamTableEnvironment.
public TableEnvironment createJavaBlinkStreamTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), streamTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, streamTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, streamTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
throw new TableException("Fail to createJavaBlinkStreamTableEnvironment", e);
}
}
use of org.apache.flink.table.delegation.Executor in project flink by apache.
the class ExecutionContext method lookupExecutor.
private Executor lookupExecutor(String executorIdentifier, StreamExecutionEnvironment executionEnvironment) {
try {
final ExecutorFactory executorFactory = FactoryUtil.discoverFactory(classLoader, ExecutorFactory.class, executorIdentifier);
final Method createMethod = executorFactory.getClass().getMethod("create", StreamExecutionEnvironment.class);
return (Executor) createMethod.invoke(executorFactory, executionEnvironment);
} catch (Exception e) {
throw new TableException("Could not instantiate the executor. Make sure a planner module is on the classpath", e);
}
}
use of org.apache.flink.table.delegation.Executor in project flink by apache.
the class TableEnvironmentImpl method create.
private static TableEnvironmentImpl create(EnvironmentSettings settings, Configuration configuration) {
// temporary solution until FLINK-15635 is fixed
final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
// use configuration to init table config
final TableConfig tableConfig = new TableConfig();
tableConfig.addConfiguration(configuration);
final ModuleManager moduleManager = new ModuleManager();
final CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).build();
final FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);
final ExecutorFactory executorFactory = FactoryUtil.discoverFactory(classLoader, ExecutorFactory.class, settings.getExecutor());
final Executor executor = executorFactory.create(configuration);
final Planner planner = PlannerFactoryUtil.createPlanner(settings.getPlanner(), executor, tableConfig, moduleManager, catalogManager, functionCatalog);
return new TableEnvironmentImpl(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader);
}
use of org.apache.flink.table.delegation.Executor in project flink by apache.
the class StreamTableEnvironmentImpl method create.
public static StreamTableEnvironment create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) {
// temporary solution until FLINK-15635 is fixed
final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
final ModuleManager moduleManager = new ModuleManager();
final CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).executionConfig(executionEnvironment.getConfig()).build();
final FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);
final Executor executor = lookupExecutor(classLoader, settings.getExecutor(), executionEnvironment);
final Planner planner = PlannerFactoryUtil.createPlanner(settings.getPlanner(), executor, tableConfig, moduleManager, catalogManager, functionCatalog);
return new StreamTableEnvironmentImpl(catalogManager, moduleManager, functionCatalog, tableConfig, executionEnvironment, planner, executor, settings.isStreamingMode(), classLoader);
}
use of org.apache.flink.table.delegation.Executor in project flink by apache.
the class DefaultExecutorTest method testJobName.
@Test
public void testJobName() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final Executor executor = new DefaultExecutor(env);
final List<Transformation<?>> dummyTransformations = Collections.singletonList(env.fromElements(1, 2, 3).addSink(new DiscardingSink<>()).getTransformation());
final Configuration configuration = new Configuration();
configuration.set(PipelineOptions.NAME, "Custom Name");
// default
testJobName(executor.createPipeline(dummyTransformations, new Configuration(), "Default Name"), "Default Name");
// Table API specific
testJobName(executor.createPipeline(dummyTransformations, configuration, "Default Name"), "Custom Name");
// DataStream API specific
env.configure(configuration);
testJobName(executor.createPipeline(dummyTransformations, new Configuration(), "Default Name"), "Custom Name");
}
Aggregations