use of org.apache.flink.table.api.TableEnvironment in project zeppelin by apache.
the class Flink114Shims method startMultipleInsert.
@Override
public void startMultipleInsert(Object tblEnv, InterpreterContext context) throws Exception {
StatementSet statementSet = ((TableEnvironment) tblEnv).createStatementSet();
statementSetMap.put(context.getParagraphId(), statementSet);
}
use of org.apache.flink.table.api.TableEnvironment in project zeppelin by apache.
the class TableEnvFactory method createJavaFlinkStreamTableEnvironment.
public TableEnvironment createJavaFlinkStreamTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), oldPlannerBatchTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(oldPlannerCatalogManager, moduleManager, oldPlannerFunctionCatalog, oldPlannerStreamTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(oldPlannerCatalogManager, moduleManager, oldPlannerFunctionCatalog, oldPlannerStreamTableConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
throw new TableException("Fail to createJavaFlinkStreamTableEnvironment", e);
}
}
use of org.apache.flink.table.api.TableEnvironment in project zeppelin by apache.
the class TableEnvFactory method createScalaBlinkStreamTableEnvironment.
public TableEnvironment createScalaBlinkStreamTableEnvironment(EnvironmentSettings settings, ClassLoader classLoader) {
try {
ImmutablePair<Object, Object> pair = flinkShims.createPlannerAndExecutor(classLoader, settings, senv.getJavaEnv(), streamTableConfig, functionCatalog, catalogManager);
Planner planner = (Planner) pair.left;
Executor executor = (Executor) pair.right;
Class clazz = Class.forName("org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl");
try {
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, streamTableConfig, senv, planner, executor, settings.isStreamingMode());
} catch (NoSuchMethodException e) {
// Flink 1.11.1 change the constructor signature, FLINK-18419
Constructor constructor = clazz.getConstructor(CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class, ClassLoader.class);
return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, functionCatalog, streamTableConfig, senv, planner, executor, settings.isStreamingMode(), classLoader);
}
} catch (Exception e) {
throw new TableException("Fail to createScalaBlinkStreamTableEnvironment", e);
}
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class BatchPythonUdfSqlJob method main.
public static void main(String[] args) {
TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
tEnv.getConfig().getConfiguration().set(CoreOptions.DEFAULT_PARALLELISM, 1);
tEnv.executeSql("create temporary system function add_one as 'add_one.add_one' language python");
tEnv.createTemporaryView("source", tEnv.fromValues(1L, 2L, 3L).as("a"));
Iterator<Row> result = tEnv.executeSql("select add_one(a) as a from source").collect();
List<Long> actual = new ArrayList<>();
while (result.hasNext()) {
Row r = result.next();
actual.add((Long) r.getField(0));
}
List<Long> expected = Arrays.asList(2L, 3L, 4L);
if (!actual.equals(expected)) {
throw new AssertionError(String.format("The output result: %s is not as expected: %s!", actual, expected));
}
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveCatalogUdfITCase method testTimestampUDF.
@Test
public void testTimestampUDF() throws Exception {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
tableEnv.useCatalog(hiveCatalog.getName());
tableEnv.executeSql(String.format("create function myyear as '%s'", UDFYear.class.getName()));
tableEnv.executeSql("create table src(ts timestamp)");
try {
HiveTestUtils.createTextTableInserter(hiveCatalog, "default", "src").addRow(new Object[] { Timestamp.valueOf("2013-07-15 10:00:00") }).addRow(new Object[] { Timestamp.valueOf("2019-05-23 17:32:55") }).commit();
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select myyear(ts) as y from src").execute().collect());
Assert.assertEquals(2, results.size());
Assert.assertEquals("[+I[2013], +I[2019]]", results.toString());
} finally {
tableEnv.executeSql("drop table src");
}
}
Aggregations