use of org.apache.flink.table.api.EnvironmentSettings in project flink by apache.
the class ExecutionContext method createTableEnvironment.
// ------------------------------------------------------------------------------------------------------------------
// Helper to create Table Environment
// ------------------------------------------------------------------------------------------------------------------
private StreamTableEnvironment createTableEnvironment() {
// checks the value of RUNTIME_MODE
EnvironmentSettings settings = EnvironmentSettings.fromConfiguration(flinkConfig);
if (!settings.isBlinkPlanner()) {
throw new TableException("The old planner is not supported anymore. Please update to new default planner.");
}
TableConfig tableConfig = new TableConfig();
tableConfig.addConfiguration(flinkConfig);
StreamExecutionEnvironment streamExecEnv = createStreamExecutionEnvironment();
final Executor executor = lookupExecutor(settings.getExecutor(), streamExecEnv);
return createStreamTableEnvironment(streamExecEnv, settings, tableConfig, executor, sessionState.catalogManager, sessionState.moduleManager, sessionState.functionCatalog, classLoader);
}
use of org.apache.flink.table.api.EnvironmentSettings in project zeppelin by apache.
the class Flink113Shims method createPlannerAndExecutor.
@Override
public ImmutablePair<Object, Object> createPlannerAndExecutor(ClassLoader classLoader, Object environmentSettings, Object sEnv, Object tableConfig, Object functionCatalog, Object catalogManager) {
EnvironmentSettings settings = (EnvironmentSettings) environmentSettings;
Executor executor = (Executor) lookupExecutor(classLoader, settings, sEnv);
Map<String, String> plannerProperties = settings.toPlannerProperties();
Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties).create(plannerProperties, executor, (TableConfig) tableConfig, (FunctionCatalog) functionCatalog, (CatalogManager) catalogManager);
return ImmutablePair.of(planner, executor);
}
use of org.apache.flink.table.api.EnvironmentSettings in project zeppelin by apache.
the class Flink112Shims method createPlannerAndExecutor.
@Override
public ImmutablePair<Object, Object> createPlannerAndExecutor(ClassLoader classLoader, Object environmentSettings, Object sEnv, Object tableConfig, Object functionCatalog, Object catalogManager) {
EnvironmentSettings settings = (EnvironmentSettings) environmentSettings;
Executor executor = (Executor) lookupExecutor(classLoader, settings, sEnv);
Map<String, String> plannerProperties = settings.toPlannerProperties();
Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties).create(plannerProperties, executor, (TableConfig) tableConfig, (FunctionCatalog) functionCatalog, (CatalogManager) catalogManager);
return ImmutablePair.of(planner, executor);
}
use of org.apache.flink.table.api.EnvironmentSettings in project flink by apache.
the class GettingStartedExample method main.
public static void main(String[] args) throws Exception {
// setup the unified API
// in this case: declare that the table programs should be executed in batch mode
final EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
final TableEnvironment env = TableEnvironment.create(settings);
// create a table with example data without a connector required
final Table rawCustomers = env.fromValues(Row.of("Guillermo Smith", LocalDate.parse("1992-12-12"), "4081 Valley Road", "08540", "New Jersey", "m", true, 0, 78, 3), Row.of("Valeria Mendoza", LocalDate.parse("1970-03-28"), "1239 Rainbow Road", "90017", "Los Angeles", "f", true, 9, 39, 0), Row.of("Leann Holloway", LocalDate.parse("1989-05-21"), "2359 New Street", "97401", "Eugene", null, true, null, null, null), Row.of("Brandy Sanders", LocalDate.parse("1956-05-26"), "4891 Walkers-Ridge-Way", "73119", "Oklahoma City", "m", false, 9, 39, 0), Row.of("John Turner", LocalDate.parse("1982-10-02"), "2359 New Street", "60605", "Chicago", "m", true, 12, 39, 0), Row.of("Ellen Ortega", LocalDate.parse("1985-06-18"), "2448 Rodney STreet", "85023", "Phoenix", "f", true, 0, 78, 3));
// handle ranges of columns easily
final Table truncatedCustomers = rawCustomers.select(withColumns(range(1, 7)));
// name columns
final Table namedCustomers = truncatedCustomers.as("name", "date_of_birth", "street", "zip_code", "city", "gender", "has_newsletter");
// register a view temporarily
env.createTemporaryView("customers", namedCustomers);
// use SQL whenever you like
// call execute() and print() to get insights
env.sqlQuery("SELECT " + " COUNT(*) AS `number of customers`, " + " AVG(YEAR(date_of_birth)) AS `average birth year` " + "FROM `customers`").execute().print();
// or further transform the data using the fluent Table API
// e.g. filter, project fields, or call a user-defined function
final Table youngCustomers = env.from("customers").filter($("gender").isNotNull()).filter($("has_newsletter").isEqual(true)).filter($("date_of_birth").isGreaterOrEqual(LocalDate.parse("1980-01-01"))).select($("name").upperCase(), $("date_of_birth"), call(AddressNormalizer.class, $("street"), $("zip_code"), $("city")).as("address"));
// this can be useful for testing before storing it in an external system
try (CloseableIterator<Row> iterator = youngCustomers.execute().collect()) {
final Set<Row> expectedOutput = new HashSet<>();
expectedOutput.add(Row.of("GUILLERMO SMITH", LocalDate.parse("1992-12-12"), "4081 VALLEY ROAD, 08540, NEW JERSEY"));
expectedOutput.add(Row.of("JOHN TURNER", LocalDate.parse("1982-10-02"), "2359 NEW STREET, 60605, CHICAGO"));
expectedOutput.add(Row.of("ELLEN ORTEGA", LocalDate.parse("1985-06-18"), "2448 RODNEY STREET, 85023, PHOENIX"));
final Set<Row> actualOutput = new HashSet<>();
iterator.forEachRemaining(actualOutput::add);
if (actualOutput.equals(expectedOutput)) {
System.out.println("SUCCESS!");
} else {
System.out.println("FAILURE!");
}
}
}
use of org.apache.flink.table.api.EnvironmentSettings in project flink by apache.
the class AdvancedFunctionsExample method main.
public static void main(String[] args) throws Exception {
// setup the environment
final EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
final TableEnvironment env = TableEnvironment.create(settings);
// execute different kinds of functions
executeLastDatedValueFunction(env);
executeInternalRowMergerFunction(env);
}
Aggregations