use of org.apache.flink.table.client.gateway.Executor in project flink by apache.
the class LocalExecutorITCase method testCompleteStatement.
@Test
public void testCompleteStatement() {
final Executor executor = createLocalExecutor();
String sessionId = executor.openSession("test-session");
assertEquals("test-session", sessionId);
initSession(executor, sessionId, Collections.emptyMap());
final List<String> expectedTableHints = Arrays.asList("default_catalog.default_database.TableNumber1", "default_catalog.default_database.TableSourceSink");
assertEquals(expectedTableHints, executor.completeStatement(sessionId, "SELECT * FROM Ta", 16));
final List<String> expectedClause = Collections.singletonList("WHERE");
assertEquals(expectedClause, executor.completeStatement(sessionId, "SELECT * FROM TableNumber1 WH", 29));
final List<String> expectedField = Arrays.asList("IntegerField1");
assertEquals(expectedField, executor.completeStatement(sessionId, "SELECT * FROM TableNumber1 WHERE Inte", 37));
executor.closeSession(sessionId);
}
use of org.apache.flink.table.client.gateway.Executor in project flink by apache.
the class LocalExecutorITCase method testBatchQueryExecution.
@Test(timeout = 90_000L)
public void testBatchQueryExecution() throws Exception {
final URL url = getClass().getClassLoader().getResource("test-data.csv");
Objects.requireNonNull(url);
final Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_SOURCE_PATH1", url.getPath());
final Map<String, String> configMap = new HashMap<>();
configMap.put(EXECUTION_RESULT_MODE.key(), ResultMode.TABLE.name());
configMap.put(RUNTIME_MODE.key(), RuntimeExecutionMode.BATCH.name());
final Executor executor = createLocalExecutor(Collections.singletonList(udfDependency), Configuration.fromMap(configMap));
String sessionId = executor.openSession("test-session");
assertEquals("test-session", sessionId);
initSession(executor, sessionId, replaceVars);
try {
final ResultDescriptor desc = executeQuery(executor, sessionId, "SELECT *, 'ABC' FROM TestView1");
assertTrue(desc.isMaterialized());
final List<String> actualResults = retrieveTableResult(executor, sessionId, desc.getResultId(), desc.getRowDataStringConverter());
final List<String> expectedResults = new ArrayList<>();
expectedResults.add("[47, ABC]");
expectedResults.add("[27, ABC]");
expectedResults.add("[37, ABC]");
expectedResults.add("[37, ABC]");
expectedResults.add("[47, ABC]");
expectedResults.add("[57, ABC]");
TestBaseUtils.compareResultCollections(expectedResults, actualResults, Comparator.naturalOrder());
} finally {
executor.closeSession(sessionId);
}
}
use of org.apache.flink.table.client.gateway.Executor in project flink by apache.
the class CliClientITCase method runSqlStatements.
/**
* Returns printed results for each ran SQL statements.
*
* @param statements the SQL statements to run
* @return the printed results on SQL Client
*/
private List<Result> runSqlStatements(List<String> statements) throws IOException {
final String sqlContent = String.join("", statements);
DefaultContext defaultContext = new DefaultContext(Collections.emptyList(), new Configuration(MINI_CLUSTER_RESOURCE.getClientConfiguration()).set(ExecutionConfigOptions.TABLE_EXEC_LEGACY_CAST_BEHAVIOUR, ExecutionConfigOptions.LegacyCastBehaviour.DISABLED), Collections.singletonList(new DefaultCLI()));
final Executor executor = new LocalExecutor(defaultContext);
InputStream inputStream = new ByteArrayInputStream(sqlContent.getBytes());
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(256);
String sessionId = executor.openSession("test-session");
try (Terminal terminal = new DumbTerminal(inputStream, outputStream);
CliClient client = new CliClient(() -> terminal, sessionId, executor, historyPath, HideSqlStatement.INSTANCE)) {
client.executeInInteractiveMode();
String output = new String(outputStream.toByteArray());
return normalizeOutput(output);
}
}
use of org.apache.flink.table.client.gateway.Executor in project flink by apache.
the class LocalExecutorITCase method testBatchQueryExecutionMultipleTimes.
@Test(timeout = 90_000L)
public void testBatchQueryExecutionMultipleTimes() throws Exception {
final URL url = getClass().getClassLoader().getResource("test-data.csv");
Objects.requireNonNull(url);
final Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_SOURCE_PATH1", url.getPath());
final Map<String, String> configMap = new HashMap<>();
configMap.put(EXECUTION_RESULT_MODE.key(), ResultMode.TABLE.name());
configMap.put(RUNTIME_MODE.key(), RuntimeExecutionMode.BATCH.name());
final Executor executor = createLocalExecutor(Collections.singletonList(udfDependency), Configuration.fromMap(configMap));
String sessionId = executor.openSession("test-session");
assertEquals("test-session", sessionId);
initSession(executor, sessionId, replaceVars);
final List<String> expectedResults = new ArrayList<>();
expectedResults.add("[47]");
expectedResults.add("[27]");
expectedResults.add("[37]");
expectedResults.add("[37]");
expectedResults.add("[47]");
expectedResults.add("[57]");
try {
for (int i = 0; i < 3; i++) {
final ResultDescriptor desc = executeQuery(executor, sessionId, "SELECT * FROM TestView1");
assertTrue(desc.isMaterialized());
final List<String> actualResults = retrieveTableResult(executor, sessionId, desc.getResultId(), desc.getRowDataStringConverter());
TestBaseUtils.compareResultCollections(expectedResults, actualResults, Comparator.naturalOrder());
}
} finally {
executor.closeSession(sessionId);
}
}
use of org.apache.flink.table.client.gateway.Executor in project flink by apache.
the class SqlClient method start.
private void start() {
if (isEmbedded) {
// create local executor with default environment
DefaultContext defaultContext = LocalContextUtils.buildDefaultContext(options);
final Executor executor = new LocalExecutor(defaultContext);
executor.start();
// Open an new session
String sessionId = executor.openSession(options.getSessionId());
try {
// add shutdown hook
Runtime.getRuntime().addShutdownHook(new EmbeddedShutdownThread(sessionId, executor));
// do the actual work
openCli(sessionId, executor);
} finally {
executor.closeSession(sessionId);
}
} else {
throw new SqlClientException("Gateway mode is not supported yet.");
}
}
Aggregations