use of org.apache.flink.table.client.gateway.ResultDescriptor in project flink by apache.
the class LocalExecutor method executeQuery.
@Override
public ResultDescriptor executeQuery(String sessionId, QueryOperation query) throws SqlExecutionException {
final TableResultInternal tableResult = executeOperation(sessionId, query);
final SessionContext context = getSessionContext(sessionId);
final ReadableConfig config = context.getReadableConfig();
final DynamicResult result = resultStore.createResult(config, tableResult);
checkArgument(tableResult.getJobClient().isPresent());
String jobId = tableResult.getJobClient().get().getJobID().toString();
// store the result under the JobID
resultStore.storeResult(jobId, result);
return new ResultDescriptor(jobId, tableResult.getResolvedSchema(), result.isMaterialized(), config, tableResult.getRowDataToStringConverter());
}
use of org.apache.flink.table.client.gateway.ResultDescriptor in project flink by apache.
the class CliResultViewTest method testResultViewClearResult.
private void testResultViewClearResult(TypedResult<?> typedResult, boolean isTableMode, int expectedCancellationCount) throws Exception {
final CountDownLatch cancellationCounterLatch = new CountDownLatch(expectedCancellationCount);
final MockExecutor executor = new MockExecutor(typedResult, cancellationCounterLatch);
final Configuration testConfig = new Configuration();
testConfig.set(EXECUTION_RESULT_MODE, ResultMode.TABLE);
testConfig.set(RUNTIME_MODE, RuntimeExecutionMode.STREAMING);
String sessionId = executor.openSession("test-session");
ResolvedSchema schema = ResolvedSchema.of(Column.physical("Null Field", DataTypes.STRING()));
final ResultDescriptor descriptor = new ResultDescriptor("result-id", schema, false, testConfig, new RowDataToStringConverterImpl(schema.toPhysicalRowDataType()));
try (CliClient cli = new TestingCliClient(TerminalUtils.createDumbTerminal(), sessionId, executor, File.createTempFile("history", "tmp").toPath(), null)) {
Thread resultViewRunner = new Thread(new TestingCliResultView(cli, descriptor, isTableMode));
resultViewRunner.start();
if (!resultViewRunner.isInterrupted()) {
resultViewRunner.interrupt();
}
// close the client until view exit
while (resultViewRunner.isAlive()) {
Thread.sleep(100);
}
}
assertTrue("Invalid number of cancellations.", cancellationCounterLatch.await(10, TimeUnit.SECONDS));
}
use of org.apache.flink.table.client.gateway.ResultDescriptor in project flink by apache.
the class LocalExecutorITCase method testStreamQueryExecutionChangelog.
@Test(timeout = 90_000L)
public void testStreamQueryExecutionChangelog() throws Exception {
final URL url = getClass().getClassLoader().getResource("test-data.csv");
Objects.requireNonNull(url);
final Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_SOURCE_PATH1", url.getPath());
Configuration configuration = Configuration.fromMap(getDefaultSessionConfigMap());
final LocalExecutor executor = createLocalExecutor(Collections.singletonList(udfDependency), configuration);
String sessionId = executor.openSession("test-session");
assertEquals("test-session", sessionId);
initSession(executor, sessionId, replaceVars);
try {
// start job and retrieval
final ResultDescriptor desc = executeQuery(executor, sessionId, "SELECT scalarUDF(IntegerField1, 5), StringField1, 'ABC' FROM TableNumber1");
assertFalse(desc.isMaterialized());
final List<String> actualResults = retrieveChangelogResult(executor, sessionId, desc.getResultId(), desc.getRowDataStringConverter());
final List<String> expectedResults = new ArrayList<>();
expectedResults.add("[47, Hello World, ABC]");
expectedResults.add("[27, Hello World, ABC]");
expectedResults.add("[37, Hello World, ABC]");
expectedResults.add("[37, Hello World, ABC]");
expectedResults.add("[47, Hello World, ABC]");
expectedResults.add("[57, Hello World!!!!, ABC]");
TestBaseUtils.compareResultCollections(expectedResults, actualResults, Comparator.naturalOrder());
} finally {
executor.closeSession(sessionId);
}
}
use of org.apache.flink.table.client.gateway.ResultDescriptor in project flink by apache.
the class LocalExecutorITCase method testBatchQueryExecutionMultipleTimes.
@Test(timeout = 90_000L)
public void testBatchQueryExecutionMultipleTimes() throws Exception {
final URL url = getClass().getClassLoader().getResource("test-data.csv");
Objects.requireNonNull(url);
final Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_SOURCE_PATH1", url.getPath());
final Map<String, String> configMap = new HashMap<>();
configMap.put(EXECUTION_RESULT_MODE.key(), ResultMode.TABLE.name());
configMap.put(RUNTIME_MODE.key(), RuntimeExecutionMode.BATCH.name());
final Executor executor = createLocalExecutor(Collections.singletonList(udfDependency), Configuration.fromMap(configMap));
String sessionId = executor.openSession("test-session");
assertEquals("test-session", sessionId);
initSession(executor, sessionId, replaceVars);
final List<String> expectedResults = new ArrayList<>();
expectedResults.add("[47]");
expectedResults.add("[27]");
expectedResults.add("[37]");
expectedResults.add("[37]");
expectedResults.add("[47]");
expectedResults.add("[57]");
try {
for (int i = 0; i < 3; i++) {
final ResultDescriptor desc = executeQuery(executor, sessionId, "SELECT * FROM TestView1");
assertTrue(desc.isMaterialized());
final List<String> actualResults = retrieveTableResult(executor, sessionId, desc.getResultId(), desc.getRowDataStringConverter());
TestBaseUtils.compareResultCollections(expectedResults, actualResults, Comparator.naturalOrder());
}
} finally {
executor.closeSession(sessionId);
}
}
use of org.apache.flink.table.client.gateway.ResultDescriptor in project flink by apache.
the class CliTableauResultViewTest method testFailedStreamingResult.
@Test
public void testFailedStreamingResult() {
final Configuration testConfig = new Configuration();
testConfig.set(EXECUTION_RESULT_MODE, ResultMode.TABLEAU);
testConfig.set(RUNTIME_MODE, RuntimeExecutionMode.STREAMING);
ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, testConfig, rowDataToStringConverter);
TestingExecutor mockExecutor = new TestingExecutorBuilder().setResultChangesSupplier(() -> TypedResult.payload(streamingData.subList(0, streamingData.size() / 2)), () -> {
throw new SqlExecutionException("query failed");
}).build();
CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor);
try {
view.displayResults();
Assert.fail("Shouldn't get here");
} catch (SqlExecutionException e) {
Assert.assertEquals("query failed", e.getMessage());
}
view.close();
Assert.assertEquals("+----+---------+-------------+----------------------+--------------------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+--------------------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | <NULL> | 1 | 2 | abc | 1.23000 | 2020-03-01 18:39:14.000000 |" + System.lineSeparator() + "| -U | FALSE | <NULL> | 0 | | 1.00000 | 2020-03-01 18:39:14.100000 |" + System.lineSeparator() + "| +U | TRUE | 2147483647 | <NULL> | abcdefg | 12345.00000 | 2020-03-01 18:39:14.120000 |" + System.lineSeparator() + "| -D | FALSE | -2147483648 | 9223372036854775807 | <NULL> | 12345.06789 | 2020-03-01 18:39:14.123000 |" + System.lineSeparator(), terminalOutput.toString());
assertThat(mockExecutor.getNumCancelCalls(), is(1));
}
Aggregations