use of org.apache.flink.table.client.gateway.SqlExecutionException in project flink by apache.
the class SqlMultiLineParser method parse.
@Override
public ParsedLine parse(String line, int cursor, ParseContext context) {
if (context != ParseContext.ACCEPT_LINE) {
return parseInternal(line, cursor, context);
}
if (!line.trim().endsWith(STATEMENT_DELIMITER)) {
throw new EOFError(-1, -1, "New line without EOF character.", NEW_LINE_PROMPT);
}
try {
command = line;
parseException = null;
// try to parse the line read
parsedOperation = parser.parseCommand(line).orElse(null);
} catch (SqlExecutionException e) {
if (e.getCause() instanceof SqlParserEOFException) {
throw new EOFError(-1, -1, "The statement is incomplete.", NEW_LINE_PROMPT);
}
// cache the exception so that we can print details in the terminal.
parseException = e;
throw new SyntaxError(-1, -1, e.getMessage());
}
return parseInternal(line, cursor, context);
}
use of org.apache.flink.table.client.gateway.SqlExecutionException in project flink by apache.
the class CliChangelogResultView method refresh.
@Override
protected void refresh() {
// retrieve change record
final TypedResult<List<RowData>> result;
try {
result = client.getExecutor().retrieveResultChanges(client.getSessionId(), resultDescriptor.getResultId());
} catch (SqlExecutionException e) {
close(e);
return;
}
// do nothing if result is empty
switch(result.getType()) {
case EMPTY:
// do nothing
break;
// stop retrieval if job is done
case EOS:
stopRetrieval(false);
break;
default:
List<RowData> changes = result.getPayload();
for (RowData change : changes) {
// convert row
final String[] row = tableauStyle.rowFieldsToString(change);
// keep the CLI responsive
if (results.size() >= DEFAULT_MAX_ROW_COUNT) {
results.remove(0);
}
results.add(row);
scrolling++;
}
break;
}
// reset view
resetAllParts();
lastRetrieval = LocalTime.now();
}
use of org.apache.flink.table.client.gateway.SqlExecutionException in project flink by apache.
the class ChangelogCollectResultTest method testRetrieveChanges.
@Test
public void testRetrieveChanges() throws Exception {
int totalCount = ChangelogCollectResult.CHANGE_RECORD_BUFFER_SIZE * 2;
CloseableIterator<Row> data = CloseableIterator.adapterForIterator(IntStream.range(0, totalCount).mapToObj(Row::of).iterator());
ChangelogCollectResult changelogResult = new ChangelogCollectResult(new TestTableResult(ResultKind.SUCCESS_WITH_CONTENT, ResolvedSchema.of(Column.physical("id", DataTypes.INT())), data));
int count = 0;
boolean running = true;
while (running) {
final TypedResult<List<RowData>> result = changelogResult.retrieveChanges();
// slow the processing down
Thread.sleep(100);
switch(result.getType()) {
case EMPTY:
// do nothing
break;
case EOS:
running = false;
break;
case PAYLOAD:
count += result.getPayload().size();
break;
default:
throw new SqlExecutionException("Unknown result type: " + result.getType());
}
}
assertEquals(totalCount, count);
}
use of org.apache.flink.table.client.gateway.SqlExecutionException in project flink by apache.
the class LocalContextUtils method discoverDependencies.
// --------------------------------------------------------------------------------------------
private static List<URL> discoverDependencies(List<URL> jars, List<URL> libraries) {
final List<URL> dependencies = new ArrayList<>();
try {
// find jar files
for (URL url : jars) {
JarUtils.checkJarFile(url);
dependencies.add(url);
}
// find jar files in library directories
for (URL libUrl : libraries) {
final File dir = new File(libUrl.toURI());
if (!dir.isDirectory()) {
throw new SqlClientException("Directory expected: " + dir);
} else if (!dir.canRead()) {
throw new SqlClientException("Directory cannot be read: " + dir);
}
final File[] files = dir.listFiles();
if (files == null) {
throw new SqlClientException("Directory cannot be read: " + dir);
}
for (File f : files) {
// only consider jars
if (f.isFile() && f.getAbsolutePath().toLowerCase().endsWith(".jar")) {
final URL url = f.toURI().toURL();
JarUtils.checkJarFile(url);
dependencies.add(url);
}
}
}
} catch (Exception e) {
throw new SqlClientException("Could not load all required JAR files.", e);
}
// add python dependencies by default
try {
URL location = Class.forName("org.apache.flink.python.PythonFunctionRunner", false, Thread.currentThread().getContextClassLoader()).getProtectionDomain().getCodeSource().getLocation();
if (Paths.get(location.toURI()).toFile().isFile()) {
dependencies.add(location);
}
} catch (URISyntaxException | ClassNotFoundException e) {
// dependencies
throw new SqlExecutionException("Don't find python dependencies. Please add the flink-python jar via `--jar` command option manually.", e);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Using the following dependencies: {}", dependencies);
}
return dependencies;
}
use of org.apache.flink.table.client.gateway.SqlExecutionException in project flink by apache.
the class CliTableauResultViewTest method testFailedStreamingResult.
@Test
public void testFailedStreamingResult() {
final Configuration testConfig = new Configuration();
testConfig.set(EXECUTION_RESULT_MODE, ResultMode.TABLEAU);
testConfig.set(RUNTIME_MODE, RuntimeExecutionMode.STREAMING);
ResultDescriptor resultDescriptor = new ResultDescriptor("", schema, true, testConfig, rowDataToStringConverter);
TestingExecutor mockExecutor = new TestingExecutorBuilder().setResultChangesSupplier(() -> TypedResult.payload(streamingData.subList(0, streamingData.size() / 2)), () -> {
throw new SqlExecutionException("query failed");
}).build();
CliTableauResultView view = new CliTableauResultView(terminal, mockExecutor, "session", resultDescriptor);
try {
view.displayResults();
Assert.fail("Shouldn't get here");
} catch (SqlExecutionException e) {
Assert.assertEquals("query failed", e.getMessage());
}
view.close();
Assert.assertEquals("+----+---------+-------------+----------------------+--------------------------------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+-------------+----------------------+--------------------------------+----------------+----------------------------+" + System.lineSeparator() + "| +I | <NULL> | 1 | 2 | abc | 1.23000 | 2020-03-01 18:39:14.000000 |" + System.lineSeparator() + "| -U | FALSE | <NULL> | 0 | | 1.00000 | 2020-03-01 18:39:14.100000 |" + System.lineSeparator() + "| +U | TRUE | 2147483647 | <NULL> | abcdefg | 12345.00000 | 2020-03-01 18:39:14.120000 |" + System.lineSeparator() + "| -D | FALSE | -2147483648 | 9223372036854775807 | <NULL> | 12345.06789 | 2020-03-01 18:39:14.123000 |" + System.lineSeparator(), terminalOutput.toString());
assertThat(mockExecutor.getNumCancelCalls(), is(1));
}
Aggregations