use of co.cask.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class ExecuteQueryCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
String query = arguments.get(ArgumentName.QUERY.toString());
long timeOutMins = arguments.getLongOptional(ArgumentName.TIMEOUT.toString(), DEFAULT_TIMEOUT_MIN);
ListenableFuture<ExploreExecutionResult> future = queryClient.execute(cliConfig.getCurrentNamespace(), query);
try {
ExploreExecutionResult executionResult = future.get(timeOutMins, TimeUnit.MINUTES);
if (!executionResult.canContainResults()) {
output.println("SQL statement does not output any result.");
executionResult.close();
return;
}
final List<ColumnDesc> schema = executionResult.getResultSchema();
String[] header = new String[schema.size()];
for (int i = 0; i < header.length; i++) {
ColumnDesc column = schema.get(i);
// Hive columns start at 1
int index = column.getPosition() - 1;
header[index] = column.getName() + ": " + column.getType();
}
List<QueryResult> rows = Lists.newArrayList(executionResult);
executionResult.close();
QueryStatus.OpStatus opStatus = executionResult.getStatus().getStatus();
if (opStatus != QueryStatus.OpStatus.FINISHED) {
throw new SQLException(String.format("Query '%s' execution did not finish successfully. " + "Got final state - %s", query, opStatus));
}
Table table = Table.builder().setHeader(header).setRows(rows, new RowMaker<QueryResult>() {
@Override
public List<?> makeRow(QueryResult object) {
return object.getColumns();
}
}).build();
cliConfig.getTableRenderer().render(cliConfig, output, table);
output.printf("Fetched %d rows", rows.size()).println();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
Throwable t = Throwables.getRootCause(e);
if (t instanceof HandleNotFoundException) {
throw Throwables.propagate(t);
}
throw new SQLException(Throwables.getRootCause(e));
} catch (CancellationException e) {
throw new RuntimeException("Query has been cancelled on ListenableFuture object.");
} catch (TimeoutException e) {
output.println("Couldn't obtain results after " + timeOutMins + "mins.");
}
}
use of co.cask.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class CLIMainTest method assertExploreQuerySuccess.
private static void assertExploreQuerySuccess(ListenableFuture<ExploreExecutionResult> dbCreationFuture) throws Exception {
ExploreExecutionResult exploreExecutionResult = dbCreationFuture.get(10, TimeUnit.SECONDS);
QueryStatus status = exploreExecutionResult.getStatus();
Assert.assertEquals(QueryStatus.OpStatus.FINISHED, status.getStatus());
}
use of co.cask.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class GenerateClientUsageExample method queryClient.
public void queryClient() throws Exception {
// Construct the client used to interact with CDAP
QueryClient queryClient = new QueryClient(clientConfig);
// Perform an ad-hoc query using the Purchase example
ListenableFuture<ExploreExecutionResult> resultFuture = queryClient.execute(NamespaceId.DEFAULT, "SELECT * FROM dataset_history WHERE customer IN ('Alice','Bob')");
ExploreExecutionResult results = resultFuture.get();
// Fetch schema
List<ColumnDesc> schema = results.getResultSchema();
String[] header = new String[schema.size()];
for (int i = 0; i < header.length; i++) {
ColumnDesc column = schema.get(i);
// Hive columns start at 1
int index = column.getPosition() - 1;
header[index] = column.getName() + ": " + column.getType();
}
}
use of co.cask.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class HiveExploreServiceStreamTest method testAvroFormattedStream.
@Test
public void testAvroFormattedStream() throws Exception {
StreamId streamId = NAMESPACE_ID.stream("avroStream");
createStream(streamId);
try {
Schema schema = Schema.recordOf("purchase", Schema.Field.of("user", Schema.of(Schema.Type.STRING)), Schema.Field.of("num", Schema.of(Schema.Type.INT)), Schema.Field.of("price", Schema.of(Schema.Type.DOUBLE)));
FormatSpecification formatSpecification = new FormatSpecification(Formats.AVRO, schema, Collections.<String, String>emptyMap());
StreamProperties properties = new StreamProperties(Long.MAX_VALUE, formatSpecification, 1000);
setStreamProperties(NAMESPACE_ID.getNamespace(), "avroStream", properties);
// our schemas are compatible
org.apache.avro.Schema avroSchema = new org.apache.avro.Schema.Parser().parse(schema.toString());
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userX", 5, 3.14));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userX", 10, 2.34));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userY", 1, 1.23));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userZ", 50, 45.67));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userZ", 100, 98.76));
Double xPrice = 5 * 3.14 + 10 * 2.34;
Double yPrice = 1.23;
Double zPrice = 50 * 45.67 + 100 * 98.76;
ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, "SELECT `user`, sum(num) as total_num, sum(price * num) as total_price " + "FROM " + getTableName(streamId) + " GROUP BY `user` ORDER BY total_price DESC").get();
Assert.assertTrue(result.hasNext());
Assert.assertEquals(Lists.newArrayList(new ColumnDesc("user", "STRING", 1, null), new ColumnDesc("total_num", "BIGINT", 2, null), new ColumnDesc("total_price", "DOUBLE", 3, null)), result.getResultSchema());
// should get 3 rows
// first row should be for userZ
List<Object> rowColumns = result.next().getColumns();
// toString b/c avro returns a utf8 object for strings
Assert.assertEquals("userZ", rowColumns.get(0).toString());
Assert.assertEquals(150L, rowColumns.get(1));
Assert.assertTrue(Math.abs(zPrice - (Double) rowColumns.get(2)) < 0.0000001);
// 2nd row, should be userX
rowColumns = result.next().getColumns();
Assert.assertEquals("userX", rowColumns.get(0).toString());
Assert.assertEquals(15L, rowColumns.get(1));
Assert.assertTrue(Math.abs(xPrice - (Double) rowColumns.get(2)) < 0.0000001);
// 3rd row, should be userY
rowColumns = result.next().getColumns();
Assert.assertEquals("userY", rowColumns.get(0).toString());
Assert.assertEquals(1L, rowColumns.get(1));
Assert.assertTrue(Math.abs(yPrice - (Double) rowColumns.get(2)) < 0.0000001);
// shouldn't be any more results
Assert.assertFalse(result.hasNext());
} finally {
dropStream(streamId);
}
}
use of co.cask.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class HiveExploreServiceStreamTest method testSelectStarOnStream.
private void testSelectStarOnStream() throws Exception {
ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from " + streamTableName).get();
// check schema
List<ColumnDesc> expectedSchema = Lists.newArrayList(new ColumnDesc(streamTableName + ".ts", "BIGINT", 1, null), new ColumnDesc(streamTableName + ".headers", "map<string,string>", 2, null), new ColumnDesc(streamTableName + ".body", "STRING", 3, null));
Assert.assertEquals(expectedSchema, results.getResultSchema());
// check each result, without checking timestamp since that changes for each test
// first result
List<Object> columns = results.next().getColumns();
// maps are returned as json objects...
Assert.assertEquals(headers, GSON.fromJson((String) columns.get(1), headerType));
Assert.assertEquals(body1, columns.get(2));
// second result
columns = results.next().getColumns();
Assert.assertEquals(headers, GSON.fromJson((String) columns.get(1), headerType));
Assert.assertEquals(body2, columns.get(2));
// third result
columns = results.next().getColumns();
Assert.assertEquals(headers, GSON.fromJson((String) columns.get(1), headerType));
Assert.assertEquals(body3, columns.get(2));
// should not be any more
Assert.assertFalse(results.hasNext());
}
Aggregations