use of io.cdap.cdap.proto.QueryResult in project cdap by cdapio.
the class Hive13ExploreService method doFetchNextResults.
@SuppressWarnings("unchecked")
@Override
protected List<QueryResult> doFetchNextResults(OperationHandle handle, FetchOrientation fetchOrientation, int size) throws Exception {
Class cliServiceClass = Class.forName("org.apache.hive.service.cli.CLIService");
Method fetchResultsMethod = cliServiceClass.getMethod("fetchResults", OperationHandle.class, FetchOrientation.class, Long.TYPE);
RowSet rowSet = (RowSet) fetchResultsMethod.invoke(getCliService(), handle, fetchOrientation, size);
ImmutableList.Builder<QueryResult> rowsBuilder = ImmutableList.builder();
for (Object[] row : rowSet) {
List<Object> cols = Lists.newArrayList(row);
rowsBuilder.add(new QueryResult(cols));
}
return rowsBuilder.build();
}
use of io.cdap.cdap.proto.QueryResult in project cdap by cdapio.
the class ExploreResultSetTest method testResultSet.
@Test
public void testResultSet() throws Exception {
ExploreClient exploreClient = new MockExploreClient(ImmutableMap.of("mock_query", (List<ColumnDesc>) Lists.newArrayList(new ColumnDesc("column1", "STRING", 1, ""), new ColumnDesc("column2", "int", 2, ""), new ColumnDesc("column3", "char", 3, ""), new ColumnDesc("column4", "float", 4, ""), new ColumnDesc("column5", "double", 5, ""), new ColumnDesc("column6", "boolean", 6, ""), new ColumnDesc("column7", "tinyint", 7, ""), new ColumnDesc("column8", "smallint", 8, ""), new ColumnDesc("column9", "bigint", 9, ""), new ColumnDesc("column10", "date", 10, ""), new ColumnDesc("column11", "timestamp", 11, ""), new ColumnDesc("column12", "decimal", 12, ""), new ColumnDesc("column14", "map<string,string>", 13, ""), new ColumnDesc("column15", "array<string>", 14, ""), new ColumnDesc("column16", "struct<name:string,attr:string>", 15, ""))), ImmutableMap.of("mock_query", (List<QueryResult>) Lists.newArrayList(new QueryResult(ImmutableList.<Object>of("value1", 1, "c", 0.1f, 0.2d, true, 0x1, (short) 2, (long) 10, "2014-06-20", "2014-06-20 07:37:00", "1000000000", "\"{\"key1\":\"value1\"}", "[\"a\",\"b\",\"c\"]", "{\"name\":\"first\",\"attr\":\"second\"}")))));
ResultSet resultSet = new ExploreResultSet(exploreClient.submit(new NamespaceId(ns), "mock_query").get(), new ExploreStatement(null, exploreClient, ns), 0);
Assert.assertTrue(resultSet.next());
Assert.assertEquals(resultSet.getObject(1), resultSet.getObject("column1"));
Assert.assertEquals("value1", resultSet.getString(1));
Assert.assertEquals(1, resultSet.getInt(2));
Assert.assertEquals("c", resultSet.getString(3));
Assert.assertEquals(0.1f, resultSet.getFloat(4), 0.01);
Assert.assertEquals(0.2d, resultSet.getDouble(5), 0.01);
Assert.assertEquals(true, resultSet.getBoolean(6));
Assert.assertEquals(0x1, resultSet.getByte(7));
Assert.assertEquals(2, resultSet.getShort(8));
Assert.assertEquals(10, resultSet.getLong(9));
Assert.assertEquals(Date.valueOf("2014-06-20"), resultSet.getDate(10));
Assert.assertEquals(Timestamp.valueOf("2014-06-20 07:37:00"), resultSet.getTimestamp(11));
Assert.assertEquals(new BigDecimal("1000000000"), resultSet.getBigDecimal(12));
Assert.assertEquals("\"{\"key1\":\"value1\"}", resultSet.getString(13));
Assert.assertEquals("[\"a\",\"b\",\"c\"]", resultSet.getString(14));
Assert.assertEquals("{\"name\":\"first\",\"attr\":\"second\"}", resultSet.getString(15));
Assert.assertFalse(resultSet.next());
Assert.assertFalse(resultSet.next());
try {
resultSet.getObject(1);
} catch (SQLException e) {
// Expected: no more rows
}
}
use of io.cdap.cdap.proto.QueryResult in project cdap by cdapio.
the class HiveExploreServiceTestRun method testJoin.
@Test
public void testJoin() throws Exception {
DatasetId myTable1 = NAMESPACE_ID.dataset("my_table_1");
String myTable1Name = getDatasetHiveName(myTable1);
// Performing admin operations to create dataset instance
datasetFramework.addInstance("keyStructValueTable", myTable1, DatasetProperties.EMPTY);
try {
Transaction tx1 = transactionManager.startShort(100);
// Accessing dataset instance to perform data operations
KeyStructValueTableDefinition.KeyStructValueTable table = datasetFramework.getDataset(myTable1, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
table.startTx(tx1);
KeyValue.Value value1 = new KeyValue.Value("two", Lists.newArrayList(20, 21, 22, 23, 24));
KeyValue.Value value2 = new KeyValue.Value("third", Lists.newArrayList(30, 31, 32, 33, 34));
table.put("2", value1);
table.put("3", value2);
Assert.assertEquals(value1, table.get("2"));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx1.getTransactionId(), table.getTxChanges());
transactionManager.commit(tx1.getTransactionId(), tx1.getWritePointer());
table.postTxCommit();
String query = String.format("select %s.key, %s.value from %s join %s on (%s.key=%s.key)", MY_TABLE_NAME, MY_TABLE_NAME, MY_TABLE_NAME, myTable1Name, MY_TABLE_NAME, myTable1Name);
runCommand(NAMESPACE_ID, query, true, Lists.newArrayList(new ColumnDesc(MY_TABLE_NAME + ".key", "STRING", 1, null), new ColumnDesc(MY_TABLE_NAME + ".value", "struct<name:string,ints:array<int>>", 2, null)), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("2", "{\"name\":\"two\",\"ints\":[10,11,12,13,14]}"))));
query = String.format("select %s.key, %s.value, %s.key, %s.value " + "from %s right outer join %s on (%s.key=%s.key)", MY_TABLE_NAME, MY_TABLE_NAME, myTable1Name, myTable1Name, MY_TABLE_NAME, myTable1Name, MY_TABLE_NAME, myTable1Name);
runCommand(NAMESPACE_ID, query, true, Lists.newArrayList(new ColumnDesc(MY_TABLE_NAME + ".key", "STRING", 1, null), new ColumnDesc(MY_TABLE_NAME + ".value", "struct<name:string,ints:array<int>>", 2, null), new ColumnDesc(myTable1Name + ".key", "STRING", 3, null), new ColumnDesc(myTable1Name + ".value", "struct<name:string,ints:array<int>>", 4, null)), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("2", "{\"name\":\"two\",\"ints\":[10,11,12,13,14]}", "2", "{\"name\":\"two\",\"ints\":[20,21,22,23,24]}")), new QueryResult(Lists.<Object>newArrayList(null, null, "3", "{\"name\":\"third\",\"ints\":[30,31,32,33,34]}"))));
query = String.format("select %s.key, %s.value, %s.key, %s.value from %s " + "left outer join %s on (%s.key=%s.key)", MY_TABLE_NAME, MY_TABLE_NAME, myTable1Name, myTable1Name, MY_TABLE_NAME, myTable1Name, MY_TABLE_NAME, myTable1Name);
runCommand(NAMESPACE_ID, query, true, Lists.newArrayList(new ColumnDesc(MY_TABLE_NAME + ".key", "STRING", 1, null), new ColumnDesc(MY_TABLE_NAME + ".value", "struct<name:string,ints:array<int>>", 2, null), new ColumnDesc(myTable1Name + ".key", "STRING", 3, null), new ColumnDesc(myTable1Name + ".value", "struct<name:string,ints:array<int>>", 4, null)), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("1", "{\"name\":\"first\",\"ints\":[1,2,3,4,5]}", null, null)), new QueryResult(Lists.<Object>newArrayList("2", "{\"name\":\"two\",\"ints\":[10,11,12,13,14]}", "2", "{\"name\":\"two\",\"ints\":[20,21,22,23,24]}"))));
query = String.format("select %s.key, %s.value, %s.key, %s.value from %s " + "full outer join %s on (%s.key=%s.key)", MY_TABLE_NAME, MY_TABLE_NAME, myTable1Name, myTable1Name, MY_TABLE_NAME, myTable1Name, MY_TABLE_NAME, myTable1Name);
runCommand(NAMESPACE_ID, query, true, Lists.newArrayList(new ColumnDesc(MY_TABLE_NAME + ".key", "STRING", 1, null), new ColumnDesc(MY_TABLE_NAME + ".value", "struct<name:string,ints:array<int>>", 2, null), new ColumnDesc(myTable1Name + ".key", "STRING", 3, null), new ColumnDesc(myTable1Name + ".value", "struct<name:string,ints:array<int>>", 4, null)), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("1", "{\"name\":\"first\",\"ints\":[1,2,3,4,5]}", null, null)), new QueryResult(Lists.<Object>newArrayList("2", "{\"name\":\"two\",\"ints\":[10,11,12,13,14]}", "2", "{\"name\":\"two\",\"ints\":[20,21,22,23,24]}")), new QueryResult(Lists.<Object>newArrayList(null, null, "3", "{\"name\":\"third\",\"ints\":[30,31,32,33,34]}"))));
} finally {
datasetFramework.deleteInstance(myTable1);
}
}
use of io.cdap.cdap.proto.QueryResult in project cdap by cdapio.
the class HiveExploreServiceTestRun method testNamespaceCreationDeletion.
@Test
public void testNamespaceCreationDeletion() throws Exception {
NamespaceId testNamespace = new NamespaceId("test");
ListenableFuture<ExploreExecutionResult> future = exploreClient.schemas(null, null);
assertStatementResult(future, true, ImmutableList.of(new ColumnDesc("TABLE_SCHEM", "STRING", 1, "Schema name."), new ColumnDesc("TABLE_CATALOG", "STRING", 2, "Catalog name.")), ImmutableList.of(new QueryResult(Lists.newArrayList(NAMESPACE_DATABASE, "")), new QueryResult(Lists.newArrayList(OTHER_NAMESPACE_DATABASE, "")), new QueryResult(Lists.newArrayList(DEFAULT_DATABASE, ""))));
future = exploreClient.addNamespace(new NamespaceMeta.Builder().setName("test").build());
future.get();
createNamespace(testNamespace);
try {
future = exploreClient.schemas(null, null);
assertStatementResult(future, true, ImmutableList.of(new ColumnDesc("TABLE_SCHEM", "STRING", 1, "Schema name."), new ColumnDesc("TABLE_CATALOG", "STRING", 2, "Catalog name.")), ImmutableList.of(new QueryResult(Lists.newArrayList(NAMESPACE_DATABASE, "")), new QueryResult(Lists.newArrayList(OTHER_NAMESPACE_DATABASE, "")), new QueryResult(Lists.newArrayList("cdap_" + testNamespace.getNamespace(), "")), new QueryResult(Lists.newArrayList(DEFAULT_DATABASE, ""))));
future = exploreClient.removeNamespace(testNamespace);
future.get();
future = exploreClient.schemas(null, null);
assertStatementResult(future, true, ImmutableList.of(new ColumnDesc("TABLE_SCHEM", "STRING", 1, "Schema name."), new ColumnDesc("TABLE_CATALOG", "STRING", 2, "Catalog name.")), ImmutableList.of(new QueryResult(Lists.<Object>newArrayList(NAMESPACE_DATABASE, "")), new QueryResult(Lists.<Object>newArrayList(OTHER_NAMESPACE_DATABASE, "")), new QueryResult(Lists.<Object>newArrayList(DEFAULT_DATABASE, ""))));
} finally {
deleteNamespace(testNamespace);
}
}
use of io.cdap.cdap.proto.QueryResult in project cdap by cdapio.
the class HiveExploreStructuredRecordTestRun method testRecordScannableAndWritableIsOK.
@Test
public void testRecordScannableAndWritableIsOK() throws Exception {
DatasetId instanceId = NAMESPACE_ID.dataset("tabul");
datasetFramework.addInstance("TableWrapper", instanceId, DatasetProperties.builder().add(DatasetProperties.SCHEMA, Schema.recordOf("intRecord", Schema.Field.of("x", Schema.of(Schema.Type.STRING))).toString()).build());
DatasetSpecification spec = datasetFramework.getDatasetSpec(instanceId);
try {
exploreTableManager.enableDataset(instanceId, spec, false);
runCommand(NAMESPACE_ID, "describe dataset_tabul", true, Lists.newArrayList(new ColumnDesc("col_name", "STRING", 1, "from deserializer"), new ColumnDesc("data_type", "STRING", 2, "from deserializer"), new ColumnDesc("comment", "STRING", 3, "from deserializer")), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("x", "string", "from deserializer"))));
} finally {
datasetFramework.deleteInstance(instanceId);
}
}
Aggregations