use of io.cdap.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class WritableDatasetTestRun method writeIntoItselfTest.
@Test
public void writeIntoItselfTest() throws Exception {
try {
initKeyValueTable(MY_TABLE, true);
ListenableFuture<ExploreExecutionResult> future = exploreClient.submit(NAMESPACE_ID, String.format("insert into table %s select * from %s", MY_TABLE_NAME, MY_TABLE_NAME));
ExploreExecutionResult result = future.get();
result.close();
// Assert the values have been inserted into the dataset
KeyStructValueTableDefinition.KeyStructValueTable table = datasetFramework.getDataset(MY_TABLE, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
Transaction tx = transactionManager.startShort(100);
table.startTx(tx);
Assert.assertEquals(new KeyStructValueTableDefinition.KeyValue.Value("first", Lists.newArrayList(1, 2, 3, 4, 5)), table.get("1_2"));
Assert.assertEquals(new KeyStructValueTableDefinition.KeyValue.Value("two", Lists.newArrayList(10, 11, 12, 13, 14)), table.get("2_2"));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx.getTransactionId(), table.getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
table.postTxCommit();
// Make sure Hive also sees those values
result = exploreClient.submit(NAMESPACE_ID, "select * from " + MY_TABLE_NAME).get();
Assert.assertEquals("1", result.next().getColumns().get(0).toString());
Assert.assertEquals("1_2", result.next().getColumns().get(0).toString());
Assert.assertEquals("2", result.next().getColumns().get(0).toString());
Assert.assertEquals("2_2", result.next().getColumns().get(0).toString());
Assert.assertFalse(result.hasNext());
result.close();
} finally {
datasetFramework.deleteInstance(MY_TABLE);
}
}
use of io.cdap.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class WritableDatasetTestRun method multipleInsertsTest.
@Test
public void multipleInsertsTest() throws Exception {
DatasetId myTable1 = NAMESPACE_ID.dataset("my_table_1");
DatasetId myTable2 = NAMESPACE_ID.dataset("my_table_2");
DatasetId myTable3 = NAMESPACE_ID.dataset("my_table_3");
String myTable1HiveName = getDatasetHiveName(myTable1);
String myTable2HiveName = getDatasetHiveName(myTable2);
String myTable3HiveName = getDatasetHiveName(myTable3);
try {
initKeyValueTable(MY_TABLE, true);
initKeyValueTable(myTable1, false);
initKeyValueTable(myTable2, false);
initKeyValueTable(myTable3, false);
ListenableFuture<ExploreExecutionResult> future = exploreClient.submit(NAMESPACE_ID, String.format("from %s insert into table %s select * where key='1' " + "insert into table %s select * where key='2' " + "insert into table %s select *", MY_TABLE_NAME, myTable1HiveName, myTable2HiveName, myTable3HiveName));
ExploreExecutionResult result = future.get();
result.close();
result = exploreClient.submit(NAMESPACE_ID, "select * from " + myTable2HiveName).get();
Assert.assertEquals("2_2", result.next().getColumns().get(0).toString());
Assert.assertFalse(result.hasNext());
result.close();
result = exploreClient.submit(NAMESPACE_ID, "select * from " + myTable1HiveName).get();
Assert.assertEquals("1_2", result.next().getColumns().get(0).toString());
Assert.assertFalse(result.hasNext());
result.close();
result = exploreClient.submit(NAMESPACE_ID, "select * from " + myTable3HiveName).get();
Assert.assertEquals("1_2", result.next().getColumns().get(0).toString());
Assert.assertEquals("2_2", result.next().getColumns().get(0).toString());
Assert.assertFalse(result.hasNext());
result.close();
} finally {
datasetFramework.deleteInstance(MY_TABLE);
datasetFramework.deleteInstance(myTable1);
datasetFramework.deleteInstance(myTable2);
datasetFramework.deleteInstance(myTable3);
}
}
use of io.cdap.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class WritableDatasetTestRun method testTablesWithSpecialChars.
@Test
public void testTablesWithSpecialChars() throws Exception {
// '.' are replaced with "_" in hive, so create a dataset with . in name.
DatasetId myTable1 = NAMESPACE_ID.dataset("dot.table");
// '_' are replaced with "_" in hive, so create a dataset with . in name.
DatasetId myTable2 = NAMESPACE_ID.dataset("hyphen-table");
try {
initKeyValueTable(myTable1, true);
initKeyValueTable(myTable2, true);
ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, "select * from dataset_dot_table").get();
Assert.assertEquals("1", result.next().getColumns().get(0).toString());
result.close();
result = exploreClient.submit(NAMESPACE_ID, "select * from dataset_hyphen_table").get();
Assert.assertEquals("1", result.next().getColumns().get(0).toString());
result.close();
} finally {
datasetFramework.deleteInstance(myTable1);
datasetFramework.deleteInstance(myTable2);
}
}
use of io.cdap.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class HiveExploreTableTestRun method testSelectStar.
private void testSelectStar(String tableToQuery, String tableInSchema, Schema schema) throws Exception {
List<ColumnDesc> expectedSchema = schema.equals(SCHEMA) ? Lists.newArrayList(new ColumnDesc(tableInSchema + ".bool_field", "BOOLEAN", 1, null), new ColumnDesc(tableInSchema + ".int_field", "INT", 2, null), new ColumnDesc(tableInSchema + ".long_field", "BIGINT", 3, null), new ColumnDesc(tableInSchema + ".float_field", "FLOAT", 4, null), new ColumnDesc(tableInSchema + ".double_field", "DOUBLE", 5, null), new ColumnDesc(tableInSchema + ".bytes_field", "BINARY", 6, null), new ColumnDesc(tableInSchema + ".string_field", "STRING", 7, null)) : Lists.newArrayList(new ColumnDesc(tableInSchema + ".int_field", "INT", 1, null), new ColumnDesc(tableInSchema + ".long_field", "BIGINT", 2, null), new ColumnDesc(tableInSchema + ".float_field", "FLOAT", 3, null), new ColumnDesc(tableInSchema + ".double_field", "BINARY", 4, null), new ColumnDesc(tableInSchema + ".bytes_field", "STRING", 5, null), new ColumnDesc(tableInSchema + ".new_field", "STRING", 6, null), new ColumnDesc(tableInSchema + ".string_field", "STRING", 7, null));
ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from " + tableToQuery).get();
// check SCHEMA
Assert.assertEquals(expectedSchema, results.getResultSchema());
List<Object> columns = results.next().getColumns();
// check record1, account for the variability between SCHEMA and NEW_SCHEMA
int index = 0;
if (schema.equals(SCHEMA)) {
Assert.assertFalse((Boolean) columns.get(index++));
}
Assert.assertEquals(Integer.MAX_VALUE, columns.get(index++));
Assert.assertEquals(Long.MAX_VALUE, columns.get(index++));
// why does this come back as a double when it's a float???
Assert.assertTrue(Math.abs(3.14f - (Double) columns.get(index++)) < 0.000001);
if (schema.equals(SCHEMA)) {
Assert.assertTrue(Math.abs(3.14 - (Double) columns.get(index++)) < 0.000001);
Assert.assertArrayEquals(new byte[] { 'A', 'B', 'C' }, (byte[]) columns.get(index++));
} else {
Assert.assertArrayEquals(Bytes.toBytes(3.14D), (byte[]) columns.get(index++));
Assert.assertEquals("ABC", columns.get(index++));
Assert.assertNull(columns.get(index++));
}
Assert.assertEquals("row1", columns.get(index));
// should not be any more
Assert.assertFalse(results.hasNext());
}
use of io.cdap.cdap.explore.client.ExploreExecutionResult in project cdap by caskdata.
the class HiveExploreTableTestRun method testNonAsciiStrings.
@Test
public void testNonAsciiStrings() throws Exception {
DatasetId ttId = NAMESPACE_ID.dataset("tt");
datasetFramework.addInstance(Table.class.getName(), ttId, TableProperties.builder().setSchema(Schema.recordOf("record", Schema.Field.of("a", Schema.of(Schema.Type.STRING)), Schema.Field.of("b", Schema.of(Schema.Type.STRING)))).setRowFieldName("a").setExploreTableName("tt").build());
try {
// Accessing dataset instance to perform data operations
Table tt = datasetFramework.getDataset(ttId, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(tt);
Transaction tx = transactionManager.startShort(100);
((TransactionAware) tt).startTx(tx);
tt.put(new Put("a", "b", "c"));
// row key and column value are non-ASCII
tt.put(new Put("ä", "b", "ç"));
((TransactionAware) tt).commitTx();
transactionManager.canCommit(tx.getTransactionId(), ((TransactionAware) tt).getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
((TransactionAware) tt).postTxCommit();
ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from tt").get();
List<Object> columns = results.next().getColumns();
Assert.assertEquals(2, columns.size());
Assert.assertEquals("a", columns.get(0));
Assert.assertEquals("c", columns.get(1));
columns = results.next().getColumns();
Assert.assertEquals(2, columns.size());
Assert.assertEquals("ä", columns.get(0));
Assert.assertEquals("ç", columns.get(1));
} finally {
datasetFramework.deleteInstance(ttId);
}
}
Aggregations