use of io.cdap.cdap.proto.QueryResult in project cdap by caskdata.
the class HiveExploreStructuredRecordTestRun method testInsert.
@Test
public void testInsert() throws Exception {
DatasetId copyTable = NAMESPACE_ID.dataset("emailCopy");
datasetFramework.addInstance(Table.class.getName(), copyTable, TableProperties.builder().setSchema(EmailTableDefinition.SCHEMA).setRowFieldName("id").build());
try {
String command = String.format("insert into %s select * from %s", getDatasetHiveName(copyTable), MY_TABLE_NAME);
ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, command).get();
Assert.assertEquals(QueryStatus.OpStatus.FINISHED, result.getStatus().getStatus());
command = String.format("select id, subject, body, sender from %s", getDatasetHiveName(copyTable));
runCommand(NAMESPACE_ID, command, true, Lists.newArrayList(new ColumnDesc("id", "STRING", 1, null), new ColumnDesc("subject", "STRING", 2, null), new ColumnDesc("body", "STRING", 3, null), new ColumnDesc("sender", "STRING", 4, null)), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("email1", "this is the subject", "this is the body", "sljackson@boss.com"))));
} finally {
datasetFramework.deleteInstance(copyTable);
}
}
use of io.cdap.cdap.proto.QueryResult in project cdap by caskdata.
the class HiveExploreStructuredRecordTestRun method testCreateDropCustomDBAndTable.
private void testCreateDropCustomDBAndTable(@Nullable String database, @Nullable String tableName) throws Exception {
String datasetName = "cdccat";
DatasetId datasetId = NAMESPACE_ID.dataset(datasetName);
ExploreProperties.Builder props = ExploreProperties.builder();
if (tableName != null) {
props.setExploreTableName(tableName);
} else {
tableName = getDatasetHiveName(datasetId);
}
if (database != null) {
runCommand(NAMESPACE_ID, "create database if not exists " + database, false, null, null);
props.setExploreDatabaseName(database);
}
try {
datasetFramework.addInstance("email", datasetId, props.build());
if (database == null) {
runCommand(NAMESPACE_ID, "show tables", true, null, Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList(MY_TABLE_NAME)), new QueryResult(Lists.<Object>newArrayList(tableName))));
} else {
runCommand(NAMESPACE_ID, "show tables in " + database, true, null, Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList(tableName))));
}
datasetFramework.deleteInstance(datasetId);
if (database == null) {
runCommand(NAMESPACE_ID, "show tables", true, null, Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList(MY_TABLE_NAME))));
} else {
runCommand(NAMESPACE_ID, "show tables in " + database, false, null, Collections.<QueryResult>emptyList());
}
} finally {
if (database != null) {
runCommand(NAMESPACE_ID, "drop database if exists " + database + "cascade", false, null, null);
}
}
}
use of io.cdap.cdap.proto.QueryResult in project cdap by caskdata.
the class HiveExploreTableTestRun method testTableWithDateTimestamp.
@Test
public void testTableWithDateTimestamp() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
DatasetId dtTsTable = NAMESPACE_ID.dataset("dt_ts_table");
DatasetId otherDtTsTable = NAMESPACE_ID.dataset("other_dt_ts_table");
Schema schema = Schema.recordOf("recordWithDateTimestamp", Schema.Field.of("int_field", Schema.of(Schema.Type.INT)), Schema.Field.of("string_field", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("date_field", Schema.nullableOf(Schema.of(Schema.LogicalType.DATE))), Schema.Field.of("ts_millis_field", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MILLIS))), Schema.Field.of("ts_micros_field", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MICROS))));
datasetFramework.addInstance(Table.class.getName(), dtTsTable, TableProperties.builder().setSchema(schema).setRowFieldName("int_field").setExploreTableName("dt_ts_table").build());
datasetFramework.addInstance(Table.class.getName(), otherDtTsTable, TableProperties.builder().setSchema(schema).setRowFieldName("int_field").setExploreTableName("other_dt_ts_table").build());
try {
// Accessing dataset instance to perform data operations
Table table = datasetFramework.getDataset(dtTsTable, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
Transaction tx = transactionManager.startShort(100);
((TransactionAware) table).startTx(tx);
Put put = new Put(Bytes.toBytes("row1"));
put.add("int_field", 1);
put.add("string_field", "alice");
put.add("date_field", 0);
put.add("ts_millis_field", 1536336590595L);
put.add("ts_micros_field", 1536336590595123L);
table.put(put);
put = new Put(Bytes.toBytes("row2"));
put.add("int_field", 2);
put.add("string_field", "bob");
table.put(put);
((TransactionAware) table).commitTx();
transactionManager.canCommit(tx.getTransactionId(), ((TransactionAware) table).getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
((TransactionAware) table).postTxCommit();
ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from dt_ts_table").get();
List<Object> columns = results.next().getColumns();
Assert.assertEquals(5, columns.size());
Assert.assertEquals("alice", columns.get(1));
Assert.assertEquals("1970-01-01", columns.get(2));
Assert.assertEquals("2018-09-07 16:09:50.595", columns.get(3));
Assert.assertEquals("2018-09-07 16:09:50.595123", columns.get(4));
columns = results.next().getColumns();
Assert.assertEquals(5, columns.size());
Assert.assertEquals("bob", columns.get(1));
Assert.assertNull(columns.get(2));
Assert.assertNull(columns.get(3));
Assert.assertNull(columns.get(4));
String command = "insert into other_dt_ts_table select int_field, string_field, date_field, ts_millis_field, " + "ts_micros_field from dt_ts_table";
ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, command).get();
Assert.assertEquals(QueryStatus.OpStatus.FINISHED, result.getStatus().getStatus());
command = "select string_field, date_field, ts_millis_field, ts_micros_field from other_dt_ts_table";
runCommand(NAMESPACE_ID, command, true, Lists.newArrayList(new ColumnDesc("string_field", "STRING", 1, null), new ColumnDesc("date_field", "DATE", 2, null), new ColumnDesc("ts_millis_field", "TIMESTAMP", 3, null), new ColumnDesc("ts_micros_field", "TIMESTAMP", 4, null)), Lists.newArrayList(new QueryResult(Lists.newArrayList("alice", "1970-01-01", "2018-09-07 16:09:50.595", "2018-09-07 16:09:50.595123")), new QueryResult(Lists.newArrayList("bob", null, null, null))));
} finally {
datasetFramework.deleteInstance(dtTsTable);
datasetFramework.deleteInstance(otherDtTsTable);
}
}
use of io.cdap.cdap.proto.QueryResult in project cdap by caskdata.
the class InMemoryExploreServiceTest method trimColumnValues.
private static List<QueryResult> trimColumnValues(List<QueryResult> results) {
List<QueryResult> newResults = Lists.newArrayList();
for (QueryResult result : results) {
List<Object> newCols = Lists.newArrayList();
for (Object obj : result.getColumns()) {
if (obj instanceof String) {
newCols.add(((String) obj).trim());
} else {
newCols.add(obj);
}
}
newResults.add(new QueryResult(newCols));
}
return newResults;
}
use of io.cdap.cdap.proto.QueryResult in project cdap by caskdata.
the class QueryResultsBodyProducer method nextChunk.
@Override
public ByteBuf nextChunk() throws Exception {
buffer.clear();
if (results == null) {
initialize();
}
if (results.isEmpty()) {
return Unpooled.EMPTY_BUFFER;
}
for (QueryResult result : results) {
appendCSVRow(writer, result);
}
writer.flush();
results = exploreService.nextResults(handle, AbstractExploreQueryExecutorHttpHandler.DOWNLOAD_FETCH_CHUNK_SIZE);
return buffer.copy();
}
Aggregations