use of org.apache.hadoop.hive.llap.LlapBaseInputFormat in project hive by apache.
the class TestJdbcWithMiniLlapVectorArrowBatch method runQueryUsingLlapArrowBatchReader.
private MultiSet<List<Object>> runQueryUsingLlapArrowBatchReader(String query, Map<String, String> extraHiveConfs) throws Exception {
String url = miniHS2.getJdbcURL();
if (extraHiveConfs != null) {
url = url + "?" + extraHiveConfs.entrySet().stream().map(e -> e.getKey() + "=" + e.getValue()).collect(Collectors.joining(";"));
}
String user = System.getProperty("user.name");
String pwd = user;
String handleId = UUID.randomUUID().toString();
// Get splits
JobConf job = new JobConf(conf);
job.set(LlapBaseInputFormat.URL_KEY, url);
job.set(LlapBaseInputFormat.USER_KEY, user);
job.set(LlapBaseInputFormat.PWD_KEY, pwd);
job.set(LlapBaseInputFormat.QUERY_KEY, query);
job.set(LlapBaseInputFormat.HANDLE_ID, handleId);
job.set(LlapBaseInputFormat.USE_NEW_SPLIT_FORMAT, "false");
BufferAllocator allocator = RootAllocatorFactory.INSTANCE.getOrCreateRootAllocator(Long.MAX_VALUE).newChildAllocator(UUID.randomUUID().toString(), 0, Long.MAX_VALUE);
LlapBaseInputFormat llapBaseInputFormat = new LlapBaseInputFormat(true, allocator);
InputSplit[] splits = llapBaseInputFormat.getSplits(job, 1);
assertTrue(splits.length > 0);
MultiSet<List<Object>> queryResult = new HashMultiSet<>();
for (InputSplit split : splits) {
System.out.println("Processing split " + Arrays.toString(split.getLocations()));
RecordReader<NullWritable, ArrowWrapperWritable> reader = llapBaseInputFormat.getRecordReader(split, job, null);
ArrowWrapperWritable wrapperWritable = new ArrowWrapperWritable();
while (reader.next(NullWritable.get(), wrapperWritable)) {
queryResult.addAll(collectResultFromArrowVector(wrapperWritable));
}
reader.close();
}
LlapBaseInputFormat.close(handleId);
return queryResult;
}
use of org.apache.hadoop.hive.llap.LlapBaseInputFormat in project hive by apache.
the class TestJdbcGenericUDTFGetSplits method testDecimalPrecisionAndScale.
@Test
public void testDecimalPrecisionAndScale() throws Exception {
try (Statement stmt = hs2Conn.createStatement()) {
stmt.execute("CREATE TABLE decimal_test_table(decimal_col DECIMAL(6,2))");
stmt.execute("INSERT INTO decimal_test_table VALUES(2507.92)");
ResultSet rs = stmt.executeQuery("SELECT * FROM decimal_test_table");
assertTrue(rs.next());
rs.close();
String url = miniHS2.getJdbcURL();
String user = System.getProperty("user.name");
String pwd = user;
String handleId = UUID.randomUUID().toString();
String sql = "SELECT avg(decimal_col)/3 FROM decimal_test_table";
// make request through llap-ext-client
JobConf job = new JobConf(conf);
job.set(LlapBaseInputFormat.URL_KEY, url);
job.set(LlapBaseInputFormat.USER_KEY, user);
job.set(LlapBaseInputFormat.PWD_KEY, pwd);
job.set(LlapBaseInputFormat.QUERY_KEY, sql);
job.set(LlapBaseInputFormat.HANDLE_ID, handleId);
LlapBaseInputFormat llapBaseInputFormat = new LlapBaseInputFormat();
// schema split
LlapInputSplit schemaSplit = (LlapInputSplit) llapBaseInputFormat.getSplits(job, 0)[0];
assertNotNull(schemaSplit);
FieldDesc fieldDesc = schemaSplit.getSchema().getColumns().get(0);
DecimalTypeInfo type = (DecimalTypeInfo) fieldDesc.getTypeInfo();
assertEquals(12, type.getPrecision());
assertEquals(8, type.scale());
LlapBaseInputFormat.close(handleId);
}
}
Aggregations