use of org.apache.hadoop.hive.llap.FieldDesc in project hive by apache.
the class GenericUDTFGetSplits method convertSchema.
private Schema convertSchema(Object obj) throws HiveException {
org.apache.hadoop.hive.metastore.api.Schema schema = (org.apache.hadoop.hive.metastore.api.Schema) obj;
List<FieldDesc> colDescs = new ArrayList<FieldDesc>();
for (FieldSchema fs : schema.getFieldSchemas()) {
String colName = fs.getName();
String typeString = fs.getType();
colDescs.add(new FieldDesc(colName, TypeInfoUtils.getTypeInfoFromTypeString(typeString)));
}
Schema Schema = new Schema(colDescs);
return Schema;
}
use of org.apache.hadoop.hive.llap.FieldDesc in project hive by apache.
the class LlapRowRecordReader method initSerDe.
protected AbstractSerDe initSerDe(Configuration conf) throws SerDeException {
Properties props = new Properties();
StringBuilder columnsBuffer = new StringBuilder();
StringBuilder typesBuffer = new StringBuilder();
boolean isFirst = true;
for (FieldDesc colDesc : schema.getColumns()) {
if (!isFirst) {
columnsBuffer.append(',');
typesBuffer.append(',');
}
columnsBuffer.append(colDesc.getName());
typesBuffer.append(colDesc.getTypeInfo().toString());
isFirst = false;
}
String columns = columnsBuffer.toString();
String types = typesBuffer.toString();
props.put(serdeConstants.LIST_COLUMNS, columns);
props.put(serdeConstants.LIST_COLUMN_TYPES, types);
props.put(serdeConstants.ESCAPE_CHAR, "\\");
AbstractSerDe createdSerDe = createSerDe();
createdSerDe.initialize(conf, props, null);
return createdSerDe;
}
use of org.apache.hadoop.hive.llap.FieldDesc in project hive by apache.
the class TestMiniLlapVectorArrowWithLlapIODisabled method testNullsInStructFields.
@Test
public void testNullsInStructFields() throws Exception {
createDataTypesTable("datatypes");
RowCollector2 rowCollector = new RowCollector2();
// c1 int
// c8 struct<r:string,s:int,t:double>
// c15 struct<r:int,s:struct<a:int,b:string>>
// c16 array<struct<m:map<string,string>,n:int>>
String query = "select c1, c8, c15, c16 from datatypes";
int rowCount = processQuery(query, 1, rowCollector);
assertEquals(4, rowCollector.numColumns);
assertEquals(3, rowCount);
FieldDesc fieldDesc = rowCollector.schema.getColumns().get(0);
assertEquals("c1", fieldDesc.getName());
assertEquals("int", fieldDesc.getTypeInfo().getTypeName());
fieldDesc = rowCollector.schema.getColumns().get(1);
assertEquals("c8", fieldDesc.getName());
assertEquals("struct<r:string,s:int,t:double>", fieldDesc.getTypeInfo().getTypeName());
fieldDesc = rowCollector.schema.getColumns().get(2);
assertEquals("c15", fieldDesc.getName());
assertEquals("struct<r:int,s:struct<a:int,b:string>>", fieldDesc.getTypeInfo().getTypeName());
fieldDesc = rowCollector.schema.getColumns().get(3);
assertEquals("c16", fieldDesc.getName());
assertEquals("array<struct<m:map<string,string>,n:int>>", fieldDesc.getTypeInfo().getTypeName());
// First row is all nulls
Object[] rowValues = rowCollector.rows.get(0);
for (int idx = 0; idx < rowCollector.numColumns; ++idx) {
assertNull("idx=" + idx, rowValues[idx]);
}
// Second Row
rowValues = rowCollector.rows.get(1);
assertEquals(-1, rowValues[0]);
List<?> c8Value = (List<?>) rowValues[1];
assertNull(c8Value.get(0));
assertNull(c8Value.get(1));
assertNull(c8Value.get(2));
List<?> c15Value = (List<?>) rowValues[2];
assertNull(c15Value.get(0));
assertNull(c15Value.get(1));
List<?> c16Value = (List<?>) rowValues[3];
assertEquals(0, c16Value.size());
// Third row
rowValues = rowCollector.rows.get(2);
assertEquals(1, rowValues[0]);
c8Value = (List<?>) rowValues[1];
assertEquals("a", c8Value.get(0));
assertEquals(9, c8Value.get(1));
assertEquals(2.2d, c8Value.get(2));
c15Value = (List<?>) rowValues[2];
assertEquals(1, c15Value.get(0));
List<?> listVal = (List<?>) c15Value.get(1);
assertEquals(2, listVal.size());
assertEquals(2, listVal.get(0));
assertEquals("x", listVal.get(1));
c16Value = (List<?>) rowValues[3];
assertEquals(2, c16Value.size());
listVal = (List<?>) c16Value.get(0);
assertEquals(2, listVal.size());
Map<?, ?> mapVal = (Map<?, ?>) listVal.get(0);
assertEquals(0, mapVal.size());
assertEquals(1, listVal.get(1));
listVal = (List<?>) c16Value.get(1);
mapVal = (Map<?, ?>) listVal.get(0);
assertEquals(2, mapVal.size());
assertEquals("b", mapVal.get("a"));
assertEquals("d", mapVal.get("c"));
assertEquals(2, listVal.get(1));
}
use of org.apache.hadoop.hive.llap.FieldDesc in project hive by apache.
the class TestJdbcGenericUDTFGetSplits method testDecimalPrecisionAndScale.
@Test
public void testDecimalPrecisionAndScale() throws Exception {
try (Statement stmt = hs2Conn.createStatement()) {
stmt.execute("CREATE TABLE decimal_test_table(decimal_col DECIMAL(6,2))");
stmt.execute("INSERT INTO decimal_test_table VALUES(2507.92)");
ResultSet rs = stmt.executeQuery("SELECT * FROM decimal_test_table");
assertTrue(rs.next());
rs.close();
String url = miniHS2.getJdbcURL();
String user = System.getProperty("user.name");
String pwd = user;
String handleId = UUID.randomUUID().toString();
String sql = "SELECT avg(decimal_col)/3 FROM decimal_test_table";
// make request through llap-ext-client
JobConf job = new JobConf(conf);
job.set(LlapBaseInputFormat.URL_KEY, url);
job.set(LlapBaseInputFormat.USER_KEY, user);
job.set(LlapBaseInputFormat.PWD_KEY, pwd);
job.set(LlapBaseInputFormat.QUERY_KEY, sql);
job.set(LlapBaseInputFormat.HANDLE_ID, handleId);
LlapBaseInputFormat llapBaseInputFormat = new LlapBaseInputFormat();
// schema split
LlapInputSplit schemaSplit = (LlapInputSplit) llapBaseInputFormat.getSplits(job, 0)[0];
assertNotNull(schemaSplit);
FieldDesc fieldDesc = schemaSplit.getSchema().getColumns().get(0);
DecimalTypeInfo type = (DecimalTypeInfo) fieldDesc.getTypeInfo();
assertEquals(12, type.getPrecision());
assertEquals(8, type.scale());
LlapBaseInputFormat.close(handleId);
}
}
use of org.apache.hadoop.hive.llap.FieldDesc in project hive by apache.
the class TestLlapInputSplit method testWritable.
@Test
public void testWritable() throws Exception {
int splitNum = 88;
byte[] planBytes = "0123456789987654321".getBytes();
byte[] fragmentBytes = "abcdefghijklmnopqrstuvwxyz".getBytes();
SplitLocationInfo[] locations = { new SplitLocationInfo("location1", false), new SplitLocationInfo("location2", false) };
LlapDaemonInfo daemonInfo1 = new LlapDaemonInfo("host1", 30004, 15003);
LlapDaemonInfo daemonInfo2 = new LlapDaemonInfo("host2", 30004, 15003);
LlapDaemonInfo[] llapDaemonInfos = { daemonInfo1, daemonInfo2 };
ArrayList<FieldDesc> colDescs = new ArrayList<FieldDesc>();
colDescs.add(new FieldDesc("col1", TypeInfoFactory.stringTypeInfo));
colDescs.add(new FieldDesc("col2", TypeInfoFactory.intTypeInfo));
Schema schema = new Schema(colDescs);
byte[] tokenBytes = new byte[] { 1 };
LlapInputSplit split1 = new LlapInputSplit(splitNum, planBytes, fragmentBytes, null, locations, llapDaemonInfos, schema, "hive", tokenBytes, "some-dummy-jwt");
ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
DataOutputStream dataOut = new DataOutputStream(byteOutStream);
split1.write(dataOut);
ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
DataInputStream dataIn = new DataInputStream(byteInStream);
LlapInputSplit split2 = new LlapInputSplit();
split2.readFields(dataIn);
// Did we read all the data?
assertEquals(0, byteInStream.available());
checkLlapSplits(split1, split2);
}
Aggregations