use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestAccumuloPredicateHandler method testRowRangeGeneration.
@Test
public void testRowRangeGeneration() throws SerDeException {
List<String> columnNames = Arrays.asList("key", "column");
List<TypeInfo> columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string");
String columnMappingStr = ":rowID,cf:f1";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr);
columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes);
// 100 < key
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "key", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 100);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(constant);
children.add(column);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
// Should make (100, +inf)
List<Range> ranges = handler.getRanges(conf, columnMapper);
Assert.assertEquals(1, ranges.size());
Assert.assertEquals(new Range(new Text("100"), false, null, false), ranges.get(0));
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestLazyAccumuloMap method testMixedSerializationMap.
@Test
public void testMixedSerializationMap() throws SerDeException, IOException {
AccumuloHiveRow row = new AccumuloHiveRow("row");
row.add(new Text("cf1"), new Text(toBytes(1)), "2".getBytes());
row.add(new Text("cf1"), new Text(toBytes(2)), "4".getBytes());
row.add(new Text("cf1"), new Text(toBytes(3)), "6".getBytes());
HiveAccumuloMapColumnMapping mapping = new HiveAccumuloMapColumnMapping("cf1", null, ColumnEncoding.BINARY, ColumnEncoding.STRING, "column", TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo).toString());
// Map of Integer to String
Text nullSequence = new Text("\\N");
ObjectInspector oi = LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map<int,int>").get(0), new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
LazyAccumuloMap map = new LazyAccumuloMap((LazyMapObjectInspector) oi);
map.init(row, mapping);
Assert.assertEquals(3, map.getMapSize());
Object o = map.getMapValueElement(new IntWritable(1));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(2), ((LazyInteger) o).getWritableObject());
o = map.getMapValueElement(new IntWritable(2));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(4), ((LazyInteger) o).getWritableObject());
o = map.getMapValueElement(new IntWritable(3));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(6), ((LazyInteger) o).getWritableObject());
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestLazyAccumuloMap method testStringMapWithProjection.
@Test
public void testStringMapWithProjection() throws SerDeException {
AccumuloHiveRow row = new AccumuloHiveRow("row");
row.add("cf1", "foo", "bar".getBytes());
row.add("cf1", "bar", "foo".getBytes());
row.add("cf2", "foo1", "bar1".getBytes());
row.add("cf3", "bar1", "foo1".getBytes());
HiveAccumuloMapColumnMapping mapping = new HiveAccumuloMapColumnMapping("cf1", null, ColumnEncoding.STRING, ColumnEncoding.STRING, "column", TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo).toString());
// Map of Integer to String
Text nullSequence = new Text("\\N");
ObjectInspector oi = LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map<string,string>").get(0), new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
LazyAccumuloMap map = new LazyAccumuloMap((LazyMapObjectInspector) oi);
map.init(row, mapping);
Assert.assertEquals(2, map.getMapSize());
Object o = map.getMapValueElement(new Text("foo"));
Assert.assertNotNull(o);
Assert.assertEquals(new Text("bar"), ((LazyString) o).getWritableObject());
o = map.getMapValueElement(new Text("bar"));
Assert.assertNotNull(o);
Assert.assertEquals(new Text("foo"), ((LazyString) o).getWritableObject());
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestLazyAccumuloRow method testNullInit.
@Test
public void testNullInit() throws SerDeException {
List<String> columns = Arrays.asList("row", "1", "2", "3");
List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME), TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME), TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME), TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text("\\N"), false, false, (byte) '\\');
DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
Properties props = new Properties();
props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:cq1,cf:cq2,cf:cq3");
props.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
props.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
AccumuloSerDeParameters params = new AccumuloSerDeParameters(new Configuration(), props, AccumuloSerDe.class.getName());
rowIdFactory.init(params, props);
ColumnMapper columnMapper = params.getColumnMapper();
LazyAccumuloRow lazyRow = new LazyAccumuloRow(objectInspector);
AccumuloHiveRow hiveRow = new AccumuloHiveRow("1");
hiveRow.add("cf", "cq1", "foo".getBytes());
hiveRow.add("cf", "cq3", "bar".getBytes());
lazyRow.init(hiveRow, columnMapper.getColumnMappings(), rowIdFactory);
// Noticed that we also suffer from the same issue as HIVE-3179
// Only want to call a field init'ed when it's non-NULL
// Check it twice, make sure we get null both times
Assert.assertEquals("{'row':'1','1':'foo','2':null,'3':'bar'}".replace('\'', '"'), SerDeUtils.getJSONString(lazyRow, objectInspector));
Assert.assertEquals("{'row':'1','1':'foo','2':null,'3':'bar'}".replace('\'', '"'), SerDeUtils.getJSONString(lazyRow, objectInspector));
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class HCatRecordSerDe method serializeList.
private static List<?> serializeList(Object f, ListObjectInspector loi) throws SerDeException {
List l = loi.getList(f);
if (l == null) {
return null;
}
ObjectInspector eloi = loi.getListElementObjectInspector();
if (eloi.getCategory() == Category.PRIMITIVE) {
List<Object> list = new ArrayList<Object>(l.size());
for (int i = 0; i < l.size(); i++) {
list.add(((PrimitiveObjectInspector) eloi).getPrimitiveJavaObject(l.get(i)));
}
return list;
} else if (eloi.getCategory() == Category.STRUCT) {
List<List<?>> list = new ArrayList<List<?>>(l.size());
for (int i = 0; i < l.size(); i++) {
list.add(serializeStruct(l.get(i), (StructObjectInspector) eloi));
}
return list;
} else if (eloi.getCategory() == Category.LIST) {
List<List<?>> list = new ArrayList<List<?>>(l.size());
for (int i = 0; i < l.size(); i++) {
list.add(serializeList(l.get(i), (ListObjectInspector) eloi));
}
return list;
} else if (eloi.getCategory() == Category.MAP) {
List<Map<?, ?>> list = new ArrayList<Map<?, ?>>(l.size());
for (int i = 0; i < l.size(); i++) {
list.add(serializeMap(l.get(i), (MapObjectInspector) eloi));
}
return list;
} else {
throw new SerDeException(HCatRecordSerDe.class.toString() + " does not know what to do with fields of unknown category: " + eloi.getCategory() + " , type: " + eloi.getTypeName());
}
}
Aggregations