use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.
the class GenericUDFStruct method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
int numFields = arguments.length;
ret = new Object[numFields];
ArrayList<String> fname = new ArrayList<String>(numFields);
for (int f = 1; f <= numFields; f++) {
fname.add("col" + f);
}
boolean constantStruct = true;
for (int i = 0; i < arguments.length; i++) {
ObjectInspector oi = arguments[i];
constantStruct &= (oi.getCategory() == Category.PRIMITIVE) && (oi instanceof ConstantObjectInspector);
if (constantStruct) {
// nested complex types trigger Kryo issue #216 in plan deserialization
ret[i] = ((ConstantObjectInspector) oi).getWritableConstantValue();
}
}
if (constantStruct) {
return ObjectInspectorFactory.getStandardConstantStructObjectInspector(fname, Arrays.asList(arguments), Arrays.asList(ret));
} else {
return ObjectInspectorFactory.getStandardStructObjectInspector(fname, Arrays.asList(arguments));
}
}
use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.
the class TypedBytesSerDe method serializeField.
private void serializeField(Object o, ObjectInspector oi, Object reuse) throws IOException {
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
switch(poi.getPrimitiveCategory()) {
case VOID:
{
return;
}
case BOOLEAN:
{
BooleanObjectInspector boi = (BooleanObjectInspector) poi;
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
r.set(boi.get(o));
tbOut.write(r);
return;
}
case BYTE:
{
ByteObjectInspector boi = (ByteObjectInspector) poi;
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r.set(boi.get(o));
tbOut.write(r);
return;
}
case SHORT:
{
ShortObjectInspector spoi = (ShortObjectInspector) poi;
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r.set(spoi.get(o));
tbOut.write(r);
return;
}
case INT:
{
IntObjectInspector ioi = (IntObjectInspector) poi;
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r.set(ioi.get(o));
tbOut.write(r);
return;
}
case LONG:
{
LongObjectInspector loi = (LongObjectInspector) poi;
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r.set(loi.get(o));
tbOut.write(r);
return;
}
case FLOAT:
{
FloatObjectInspector foi = (FloatObjectInspector) poi;
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
r.set(foi.get(o));
tbOut.write(r);
return;
}
case DOUBLE:
{
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r.set(doi.get(o));
tbOut.write(r);
return;
}
case STRING:
{
StringObjectInspector soi = (StringObjectInspector) poi;
Text t = soi.getPrimitiveWritableObject(o);
tbOut.write(t);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
}
}
case LIST:
case MAP:
case STRUCT:
{
// For complex object, serialize to JSON format
String s = SerDeUtils.getJSONString(o, oi);
Text t = reuse == null ? new Text() : (Text) reuse;
// convert to Text and write it
t.set(s);
tbOut.write(t);
}
default:
{
throw new RuntimeException("Unrecognized type: " + oi.getCategory());
}
}
}
use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.
the class TypedBytesSerDe method deserializeField.
static Object deserializeField(TypedBytesWritableInput in, TypeInfo type, Object reuse) throws IOException {
// read the type
Class<? extends Writable> writableType = in.readType();
if (writableType != null && writableType.isAssignableFrom(NullWritable.class)) {
// indicates that the recorded value is null
return null;
}
switch(type.getCategory()) {
case PRIMITIVE:
{
PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
switch(ptype.getPrimitiveCategory()) {
case VOID:
{
return null;
}
case BOOLEAN:
{
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
r = in.readBoolean(r);
return r;
}
case BYTE:
{
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r = in.readByte(r);
return r;
}
case SHORT:
{
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r = in.readShort(r);
return r;
}
case INT:
{
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r = in.readInt(r);
return r;
}
case LONG:
{
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r = in.readLong(r);
return r;
}
case FLOAT:
{
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
r = in.readFloat(r);
return r;
}
case DOUBLE:
{
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r = in.readDouble(r);
return r;
}
case STRING:
{
Text r = reuse == null ? new Text() : (Text) reuse;
r = in.readText(r);
return r;
}
default:
{
throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
}
}
}
// Currently, deserialization of complex types is not supported
case LIST:
case MAP:
case STRUCT:
default:
{
throw new RuntimeException("Unsupported category: " + type.getCategory());
}
}
}
use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.
the class HBaseStorageHandler method decomposePredicate.
public static DecomposedPredicate decomposePredicate(JobConf jobConf, HBaseSerDe hBaseSerDe, ExprNodeDesc predicate) {
ColumnMapping keyMapping = hBaseSerDe.getHBaseSerdeParam().getKeyColumnMapping();
ColumnMapping tsMapping = hBaseSerDe.getHBaseSerdeParam().getTimestampColumnMapping();
IndexPredicateAnalyzer analyzer = HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(keyMapping.columnName, keyMapping.isComparable(), tsMapping == null ? null : tsMapping.columnName);
List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
ExprNodeGenericFuncDesc pushedPredicate = null;
ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, conditions);
for (List<IndexSearchCondition> searchConditions : HiveHBaseInputFormatUtil.decompose(conditions).values()) {
int scSize = searchConditions.size();
if (scSize < 1 || 2 < scSize) {
// Either there was nothing which could be pushed down (size = 0),
// there were complex predicates which we don't support yet.
// Currently supported are one of the form:
// 1. key < 20 (size = 1)
// 2. key = 20 (size = 1)
// 3. key < 20 and key > 10 (size = 2)
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
if (scSize == 2 && (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqual.class.getName()))) {
// If one of the predicates is =, then any other predicate with it is illegal.
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
boolean sameType = sameTypeIndexSearchConditions(searchConditions);
if (!sameType) {
// If type for column and constant are different, we currently do not support pushing them
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
TypeInfo typeInfo = searchConditions.get(0).getColumnDesc().getTypeInfo();
if (typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) == PrimitiveGrouping.NUMERIC_GROUP) {
// would be returned.
if (scSize == 2) {
boolean lowerBound = false;
boolean upperBound = false;
if (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqualOrLessThan.class.getName()) || searchConditions.get(0).getComparisonOp().equals(GenericUDFOPLessThan.class.getName())) {
lowerBound = true;
} else {
upperBound = true;
}
if (searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqualOrGreaterThan.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPGreaterThan.class.getName())) {
upperBound = true;
} else {
lowerBound = true;
}
if (!upperBound || !lowerBound) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
} else {
// scSize == 1
if (!searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName())) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
}
}
// This one can be pushed
pushedPredicate = extractStorageHandlerCondition(analyzer, searchConditions, pushedPredicate);
}
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate = pushedPredicate;
decomposedPredicate.residualPredicate = residualPredicate;
return decomposedPredicate;
}
use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.
the class VectorDeserializeRow method initSourceEntry.
/*
* Initialize one column's source deserializtion related arrays.
*/
private void initSourceEntry(int logicalColumnIndex, int projectionColumnNum, TypeInfo sourceTypeInfo) {
isConvert[logicalColumnIndex] = false;
projectionColumnNums[logicalColumnIndex] = projectionColumnNum;
Category sourceCategory = sourceTypeInfo.getCategory();
sourceCategories[logicalColumnIndex] = sourceCategory;
if (sourceCategory == Category.PRIMITIVE) {
PrimitiveTypeInfo sourcePrimitiveTypeInfo = (PrimitiveTypeInfo) sourceTypeInfo;
PrimitiveCategory sourcePrimitiveCategory = sourcePrimitiveTypeInfo.getPrimitiveCategory();
sourcePrimitiveCategories[logicalColumnIndex] = sourcePrimitiveCategory;
switch(sourcePrimitiveCategory) {
case CHAR:
maxLengths[logicalColumnIndex] = ((CharTypeInfo) sourcePrimitiveTypeInfo).getLength();
break;
case VARCHAR:
maxLengths[logicalColumnIndex] = ((VarcharTypeInfo) sourcePrimitiveTypeInfo).getLength();
break;
default:
// No additional data type specific setting.
break;
}
} else {
// We don't currently support complex types.
Preconditions.checkState(false);
}
}
Aggregations