use of org.apache.hadoop.io.BooleanWritable in project hive by apache.
the class BatchToRowReader method nextBoolean.
/* Routines for stubbing into Writables */
public static BooleanWritable nextBoolean(ColumnVector vector, int row, Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
BooleanWritable result;
if (previous == null || previous.getClass() != BooleanWritable.class) {
result = new BooleanWritable();
} else {
result = (BooleanWritable) previous;
}
result.set(((LongColumnVector) vector).vector[row] != 0);
return result;
} else {
return null;
}
}
use of org.apache.hadoop.io.BooleanWritable in project hive by apache.
the class GenericUDFArrayContains method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
// Check if two arguments were passed
if (arguments.length != ARG_COUNT) {
throw new UDFArgumentException("The function " + FUNC_NAME + " accepts " + ARG_COUNT + " arguments.");
}
// Check if ARRAY_IDX argument is of category LIST
if (!arguments[ARRAY_IDX].getCategory().equals(Category.LIST)) {
throw new UDFArgumentTypeException(ARRAY_IDX, "\"" + org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "\" " + "expected at function ARRAY_CONTAINS, but " + "\"" + arguments[ARRAY_IDX].getTypeName() + "\" " + "is found");
}
arrayOI = (ListObjectInspector) arguments[ARRAY_IDX];
arrayElementOI = arrayOI.getListElementObjectInspector();
valueOI = arguments[VALUE_IDX];
// Check if list element and value are of same type
if (!ObjectInspectorUtils.compareTypes(arrayElementOI, valueOI)) {
throw new UDFArgumentTypeException(VALUE_IDX, "\"" + arrayElementOI.getTypeName() + "\"" + " expected at function ARRAY_CONTAINS, but " + "\"" + valueOI.getTypeName() + "\"" + " is found");
}
// Check if the comparison is supported for this type
if (!ObjectInspectorUtils.compareSupported(valueOI)) {
throw new UDFArgumentException("The function " + FUNC_NAME + " does not support comparison for " + "\"" + valueOI.getTypeName() + "\"" + " types");
}
result = new BooleanWritable(false);
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
use of org.apache.hadoop.io.BooleanWritable in project hive by apache.
the class GenericUDFBetween method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
boolean invert = (Boolean) ((PrimitiveObjectInspector) argumentOIs[0]).getPrimitiveJavaObject(arguments[0].get());
BooleanWritable left = ((BooleanWritable) egt.evaluate(new DeferredObject[] { arguments[1], arguments[2] }));
if (left == null) {
return null;
}
if (!invert && !left.get()) {
result.set(false);
return result;
}
BooleanWritable right = ((BooleanWritable) elt.evaluate(new DeferredObject[] { arguments[1], arguments[3] }));
if (right == null) {
return null;
}
boolean between = left.get() && right.get();
result.set(invert ? !between : between);
return result;
}
use of org.apache.hadoop.io.BooleanWritable in project hive by apache.
the class GenericUDFEWAHBitmapEmpty method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
assert (arguments.length == 1);
Object b = arguments[0].get();
ListObjectInspector lloi = (ListObjectInspector) bitmapOI;
int length = lloi.getListLength(b);
ArrayList<LongWritable> bitmapArray = new ArrayList<LongWritable>();
for (int i = 0; i < length; i++) {
long l = PrimitiveObjectInspectorUtils.getLong(lloi.getListElement(b, i), (PrimitiveObjectInspector) lloi.getListElementObjectInspector());
bitmapArray.add(new LongWritable(l));
}
BitmapObjectInput bitmapObjIn = new BitmapObjectInput(bitmapArray);
EWAHCompressedBitmap bitmap = new EWAHCompressedBitmap();
try {
bitmap.readExternal(bitmapObjIn);
} catch (IOException e) {
throw new RuntimeException(e);
}
// Add return true only if bitmap is all zeros.
return new BooleanWritable(!bitmap.iterator().hasNext());
}
use of org.apache.hadoop.io.BooleanWritable in project hive by apache.
the class TypedBytesRecordReader method next.
public int next(Writable data) throws IOException {
int pos = 0;
barrStr.reset();
while (true) {
Type type = tbIn.readTypeCode();
// it was a empty stream
if (type == null) {
return -1;
}
if (type == Type.ENDOFRECORD) {
tbOut.writeEndOfRecord();
if (barrStr.getLength() > 0) {
((BytesWritable) data).set(barrStr.getData(), 0, barrStr.getLength());
}
return barrStr.getLength();
}
if (pos >= row.size()) {
Writable wrt = allocateWritable(type);
assert pos == row.size();
assert pos == rowTypeName.size();
row.add(wrt);
rowTypeName.add(type.name());
String typeName = typedBytesToTypeName.get(type);
PrimitiveTypeInfo srcTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
srcOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(srcTypeInfo));
converters.add(ObjectInspectorConverters.getConverter(srcOIns.get(pos), dstOIns.get(pos)));
} else {
if (!rowTypeName.get(pos).equals(type.name())) {
throw new RuntimeException("datatype of row changed from " + rowTypeName.get(pos) + " to " + type.name());
}
}
Writable w = row.get(pos);
switch(type) {
case BYTE:
tbIn.readByte((ByteWritable) w);
break;
case BOOL:
tbIn.readBoolean((BooleanWritable) w);
break;
case INT:
tbIn.readInt((IntWritable) w);
break;
case SHORT:
tbIn.readShort((ShortWritable) w);
break;
case LONG:
tbIn.readLong((LongWritable) w);
break;
case FLOAT:
tbIn.readFloat((FloatWritable) w);
break;
case DOUBLE:
tbIn.readDouble((DoubleWritable) w);
break;
case STRING:
tbIn.readText((Text) w);
break;
default:
// should never come here
assert false;
}
write(pos, w);
pos++;
}
}
Aggregations