Search in sources :

Example 41 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class BatchToRowReader method nextBoolean.

/* Routines for stubbing into Writables */
public static BooleanWritable nextBoolean(ColumnVector vector, int row, Object previous) {
    if (vector.isRepeating) {
        row = 0;
    }
    if (vector.noNulls || !vector.isNull[row]) {
        BooleanWritable result;
        if (previous == null || previous.getClass() != BooleanWritable.class) {
            result = new BooleanWritable();
        } else {
            result = (BooleanWritable) previous;
        }
        result.set(((LongColumnVector) vector).vector[row] != 0);
        return result;
    } else {
        return null;
    }
}
Also used : BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 42 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class GenericUDFArrayContains method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    // Check if two arguments were passed
    if (arguments.length != ARG_COUNT) {
        throw new UDFArgumentException("The function " + FUNC_NAME + " accepts " + ARG_COUNT + " arguments.");
    }
    // Check if ARRAY_IDX argument is of category LIST
    if (!arguments[ARRAY_IDX].getCategory().equals(Category.LIST)) {
        throw new UDFArgumentTypeException(ARRAY_IDX, "\"" + org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "\" " + "expected at function ARRAY_CONTAINS, but " + "\"" + arguments[ARRAY_IDX].getTypeName() + "\" " + "is found");
    }
    arrayOI = (ListObjectInspector) arguments[ARRAY_IDX];
    arrayElementOI = arrayOI.getListElementObjectInspector();
    valueOI = arguments[VALUE_IDX];
    // Check if list element and value are of same type
    if (!ObjectInspectorUtils.compareTypes(arrayElementOI, valueOI)) {
        throw new UDFArgumentTypeException(VALUE_IDX, "\"" + arrayElementOI.getTypeName() + "\"" + " expected at function ARRAY_CONTAINS, but " + "\"" + valueOI.getTypeName() + "\"" + " is found");
    }
    // Check if the comparison is supported for this type
    if (!ObjectInspectorUtils.compareSupported(valueOI)) {
        throw new UDFArgumentException("The function " + FUNC_NAME + " does not support comparison for " + "\"" + valueOI.getTypeName() + "\"" + " types");
    }
    result = new BooleanWritable(false);
    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) BooleanWritable(org.apache.hadoop.io.BooleanWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)

Example 43 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class GenericUDFBetween method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    boolean invert = (Boolean) ((PrimitiveObjectInspector) argumentOIs[0]).getPrimitiveJavaObject(arguments[0].get());
    BooleanWritable left = ((BooleanWritable) egt.evaluate(new DeferredObject[] { arguments[1], arguments[2] }));
    if (left == null) {
        return null;
    }
    if (!invert && !left.get()) {
        result.set(false);
        return result;
    }
    BooleanWritable right = ((BooleanWritable) elt.evaluate(new DeferredObject[] { arguments[1], arguments[3] }));
    if (right == null) {
        return null;
    }
    boolean between = left.get() && right.get();
    result.set(invert ? !between : between);
    return result;
}
Also used : BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 44 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class GenericUDFEWAHBitmapEmpty method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    assert (arguments.length == 1);
    Object b = arguments[0].get();
    ListObjectInspector lloi = (ListObjectInspector) bitmapOI;
    int length = lloi.getListLength(b);
    ArrayList<LongWritable> bitmapArray = new ArrayList<LongWritable>();
    for (int i = 0; i < length; i++) {
        long l = PrimitiveObjectInspectorUtils.getLong(lloi.getListElement(b, i), (PrimitiveObjectInspector) lloi.getListElementObjectInspector());
        bitmapArray.add(new LongWritable(l));
    }
    BitmapObjectInput bitmapObjIn = new BitmapObjectInput(bitmapArray);
    EWAHCompressedBitmap bitmap = new EWAHCompressedBitmap();
    try {
        bitmap.readExternal(bitmapObjIn);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    // Add return true only if bitmap is all zeros.
    return new BooleanWritable(!bitmap.iterator().hasNext());
}
Also used : BitmapObjectInput(org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) EWAHCompressedBitmap(javaewah.EWAHCompressedBitmap) ArrayList(java.util.ArrayList) LongWritable(org.apache.hadoop.io.LongWritable) IOException(java.io.IOException)

Example 45 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TypedBytesRecordReader method next.

public int next(Writable data) throws IOException {
    int pos = 0;
    barrStr.reset();
    while (true) {
        Type type = tbIn.readTypeCode();
        // it was a empty stream
        if (type == null) {
            return -1;
        }
        if (type == Type.ENDOFRECORD) {
            tbOut.writeEndOfRecord();
            if (barrStr.getLength() > 0) {
                ((BytesWritable) data).set(barrStr.getData(), 0, barrStr.getLength());
            }
            return barrStr.getLength();
        }
        if (pos >= row.size()) {
            Writable wrt = allocateWritable(type);
            assert pos == row.size();
            assert pos == rowTypeName.size();
            row.add(wrt);
            rowTypeName.add(type.name());
            String typeName = typedBytesToTypeName.get(type);
            PrimitiveTypeInfo srcTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
            srcOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(srcTypeInfo));
            converters.add(ObjectInspectorConverters.getConverter(srcOIns.get(pos), dstOIns.get(pos)));
        } else {
            if (!rowTypeName.get(pos).equals(type.name())) {
                throw new RuntimeException("datatype of row changed from " + rowTypeName.get(pos) + " to " + type.name());
            }
        }
        Writable w = row.get(pos);
        switch(type) {
            case BYTE:
                tbIn.readByte((ByteWritable) w);
                break;
            case BOOL:
                tbIn.readBoolean((BooleanWritable) w);
                break;
            case INT:
                tbIn.readInt((IntWritable) w);
                break;
            case SHORT:
                tbIn.readShort((ShortWritable) w);
                break;
            case LONG:
                tbIn.readLong((LongWritable) w);
                break;
            case FLOAT:
                tbIn.readFloat((FloatWritable) w);
                break;
            case DOUBLE:
                tbIn.readDouble((DoubleWritable) w);
                break;
            case STRING:
                tbIn.readText((Text) w);
                break;
            default:
                // should never come here
                assert false;
        }
        write(pos, w);
        pos++;
    }
}
Also used : ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Aggregations

BooleanWritable (org.apache.hadoop.io.BooleanWritable)63 IntWritable (org.apache.hadoop.io.IntWritable)41 LongWritable (org.apache.hadoop.io.LongWritable)40 FloatWritable (org.apache.hadoop.io.FloatWritable)37 Text (org.apache.hadoop.io.Text)31 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)27 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)26 BytesWritable (org.apache.hadoop.io.BytesWritable)26 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 Writable (org.apache.hadoop.io.Writable)17 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)15 Configuration (org.apache.hadoop.conf.Configuration)12 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)12 Random (java.util.Random)11 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)10 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 KeyValue (org.apache.hadoop.hbase.KeyValue)7 Result (org.apache.hadoop.hbase.client.Result)7 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)7