Search in sources :

Example 96 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.

the class TestPTFRowContainer method runTest.

private void runTest(int sz, int blockSize, String value) throws SerDeException, HiveException {
    List<Object> row;
    PTFRowContainer<List<Object>> rc = rowContainer(blockSize);
    int i;
    for (i = 0; i < sz; i++) {
        row = new ArrayList<Object>();
        row.add(new IntWritable(i));
        row.add(new Text("abc " + i));
        row.add(new DoubleWritable(i));
        row.add(new IntWritable(i));
        row.add(new Text("def " + i));
        row.add(new Text(value));
        rc.addRow(row);
    }
    // test forward scan
    assertEquals(sz, rc.rowCount());
    i = 0;
    row = new ArrayList<Object>();
    row = rc.first();
    while (row != null) {
        assertEquals("abc " + i, row.get(1).toString());
        i++;
        row = rc.next();
    }
    // test backward scan
    row = rc.first();
    for (i = sz - 1; i >= 0; i--) {
        row = rc.getAt(i);
        assertEquals("abc " + i, row.get(1).toString());
    }
    Random r = new Random(1000L);
    // test random scan
    for (i = 0; i < 100; i++) {
        int j = r.nextInt(sz);
        row = rc.getAt(j);
        assertEquals("abc " + j, row.get(1).toString());
    }
    // intersperse getAt and next calls
    for (i = 0; i < 100; i++) {
        int j = r.nextInt(sz);
        row = rc.getAt(j);
        assertEquals("abc " + j, row.get(1).toString());
        for (int k = j + 1; k < j + (blockSize / 4) && k < sz; k++) {
            row = rc.next();
            assertEquals("def " + k, row.get(4).toString());
        }
    }
}
Also used : Random(java.util.Random) ArrayList(java.util.ArrayList) List(java.util.List) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 97 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.

the class GenericUDFAbs method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object valObject = arguments[0].get();
    if (valObject == null) {
        return null;
    }
    switch(inputType) {
        case SHORT:
        case BYTE:
        case INT:
            valObject = inputConverter.convert(valObject);
            resultInt.set(Math.abs(((IntWritable) valObject).get()));
            return resultInt;
        case LONG:
            valObject = inputConverter.convert(valObject);
            resultLong.set(Math.abs(((LongWritable) valObject).get()));
            return resultLong;
        case FLOAT:
        case STRING:
        case DOUBLE:
            valObject = inputConverter.convert(valObject);
            if (valObject == null) {
                return null;
            }
            resultDouble.set(Math.abs(((DoubleWritable) valObject).get()));
            return resultDouble;
        case DECIMAL:
            HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) argumentOI;
            HiveDecimalWritable val = decimalOI.getPrimitiveWritableObject(valObject);
            if (val != null) {
                resultDecimal.set(val);
                resultDecimal.mutateAbs();
                val = resultDecimal;
            }
            return val;
        default:
            throw new UDFArgumentException("ABS only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
    }
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 98 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.

the class GenericUDFOPNegative method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    if (arguments[0] == null) {
        return null;
    }
    Object input = arguments[0].get();
    if (input == null) {
        return null;
    }
    input = converter.convert(input);
    if (input == null) {
        return null;
    }
    switch(resultOI.getPrimitiveCategory()) {
        case BYTE:
            byteWritable.set((byte) -(((ByteWritable) input).get()));
            return byteWritable;
        case SHORT:
            shortWritable.set((short) -(((ShortWritable) input).get()));
            return shortWritable;
        case INT:
            intWritable.set(-(((IntWritable) input).get()));
            return intWritable;
        case LONG:
            longWritable.set(-(((LongWritable) input).get()));
            return longWritable;
        case FLOAT:
            floatWritable.set(-(((FloatWritable) input).get()));
            return floatWritable;
        case DOUBLE:
            doubleWritable.set(-(((DoubleWritable) input).get()));
            return doubleWritable;
        case DECIMAL:
            decimalWritable.set((HiveDecimalWritable) input);
            decimalWritable.mutateNegate();
            return decimalWritable;
        case INTERVAL_YEAR_MONTH:
            HiveIntervalYearMonth intervalYearMonth = ((HiveIntervalYearMonthWritable) input).getHiveIntervalYearMonth();
            this.intervalYearMonthWritable.set(intervalYearMonth.negate());
            return this.intervalYearMonthWritable;
        case INTERVAL_DAY_TIME:
            HiveIntervalDayTime intervalDayTime = ((HiveIntervalDayTimeWritable) input).getHiveIntervalDayTime();
            this.intervalDayTimeWritable.set(intervalDayTime.negate());
            return intervalDayTimeWritable;
        default:
            // Should never happen.
            throw new RuntimeException("Unexpected type in evaluating " + opName + ": " + resultOI.getPrimitiveCategory());
    }
}
Also used : HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 99 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.

the class GenericUDFReflect2 method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object targetObject = targetOI.getPrimitiveJavaObject(arguments[0].get());
    if (targetObject == null) {
        return null;
    }
    Object result = null;
    try {
        result = method.invoke(targetObject, setupParameters(arguments, 2));
    } catch (InvocationTargetException e) {
        throw new HiveException(e.getCause());
    } catch (Exception e) {
        throw new HiveException(e);
    }
    if (result == null) {
        return null;
    }
    switch(returnOI.getPrimitiveCategory()) {
        case VOID:
            return null;
        case BOOLEAN:
            ((BooleanWritable) returnObj).set((Boolean) result);
            return returnObj;
        case BYTE:
            ((ByteWritable) returnObj).set((Byte) result);
            return returnObj;
        case SHORT:
            ((ShortWritable) returnObj).set((Short) result);
            return returnObj;
        case INT:
            ((IntWritable) returnObj).set((Integer) result);
            return returnObj;
        case LONG:
            ((LongWritable) returnObj).set((Long) result);
            return returnObj;
        case FLOAT:
            ((FloatWritable) returnObj).set((Float) result);
            return returnObj;
        case DOUBLE:
            ((DoubleWritable) returnObj).set((Double) result);
            return returnObj;
        case STRING:
            ((Text) returnObj).set((String) result);
            return returnObj;
        case TIMESTAMP:
            ((TimestampWritableV2) returnObj).set((Timestamp) result);
            return returnObj;
        case BINARY:
            ((BytesWritable) returnObj).set((byte[]) result, 0, ((byte[]) result).length);
            return returnObj;
        case DECIMAL:
            ((HiveDecimalWritable) returnObj).set((HiveDecimal) result);
            return returnObj;
    }
    throw new HiveException("Invalid type " + returnOI.getPrimitiveCategory());
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) InvocationTargetException(java.lang.reflect.InvocationTargetException) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) InvocationTargetException(java.lang.reflect.InvocationTargetException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 100 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.

the class TestETypeConverter method testGetDoubleConverter.

@Test
public void testGetDoubleConverter() throws Exception {
    MyConverterParent converterParent = new MyConverterParent();
    PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.DOUBLE).named("value");
    PrimitiveConverter converter = ETypeConverter.getNewConverter(primitiveType, 1, converterParent, null);
    ((PrimitiveConverter) converter).addDouble(3276);
    Writable writable = converterParent.getValue();
    DoubleWritable doubleWritable = (DoubleWritable) writable;
    assertEquals(3276, doubleWritable.get(), 0);
}
Also used : PrimitiveConverter(org.apache.parquet.io.api.PrimitiveConverter) Writable(org.apache.hadoop.io.Writable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) PrimitiveType(org.apache.parquet.schema.PrimitiveType) DoubleWritable(org.apache.hadoop.io.DoubleWritable) Test(org.junit.Test)

Aggregations

DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)132 Test (org.junit.Test)85 IntWritable (org.apache.hadoop.io.IntWritable)71 LongWritable (org.apache.hadoop.io.LongWritable)70 Text (org.apache.hadoop.io.Text)57 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)52 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)51 FloatWritable (org.apache.hadoop.io.FloatWritable)51 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)49 BooleanWritable (org.apache.hadoop.io.BooleanWritable)46 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)42 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)42 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)42 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)40 BytesWritable (org.apache.hadoop.io.BytesWritable)36 ArrayList (java.util.ArrayList)29 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)27 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)21 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)21 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)19