use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class TestPTFRowContainer method runTest.
private void runTest(int sz, int blockSize, String value) throws SerDeException, HiveException {
List<Object> row;
PTFRowContainer<List<Object>> rc = rowContainer(blockSize);
int i;
for (i = 0; i < sz; i++) {
row = new ArrayList<Object>();
row.add(new IntWritable(i));
row.add(new Text("abc " + i));
row.add(new DoubleWritable(i));
row.add(new IntWritable(i));
row.add(new Text("def " + i));
row.add(new Text(value));
rc.addRow(row);
}
// test forward scan
assertEquals(sz, rc.rowCount());
i = 0;
row = new ArrayList<Object>();
row = rc.first();
while (row != null) {
assertEquals("abc " + i, row.get(1).toString());
i++;
row = rc.next();
}
// test backward scan
row = rc.first();
for (i = sz - 1; i >= 0; i--) {
row = rc.getAt(i);
assertEquals("abc " + i, row.get(1).toString());
}
Random r = new Random(1000L);
// test random scan
for (i = 0; i < 100; i++) {
int j = r.nextInt(sz);
row = rc.getAt(j);
assertEquals("abc " + j, row.get(1).toString());
}
// intersperse getAt and next calls
for (i = 0; i < 100; i++) {
int j = r.nextInt(sz);
row = rc.getAt(j);
assertEquals("abc " + j, row.get(1).toString());
for (int k = j + 1; k < j + (blockSize / 4) && k < sz; k++) {
row = rc.next();
assertEquals("def " + k, row.get(4).toString());
}
}
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class GenericUDFAbs method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object valObject = arguments[0].get();
if (valObject == null) {
return null;
}
switch(inputType) {
case SHORT:
case BYTE:
case INT:
valObject = inputConverter.convert(valObject);
resultInt.set(Math.abs(((IntWritable) valObject).get()));
return resultInt;
case LONG:
valObject = inputConverter.convert(valObject);
resultLong.set(Math.abs(((LongWritable) valObject).get()));
return resultLong;
case FLOAT:
case STRING:
case DOUBLE:
valObject = inputConverter.convert(valObject);
if (valObject == null) {
return null;
}
resultDouble.set(Math.abs(((DoubleWritable) valObject).get()));
return resultDouble;
case DECIMAL:
HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) argumentOI;
HiveDecimalWritable val = decimalOI.getPrimitiveWritableObject(valObject);
if (val != null) {
resultDecimal.set(val);
resultDecimal.mutateAbs();
val = resultDecimal;
}
return val;
default:
throw new UDFArgumentException("ABS only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
}
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class GenericUDFOPNegative method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null) {
return null;
}
Object input = arguments[0].get();
if (input == null) {
return null;
}
input = converter.convert(input);
if (input == null) {
return null;
}
switch(resultOI.getPrimitiveCategory()) {
case BYTE:
byteWritable.set((byte) -(((ByteWritable) input).get()));
return byteWritable;
case SHORT:
shortWritable.set((short) -(((ShortWritable) input).get()));
return shortWritable;
case INT:
intWritable.set(-(((IntWritable) input).get()));
return intWritable;
case LONG:
longWritable.set(-(((LongWritable) input).get()));
return longWritable;
case FLOAT:
floatWritable.set(-(((FloatWritable) input).get()));
return floatWritable;
case DOUBLE:
doubleWritable.set(-(((DoubleWritable) input).get()));
return doubleWritable;
case DECIMAL:
decimalWritable.set((HiveDecimalWritable) input);
decimalWritable.mutateNegate();
return decimalWritable;
case INTERVAL_YEAR_MONTH:
HiveIntervalYearMonth intervalYearMonth = ((HiveIntervalYearMonthWritable) input).getHiveIntervalYearMonth();
this.intervalYearMonthWritable.set(intervalYearMonth.negate());
return this.intervalYearMonthWritable;
case INTERVAL_DAY_TIME:
HiveIntervalDayTime intervalDayTime = ((HiveIntervalDayTimeWritable) input).getHiveIntervalDayTime();
this.intervalDayTimeWritable.set(intervalDayTime.negate());
return intervalDayTimeWritable;
default:
// Should never happen.
throw new RuntimeException("Unexpected type in evaluating " + opName + ": " + resultOI.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class GenericUDFReflect2 method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object targetObject = targetOI.getPrimitiveJavaObject(arguments[0].get());
if (targetObject == null) {
return null;
}
Object result = null;
try {
result = method.invoke(targetObject, setupParameters(arguments, 2));
} catch (InvocationTargetException e) {
throw new HiveException(e.getCause());
} catch (Exception e) {
throw new HiveException(e);
}
if (result == null) {
return null;
}
switch(returnOI.getPrimitiveCategory()) {
case VOID:
return null;
case BOOLEAN:
((BooleanWritable) returnObj).set((Boolean) result);
return returnObj;
case BYTE:
((ByteWritable) returnObj).set((Byte) result);
return returnObj;
case SHORT:
((ShortWritable) returnObj).set((Short) result);
return returnObj;
case INT:
((IntWritable) returnObj).set((Integer) result);
return returnObj;
case LONG:
((LongWritable) returnObj).set((Long) result);
return returnObj;
case FLOAT:
((FloatWritable) returnObj).set((Float) result);
return returnObj;
case DOUBLE:
((DoubleWritable) returnObj).set((Double) result);
return returnObj;
case STRING:
((Text) returnObj).set((String) result);
return returnObj;
case TIMESTAMP:
((TimestampWritableV2) returnObj).set((Timestamp) result);
return returnObj;
case BINARY:
((BytesWritable) returnObj).set((byte[]) result, 0, ((byte[]) result).length);
return returnObj;
case DECIMAL:
((HiveDecimalWritable) returnObj).set((HiveDecimal) result);
return returnObj;
}
throw new HiveException("Invalid type " + returnOI.getPrimitiveCategory());
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class TestETypeConverter method testGetDoubleConverter.
@Test
public void testGetDoubleConverter() throws Exception {
MyConverterParent converterParent = new MyConverterParent();
PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.DOUBLE).named("value");
PrimitiveConverter converter = ETypeConverter.getNewConverter(primitiveType, 1, converterParent, null);
((PrimitiveConverter) converter).addDouble(3276);
Writable writable = converterParent.getValue();
DoubleWritable doubleWritable = (DoubleWritable) writable;
assertEquals(3276, doubleWritable.get(), 0);
}
Aggregations