use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestVectorGroupByOperator method testKeyTypeAggregate.
private void testKeyTypeAggregate(String aggregateName, FakeVectorRowBatchFromObjectIterables data, Map<Object, Object> expected) throws HiveException {
List<String> mapColumnNames = new ArrayList<String>();
mapColumnNames.add("Key");
mapColumnNames.add("Value");
VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
Set<Object> keys = new HashSet<Object>();
AggregationDesc agg = buildAggregationDesc(ctx, aggregateName, GenericUDAFEvaluator.Mode.PARTIAL1, "Value", TypeInfoFactory.getPrimitiveTypeInfo(data.getTypes()[1]));
ArrayList<AggregationDesc> aggs = new ArrayList<AggregationDesc>();
aggs.add(agg);
ArrayList<String> outputColumnNames = new ArrayList<String>();
outputColumnNames.add("_col0");
outputColumnNames.add("_col1");
GroupByDesc desc = new GroupByDesc();
VectorGroupByDesc vectorGroupByDesc = new VectorGroupByDesc();
desc.setOutputColumnNames(outputColumnNames);
desc.setAggregators(aggs);
vectorGroupByDesc.setProcessingMode(ProcessingMode.HASH);
ExprNodeDesc keyExp = buildColumnDesc(ctx, "Key", TypeInfoFactory.getPrimitiveTypeInfo(data.getTypes()[0]));
ArrayList<ExprNodeDesc> keysDesc = new ArrayList<ExprNodeDesc>();
keysDesc.add(keyExp);
desc.setKeys(keysDesc);
CompilationOpContext cCtx = new CompilationOpContext();
Operator<? extends OperatorDesc> groupByOp = OperatorFactory.get(cCtx, desc);
VectorGroupByOperator vgo = (VectorGroupByOperator) Vectorizer.vectorizeGroupByOperator(groupByOp, ctx, vectorGroupByDesc);
if (vgo == null) {
assertTrue(false);
}
FakeCaptureVectorToRowOutputOperator out = FakeCaptureVectorToRowOutputOperator.addCaptureOutputChild(cCtx, vgo);
vgo.initialize(hconf, null);
out.setOutputInspector(new FakeCaptureVectorToRowOutputOperator.OutputInspector() {
private int rowIndex;
private String aggregateName;
private Map<Object, Object> expected;
private Set<Object> keys;
@Override
public void inspectRow(Object row, int tag) throws HiveException {
assertTrue(row instanceof Object[]);
Object[] fields = (Object[]) row;
assertEquals(2, fields.length);
Object key = fields[0];
Object keyValue = null;
if (null == key) {
keyValue = null;
} else if (key instanceof ByteWritable) {
ByteWritable bwKey = (ByteWritable) key;
keyValue = bwKey.get();
} else if (key instanceof ShortWritable) {
ShortWritable swKey = (ShortWritable) key;
keyValue = swKey.get();
} else if (key instanceof IntWritable) {
IntWritable iwKey = (IntWritable) key;
keyValue = iwKey.get();
} else if (key instanceof LongWritable) {
LongWritable lwKey = (LongWritable) key;
keyValue = lwKey.get();
} else if (key instanceof TimestampWritableV2) {
TimestampWritableV2 twKey = (TimestampWritableV2) key;
keyValue = twKey.getTimestamp().toSqlTimestamp();
} else if (key instanceof DoubleWritable) {
DoubleWritable dwKey = (DoubleWritable) key;
keyValue = dwKey.get();
} else if (key instanceof FloatWritable) {
FloatWritable fwKey = (FloatWritable) key;
keyValue = fwKey.get();
} else if (key instanceof BooleanWritable) {
BooleanWritable bwKey = (BooleanWritable) key;
keyValue = bwKey.get();
} else if (key instanceof HiveDecimalWritable) {
HiveDecimalWritable hdwKey = (HiveDecimalWritable) key;
keyValue = hdwKey.getHiveDecimal();
} else {
Assert.fail(String.format("Not implemented key output type %s: %s", key.getClass().getName(), key));
}
String keyValueAsString = String.format("%s", keyValue);
assertTrue(expected.containsKey(keyValue));
Object expectedValue = expected.get(keyValue);
Object value = fields[1];
Validator validator = getValidator(aggregateName);
validator.validate(keyValueAsString, expectedValue, new Object[] { value });
keys.add(keyValue);
}
private FakeCaptureVectorToRowOutputOperator.OutputInspector init(String aggregateName, Map<Object, Object> expected, Set<Object> keys) {
this.aggregateName = aggregateName;
this.expected = expected;
this.keys = keys;
return this;
}
}.init(aggregateName, expected, keys));
for (VectorizedRowBatch unit : data) {
vgo.process(unit, 0);
}
vgo.close(false);
List<Object> outBatchList = out.getCapturedRows();
assertNotNull(outBatchList);
assertEquals(expected.size(), outBatchList.size());
assertEquals(expected.size(), keys.size());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestOrcFile method testHiveDecimalAllNulls.
@Test
public void testHiveDecimalAllNulls() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector = ObjectInspectorFactory.getReflectionObjectInspector(DecimalStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
// this is an invalid decimal value, getting HiveDecimal from it will return null
writer.addRow(new DecimalStruct(new HiveDecimalWritable("1.463040009E9".getBytes(), 8)));
writer.addRow(new DecimalStruct(null));
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
StructObjectInspector readerInspector = (StructObjectInspector) reader.getObjectInspector();
List<? extends StructField> fields = readerInspector.getAllStructFieldRefs();
HiveDecimalObjectInspector doi = (HiveDecimalObjectInspector) readerInspector.getStructFieldRef("dec").getFieldObjectInspector();
RecordReader rows = reader.rows();
while (rows.hasNext()) {
Object row = rows.next(null);
assertEquals(null, doi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row, fields.get(0))));
}
// check the stats
ColumnStatistics[] stats = reader.getStatistics();
assertEquals(2, stats[0].getNumberOfValues());
assertEquals(0, stats[1].getNumberOfValues());
assertEquals(true, stats[1].hasNull());
rows.close();
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFOPDivide method testDouleDivideDecimal.
@Test
public void testDouleDivideDecimal() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
DoubleWritable left = new DoubleWritable(74.52);
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2)) };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(74.52 / 234.97, res.get(), EPSILON);
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFOPDivide method testLongDivideDecimal.
@Test
public void testLongDivideDecimal() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
LongWritable left = new LongWritable(104);
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableLongObjectInspector, PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9, 4)) };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(33, 10), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.4426096949"), res.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFOPDivide method testDecimalDivideDecimal2.
@Test
public void testDecimalDivideDecimal2() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
HiveDecimalWritable left = new HiveDecimalWritable(HiveDecimal.create("5"));
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("25"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(1, 0)), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(2, 0)) };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(7, 6), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.2"), res.getHiveDecimal());
}
Aggregations