use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorGroupByOperator method testKeyTypeAggregate.
private void testKeyTypeAggregate(String aggregateName, FakeVectorRowBatchFromObjectIterables data, Map<Object, Object> expected) throws HiveException {
List<String> mapColumnNames = new ArrayList<String>();
mapColumnNames.add("Key");
mapColumnNames.add("Value");
VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
Set<Object> keys = new HashSet<Object>();
AggregationDesc agg = buildAggregationDesc(ctx, aggregateName, GenericUDAFEvaluator.Mode.PARTIAL1, "Value", TypeInfoFactory.getPrimitiveTypeInfo(data.getTypes()[1]));
ArrayList<AggregationDesc> aggs = new ArrayList<AggregationDesc>();
aggs.add(agg);
ArrayList<String> outputColumnNames = new ArrayList<String>();
outputColumnNames.add("_col0");
outputColumnNames.add("_col1");
GroupByDesc desc = new GroupByDesc();
desc.setVectorDesc(new VectorGroupByDesc());
desc.setOutputColumnNames(outputColumnNames);
desc.setAggregators(aggs);
((VectorGroupByDesc) desc.getVectorDesc()).setProcessingMode(ProcessingMode.HASH);
ExprNodeDesc keyExp = buildColumnDesc(ctx, "Key", TypeInfoFactory.getPrimitiveTypeInfo(data.getTypes()[0]));
ArrayList<ExprNodeDesc> keysDesc = new ArrayList<ExprNodeDesc>();
keysDesc.add(keyExp);
desc.setKeys(keysDesc);
CompilationOpContext cCtx = new CompilationOpContext();
Operator<? extends OperatorDesc> groupByOp = OperatorFactory.get(cCtx, desc);
VectorGroupByOperator vgo = (VectorGroupByOperator) Vectorizer.vectorizeGroupByOperator(groupByOp, ctx);
FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
vgo.initialize(hconf, null);
out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
private int rowIndex;
private String aggregateName;
private Map<Object, Object> expected;
private Set<Object> keys;
@Override
public void inspectRow(Object row, int tag) throws HiveException {
assertTrue(row instanceof Object[]);
Object[] fields = (Object[]) row;
assertEquals(2, fields.length);
Object key = fields[0];
Object keyValue = null;
if (null == key) {
keyValue = null;
} else if (key instanceof ByteWritable) {
ByteWritable bwKey = (ByteWritable) key;
keyValue = bwKey.get();
} else if (key instanceof ShortWritable) {
ShortWritable swKey = (ShortWritable) key;
keyValue = swKey.get();
} else if (key instanceof IntWritable) {
IntWritable iwKey = (IntWritable) key;
keyValue = iwKey.get();
} else if (key instanceof LongWritable) {
LongWritable lwKey = (LongWritable) key;
keyValue = lwKey.get();
} else if (key instanceof TimestampWritable) {
TimestampWritable twKey = (TimestampWritable) key;
keyValue = twKey.getTimestamp();
} else if (key instanceof DoubleWritable) {
DoubleWritable dwKey = (DoubleWritable) key;
keyValue = dwKey.get();
} else if (key instanceof FloatWritable) {
FloatWritable fwKey = (FloatWritable) key;
keyValue = fwKey.get();
} else if (key instanceof BooleanWritable) {
BooleanWritable bwKey = (BooleanWritable) key;
keyValue = bwKey.get();
} else if (key instanceof HiveDecimalWritable) {
HiveDecimalWritable hdwKey = (HiveDecimalWritable) key;
keyValue = hdwKey.getHiveDecimal();
} else {
Assert.fail(String.format("Not implemented key output type %s: %s", key.getClass().getName(), key));
}
String keyValueAsString = String.format("%s", keyValue);
assertTrue(expected.containsKey(keyValue));
Object expectedValue = expected.get(keyValue);
Object value = fields[1];
Validator validator = getValidator(aggregateName);
validator.validate(keyValueAsString, expectedValue, new Object[] { value });
keys.add(keyValue);
}
private FakeCaptureOutputOperator.OutputInspector init(String aggregateName, Map<Object, Object> expected, Set<Object> keys) {
this.aggregateName = aggregateName;
this.expected = expected;
this.keys = keys;
return this;
}
}.init(aggregateName, expected, keys));
for (VectorizedRowBatch unit : data) {
vgo.process(unit, 0);
}
vgo.close(false);
List<Object> outBatchList = out.getCapturedRows();
assertNotNull(outBatchList);
assertEquals(expected.size(), outBatchList.size());
assertEquals(expected.size(), keys.size());
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorDateExpressions method compareToUDFMonthDate.
private void compareToUDFMonthDate(long t, int y) {
UDFMonth udf = new UDFMonth();
TimestampWritable tsw = toTimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterChicago.
@Test
public void testTimestampInt96ConverterChicago() {
PrimitiveConverter converter;
parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "America/Chicago");
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("America/Chicago"))).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterEtc.
@Test
public void testTimestampInt96ConverterEtc() {
PrimitiveConverter converter;
parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "Etc/GMT-12");
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("Etc/GMT-12"))).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestUDFDateFormatGranularity method testTimestampToTimestampWithGranularity.
// Timestamp values are PST (timezone for tests is set to PST by default)
@Test
public void testTimestampToTimestampWithGranularity() throws Exception {
// Running example
// Friday 30th August 1985 02:47:02 AM
final TimestampWritable t = new TimestampWritable(new Timestamp(494243222000L));
UDFDateFloor g;
// Year granularity
// Tuesday 1st January 1985 12:00:00 AM
g = new UDFDateFloorYear();
TimestampWritable i1 = g.evaluate(t);
assertEquals(473414400000L, i1.getTimestamp().getTime());
// Quarter granularity
// Monday 1st July 1985 12:00:00 AM
g = new UDFDateFloorQuarter();
TimestampWritable i2 = g.evaluate(t);
assertEquals(489049200000L, i2.getTimestamp().getTime());
// Month granularity
// Thursday 1st August 1985 12:00:00 AM
g = new UDFDateFloorMonth();
TimestampWritable i3 = g.evaluate(t);
assertEquals(491727600000L, i3.getTimestamp().getTime());
// Week granularity
// Monday 26th August 1985 12:00:00 AM
g = new UDFDateFloorWeek();
TimestampWritable i4 = g.evaluate(t);
assertEquals(493887600000L, i4.getTimestamp().getTime());
// Day granularity
// Friday 30th August 1985 12:00:00 AM
g = new UDFDateFloorDay();
TimestampWritable i5 = g.evaluate(t);
assertEquals(494233200000L, i5.getTimestamp().getTime());
// Hour granularity
// Friday 30th August 1985 02:00:00 AM
g = new UDFDateFloorHour();
TimestampWritable i6 = g.evaluate(t);
assertEquals(494240400000L, i6.getTimestamp().getTime());
// Minute granularity
// Friday 30th August 1985 02:47:00 AM
g = new UDFDateFloorMinute();
TimestampWritable i7 = g.evaluate(t);
assertEquals(494243220000L, i7.getTimestamp().getTime());
// Second granularity
// Friday 30th August 1985 02:47:02 AM
g = new UDFDateFloorSecond();
TimestampWritable i8 = g.evaluate(t);
assertEquals(494243222000L, i8.getTimestamp().getTime());
}
Aggregations