use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestETypeConverter method testGetInt64NanosAdjustedToUTCTimestampConverter.
@Test
public void testGetInt64NanosAdjustedToUTCTimestampConverter() throws Exception {
ZoneId zone = ZoneId.systemDefault();
Timestamp timestamp = Timestamp.valueOf("2018-07-15 15:12:20.11223344");
PrimitiveType primitiveType = createInt64TimestampType(true, TimeUnit.NANOS);
long time = timestamp.toEpochSecond() * 1000000000 + timestamp.getNanos();
Writable writable = getWritableFromPrimitiveConverter(null, primitiveType, time);
TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
timestamp = Timestamp.ofEpochSecond(timestamp.toEpochSecond(), timestamp.getNanos(), zone);
assertEquals(timestamp.toEpochMilli(), timestampWritable.getTimestamp().toEpochMilli());
assertEquals(timestamp.getNanos(), timestampWritable.getNanos());
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestETypeConverter method testGetInt64MillisTimestampProlepticConverter.
@Test
public void testGetInt64MillisTimestampProlepticConverter() throws Exception {
Timestamp timestamp = Timestamp.valueOf("1572-07-15 15:12:20.112");
PrimitiveType primitiveType = createInt64TimestampType(false, TimeUnit.MILLIS);
Writable writable = getWritableFromPrimitiveConverter(null, primitiveType, timestamp.toEpochMilli());
TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
assertEquals(timestamp.toEpochMilli(), timestampWritable.getTimestamp().toEpochMilli());
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackTimestampStats.
private static void unpackTimestampStats(ObjectInspector oi, Object o, ColumnStatsField csf, ColumnStatisticsObj statsObj) {
switch(csf) {
case COUNT_NULLS:
long cn = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getTimestampStats().setNumNulls(cn);
break;
case MIN:
TimestampWritableV2 min = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o);
statsObj.getStatsData().getTimestampStats().setLowValue(new Timestamp(min.getSeconds()));
break;
case MAX:
TimestampWritableV2 max = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o);
statsObj.getStatsData().getTimestampStats().setHighValue(new Timestamp(max.getSeconds()));
break;
case NDV:
long ndv = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getTimestampStats().setNumDVs(ndv);
break;
case BITVECTOR:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
byte[] buf = ((BinaryObjectInspector) poi).getPrimitiveJavaObject(o);
statsObj.getStatsData().getTimestampStats().setBitVectors(buf);
break;
default:
throw new RuntimeException("Unsupported column stat for TIMESTAMP : " + csf);
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class GenericUDFTrunc method evaluateDate.
private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLengthException, HiveException, UDFArgumentTypeException, UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
if (textConverter2 != null) {
fmtInput = textConverter2.convert(arguments[1].get()).toString();
}
Date d;
switch(inputType1) {
case STRING:
String dateString = textConverter1.convert(arguments[0].get()).toString();
d = DateParser.parseDate(dateString);
if (d == null) {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
d = Date.ofEpochMilli(ts.toEpochMilli());
break;
case DATE:
DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
d = dw.get();
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
if (evalDate(d) == null) {
return null;
}
output.set(date.toString());
return output;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestVectorGroupByOperator method testKeyTypeAggregate.
private void testKeyTypeAggregate(String aggregateName, FakeVectorRowBatchFromObjectIterables data, Map<Object, Object> expected) throws HiveException {
List<String> mapColumnNames = new ArrayList<String>();
mapColumnNames.add("Key");
mapColumnNames.add("Value");
VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
Set<Object> keys = new HashSet<Object>();
AggregationDesc agg = buildAggregationDesc(ctx, aggregateName, GenericUDAFEvaluator.Mode.PARTIAL1, "Value", TypeInfoFactory.getPrimitiveTypeInfo(data.getTypes()[1]));
ArrayList<AggregationDesc> aggs = new ArrayList<AggregationDesc>();
aggs.add(agg);
ArrayList<String> outputColumnNames = new ArrayList<String>();
outputColumnNames.add("_col0");
outputColumnNames.add("_col1");
GroupByDesc desc = new GroupByDesc();
VectorGroupByDesc vectorGroupByDesc = new VectorGroupByDesc();
desc.setOutputColumnNames(outputColumnNames);
desc.setAggregators(aggs);
vectorGroupByDesc.setProcessingMode(ProcessingMode.HASH);
ExprNodeDesc keyExp = buildColumnDesc(ctx, "Key", TypeInfoFactory.getPrimitiveTypeInfo(data.getTypes()[0]));
ArrayList<ExprNodeDesc> keysDesc = new ArrayList<ExprNodeDesc>();
keysDesc.add(keyExp);
desc.setKeys(keysDesc);
CompilationOpContext cCtx = new CompilationOpContext();
Operator<? extends OperatorDesc> groupByOp = OperatorFactory.get(cCtx, desc);
VectorGroupByOperator vgo = (VectorGroupByOperator) Vectorizer.vectorizeGroupByOperator(groupByOp, ctx, vectorGroupByDesc);
if (vgo == null) {
assertTrue(false);
}
FakeCaptureVectorToRowOutputOperator out = FakeCaptureVectorToRowOutputOperator.addCaptureOutputChild(cCtx, vgo);
vgo.initialize(hconf, null);
out.setOutputInspector(new FakeCaptureVectorToRowOutputOperator.OutputInspector() {
private int rowIndex;
private String aggregateName;
private Map<Object, Object> expected;
private Set<Object> keys;
@Override
public void inspectRow(Object row, int tag) throws HiveException {
assertTrue(row instanceof Object[]);
Object[] fields = (Object[]) row;
assertEquals(2, fields.length);
Object key = fields[0];
Object keyValue = null;
if (null == key) {
keyValue = null;
} else if (key instanceof ByteWritable) {
ByteWritable bwKey = (ByteWritable) key;
keyValue = bwKey.get();
} else if (key instanceof ShortWritable) {
ShortWritable swKey = (ShortWritable) key;
keyValue = swKey.get();
} else if (key instanceof IntWritable) {
IntWritable iwKey = (IntWritable) key;
keyValue = iwKey.get();
} else if (key instanceof LongWritable) {
LongWritable lwKey = (LongWritable) key;
keyValue = lwKey.get();
} else if (key instanceof TimestampWritableV2) {
TimestampWritableV2 twKey = (TimestampWritableV2) key;
keyValue = twKey.getTimestamp().toSqlTimestamp();
} else if (key instanceof DoubleWritable) {
DoubleWritable dwKey = (DoubleWritable) key;
keyValue = dwKey.get();
} else if (key instanceof FloatWritable) {
FloatWritable fwKey = (FloatWritable) key;
keyValue = fwKey.get();
} else if (key instanceof BooleanWritable) {
BooleanWritable bwKey = (BooleanWritable) key;
keyValue = bwKey.get();
} else if (key instanceof HiveDecimalWritable) {
HiveDecimalWritable hdwKey = (HiveDecimalWritable) key;
keyValue = hdwKey.getHiveDecimal();
} else {
Assert.fail(String.format("Not implemented key output type %s: %s", key.getClass().getName(), key));
}
String keyValueAsString = String.format("%s", keyValue);
assertTrue(expected.containsKey(keyValue));
Object expectedValue = expected.get(keyValue);
Object value = fields[1];
Validator validator = getValidator(aggregateName);
validator.validate(keyValueAsString, expectedValue, new Object[] { value });
keys.add(keyValue);
}
private FakeCaptureVectorToRowOutputOperator.OutputInspector init(String aggregateName, Map<Object, Object> expected, Set<Object> keys) {
this.aggregateName = aggregateName;
this.expected = expected;
this.keys = keys;
return this;
}
}.init(aggregateName, expected, keys));
for (VectorizedRowBatch unit : data) {
vgo.process(unit, 0);
}
vgo.close(false);
List<Object> outBatchList = out.getCapturedRows();
assertNotNull(outBatchList);
assertEquals(expected.size(), outBatchList.size());
assertEquals(expected.size(), keys.size());
}
Aggregations