use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TestVectorUDFDatetimeLegacyHybridCalendar method compareToUDFDatetimeLegacyHybridCalendar.
private void compareToUDFDatetimeLegacyHybridCalendar(GenericUDF udf, long in, long out) throws HiveException {
DateWritableV2 dateWInput = new DateWritableV2((int) in);
DateWritableV2 dateWOutput = (DateWritableV2) udf.evaluate(new GenericUDF.DeferredObject[] { new GenericUDF.DeferredJavaObject(dateWInput) });
Assert.assertEquals(dateWOutput.get(), Date.ofEpochDay((int) out));
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class VectorHashKeyWrapperGeneral method stringifyKeys.
/*
* This method is mainly intended for debug display purposes.
*/
@Override
public String stringifyKeys(VectorColumnSetInfo columnSetInfo) {
StringBuilder sb = new StringBuilder();
boolean isFirstKey = true;
if (longValues.length > 0) {
isFirstKey = false;
sb.append("longs ");
boolean isFirstValue = true;
for (int i = 0; i < columnSetInfo.longIndices.length; i++) {
if (isFirstValue) {
isFirstValue = false;
} else {
sb.append(", ");
}
int keyIndex = columnSetInfo.longIndices[i];
if (isNull[keyIndex]) {
sb.append("null");
} else {
sb.append(longValues[i]);
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) columnSetInfo.typeInfos[keyIndex];
// FUTURE: Add INTERVAL_YEAR_MONTH, etc, as desired.
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case DATE:
{
Date dt = new Date(0);
dt.setTime(DateWritableV2.daysToMillis((int) longValues[i]));
sb.append(" date ");
sb.append(dt.toString());
}
break;
default:
// Add nothing more.
break;
}
}
}
}
if (doubleValues.length > 0) {
if (isFirstKey) {
isFirstKey = false;
} else {
sb.append(", ");
}
sb.append("doubles ");
boolean isFirstValue = true;
for (int i = 0; i < columnSetInfo.doubleIndices.length; i++) {
if (isFirstValue) {
isFirstValue = false;
} else {
sb.append(", ");
}
int keyIndex = columnSetInfo.doubleIndices[i];
if (isNull[keyIndex]) {
sb.append("null");
} else {
sb.append(doubleValues[i]);
}
}
}
if (byteValues.length > 0) {
if (isFirstKey) {
isFirstKey = false;
} else {
sb.append(", ");
}
sb.append("byte lengths ");
boolean isFirstValue = true;
for (int i = 0; i < columnSetInfo.stringIndices.length; i++) {
if (isFirstValue) {
isFirstValue = false;
} else {
sb.append(", ");
}
int keyIndex = columnSetInfo.stringIndices[i];
if (isNull[keyIndex]) {
sb.append("null");
} else {
sb.append(byteLengths[i]);
}
}
}
if (decimalValues.length > 0) {
if (isFirstKey) {
isFirstKey = true;
} else {
sb.append(", ");
}
sb.append("decimals ");
boolean isFirstValue = true;
for (int i = 0; i < columnSetInfo.decimalIndices.length; i++) {
if (isFirstValue) {
isFirstValue = false;
} else {
sb.append(", ");
}
int keyIndex = columnSetInfo.decimalIndices[i];
if (isNull[keyIndex]) {
sb.append("null");
} else {
sb.append(decimalValues[i]);
}
}
}
if (timestampValues.length > 0) {
if (isFirstKey) {
isFirstKey = false;
} else {
sb.append(", ");
}
sb.append("timestamps ");
boolean isFirstValue = true;
for (int i = 0; i < columnSetInfo.timestampIndices.length; i++) {
if (isFirstValue) {
isFirstValue = false;
} else {
sb.append(", ");
}
int keyIndex = columnSetInfo.timestampIndices[i];
if (isNull[keyIndex]) {
sb.append("null");
} else {
sb.append(timestampValues[i]);
}
}
}
if (intervalDayTimeValues.length > 0) {
if (isFirstKey) {
isFirstKey = false;
} else {
sb.append(", ");
}
sb.append("interval day times ");
boolean isFirstValue = true;
for (int i = 0; i < columnSetInfo.intervalDayTimeIndices.length; i++) {
if (isFirstValue) {
isFirstValue = false;
} else {
sb.append(", ");
}
int keyIndex = columnSetInfo.intervalDayTimeIndices[i];
if (isNull[keyIndex]) {
sb.append("null");
} else {
sb.append(intervalDayTimeValues[i]);
}
}
}
return sb.toString();
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class BatchToRowReader method nextDate.
public static DateWritableV2 nextDate(ColumnVector vector, int row, Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
DateWritableV2 result;
if (previous == null || previous.getClass() != DateWritableV2.class) {
result = new DateWritableV2();
} else {
result = (DateWritableV2) previous;
}
int date = (int) ((LongColumnVector) vector).vector[row];
result.set(date);
return result;
} else {
return null;
}
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class GenericUDFInBloomFilter method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// Return if either of the arguments is null
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
if (!initializedBloomFilter) {
// Setup the bloom filter once
InputStream in = null;
try {
BytesWritable bw = (BytesWritable) arguments[1].get();
byte[] bytes = new byte[bw.getLength()];
System.arraycopy(bw.getBytes(), 0, bytes, 0, bw.getLength());
in = new NonSyncByteArrayInputStream(bytes);
bloomFilter = BloomKFilter.deserialize(in);
} catch (IOException e) {
throw new HiveException(e);
} finally {
IOUtils.closeStream(in);
}
initializedBloomFilter = true;
}
// Check if the value is in bloom filter
switch(((PrimitiveObjectInspector) valObjectInspector).getTypeInfo().getPrimitiveCategory()) {
case BOOLEAN:
boolean vBoolean = ((BooleanObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vBoolean ? 1 : 0);
case BYTE:
byte vByte = ((ByteObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vByte);
case SHORT:
short vShort = ((ShortObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vShort);
case INT:
int vInt = ((IntObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vInt);
case LONG:
long vLong = ((LongObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vLong);
case FLOAT:
float vFloat = ((FloatObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vFloat);
case DOUBLE:
double vDouble = ((DoubleObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vDouble);
case DECIMAL:
HiveDecimalWritable vDecimal = ((HiveDecimalObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
int startIdx = vDecimal.toBytes(scratchBuffer);
return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
case DATE:
DateWritableV2 vDate = ((DateObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testLong(vDate.getDays());
case TIMESTAMP:
Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).getPrimitiveJavaObject(arguments[0].get());
return bloomFilter.testLong(vTimeStamp.toEpochMilli());
case CHAR:
Text vChar = ((HiveCharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();
return bloomFilter.testBytes(vChar.getBytes(), 0, vChar.getLength());
case VARCHAR:
Text vVarchar = ((HiveVarcharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getTextValue();
return bloomFilter.testBytes(vVarchar.getBytes(), 0, vVarchar.getLength());
case STRING:
Text vString = ((StringObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vString.getBytes(), 0, vString.getLength());
case BINARY:
BytesWritable vBytes = ((BinaryObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vBytes.getBytes(), 0, vBytes.getLength());
default:
throw new UDFArgumentTypeException(0, "Bad primitive category " + ((PrimitiveTypeInfo) valObjectInspector).getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class GenericUDFDateAdd method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
Object daysWritableObject = daysConverter.convert(arguments[1].get());
if (daysWritableObject == null) {
return null;
}
int toBeAdded;
if (daysWritableObject instanceof ByteWritable) {
toBeAdded = ((ByteWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof ShortWritable) {
toBeAdded = ((ShortWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof IntWritable) {
toBeAdded = ((IntWritable) daysWritableObject).get();
} else {
return null;
}
// Convert the first param into a DateWritableV2 value
switch(inputType1) {
case STRING:
String dateString = dateConverter.convert(arguments[0].get()).toString();
if (DateParser.parseDate(dateString, dateVal)) {
output.set(dateVal);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case DATE:
DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
output.set(dw.getDays());
break;
default:
throw new UDFArgumentException("DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
int newDays = output.getDays() + (signModifier * toBeAdded);
output.set(newDays);
return output;
}
Aggregations