use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class ReaderWriter method writeDatum.
public static void writeDatum(DataOutput out, Object val) throws IOException {
// write the data type
byte type = DataType.findType(val);
out.write(type);
switch(type) {
case DataType.LIST:
List<?> list = (List<?>) val;
int sz = list.size();
out.writeInt(sz);
for (int i = 0; i < sz; i++) {
writeDatum(out, list.get(i));
}
return;
case DataType.MAP:
Map<?, ?> m = (Map<?, ?>) val;
out.writeInt(m.size());
Iterator<?> i = m.entrySet().iterator();
while (i.hasNext()) {
Entry<?, ?> entry = (Entry<?, ?>) i.next();
writeDatum(out, entry.getKey());
writeDatum(out, entry.getValue());
}
return;
case DataType.INTEGER:
new VIntWritable((Integer) val).write(out);
return;
case DataType.LONG:
new VLongWritable((Long) val).write(out);
return;
case DataType.FLOAT:
out.writeFloat((Float) val);
return;
case DataType.DOUBLE:
out.writeDouble((Double) val);
return;
case DataType.BOOLEAN:
out.writeBoolean((Boolean) val);
return;
case DataType.BYTE:
out.writeByte((Byte) val);
return;
case DataType.SHORT:
out.writeShort((Short) val);
return;
case DataType.STRING:
String s = (String) val;
byte[] utfBytes = s.getBytes(ReaderWriter.UTF8);
out.writeInt(utfBytes.length);
out.write(utfBytes);
return;
case DataType.BINARY:
byte[] ba = (byte[]) val;
out.writeInt(ba.length);
out.write(ba);
return;
case DataType.NULL:
// for NULL we just write out the type
return;
case DataType.CHAR:
new HiveCharWritable((HiveChar) val).write(out);
return;
case DataType.VARCHAR:
new HiveVarcharWritable((HiveVarchar) val).write(out);
return;
case DataType.DECIMAL:
new HiveDecimalWritable((HiveDecimal) val).write(out);
return;
case DataType.DATE:
new DateWritable((Date) val).write(out);
return;
case DataType.TIMESTAMP:
new TimestampWritable((java.sql.Timestamp) val).write(out);
return;
default:
throw new IOException("Unexpected data type " + type + " found in stream.");
}
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class GenericUDFTrunc method initializeDate.
private ObjectInspector initializeDate(ObjectInspector[] arguments) throws UDFArgumentLengthException, UDFArgumentTypeException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
case VOID:
inputType1 = PrimitiveCategory.STRING;
textConverter1 = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(1, "trunc() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
}
inputType2 = PrimitiveCategory.STRING;
if (arguments[1] instanceof ConstantObjectInspector) {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
} else {
textConverter2 = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class GenericUDFDate method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("to_date() requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException("to_date() only accepts STRING/TIMESTAMP/DATEWRITABLE types, got " + arguments[0].getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
switch(inputType) {
case VOID:
break;
case CHAR:
case VARCHAR:
case STRING:
inputType = PrimitiveCategory.STRING;
textConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter(argumentOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case TIMESTAMPLOCALTZ:
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class GenericUDFDate method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
switch(inputType) {
case VOID:
throw new UDFArgumentException("TO_DATE() received non-null object of VOID type");
case STRING:
String dateString = textConverter.convert(arguments[0].get()).toString();
if (dateParser.parseDate(dateString, date)) {
output.set(date);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritable.millisToDays(ts.getTime()));
break;
case TIMESTAMPLOCALTZ:
case DATE:
DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
output.set(dw);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return output;
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class GenericUDFInBloomFilter method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// Return if either of the arguments is null
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
if (!initializedBloomFilter) {
// Setup the bloom filter once
InputStream in = null;
try {
BytesWritable bw = (BytesWritable) arguments[1].get();
byte[] bytes = new byte[bw.getLength()];
System.arraycopy(bw.getBytes(), 0, bytes, 0, bw.getLength());
in = new NonSyncByteArrayInputStream(bytes);
bloomFilter = BloomKFilter.deserialize(in);
} catch (IOException e) {
throw new HiveException(e);
} finally {
IOUtils.closeStream(in);
}
initializedBloomFilter = true;
}
// Check if the value is in bloom filter
switch(((PrimitiveObjectInspector) valObjectInspector).getTypeInfo().getPrimitiveCategory()) {
case BOOLEAN:
boolean vBoolean = ((BooleanObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vBoolean ? 1 : 0);
case BYTE:
byte vByte = ((ByteObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vByte);
case SHORT:
short vShort = ((ShortObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vShort);
case INT:
int vInt = ((IntObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vInt);
case LONG:
long vLong = ((LongObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vLong);
case FLOAT:
float vFloat = ((FloatObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vFloat);
case DOUBLE:
double vDouble = ((DoubleObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vDouble);
case DECIMAL:
HiveDecimalWritable vDecimal = ((HiveDecimalObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
int startIdx = vDecimal.toBytes(scratchBuffer);
return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
case DATE:
DateWritable vDate = ((DateObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testLong(vDate.getDays());
case TIMESTAMP:
Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).getPrimitiveJavaObject(arguments[0].get());
return bloomFilter.testLong(vTimeStamp.getTime());
case CHAR:
Text vChar = ((HiveCharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();
return bloomFilter.testBytes(vChar.getBytes(), 0, vChar.getLength());
case VARCHAR:
Text vVarchar = ((HiveVarcharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getTextValue();
return bloomFilter.testBytes(vVarchar.getBytes(), 0, vVarchar.getLength());
case STRING:
Text vString = ((StringObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vString.getBytes(), 0, vString.getLength());
case BINARY:
BytesWritable vBytes = ((BinaryObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vBytes.getBytes(), 0, vBytes.getLength());
default:
throw new UDFArgumentTypeException(0, "Bad primitive category " + ((PrimitiveTypeInfo) valObjectInspector).getPrimitiveCategory());
}
}
Aggregations