use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class GenericUDFOPNegative method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null) {
return null;
}
Object input = arguments[0].get();
if (input == null) {
return null;
}
input = converter.convert(input);
if (input == null) {
return null;
}
switch(resultOI.getPrimitiveCategory()) {
case BYTE:
byteWritable.set((byte) -(((ByteWritable) input).get()));
return byteWritable;
case SHORT:
shortWritable.set((short) -(((ShortWritable) input).get()));
return shortWritable;
case INT:
intWritable.set(-(((IntWritable) input).get()));
return intWritable;
case LONG:
longWritable.set(-(((LongWritable) input).get()));
return longWritable;
case FLOAT:
floatWritable.set(-(((FloatWritable) input).get()));
return floatWritable;
case DOUBLE:
doubleWritable.set(-(((DoubleWritable) input).get()));
return doubleWritable;
case DECIMAL:
decimalWritable.set((HiveDecimalWritable) input);
decimalWritable.mutateNegate();
return decimalWritable;
case INTERVAL_YEAR_MONTH:
HiveIntervalYearMonth intervalYearMonth = ((HiveIntervalYearMonthWritable) input).getHiveIntervalYearMonth();
this.intervalYearMonthWritable.set(intervalYearMonth.negate());
return this.intervalYearMonthWritable;
case INTERVAL_DAY_TIME:
HiveIntervalDayTime intervalDayTime = ((HiveIntervalDayTimeWritable) input).getHiveIntervalDayTime();
this.intervalDayTimeWritable.set(intervalDayTime.negate());
return intervalDayTimeWritable;
default:
// Should never happen.
throw new RuntimeException("Unexpected type in evaluating " + opName + ": " + resultOI.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class TypedBytesSerDe method serializeField.
private void serializeField(Object o, ObjectInspector oi, Object reuse) throws IOException {
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
switch(poi.getPrimitiveCategory()) {
case VOID:
{
return;
}
case BOOLEAN:
{
BooleanObjectInspector boi = (BooleanObjectInspector) poi;
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
r.set(boi.get(o));
tbOut.write(r);
return;
}
case BYTE:
{
ByteObjectInspector boi = (ByteObjectInspector) poi;
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r.set(boi.get(o));
tbOut.write(r);
return;
}
case SHORT:
{
ShortObjectInspector spoi = (ShortObjectInspector) poi;
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r.set(spoi.get(o));
tbOut.write(r);
return;
}
case INT:
{
IntObjectInspector ioi = (IntObjectInspector) poi;
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r.set(ioi.get(o));
tbOut.write(r);
return;
}
case LONG:
{
LongObjectInspector loi = (LongObjectInspector) poi;
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r.set(loi.get(o));
tbOut.write(r);
return;
}
case FLOAT:
{
FloatObjectInspector foi = (FloatObjectInspector) poi;
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
r.set(foi.get(o));
tbOut.write(r);
return;
}
case DOUBLE:
{
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r.set(doi.get(o));
tbOut.write(r);
return;
}
case STRING:
{
StringObjectInspector soi = (StringObjectInspector) poi;
Text t = soi.getPrimitiveWritableObject(o);
tbOut.write(t);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
}
}
case LIST:
case MAP:
case STRUCT:
{
// For complex object, serialize to JSON format
String s = SerDeUtils.getJSONString(o, oi);
Text t = reuse == null ? new Text() : (Text) reuse;
// convert to Text and write it
t.set(s);
tbOut.write(t);
}
default:
{
throw new RuntimeException("Unrecognized type: " + oi.getCategory());
}
}
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class TypedBytesSerDe method deserializeField.
static Object deserializeField(TypedBytesWritableInput in, TypeInfo type, Object reuse) throws IOException {
// read the type
Class<? extends Writable> writableType = in.readType();
if (writableType != null && writableType.isAssignableFrom(NullWritable.class)) {
// indicates that the recorded value is null
return null;
}
switch(type.getCategory()) {
case PRIMITIVE:
{
PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
switch(ptype.getPrimitiveCategory()) {
case VOID:
{
return null;
}
case BOOLEAN:
{
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
r = in.readBoolean(r);
return r;
}
case BYTE:
{
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r = in.readByte(r);
return r;
}
case SHORT:
{
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r = in.readShort(r);
return r;
}
case INT:
{
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r = in.readInt(r);
return r;
}
case LONG:
{
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r = in.readLong(r);
return r;
}
case FLOAT:
{
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
r = in.readFloat(r);
return r;
}
case DOUBLE:
{
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r = in.readDouble(r);
return r;
}
case STRING:
{
Text r = reuse == null ? new Text() : (Text) reuse;
r = in.readText(r);
return r;
}
default:
{
throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
}
}
}
// Currently, deserialization of complex types is not supported
case LIST:
case MAP:
case STRUCT:
default:
{
throw new RuntimeException("Unsupported category: " + type.getCategory());
}
}
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class GenericUDFBaseNumeric method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null || arguments[1] == null) {
return null;
}
Object left = arguments[0].get();
Object right = arguments[1].get();
if (left == null && right == null) {
return null;
}
// Handle decimal separately.
if (resultOI.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
HiveDecimal hdLeft = PrimitiveObjectInspectorUtils.getHiveDecimal(left, leftOI);
HiveDecimal hdRight = PrimitiveObjectInspectorUtils.getHiveDecimal(right, rightOI);
if (hdLeft == null || hdRight == null) {
return null;
}
HiveDecimalWritable result = evaluate(hdLeft, hdRight);
return resultOI.getPrimitiveWritableObject(result);
}
left = converterLeft.convert(left);
if (left == null) {
return null;
}
right = converterRight.convert(right);
if (right == null) {
return null;
}
switch(resultOI.getPrimitiveCategory()) {
case BYTE:
return evaluate((ByteWritable) left, (ByteWritable) right);
case SHORT:
return evaluate((ShortWritable) left, (ShortWritable) right);
case INT:
return evaluate((IntWritable) left, (IntWritable) right);
case LONG:
return evaluate((LongWritable) left, (LongWritable) right);
case FLOAT:
return evaluate((FloatWritable) left, (FloatWritable) right);
case DOUBLE:
return evaluate((DoubleWritable) left, (DoubleWritable) right);
default:
// Should never happen.
throw new RuntimeException("Unexpected type in evaluating " + opName + ": " + resultOI.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class TypedBytesRecordReader method next.
public int next(Writable data) throws IOException {
int pos = 0;
barrStr.reset();
while (true) {
Type type = tbIn.readTypeCode();
// it was a empty stream
if (type == null) {
return -1;
}
if (type == Type.ENDOFRECORD) {
tbOut.writeEndOfRecord();
if (barrStr.getLength() > 0) {
((BytesWritable) data).set(barrStr.getData(), 0, barrStr.getLength());
}
return barrStr.getLength();
}
if (pos >= row.size()) {
Writable wrt = allocateWritable(type);
assert pos == row.size();
assert pos == rowTypeName.size();
row.add(wrt);
rowTypeName.add(type.name());
String typeName = typedBytesToTypeName.get(type);
PrimitiveTypeInfo srcTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
srcOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(srcTypeInfo));
converters.add(ObjectInspectorConverters.getConverter(srcOIns.get(pos), dstOIns.get(pos)));
} else {
if (!rowTypeName.get(pos).equals(type.name())) {
throw new RuntimeException("datatype of row changed from " + rowTypeName.get(pos) + " to " + type.name());
}
}
Writable w = row.get(pos);
switch(type) {
case BYTE:
tbIn.readByte((ByteWritable) w);
break;
case BOOL:
tbIn.readBoolean((BooleanWritable) w);
break;
case INT:
tbIn.readInt((IntWritable) w);
break;
case SHORT:
tbIn.readShort((ShortWritable) w);
break;
case LONG:
tbIn.readLong((LongWritable) w);
break;
case FLOAT:
tbIn.readFloat((FloatWritable) w);
break;
case DOUBLE:
tbIn.readDouble((DoubleWritable) w);
break;
case STRING:
tbIn.readText((Text) w);
break;
default:
// should never come here
assert false;
}
write(pos, w);
pos++;
}
}
Aggregations