use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.
the class TestGenericUDFPrintf method testCharFormat.
@Test
public void testCharFormat() throws HiveException {
GenericUDFPrintf udf = new GenericUDFPrintf();
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getCharTypeInfo(10)), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getVarcharTypeInfo(7)) };
HiveCharWritable formatChar = new HiveCharWritable();
formatChar.set("arg1=%s");
HiveVarcharWritable argVarchar = new HiveVarcharWritable();
argVarchar.set("world");
DeferredObject[] args = { new DeferredJavaObject(formatChar), new DeferredJavaObject(argVarchar) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(PrimitiveObjectInspectorFactory.writableStringObjectInspector, oi);
Text res = (Text) udf.evaluate(args);
Assert.assertEquals("arg1=world", res.toString());
}
use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.
the class TestGenericUDFSortArrayByField method testSortPrimitiveTupleTwoFieldOrderDESC.
@Test
public void testSortPrimitiveTupleTwoFieldOrderDESC() throws HiveException {
List<ObjectInspector> tuple = new ArrayList<ObjectInspector>();
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("Company", "Department"), tuple)), PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector };
udf.initialize(inputOIs);
Object i1 = asList(new Text("Linkedin"), new Text("HR"));
Object i2 = asList(new Text("Linkedin"), new Text("IT"));
Object i3 = asList(new Text("Linkedin"), new Text("Finance"));
Object i4 = asList(new Text("Facebook"), new Text("IT"));
Object i5 = asList(new Text("Facebook"), new Text("Finance"));
Object i6 = asList(new Text("Facebook"), new Text("HR"));
Object i7 = asList(new Text("Google"), new Text("Logistics"));
Object i8 = asList(new Text("Google"), new Text("Finance"));
Object i9 = asList(new Text("Google"), new Text("HR"));
HiveVarchar vc = new HiveVarchar();
vc.setValue("Department");
GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3, i4, i5, i6, i7, i8, i9)), new GenericUDF.DeferredJavaObject(new Text("Company")), new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)), new GenericUDF.DeferredJavaObject(new Text("DESC")) };
runAndVerify(argas, asList(i2, i1, i3, i7, i9, i8, i4, i6, i5));
}
use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.
the class ObjectInspectorUtils method compare.
/**
* Compare two objects with their respective ObjectInspectors.
* if nullValueOpt is MAXVALUE, treat null as maximum value.
* if nullValueOpt is MINVALUE, treat null as minimum value.
*/
public static int compare(Object o1, ObjectInspector oi1, Object o2, ObjectInspector oi2, MapEqualComparer mapEqualComparer, NullValueOption nullValueOpt) {
if (oi1.getCategory() != oi2.getCategory()) {
return oi1.getCategory().compareTo(oi2.getCategory());
}
int nullCmpRtn = -1;
switch(nullValueOpt) {
case MAXVALUE:
nullCmpRtn = 1;
break;
case MINVALUE:
nullCmpRtn = -1;
break;
}
if (o1 == null) {
return o2 == null ? 0 : nullCmpRtn;
} else if (o2 == null) {
return -nullCmpRtn;
}
switch(oi1.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi1 = ((PrimitiveObjectInspector) oi1);
PrimitiveObjectInspector poi2 = ((PrimitiveObjectInspector) oi2);
if (poi1.getPrimitiveCategory() != poi2.getPrimitiveCategory()) {
return poi1.getPrimitiveCategory().compareTo(poi2.getPrimitiveCategory());
}
switch(poi1.getPrimitiveCategory()) {
case VOID:
return 0;
case BOOLEAN:
{
int v1 = ((BooleanObjectInspector) poi1).get(o1) ? 1 : 0;
int v2 = ((BooleanObjectInspector) poi2).get(o2) ? 1 : 0;
return v1 - v2;
}
case BYTE:
{
int v1 = ((ByteObjectInspector) poi1).get(o1);
int v2 = ((ByteObjectInspector) poi2).get(o2);
return v1 - v2;
}
case SHORT:
{
int v1 = ((ShortObjectInspector) poi1).get(o1);
int v2 = ((ShortObjectInspector) poi2).get(o2);
return v1 - v2;
}
case INT:
{
int v1 = ((IntObjectInspector) poi1).get(o1);
int v2 = ((IntObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case LONG:
{
long v1 = ((LongObjectInspector) poi1).get(o1);
long v2 = ((LongObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case FLOAT:
{
float v1 = ((FloatObjectInspector) poi1).get(o1);
float v2 = ((FloatObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0f && v2 == 0.0f) {
return 0;
} else {
// Float.compare() treats -0.0 and 0.0 as different
return Float.compare(v1, v2);
}
}
case DOUBLE:
{
double v1 = ((DoubleObjectInspector) poi1).get(o1);
double v2 = ((DoubleObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0d && v2 == 0.0d) {
return 0;
} else {
// Double.compare() treats -0.0 and 0.0 as different
return Double.compare(v1, v2);
}
}
case STRING:
{
if (poi1.preferWritable() || poi2.preferWritable()) {
Text t1 = (Text) poi1.getPrimitiveWritableObject(o1);
Text t2 = (Text) poi2.getPrimitiveWritableObject(o2);
return t1 == null ? (t2 == null ? 0 : -1) : (t2 == null ? 1 : t1.compareTo(t2));
} else {
String s1 = (String) poi1.getPrimitiveJavaObject(o1);
String s2 = (String) poi2.getPrimitiveJavaObject(o2);
return s1 == null ? (s2 == null ? 0 : -1) : (s2 == null ? 1 : s1.compareTo(s2));
}
}
case CHAR:
{
HiveCharWritable t1 = ((HiveCharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveCharWritable t2 = ((HiveCharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case VARCHAR:
{
HiveVarcharWritable t1 = ((HiveVarcharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveVarcharWritable t2 = ((HiveVarcharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case BINARY:
{
BytesWritable bw1 = ((BinaryObjectInspector) poi1).getPrimitiveWritableObject(o1);
BytesWritable bw2 = ((BinaryObjectInspector) poi2).getPrimitiveWritableObject(o2);
return bw1.compareTo(bw2);
}
case DATE:
{
DateWritable d1 = ((DateObjectInspector) poi1).getPrimitiveWritableObject(o1);
DateWritable d2 = ((DateObjectInspector) poi2).getPrimitiveWritableObject(o2);
return d1.compareTo(d2);
}
case TIMESTAMP:
{
TimestampWritable t1 = ((TimestampObjectInspector) poi1).getPrimitiveWritableObject(o1);
TimestampWritable t2 = ((TimestampObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalYearMonthWritable i2 = ((HiveIntervalYearMonthObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeWritable i1 = ((HiveIntervalDayTimeObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalDayTimeWritable i2 = ((HiveIntervalDayTimeObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case DECIMAL:
{
HiveDecimalWritable t1 = ((HiveDecimalObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveDecimalWritable t2 = ((HiveDecimalObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
default:
{
throw new RuntimeException("Unknown type: " + poi1.getPrimitiveCategory());
}
}
}
case STRUCT:
{
StructObjectInspector soi1 = (StructObjectInspector) oi1;
StructObjectInspector soi2 = (StructObjectInspector) oi2;
List<? extends StructField> fields1 = soi1.getAllStructFieldRefs();
List<? extends StructField> fields2 = soi2.getAllStructFieldRefs();
int minimum = Math.min(fields1.size(), fields2.size());
for (int i = 0; i < minimum; i++) {
int r = compare(soi1.getStructFieldData(o1, fields1.get(i)), fields1.get(i).getFieldObjectInspector(), soi2.getStructFieldData(o2, fields2.get(i)), fields2.get(i).getFieldObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return fields1.size() - fields2.size();
}
case LIST:
{
ListObjectInspector loi1 = (ListObjectInspector) oi1;
ListObjectInspector loi2 = (ListObjectInspector) oi2;
int minimum = Math.min(loi1.getListLength(o1), loi2.getListLength(o2));
for (int i = 0; i < minimum; i++) {
int r = compare(loi1.getListElement(o1, i), loi1.getListElementObjectInspector(), loi2.getListElement(o2, i), loi2.getListElementObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return loi1.getListLength(o1) - loi2.getListLength(o2);
}
case MAP:
{
if (mapEqualComparer == null) {
throw new RuntimeException("Compare on map type not supported!");
} else {
return mapEqualComparer.compare(o1, (MapObjectInspector) oi1, o2, (MapObjectInspector) oi2);
}
}
case UNION:
{
UnionObjectInspector uoi1 = (UnionObjectInspector) oi1;
UnionObjectInspector uoi2 = (UnionObjectInspector) oi2;
byte tag1 = uoi1.getTag(o1);
byte tag2 = uoi2.getTag(o2);
if (tag1 != tag2) {
return tag1 - tag2;
}
return compare(uoi1.getField(o1), uoi1.getObjectInspectors().get(tag1), uoi2.getField(o2), uoi2.getObjectInspectors().get(tag2), mapEqualComparer, nullValueOpt);
}
default:
throw new RuntimeException("Compare on unknown type: " + oi1.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.
the class JavaHiveVarcharObjectInspector method getWritableWithParams.
private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
HiveVarcharWritable newValue = new HiveVarcharWritable();
newValue.set(val, getMaxLength());
return newValue;
}
use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.
the class WritableHiveVarcharObjectInspector method set.
@Override
public Object set(Object o, HiveVarchar value) {
if (value == null) {
return null;
}
HiveVarcharWritable writable = (HiveVarcharWritable) o;
writable.set(value, getMaxLength());
return o;
}
Aggregations