use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFInternalInterval method testDayInterval.
@Test
public void testDayInterval() throws Exception {
try (GenericUDFInternalInterval udf = new GenericUDFInternalInterval()) {
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, new IntWritable(HiveParser.TOK_INTERVAL_DAY_LITERAL)), PrimitiveObjectInspectorFactory.writableStringObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(new ByteWritable((byte) 4)), new DeferredJavaObject(new Text("8")) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo());
HiveIntervalDayTimeWritable res = (HiveIntervalDayTimeWritable) udf.evaluate(args);
Assert.assertEquals(8, res.getHiveIntervalDayTime().getDays());
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFBRound method testDecimal.
@Test
public void testDecimal() throws HiveException {
GenericUDFBRound udf = new GenericUDFBRound();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector;
IntWritable scale = new IntWritable(0);
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, scale);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runDecimal(2.5, scale, 2.0, udf);
runDecimal(3.5, scale, 4.0, udf);
runDecimal(2.49, scale, 2.0, udf);
runDecimal(3.49, scale, 3.0, udf);
runDecimal(2.51, scale, 3.0, udf);
runDecimal(3.51, scale, 4.0, udf);
runDecimal(2.4, scale, 2.0, udf);
runDecimal(3.4, scale, 3.0, udf);
runDecimal(2.6, scale, 3.0, udf);
runDecimal(3.6, scale, 4.0, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFBRound method testDoubleScaleMinus1.
@Test
public void testDoubleScaleMinus1() throws HiveException {
GenericUDFBRound udf = new GenericUDFBRound();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
IntWritable scale = new IntWritable(-1);
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, scale);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runDouble(55.0, scale, 60.0, udf);
runDouble(45.0, scale, 40.0, udf);
runDouble(54.9, scale, 50.0, udf);
runDouble(44.9, scale, 40.0, udf);
runDouble(55.1, scale, 60.0, udf);
runDouble(45.1, scale, 50.0, udf);
runDouble(-55.0, scale, -60.0, udf);
runDouble(-45.0, scale, -40.0, udf);
runDouble(-54.9, scale, -50.0, udf);
runDouble(-44.9, scale, -40.0, udf);
runDouble(-55.1, scale, -60.0, udf);
runDouble(-45.1, scale, -50.0, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestAccumuloRowSerializer method testBinarySerialization.
@Test
public void testBinarySerialization() throws IOException, SerDeException {
List<String> columns = Arrays.asList("row", "cq1", "cq2", "cq3");
List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
List<String> typeNames = new ArrayList<String>(types.size());
for (TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties = new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(), tableProperties, AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(columns, types, serDeParams.getSeparators(), serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams, accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(), accumuloSerDeParams.getRowIdFactory());
// Create the LazyStruct from the LazyStruct...Inspector
LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(oi);
ByteArrayRef byteRef = new ByteArrayRef();
byteRef.setData(new byte[] { 'r', 'o', 'w', '1', ' ', '1', '0', ' ', '2', '0', ' ', 'v', 'a', 'l', 'u', 'e' });
obj.init(byteRef, 0, byteRef.getData().length);
Mutation m = (Mutation) serializer.serialize(obj, oi);
Assert.assertArrayEquals("row1".getBytes(), m.getRow());
List<ColumnUpdate> updates = m.getUpdates();
Assert.assertEquals(3, updates.size());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
ColumnUpdate update = updates.get(0);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
out.writeInt(10);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(1);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
baos.reset();
out.writeInt(20);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(2);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
Assert.assertEquals("value", new String(update.getValue()));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class ComparisonOpMethodResolver method getEvalMethod.
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.hive.ql.exec.UDFMethodResolver#getEvalMethod(java.util
* .List)
*/
@Override
public Method getEvalMethod(List<TypeInfo> argTypeInfos) throws UDFArgumentException {
assert (argTypeInfos.size() == 2);
List<TypeInfo> pTypeInfos = null;
if (argTypeInfos.get(0).equals(TypeInfoFactory.voidTypeInfo) || argTypeInfos.get(1).equals(TypeInfoFactory.voidTypeInfo)) {
pTypeInfos = new ArrayList<TypeInfo>();
pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
} else if (argTypeInfos.get(0).equals(TypeInfoFactory.booleanTypeInfo) && argTypeInfos.get(1).equals(TypeInfoFactory.booleanTypeInfo)) {
pTypeInfos = new ArrayList<TypeInfo>();
pTypeInfos.add(TypeInfoFactory.intTypeInfo);
pTypeInfos.add(TypeInfoFactory.intTypeInfo);
} else if (argTypeInfos.get(0) == argTypeInfos.get(1)) {
pTypeInfos = argTypeInfos;
} else {
pTypeInfos = new ArrayList<TypeInfo>();
pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
pTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
}
Method udfMethod = null;
List<Method> evaluateMethods = new ArrayList<Method>();
for (Method m : Arrays.asList(udfClass.getMethods())) {
if (m.getName().equals("evaluate")) {
evaluateMethods.add(m);
List<TypeInfo> acceptedTypeInfos = TypeInfoUtils.getParameterTypeInfos(m, pTypeInfos.size());
if (acceptedTypeInfos == null) {
// null means the method does not accept number of arguments passed.
continue;
}
boolean match = (acceptedTypeInfos.size() == pTypeInfos.size());
for (int i = 0; i < pTypeInfos.size() && match; i++) {
TypeInfo accepted = acceptedTypeInfos.get(i);
if (accepted != pTypeInfos.get(i)) {
match = false;
}
}
if (match) {
if (udfMethod != null) {
throw new AmbiguousMethodException(udfClass, argTypeInfos, Arrays.asList(new Method[] { udfMethod, m }));
} else {
udfMethod = m;
}
}
}
}
if (udfMethod == null) {
throw new NoMatchingMethodException(udfClass, argTypeInfos, evaluateMethods);
}
return udfMethod;
}
Aggregations