use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.dateTypeInfo in project hive by apache.
the class TestGenericUDFOPPlus method testDatePlusIntervalYearMonth.
@Test
public void testDatePlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
DateWritable left = new DateWritable(Date.valueOf("2001-06-15"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
DateWritable res = (DateWritable) udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.dateTypeInfo in project hive by apache.
the class GenericUDFOPDTIMinus method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(opName + " requires two arguments.");
}
PrimitiveObjectInspector resultOI = null;
for (int i = 0; i < 2; i++) {
Category category = arguments[i].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
}
inputOIs = new PrimitiveObjectInspector[] { (PrimitiveObjectInspector) arguments[0], (PrimitiveObjectInspector) arguments[1] };
PrimitiveObjectInspector leftOI = inputOIs[0];
PrimitiveObjectInspector rightOI = inputOIs[1];
// Timestamp - Date = IntervalDayTime (operands reversible)
if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.INTERVALYM_MINUS_INTERVALYM;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalYearMonthTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.DATE_MINUS_INTERVALYM;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
} else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALYM;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
minusOpType = OperationType.INTERVALDT_MINUS_INTERVALDT;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALDT;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.DATE) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.DATE)) {
// Operands converted to timestamp, result as interval day-time
minusOpType = OperationType.TIMESTAMP_MINUS_TIMESTAMP;
dtArg1Idx = 0;
dtArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
dt2Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else {
// Unsupported types - error
List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
argTypeInfos.add(leftOI.getTypeInfo());
argTypeInfos.add(rightOI.getTypeInfo());
throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
}
return resultOI;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.dateTypeInfo in project hive by apache.
the class TestFunctionRegistry method testImplicitConversion.
public void testImplicitConversion() {
implicit(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo, true);
implicit(TypeInfoFactory.longTypeInfo, TypeInfoFactory.decimalTypeInfo, true);
implicit(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
implicit(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
implicit(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
implicit(TypeInfoFactory.dateTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
implicit(TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
implicit(varchar10, TypeInfoFactory.stringTypeInfo, true);
implicit(TypeInfoFactory.stringTypeInfo, varchar10, true);
// Try with parameterized varchar types
TypeInfo varchar10 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(10)");
TypeInfo varchar20 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(20)");
implicit(varchar10, TypeInfoFactory.stringTypeInfo, true);
implicit(varchar20, TypeInfoFactory.stringTypeInfo, true);
implicit(TypeInfoFactory.stringTypeInfo, varchar10, true);
implicit(TypeInfoFactory.stringTypeInfo, varchar20, true);
implicit(varchar20, varchar10, true);
implicit(char10, TypeInfoFactory.stringTypeInfo, true);
implicit(TypeInfoFactory.stringTypeInfo, char10, true);
implicit(char5, char10, true);
implicit(char5, varchar10, true);
implicit(varchar5, char10, true);
implicit(TypeInfoFactory.intTypeInfo, char10, true);
implicit(TypeInfoFactory.intTypeInfo, varchar10, true);
implicit(TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo, true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.dateTypeInfo in project hive by apache.
the class TestGenericUDFOPMinus method testDateMinusIntervalYearMonth.
@Test
public void testDateMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
DateWritable left = new DateWritable(Date.valueOf("2004-02-15"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
DateWritable res = (DateWritable) udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2001-06-15"), res.get());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.dateTypeInfo in project hive by apache.
the class MapJoinOneLongKeyBenchBase method doSetup.
public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception {
HiveConf hiveConf = new HiveConf();
long seed = 2543;
// 10,000,000.
int rowCount = 10000000;
String[] bigTableColumnNames = new String[] { "number1" };
TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.longTypeInfo };
int[] bigTableKeyColumnNums = new int[] { 0 };
String[] smallTableValueColumnNames = new String[] { "sv1", "sv2" };
TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.dateTypeInfo, TypeInfoFactory.stringTypeInfo };
int[] bigTableRetainColumnNums = new int[] { 0 };
int[] smallTableRetainKeyColumnNums = new int[] {};
int[] smallTableRetainValueColumnNums = new int[] { 0, 1 };
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
smallTableGenerationParameters.setValueOption(ValueOption.ONLY_ONE);
setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
}
Aggregations