use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.
the class GenericUDFDateDiff method checkArguments.
private Converter checkArguments(ObjectInspector[] arguments, int i) throws UDFArgumentException {
if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[i].getTypeName() + " is passed. as first arguments");
}
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
Converter converter;
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
converter = new TimestampConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException(" DATEDIFF() only takes STRING/TIMESTAMP/DATEWRITABLE types as " + (i + 1) + "-th argument, got " + inputType);
}
return converter;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterGMT.
@Test
public void testTimestampInt96ConverterGMT() {
PrimitiveConverter converter;
parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "GMT");
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("GMT"))).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterLocal.
@Test
public void testTimestampInt96ConverterLocal() {
PrimitiveConverter converter;
// Default timezone should be Localtime
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance()).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.
the class DynamicPartitionPruner method prunePartitionSingleSource.
@VisibleForTesting
protected void prunePartitionSingleSource(String source, SourceInfo si) throws HiveException {
if (si.skipPruning.get()) {
// in this case we've determined that there's too much data
// to prune dynamically.
LOG.info("Skip pruning on " + source + ", column " + si.columnName);
return;
}
Set<Object> values = si.values;
String columnName = si.columnName;
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder("Pruning ");
sb.append(columnName);
sb.append(" with ");
for (Object value : values) {
sb.append(value == null ? null : value.toString());
sb.append(", ");
}
LOG.debug(sb.toString());
}
ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(si.columnType));
Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
@SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(si.partKey);
eval.initialize(soi);
applyFilterToPartitions(converter, eval, columnName, values);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project SQLWindowing by hbutani.
the class Executor method executeSelectList.
/**
* For each row in the partition:
* 1. evaluate the where condition if applicable.
* 2. evaluate the value for each column retrieved
* from the select list
* 3. Forward the writable value or object based on the
* implementation of the ForwardSink
* @param qDef
* @param oPart
* @param rS
* @throws WindowingException
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public static void executeSelectList(QueryDef qDef, Partition oPart, ForwardSink rS) throws WindowingException {
ArrayList<ColumnDef> cols = qDef.getSelectList().getColumns();
ObjectInspector selectOI = qDef.getSelectList().getOI();
SerDe oSerDe = qDef.getOutput().getSerDe();
Object[] output = new Object[cols.size()];
WhereDef whDef = qDef.getWhere();
boolean applyWhere = whDef != null;
Converter whConverter = !applyWhere ? null : ObjectInspectorConverters.getConverter(whDef.getOI(), PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
ExprNodeEvaluator whCondEval = !applyWhere ? null : whDef.getExprEvaluator();
Writable value = null;
PartitionIterator<Object> pItr = oPart.iterator();
RuntimeUtils.connectLeadLagFunctionsToPartition(qDef, pItr);
while (pItr.hasNext()) {
int colCnt = 0;
ArrayList selectList = new ArrayList();
Object oRow = pItr.next();
if (applyWhere) {
Object whCond = null;
try {
whCond = whCondEval.evaluate(oRow);
whCond = whConverter.convert(whCond);
} catch (HiveException he) {
throw new WindowingException(he);
}
if (whCond == null || !((Boolean) whCond).booleanValue()) {
continue;
}
}
for (ColumnDef cDef : cols) {
try {
Object newCol = cDef.getExprEvaluator().evaluate(oRow);
output[colCnt++] = newCol;
selectList.add(newCol);
} catch (HiveException he) {
throw new WindowingException(he);
}
}
//else collect the writable key-value pairs for outstream
if (rS.acceptObject()) {
rS.collectOutput(output);
} else {
try {
value = oSerDe.serialize(selectList, selectOI);
} catch (SerDeException se) {
throw new WindowingException(se);
}
rS.collectOutput(NullWritable.get(), value);
}
}
}
Aggregations