use of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce in project hive by apache.
the class VectorizationContext method getCoalesceExpression.
private VectorExpression getCoalesceExpression(List<ExprNodeDesc> childExpr, VectorExpressionDescriptor.Mode mode, TypeInfo returnType) throws HiveException {
int[] inputColumns = new int[childExpr.size()];
VectorExpression[] vectorChildren = getVectorExpressions(childExpr, VectorExpressionDescriptor.Mode.PROJECTION);
final int size = vectorChildren.length;
TypeInfo[] inputTypeInfos = new TypeInfo[size];
DataTypePhysicalVariation[] inputDataTypePhysicalVariations = new DataTypePhysicalVariation[size];
DataTypePhysicalVariation outputDataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64;
boolean fixConstants = false;
for (int i = 0; i < vectorChildren.length; ++i) {
VectorExpression ve = vectorChildren[i];
inputColumns[i] = ve.getOutputColumnNum();
inputTypeInfos[i] = ve.getOutputTypeInfo();
inputDataTypePhysicalVariations[i] = ve.getOutputDataTypePhysicalVariation();
if (inputDataTypePhysicalVariations[i] == DataTypePhysicalVariation.NONE || inputDataTypePhysicalVariations[i] == null) {
if (childExpr.get(i) instanceof ExprNodeConstantDesc && inputTypeInfos[i] instanceof DecimalTypeInfo && ((DecimalTypeInfo) inputTypeInfos[i]).precision() <= 18) {
fixConstants = true;
} else {
outputDataTypePhysicalVariation = DataTypePhysicalVariation.NONE;
}
}
}
if (outputDataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64 && fixConstants) {
for (int i = 0; i < vectorChildren.length; ++i) {
if ((inputDataTypePhysicalVariations[i] == DataTypePhysicalVariation.NONE || inputDataTypePhysicalVariations[i] == null) && vectorChildren[i] instanceof ConstantVectorExpression) {
ConstantVectorExpression cve = ((ConstantVectorExpression) vectorChildren[i]);
HiveDecimal hd = cve.getDecimalValue();
Long longValue = new HiveDecimalWritable(hd).serialize64(((DecimalTypeInfo) cve.getOutputTypeInfo()).getScale());
((ConstantVectorExpression) vectorChildren[i]).setLongValue(longValue);
vectorChildren[i].setOutputDataTypePhysicalVariation(DataTypePhysicalVariation.DECIMAL_64);
int scratchColIndex = vectorChildren[i].getOutputColumnNum() - ocm.initialOutputCol;
ocm.scratchDataTypePhysicalVariations[scratchColIndex] = DataTypePhysicalVariation.DECIMAL_64;
}
}
}
final int outputColumnNum = ocm.allocateOutputColumn(returnType, outputDataTypePhysicalVariation);
VectorCoalesce vectorCoalesce = new VectorCoalesce(inputColumns, outputColumnNum);
vectorCoalesce.setChildExpressions(vectorChildren);
vectorCoalesce.setInputTypeInfos(inputTypeInfos);
vectorCoalesce.setInputDataTypePhysicalVariations(inputDataTypePhysicalVariations);
vectorCoalesce.setOutputTypeInfo(returnType);
vectorCoalesce.setOutputDataTypePhysicalVariation(outputDataTypePhysicalVariation);
freeNonColumns(vectorChildren);
// Assume.
boolean isFilter = false;
if (mode == VectorExpressionDescriptor.Mode.FILTER) {
// Is output type a BOOLEAN?
if (returnType.getCategory() == Category.PRIMITIVE && ((PrimitiveTypeInfo) returnType).getPrimitiveCategory() == PrimitiveCategory.BOOLEAN) {
isFilter = true;
} else {
return null;
}
}
if (isFilter) {
// Wrap the PROJECTION IF expression output with a filter.
SelectColumnIsTrue filterVectorExpr = new SelectColumnIsTrue(vectorCoalesce.getOutputColumnNum());
filterVectorExpr.setChildExpressions(new VectorExpression[] { vectorCoalesce });
filterVectorExpr.setInputTypeInfos(vectorCoalesce.getOutputTypeInfo());
filterVectorExpr.setInputDataTypePhysicalVariations(vectorCoalesce.getOutputDataTypePhysicalVariation());
return filterVectorExpr;
} else {
return vectorCoalesce;
}
}
use of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce in project hive by apache.
the class Vectorizer method fixDecimalDataTypePhysicalVariations.
private static VectorExpression fixDecimalDataTypePhysicalVariations(final VectorExpression parent, final VectorExpression[] children, final VectorizationContext vContext) throws HiveException {
if (children == null || children.length == 0) {
return parent;
}
for (int i = 0; i < children.length; i++) {
VectorExpression child = children[i];
VectorExpression newChild = fixDecimalDataTypePhysicalVariations(child, child.getChildExpressions(), vContext);
if (child.getClass() == newChild.getClass() && child != newChild) {
children[i] = newChild;
}
}
if (parent.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.NONE && !(parent instanceof ConvertDecimal64ToDecimal)) {
boolean inputArgsChanged = false;
DataTypePhysicalVariation[] dataTypePhysicalVariations = parent.getInputDataTypePhysicalVariations();
for (int i = 0; i < children.length; i++) {
// we found at least one children with mismatch
if (children[i].getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.DECIMAL_64) {
children[i] = vContext.wrapWithDecimal64ToDecimalConversion(children[i]);
inputArgsChanged = true;
dataTypePhysicalVariations[i] = DataTypePhysicalVariation.NONE;
}
}
// fix up the input column numbers and output column numbers
if (inputArgsChanged) {
if (parent instanceof VectorUDFAdaptor) {
VectorUDFAdaptor parentAdaptor = (VectorUDFAdaptor) parent;
VectorUDFArgDesc[] argDescs = parentAdaptor.getArgDescs();
for (int i = 0; i < argDescs.length; ++i) {
if (argDescs[i].getColumnNum() != children[i].getOutputColumnNum()) {
argDescs[i].setColumnNum(children[i].getOutputColumnNum());
break;
}
}
} else {
Object[] arguments;
int argumentCount = children.length + (parent.getOutputColumnNum() == -1 ? 0 : 1);
// Need to handle it as a special case to avoid instantiation failure.
if (parent instanceof VectorCoalesce) {
arguments = new Object[2];
arguments[0] = new int[children.length];
for (int i = 0; i < children.length; i++) {
VectorExpression vce = children[i];
((int[]) arguments[0])[i] = vce.getOutputColumnNum();
}
arguments[1] = parent.getOutputColumnNum();
} else {
if (parent instanceof DecimalColDivideDecimalScalar) {
arguments = new Object[argumentCount + 1];
arguments[children.length] = ((DecimalColDivideDecimalScalar) parent).getValue();
} else {
arguments = new Object[argumentCount];
}
for (int i = 0; i < children.length; i++) {
VectorExpression vce = children[i];
arguments[i] = vce.getOutputColumnNum();
}
}
// retain output column number from parent
if (parent.getOutputColumnNum() != -1) {
arguments[arguments.length - 1] = parent.getOutputColumnNum();
}
// re-instantiate the parent expression with new arguments
VectorExpression newParent = vContext.instantiateExpression(parent.getClass(), parent.getOutputTypeInfo(), parent.getOutputDataTypePhysicalVariation(), arguments);
newParent.setOutputTypeInfo(parent.getOutputTypeInfo());
newParent.setOutputDataTypePhysicalVariation(parent.getOutputDataTypePhysicalVariation());
newParent.setInputTypeInfos(parent.getInputTypeInfos());
newParent.setInputDataTypePhysicalVariations(dataTypePhysicalVariations);
newParent.setChildExpressions(parent.getChildExpressions());
return newParent;
}
}
}
return parent;
}
Aggregations