use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.booleanTypeInfo in project hive by apache.
the class TestColumnPrunerProcCtx method setup.
@BeforeClass
public static void setup() {
List<String> ns = new ArrayList<>();
ns.add("a");
ns.add("b");
List<TypeInfo> tis = new ArrayList<>();
TypeInfo aType = TypeInfoFactory.booleanTypeInfo;
TypeInfo bType = TypeInfoFactory.doubleTypeInfo;
tis.add(aType);
tis.add(bType);
col1Type = TypeInfoFactory.getStructTypeInfo(ns, tis);
col2Type = TypeInfoFactory.doubleTypeInfo;
List<String> names = new ArrayList<>();
names.add("col1");
names.add("col2");
List<TypeInfo> typeInfos = new ArrayList<>();
typeInfos.add(col1Type);
typeInfos.add(col2Type);
col3Type = TypeInfoFactory.getStructTypeInfo(names, typeInfos);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.booleanTypeInfo in project hive by apache.
the class TestVectorNull method doIsNullOnRandomDataType.
private boolean doIsNullOnRandomDataType(Random random, String functionName, boolean isFilter) throws Exception {
String typeName;
if (functionName.equals("not")) {
typeName = "boolean";
} else {
typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.ALL, /* allowedTypeNameSet */
null);
typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.ALL, /* allowedTypeNameSet */
null, /* depth */
0, /* maxDepth */
2);
}
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
// ----------------------------------------------------------------------------------------------
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
// ----------------------------------------------------------------------------------------------
GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
generationSpecList.add(generationSpec);
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
List<String> columns = new ArrayList<String>();
columns.add("col1");
ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
String[] columnNames = columns.toArray(new String[0]);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
final GenericUDF udf;
final ObjectInspector outputObjectInspector;
switch(functionName) {
case "isnull":
udf = new GenericUDFOPNull();
break;
case "isnotnull":
udf = new GenericUDFOPNotNull();
break;
case "not":
udf = new GenericUDFOPNot();
break;
default:
throw new RuntimeException("Unexpected function name " + functionName);
}
ObjectInspector[] argumentOIs = new ObjectInspector[] { objectInspector };
outputObjectInspector = udf.initialize(argumentOIs);
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[NullTestMode.count][];
for (int i = 0; i < NullTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
NullTestMode nullTestMode = NullTestMode.values()[i];
switch(nullTestMode) {
case ROW_MODE:
if (!doRowCastTest(typeInfo, isFilter, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
return false;
}
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
if (!doVectorCastTest(typeInfo, isFilter, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, nullTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
return false;
}
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + nullTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < NullTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
NullTestMode nullTestMode = NullTestMode.values()[v];
if (isFilter && expectedResult == null && vectorResult != null) {
// This is OK.
boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
if (vectorBoolean) {
Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " isFilter " + isFilter + " " + nullTestMode + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
} else if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
}
}
}
return true;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.booleanTypeInfo in project hive by apache.
the class ExprNodeDescExprFactory method createSubqueryExpr.
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc createSubqueryExpr(TypeCheckCtx ctx, ASTNode expr, SubqueryType subqueryType, Object[] inputs) throws CalciteSubquerySemanticException {
// subqueryToRelNode might be null if subquery expression anywhere other than
// as expected in filter (where/having). We should throw an appropriate error
// message
Map<ASTNode, QBSubQueryParseInfo> subqueryToRelNode = ctx.getSubqueryToRelNode();
if (subqueryToRelNode == null) {
throw new CalciteSubquerySemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(" Currently SubQuery expressions are only allowed as " + "Where and Having Clause predicates"));
}
ASTNode subqueryOp = (ASTNode) expr.getChild(0);
RelNode subqueryRel = subqueryToRelNode.get(expr).getSubQueryRelNode();
// we will create subquery expression of boolean type
switch(subqueryType) {
case EXISTS:
{
if (subqueryToRelNode.get(expr).hasFullAggregate()) {
return createConstantExpr(TypeInfoFactory.booleanTypeInfo, true);
}
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel, SubqueryType.EXISTS);
}
case IN:
{
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel, SubqueryType.IN, lhs);
}
case SCALAR:
{
// only single subquery expr is supported
if (subqueryRel.getRowType().getFieldCount() != 1) {
throw new CalciteSubquerySemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg("More than one column expression in subquery"));
}
// figure out subquery expression column's type
TypeInfo subExprType = TypeConverter.convert(subqueryRel.getRowType().getFieldList().get(0).getType());
return new ExprNodeSubQueryDesc(subExprType, subqueryRel, SubqueryType.SCALAR);
}
case SOME:
{
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel, SubqueryType.SOME, lhs, (ASTNode) subqueryOp.getChild(1));
}
case ALL:
{
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel, SubqueryType.ALL, lhs, (ASTNode) subqueryOp.getChild(1));
}
default:
return null;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.booleanTypeInfo in project hive by apache.
the class TestVectorizationContext method testBetweenFilters.
@Test
public void testBetweenFilters() throws HiveException {
// string tests
ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(String.class, "col1", "table", false);
ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc("Alpha");
ExprNodeConstantDesc constDesc2 = new ExprNodeConstantDesc("Bravo");
// string BETWEEN
GenericUDFBetween udf = new GenericUDFBetween();
List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>();
// no NOT keyword
children1.add(new ExprNodeConstantDesc(Boolean.FALSE));
children1.add(col1Expr);
children1.add(constDesc);
children1.add(constDesc2);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children1);
List<String> columns = new ArrayList<String>();
columns.add("col0");
columns.add("col1");
columns.add("col2");
VectorizationContext vc = new VectorizationContext("name", columns);
VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterStringColumnBetween);
// string NOT BETWEEN
// has NOT keyword
children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterStringColumnNotBetween);
// CHAR tests
CharTypeInfo charTypeInfo = new CharTypeInfo(10);
col1Expr = new ExprNodeColumnDesc(charTypeInfo, "col1", "table", false);
constDesc = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Alpha", 10));
constDesc2 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Bravo", 10));
// CHAR BETWEEN
udf = new GenericUDFBetween();
children1 = new ArrayList<ExprNodeDesc>();
// no NOT keyword
children1.add(new ExprNodeConstantDesc(Boolean.FALSE));
children1.add(col1Expr);
children1.add(constDesc);
children1.add(constDesc2);
exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children1);
vc = new VectorizationContext("name", columns);
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterCharColumnBetween);
// CHAR NOT BETWEEN
// has NOT keyword
children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterCharColumnNotBetween);
// VARCHAR tests
VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10);
col1Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false);
constDesc = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Alpha", 10));
constDesc2 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Bravo", 10));
// VARCHAR BETWEEN
udf = new GenericUDFBetween();
children1 = new ArrayList<ExprNodeDesc>();
// no NOT keyword
children1.add(new ExprNodeConstantDesc(Boolean.FALSE));
children1.add(col1Expr);
children1.add(constDesc);
children1.add(constDesc2);
exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children1);
vc = new VectorizationContext("name", columns);
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterVarCharColumnBetween);
// VARCHAR NOT BETWEEN
// has NOT keyword
children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterVarCharColumnNotBetween);
// long BETWEEN
children1.set(0, new ExprNodeConstantDesc(Boolean.FALSE));
children1.set(1, new ExprNodeColumnDesc(Long.class, "col1", "table", false));
children1.set(2, new ExprNodeConstantDesc(10));
children1.set(3, new ExprNodeConstantDesc(20));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterLongColumnBetween);
// long NOT BETWEEN
children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterLongColumnNotBetween);
// double BETWEEN
children1.set(0, new ExprNodeConstantDesc(Boolean.FALSE));
children1.set(1, new ExprNodeColumnDesc(Double.class, "col1", "table", false));
children1.set(2, new ExprNodeConstantDesc(10.0d));
children1.set(3, new ExprNodeConstantDesc(20.0d));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterDoubleColumnBetween);
// double NOT BETWEEN
children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertTrue(ve instanceof FilterDoubleColumnNotBetween);
// timestamp BETWEEN
children1.set(0, new ExprNodeConstantDesc(Boolean.FALSE));
children1.set(1, new ExprNodeColumnDesc(Timestamp.class, "col1", "table", false));
children1.set(2, new ExprNodeConstantDesc("2013-11-05 00:00:00.000"));
children1.set(3, new ExprNodeConstantDesc("2013-11-06 00:00:00.000"));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertEquals(FilterTimestampColumnBetween.class, ve.getClass());
// timestamp NOT BETWEEN
children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE));
ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
assertEquals(FilterTimestampColumnNotBetween.class, ve.getClass());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.booleanTypeInfo in project hive by apache.
the class TestGenericUDFMonthsBetween method testMonthsBetweenForString.
@Test
public void testMonthsBetweenForString() throws HiveException {
// Default run
GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2 };
udf.initialize(arguments);
testMonthsBetweenForString(udf);
// Run without round-off
GenericUDFMonthsBetween udfWithoutRoundOff = new GenericUDFMonthsBetween();
ObjectInspector vOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector vOI2 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector vOI3 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.booleanTypeInfo, new BooleanWritable(false));
ObjectInspector[] args = { vOI1, vOI2, vOI3 };
udfWithoutRoundOff.initialize(args);
testMonthsBetweenForString(udf);
}
Aggregations