use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestUtilities method testSerializeTimestamp.
@Test
public void testSerializeTimestamp() {
Timestamp ts = Timestamp.ofEpochMilli(1374554702000L, 123456);
ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
children.add(constant);
ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo, new GenericUDFFromUtcTimestamp(), children);
assertEquals(desc.getExprString(), SerializationUtilities.deserializeExpression(SerializationUtilities.serializeExpression(desc)).getExprString());
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestVectorDateDiff method doDateDiffTestsWithDiffColumnScalar.
private void doDateDiffTestsWithDiffColumnScalar(Random random, String dateTimeStringTypeName1, String dateTimeStringTypeName2, ColumnScalarMode columnScalarMode) throws Exception {
TypeInfo dateTimeStringTypeInfo1 = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName1);
PrimitiveCategory dateTimeStringPrimitiveCategory1 = ((PrimitiveTypeInfo) dateTimeStringTypeInfo1).getPrimitiveCategory();
boolean isStringFamily1 = (dateTimeStringPrimitiveCategory1 == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory1 == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory1 == PrimitiveCategory.VARCHAR);
TypeInfo dateTimeStringTypeInfo2 = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName2);
PrimitiveCategory dateTimeStringPrimitiveCategory2 = ((PrimitiveTypeInfo) dateTimeStringTypeInfo2).getPrimitiveCategory();
boolean isStringFamily2 = (dateTimeStringPrimitiveCategory2 == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory2 == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory2 == PrimitiveCategory.VARCHAR);
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
if (!isStringFamily1) {
generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo1));
} else {
generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo1, TypeInfoFactory.dateTypeInfo));
}
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo1, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar1Object;
if (!isStringFamily1) {
scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo1);
} else {
scalar1Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo1, TypeInfoFactory.dateTypeInfo, false);
}
col1Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo1, scalar1Object);
}
ExprNodeDesc col2Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
if (!isStringFamily2) {
generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo2));
} else {
generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo2, TypeInfoFactory.dateTypeInfo));
}
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col2Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo2, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar2Object;
if (!isStringFamily2) {
scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo2);
} else {
scalar2Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo2, TypeInfoFactory.dateTypeInfo, false);
}
col2Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo2, scalar2Object);
}
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
children.add(col2Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
String[] outputScratchTypeNames = new String[] { "date" };
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, outputScratchTypeNames, null);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[DateDiffTestMode.count][];
for (int i = 0; i < DateDiffTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
GenericUDF udf = new GenericUDFDateDiff();
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, children);
DateDiffTestMode dateDiffTestMode = DateDiffTestMode.values()[i];
switch(dateDiffTestMode) {
case ROW_MODE:
doRowDateAddSubTest(dateTimeStringTypeInfo1, dateTimeStringTypeInfo2, columns, children, exprDesc, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), resultObjects);
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
doVectorDateAddSubTest(dateTimeStringTypeInfo1, dateTimeStringTypeInfo2, columns, rowSource.typeInfos(), children, exprDesc, dateDiffTestMode, columnScalarMode, batchSource, batchContext, resultObjects);
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + dateDiffTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < DateDiffTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " " + DateDiffTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " " + DateDiffTestMode.values()[v] + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestVectorNull method doIsNullOnRandomDataType.
private boolean doIsNullOnRandomDataType(Random random, String functionName, boolean isFilter) throws Exception {
String typeName;
if (functionName.equals("not")) {
typeName = "boolean";
} else {
typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.ALL, /* allowedTypeNameSet */
null);
typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.ALL, /* allowedTypeNameSet */
null, /* depth */
0, /* maxDepth */
2);
}
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
// ----------------------------------------------------------------------------------------------
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
// ----------------------------------------------------------------------------------------------
GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
generationSpecList.add(generationSpec);
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
List<String> columns = new ArrayList<String>();
columns.add("col1");
ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
String[] columnNames = columns.toArray(new String[0]);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
final GenericUDF udf;
final ObjectInspector outputObjectInspector;
switch(functionName) {
case "isnull":
udf = new GenericUDFOPNull();
break;
case "isnotnull":
udf = new GenericUDFOPNotNull();
break;
case "not":
udf = new GenericUDFOPNot();
break;
default:
throw new RuntimeException("Unexpected function name " + functionName);
}
ObjectInspector[] argumentOIs = new ObjectInspector[] { objectInspector };
outputObjectInspector = udf.initialize(argumentOIs);
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[NullTestMode.count][];
for (int i = 0; i < NullTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
NullTestMode nullTestMode = NullTestMode.values()[i];
switch(nullTestMode) {
case ROW_MODE:
if (!doRowCastTest(typeInfo, isFilter, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
return false;
}
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
if (!doVectorCastTest(typeInfo, isFilter, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, nullTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
return false;
}
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + nullTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < NullTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
NullTestMode nullTestMode = NullTestMode.values()[v];
if (isFilter && expectedResult == null && vectorResult != null) {
// This is OK.
boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
if (vectorBoolean) {
Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " isFilter " + isFilter + " " + nullTestMode + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
} else if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
}
}
}
return true;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestVectorStringConcat method doVectorStringConcatTest.
private void doVectorStringConcatTest(TypeInfo stringTypeInfo1, TypeInfo stringTypeInfo2, List<String> columns, TypeInfo[] typeInfos, List<ExprNodeDesc> children, StringConcatTestMode stringConcatTestMode, ColumnScalarMode columnScalarMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, ObjectInspector rowInspector, GenericUDF genericUdf, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (stringConcatTestMode == StringConcatTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
DataTypePhysicalVariation[] dataTypePhysicalVariations = new DataTypePhysicalVariation[2];
Arrays.fill(dataTypePhysicalVariations, DataTypePhysicalVariation.NONE);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, genericUdf, children);
// ---------------------------------------
// Just so we can get the output type...
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
evaluator.initialize(rowInspector);
ObjectInspector objectInspector = evaluator.getOutputOI();
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(objectInspector);
/*
* Again with correct output type...
*/
exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
// ---------------------------------------
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (stringConcatTestMode == StringConcatTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* stringTypeInfo1 " + stringTypeInfo1.toString() + " stringTypeInfo2 " + stringTypeInfo2.toString() + " stringConcatTestMode " + stringConcatTestMode + " columnScalarMode " + columnScalarMode + " vectorExpression " + vectorExpression.toString());
}
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { columns.size() });
Object[] scrqtchRow = new Object[1];
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* stringTypeInfo1 " + stringTypeInfo1.toString() +
" stringTypeInfo2 " + stringTypeInfo2.toString() +
" stringConcatTestMode " + stringConcatTestMode +
" columnScalarMode " + columnScalarMode +
" vectorExpression " + vectorExpression.toString());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
rowIndex += batch.size;
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestVectorSubStr method doVectorIfTest.
private void doVectorIfTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, List<String> columns, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, SubStrTestMode subStrTestMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, GenericUDF genericUdf, Object[] resultObjects) throws Exception {
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(targetTypeInfo, genericUdf, children);
HiveConf hiveConf = new HiveConf();
if (subStrTestMode == SubStrTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
if (subStrTestMode == SubStrTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " subStrTestMode " + subStrTestMode + " vectorExpression " + vectorExpression.toString());
}
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { targetTypeInfo }, new int[] { columns.size() });
Object[] scrqtchRow = new Object[1];
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" targetTypeInfo " + targetTypeInfo.toString() +
" subStrTestMode " + subStrTestMode +
" vectorExpression " + vectorExpression.getClass().getSimpleName());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, targetTypeInfo, resultObjects);
rowIndex += batch.size;
}
}
Aggregations