use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorDateAddSub method doVectorDateAddSubTest.
private void doVectorDateAddSubTest(TypeInfo dateTimeStringTypeInfo, TypeInfo integerTypeInfo, List<String> columns, TypeInfo[] typeInfos, List<ExprNodeDesc> children, boolean isAdd, ExprNodeGenericFuncDesc exprDesc, DateAddSubTestMode dateAddSubTestMode, ColumnScalarMode columnScalarMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (dateAddSubTestMode == DateAddSubTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
DataTypePhysicalVariation[] dataTypePhysicalVariations = new DataTypePhysicalVariation[2];
Arrays.fill(dataTypePhysicalVariations, DataTypePhysicalVariation.NONE);
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (dateAddSubTestMode == DateAddSubTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() + " dateAddSubTestMode " + dateAddSubTestMode + " columnScalarMode " + columnScalarMode + " vectorExpression " + vectorExpression.toString());
}
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { TypeInfoFactory.dateTypeInfo }, new int[] { columns.size() });
Object[] scrqtchRow = new Object[1];
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
" integerTypeInfo " + integerTypeInfo +
" isAdd " + isAdd +
" dateAddSubTestMode " + dateAddSubTestMode +
" columnScalarMode " + columnScalarMode +
" vectorExpression " + vectorExpression.toString());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, TypeInfoFactory.dateTypeInfo, resultObjects);
rowIndex += batch.size;
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorIndex method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo typeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, GenericUDF udf, ExprNodeGenericFuncDesc exprDesc, IndexTestMode indexTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (indexTestMode == IndexTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
vectorExpression.transientInit(hiveConf);
if (indexTestMode == IndexTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " indexTestMode " + indexTestMode + " vectorExpression " + vectorExpression.toString());
}
System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" indexTestMode " + indexTestMode +
" vectorExpression " + vectorExpression.toString());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
/*
System.out.println(
"*DEBUG* typeInfo1 " + typeInfo1.toString() +
" typeInfo2 " + typeInfo2.toString() +
" arithmeticTestMode " + arithmeticTestMode +
" columnScalarMode " + columnScalarMode +
" vectorExpression " + vectorExpression.toString());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
rowIndex += batch.size;
}
return true;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorCastStatement method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, CastStmtTestMode castStmtTestMode, VectorRandomBatchSource batchSource, Object[] resultObjects) throws Exception {
GenericUDF udf;
try {
udf = VectorizationContext.getGenericUDFForCast(targetTypeInfo);
} catch (HiveException e) {
return false;
}
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(targetTypeInfo, udf, children);
HiveConf hiveConf = new HiveConf();
if (castStmtTestMode == CastStmtTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (castStmtTestMode == CastStmtTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " castStmtTestMode " + castStmtTestMode + " vectorExpression " + vectorExpression.toString());
}
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" targetTypeInfo " + targetTypeInfo +
" castStmtTestMode " + castStmtTestMode +
" vectorExpression " + vectorExpression.toString());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { targetTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, resultObjects);
rowIndex += batch.size;
}
return true;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorStringUnary method doVectorIfTest.
private void doVectorIfTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, List<String> columns, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, StringUnaryTestMode stringUnaryTestMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, GenericUDF genericUdf, Object[] resultObjects) throws Exception {
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(targetTypeInfo, genericUdf, children);
HiveConf hiveConf = new HiveConf();
if (stringUnaryTestMode == StringUnaryTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
if (stringUnaryTestMode == StringUnaryTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " stringUnaryTestMode " + stringUnaryTestMode + " vectorExpression " + vectorExpression.toString());
}
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { targetTypeInfo }, new int[] { columns.size() });
Object[] scrqtchRow = new Object[1];
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" targetTypeInfo " + targetTypeInfo.toString() +
" stringUnaryTestMode " + stringUnaryTestMode +
" vectorExpression " + vectorExpression.getClass().getSimpleName());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, targetTypeInfo, resultObjects);
rowIndex += batch.size;
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorTimestampExtract method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo dateTimeStringTypeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, TimestampExtractTestMode timestampExtractTestMode, VectorRandomBatchSource batchSource, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (timestampExtractTestMode == TimestampExtractTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (timestampExtractTestMode == TimestampExtractTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() + " timestampExtractTestMode " + timestampExtractTestMode + " vectorExpression " + vectorExpression.toString());
}
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
" timestampExtractTestMode " + timestampExtractTestMode +
" vectorExpression " + vectorExpression.getClass().getSimpleName());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { TypeInfoFactory.intTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, TypeInfoFactory.intTypeInfo, resultObjects);
rowIndex += batch.size;
}
return true;
}
Aggregations