use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorTimestampExtract method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo dateTimeStringTypeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, TimestampExtractTestMode timestampExtractTestMode, VectorRandomBatchSource batchSource, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (timestampExtractTestMode == TimestampExtractTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (timestampExtractTestMode == TimestampExtractTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() + " timestampExtractTestMode " + timestampExtractTestMode + " vectorExpression " + vectorExpression.toString());
}
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
" timestampExtractTestMode " + timestampExtractTestMode +
" vectorExpression " + vectorExpression.getClass().getSimpleName());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { TypeInfoFactory.intTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, TypeInfoFactory.intTypeInfo, resultObjects);
rowIndex += batch.size;
}
return true;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorTimestampExtract method doIfTestOneTimestampExtract.
private void doIfTestOneTimestampExtract(Random random, String dateTimeStringTypeName, String extractFunctionName) throws Exception {
TypeInfo dateTimeStringTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName);
PrimitiveCategory dateTimeStringPrimitiveCategory = ((PrimitiveTypeInfo) dateTimeStringTypeInfo).getPrimitiveCategory();
boolean isStringFamily = (dateTimeStringPrimitiveCategory == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory == PrimitiveCategory.VARCHAR);
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
if (!isStringFamily) {
generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo));
} else {
generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo, TypeInfoFactory.timestampTypeInfo));
}
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo, columnName, "table", false);
columns.add(columnName);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
if (dateTimeStringPrimitiveCategory == PrimitiveCategory.DATE && (extractFunctionName.equals("hour") || extractFunctionName.equals("minute") || extractFunctionName.equals("second"))) {
return;
}
final GenericUDF udf;
switch(extractFunctionName) {
case "day":
udf = new UDFDayOfMonth();
break;
case "dayofweek":
GenericUDFBridge dayOfWeekUDFBridge = new GenericUDFBridge();
dayOfWeekUDFBridge.setUdfClassName(UDFDayOfWeek.class.getName());
udf = dayOfWeekUDFBridge;
break;
case "hour":
udf = new UDFHour();
break;
case "minute":
udf = new UDFMinute();
break;
case "month":
udf = new UDFMonth();
break;
case "second":
udf = new UDFSecond();
break;
case "yearweek":
GenericUDFBridge weekOfYearUDFBridge = new GenericUDFBridge();
weekOfYearUDFBridge.setUdfClassName(UDFWeekOfYear.class.getName());
udf = weekOfYearUDFBridge;
break;
case "year":
udf = new UDFYear();
break;
default:
throw new RuntimeException("Unexpected extract function name " + extractFunctionName);
}
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, children);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[TimestampExtractTestMode.count][];
for (int i = 0; i < TimestampExtractTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
TimestampExtractTestMode timestampExtractTestMode = TimestampExtractTestMode.values()[i];
switch(timestampExtractTestMode) {
case ROW_MODE:
if (!doRowCastTest(dateTimeStringTypeInfo, columns, children, exprDesc, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
return;
}
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
if (!doVectorCastTest(dateTimeStringTypeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, timestampExtractTestMode, batchSource, resultObjects)) {
return;
}
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + timestampExtractTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < TimestampExtractTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " dateTimeStringTypeName " + dateTimeStringTypeName + " extractFunctionName " + extractFunctionName + " " + TimestampExtractTestMode.values()[v] + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " dateTimeStringTypeName " + dateTimeStringTypeName + " extractFunctionName " + extractFunctionName + " " + TimestampExtractTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testMultiKeyRowsClipped1.
@Test
public void testMultiKeyRowsClipped1() throws Exception {
random = new Random(2331);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "varchar(20)", "date", "interval_day_time" }, /* doClipping */
true, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testIntRows.
@Test
public void testIntRows() throws Exception {
random = new Random(927337);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.INT, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.INT, verifyTable, new String[] { "int" }, /* doClipping */
false, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testStringRowsExact.
@Test
public void testStringRowsExact() throws Exception {
random = new Random(8235);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastStringHashMapContainer map = new VectorMapJoinFastStringHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.STRING, verifyTable, new String[] { "string" }, /* doClipping */
false, /* useExactBytes */
true);
}
Aggregations