Search in sources :

Example 46 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testBigIntRowsExact.

@Test
public void testBigIntRowsExact() throws Exception {
    random = new Random(27722);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.LONG, verifyTable, new String[] { "bigint" }, /* doClipping */
    false, /* useExactBytes */
    true);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 47 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testBigIntRowsClipped.

@Test
public void testBigIntRowsClipped() throws Exception {
    random = new Random(326232);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.LONG, verifyTable, new String[] { "bigint" }, /* doClipping */
    true, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 48 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testMultiKeyRows2.

@Test
public void testMultiKeyRows2() throws Exception {
    random = new Random(833099);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "string", "string" }, /* doClipping */
    false, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 49 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testStringRowsClipped.

@Test
public void testStringRowsClipped() throws Exception {
    random = new Random(326232);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastStringHashMapContainer map = new VectorMapJoinFastStringHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.STRING, verifyTable, new String[] { "string" }, /* doClipping */
    true, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 50 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testMultiKeyRowsExact2.

@Test
public void testMultiKeyRowsExact2() throws Exception {
    random = new Random(8235);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "smallint" }, /* doClipping */
    false, /* useExactBytes */
    true);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Aggregations

VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)50 Random (java.util.Random)24 VerifyFastRowHashMap (org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap)24 Test (org.junit.Test)24 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)20 ArrayList (java.util.ArrayList)19 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)19 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)19 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)19 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)18 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)15 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)12 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)12 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)11 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)8 HiveConf (org.apache.hadoop.hive.conf.HiveConf)7