Search in sources :

Example 41 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testMultiKeyRowsExact1.

@Test
public void testMultiKeyRowsExact1() throws Exception {
    random = new Random(8235);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "string", "string", "string", "string" }, /* doClipping */
    false, /* useExactBytes */
    true);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 42 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testBigIntRows.

@Test
public void testBigIntRows() throws Exception {
    random = new Random(927337);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.LONG, verifyTable, new String[] { "bigint" }, /* doClipping */
    false, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 43 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testMultiKeyRows1.

@Test
public void testMultiKeyRows1() throws Exception {
    random = new Random(833);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "int", "int" }, /* doClipping */
    false, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 44 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testStringRows.

@Test
public void testStringRows() throws Exception {
    random = new Random(927337);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastStringHashMapContainer map = new VectorMapJoinFastStringHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.STRING, verifyTable, new String[] { "string" }, /* doClipping */
    false, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 45 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testIntRowsClippedExact.

@Test
public void testIntRowsClippedExact() throws Exception {
    random = new Random(7520);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.INT, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.INT, verifyTable, new String[] { "int" }, /* doClipping */
    true, /* useExactBytes */
    true);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Aggregations

VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)50 Random (java.util.Random)24 VerifyFastRowHashMap (org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap)24 Test (org.junit.Test)24 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)20 ArrayList (java.util.ArrayList)19 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)19 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)19 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)19 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)18 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)15 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)12 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)12 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)11 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)8 HiveConf (org.apache.hadoop.hive.conf.HiveConf)7