use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testBigIntRowsClipped.
@Test
public void testBigIntRowsClipped() throws Exception {
random = new Random(326232);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.LONG, verifyTable, new String[] { "bigint" }, /* doClipping */
true, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testMultiKeyRows2.
@Test
public void testMultiKeyRows2() throws Exception {
random = new Random(833099);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "string", "string" }, /* doClipping */
false, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testStringRowsClipped.
@Test
public void testStringRowsClipped() throws Exception {
random = new Random(326232);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastStringHashMapContainer map = new VectorMapJoinFastStringHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.STRING, verifyTable, new String[] { "string" }, /* doClipping */
true, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testMultiKeyRowsExact2.
@Test
public void testMultiKeyRowsExact2() throws Exception {
random = new Random(8235);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "smallint" }, /* doClipping */
false, /* useExactBytes */
true);
}
Aggregations