use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testBigIntRows.
@Test
public void testBigIntRows() throws Exception {
random = new Random(927337);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.LONG, verifyTable, new String[] { "bigint" }, /* doClipping */
false, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testMultiKeyRows1.
@Test
public void testMultiKeyRows1() throws Exception {
random = new Random(833);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "int", "int" }, /* doClipping */
false, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testStringRows.
@Test
public void testStringRows() throws Exception {
random = new Random(927337);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastStringHashMapContainer map = new VectorMapJoinFastStringHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.STRING, verifyTable, new String[] { "string" }, /* doClipping */
false, /* useExactBytes */
false);
}
use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testIntRowsClippedExact.
@Test
public void testIntRowsClippedExact() throws Exception {
random = new Random(7520);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.INT, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.INT, verifyTable, new String[] { "int" }, /* doClipping */
true, /* useExactBytes */
true);
}
use of org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testBigIntRowsExact.
@Test
public void testBigIntRowsExact() throws Exception {
random = new Random(27722);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.LONG, verifyTable, new String[] { "bigint" }, /* doClipping */
false, /* useExactBytes */
true);
}
Aggregations