use of org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc in project hive by apache.
the class TestHBaseAggregateStatsExtrapolation method MiddleOfPartitionsHaveBitVectorStatus.
@Test
public void MiddleOfPartitionsHaveBitVectorStatus() throws Exception {
String dbName = "default";
String tableName = "snp";
long now = System.currentTimeMillis();
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col4", "long", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("ds", "string", ""));
Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
store.createTable(table);
List<List<String>> partVals = new ArrayList<>();
for (int i = 0; i < 10; i++) {
List<String> partVal = Arrays.asList("" + i);
partVals.add(partVal);
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
store.addPartition(part);
if (i > 2 && i < 7) {
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVal);
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col4");
obj.setColType("long");
ColumnStatisticsData data = new ColumnStatisticsData();
LongColumnStatsData dcsd = new LongColumnStatsData();
dcsd.setHighValue(1000 + i);
dcsd.setLowValue(-1000 - i);
dcsd.setNumNulls(i);
dcsd.setNumDVs(10 * i);
dcsd.setBitVectors(bitVectors);
data.setLongStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs, partVal);
}
}
Checker statChecker = new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(4, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col4", cso.getColName());
Assert.assertEquals("long", cso.getColType());
LongColumnStatsData lcsd = cso.getStatsData().getLongStats();
Assert.assertEquals(1006, lcsd.getHighValue(), 0.01);
Assert.assertEquals(-1006, lcsd.getLowValue(), 0.01);
Assert.assertEquals(45, lcsd.getNumNulls());
Assert.assertEquals(3, lcsd.getNumDVs());
}
};
List<String> partNames = new ArrayList<>();
for (int i = 0; i < 10; i++) {
partNames.add("ds=" + i);
}
AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col4"));
statChecker.checkStats(aggrStats);
}
use of org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc in project hive by apache.
the class TestHBaseAggregateStatsExtrapolation method allPartitionsHaveBitVectorStatusDouble.
@Test
public void allPartitionsHaveBitVectorStatusDouble() throws Exception {
String dbName = "default";
String tableName = "snp";
long now = System.currentTimeMillis();
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col1_double", "double", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("ds", "string", ""));
Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
store.createTable(table);
List<List<String>> partVals = new ArrayList<>();
for (int i = 0; i < 10; i++) {
List<String> partVal = Arrays.asList("" + i);
partVals.add(partVal);
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
store.addPartition(part);
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVal);
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col1_double");
obj.setColType("double");
ColumnStatisticsData data = new ColumnStatisticsData();
DoubleColumnStatsData dcsd = new DoubleColumnStatsData();
dcsd.setHighValue(1000 + i);
dcsd.setLowValue(-1000 - i);
dcsd.setNumNulls(i);
dcsd.setNumDVs(10 * i + 1);
dcsd.setBitVectors(bitVectors);
data.setDoubleStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs, partVal);
}
Checker statChecker = new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(10, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col1_double", cso.getColName());
Assert.assertEquals("double", cso.getColType());
DoubleColumnStatsData lcsd = cso.getStatsData().getDoubleStats();
Assert.assertEquals(1009, lcsd.getHighValue(), 0.01);
Assert.assertEquals(-1009, lcsd.getLowValue(), 0.01);
Assert.assertEquals(45, lcsd.getNumNulls());
Assert.assertEquals(3, lcsd.getNumDVs());
}
};
List<String> partNames = new ArrayList<>();
for (int i = 0; i < 10; i++) {
partNames.add("ds=" + i);
}
AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col1_double"));
statChecker.checkStats(aggrStats);
}
use of org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc in project hive by apache.
the class TestHBaseAggregateStatsExtrapolation method allPartitionsHaveBitVectorStatusString.
@Test
public void allPartitionsHaveBitVectorStatusString() throws Exception {
String dbName = "default";
String tableName = "snp";
long now = System.currentTimeMillis();
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col1_string", "string", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("ds", "string", ""));
Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
store.createTable(table);
List<List<String>> partVals = new ArrayList<>();
for (int i = 0; i < 10; i++) {
List<String> partVal = Arrays.asList("" + i);
partVals.add(partVal);
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
store.addPartition(part);
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVal);
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col1_string");
obj.setColType("string");
ColumnStatisticsData data = new ColumnStatisticsData();
StringColumnStatsData dcsd = new StringColumnStatsData();
dcsd.setAvgColLen(i + 1);
dcsd.setMaxColLen(i + 10);
dcsd.setNumNulls(i);
dcsd.setNumDVs(10 * i + 1);
dcsd.setBitVectors(bitVectors);
data.setStringStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs, partVal);
}
Checker statChecker = new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(10, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col1_string", cso.getColName());
Assert.assertEquals("string", cso.getColType());
StringColumnStatsData lcsd = cso.getStatsData().getStringStats();
Assert.assertEquals(10, lcsd.getAvgColLen(), 0.01);
Assert.assertEquals(19, lcsd.getMaxColLen(), 0.01);
Assert.assertEquals(45, lcsd.getNumNulls());
Assert.assertEquals(3, lcsd.getNumDVs());
}
};
List<String> partNames = new ArrayList<>();
for (int i = 0; i < 10; i++) {
partNames.add("ds=" + i);
}
AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col1_string"));
statChecker.checkStats(aggrStats);
}
use of org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc in project hive by apache.
the class TestHBaseAggregateStatsExtrapolation method TwoEndsAndMiddleOfPartitionsHaveBitVectorStatusLong.
@Test
public void TwoEndsAndMiddleOfPartitionsHaveBitVectorStatusLong() throws Exception {
String dbName = "default";
String tableName = "snp";
long now = System.currentTimeMillis();
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col5", "long", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("ds", "string", ""));
Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
store.createTable(table);
List<List<String>> partVals = new ArrayList<>();
for (int i = 0; i < 10; i++) {
List<String> partVal = Arrays.asList("" + i);
partVals.add(partVal);
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
store.addPartition(part);
if (i == 0 || i == 2 || i == 3 || i == 5 || i == 6 || i == 8) {
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVal);
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col5");
obj.setColType("long");
ColumnStatisticsData data = new ColumnStatisticsData();
LongColumnStatsData dcsd = new LongColumnStatsData();
dcsd.setHighValue(1000 + i);
dcsd.setLowValue(-1000 - i);
dcsd.setNumNulls(i);
dcsd.setNumDVs(10 * i);
dcsd.setBitVectors(bitVectors);
data.setLongStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs, partVal);
}
}
Checker statChecker = new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(6, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col5", cso.getColName());
Assert.assertEquals("long", cso.getColType());
LongColumnStatsData lcsd = cso.getStatsData().getLongStats();
Assert.assertEquals(1010, lcsd.getHighValue(), 0.01);
Assert.assertEquals(-1010, lcsd.getLowValue(), 0.01);
Assert.assertEquals(40, lcsd.getNumNulls());
Assert.assertEquals(3, lcsd.getNumDVs());
}
};
List<String> partNames = new ArrayList<>();
for (int i = 0; i < 10; i++) {
partNames.add("ds=" + i);
}
AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col5"));
statChecker.checkStats(aggrStats);
}
use of org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc in project hive by apache.
the class TestHBaseStore method longPartitionStatistics.
@Test
public void longPartitionStatistics() throws Exception {
createMockTableAndPartition(INT_TYPE, INT_VAL);
// Add partition stats for: LONG_COL and partition: {PART_KEY, INT_VAL} to DB
// Because of the way our mock implementation works we actually need to not create the table
// before we set statistics on it.
ColumnStatistics stats = new ColumnStatistics();
// Get a default ColumnStatisticsDesc for partition level stats
ColumnStatisticsDesc desc = getMockPartColStatsDesc(PART_KEY, INT_VAL);
stats.setStatsDesc(desc);
// Get one of the pre-created ColumnStatisticsObj
ColumnStatisticsObj obj = longColStatsObjs.get(0);
LongColumnStatsData longData = obj.getStatsData().getLongStats();
// Add to DB
stats.addToStatsObj(obj);
List<String> parVals = new ArrayList<String>();
parVals.add(INT_VAL);
store.updatePartitionColumnStatistics(stats, parVals);
// Get from DB
List<String> partNames = new ArrayList<String>();
partNames.add(desc.getPartName());
List<String> colNames = new ArrayList<String>();
colNames.add(obj.getColName());
List<ColumnStatistics> statsFromDB = store.getPartitionColumnStatistics(DB, TBL, partNames, colNames);
// Compare ColumnStatisticsDesc
Assert.assertEquals(1, statsFromDB.size());
Assert.assertEquals(desc.getLastAnalyzed(), statsFromDB.get(0).getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB, statsFromDB.get(0).getStatsDesc().getDbName());
Assert.assertEquals(TBL, statsFromDB.get(0).getStatsDesc().getTableName());
Assert.assertFalse(statsFromDB.get(0).getStatsDesc().isIsTblLevel());
// Compare ColumnStatisticsObj
Assert.assertEquals(1, statsFromDB.get(0).getStatsObjSize());
ColumnStatisticsObj objFromDB = statsFromDB.get(0).getStatsObj().get(0);
ColumnStatisticsData dataFromDB = objFromDB.getStatsData();
// Compare ColumnStatisticsData
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS, dataFromDB.getSetField());
// Compare LongColumnStatsData
LongColumnStatsData longDataFromDB = dataFromDB.getLongStats();
Assert.assertEquals(longData.getHighValue(), longDataFromDB.getHighValue());
Assert.assertEquals(longData.getLowValue(), longDataFromDB.getLowValue());
Assert.assertEquals(longData.getNumNulls(), longDataFromDB.getNumNulls());
Assert.assertEquals(longData.getNumDVs(), longDataFromDB.getNumDVs());
}
Aggregations