use of org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData in project hive by apache.
the class TestHBaseAggrStatsCacheIntegration method invalidation.
@Test
public void invalidation() throws Exception {
try {
String dbName = "default";
String tableName = "invalidation";
List<String> partVals1 = Arrays.asList("today");
List<String> partVals2 = Arrays.asList("yesterday");
List<String> partVals3 = Arrays.asList("tomorrow");
long now = System.currentTimeMillis();
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col1", "boolean", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("ds", "string", ""));
Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
store.createTable(table);
for (List<String> partVals : Arrays.asList(partVals1, partVals2, partVals3)) {
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/invalidation/ds=" + partVals.get(0));
Partition part = new Partition(partVals, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
store.addPartition(part);
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data = new ColumnStatisticsData();
BooleanColumnStatsData bcsd = new BooleanColumnStatsData();
bcsd.setNumFalses(10);
bcsd.setNumTrues(20);
bcsd.setNumNulls(30);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs, partVals);
}
Checker statChecker = new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(2, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col1", cso.getColName());
Assert.assertEquals("boolean", cso.getColType());
BooleanColumnStatsData bcsd = cso.getStatsData().getBooleanStats();
Assert.assertEquals(20, bcsd.getNumFalses());
Assert.assertEquals(40, bcsd.getNumTrues());
Assert.assertEquals(60, bcsd.getNumNulls());
}
};
AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
// Check that we had to build it from the stats
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(1, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
// Call again, this time it should come from memory. Also, reverse the name order this time
// to assure that we still hit.
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
// Now call a different combination to get it in memory too
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=tomorrow", "ds=today"), Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(3, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=tomorrow", "ds=today"), Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(4, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
// wake the invalidator and check again to make sure it isn't too aggressive about
// removing our stuff.
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=tomorrow", "ds=today"), Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(5, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
// Update statistics for 'tomorrow'
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals3.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data = new ColumnStatisticsData();
BooleanColumnStatsData bcsd = new BooleanColumnStatsData();
bcsd.setNumFalses(100);
bcsd.setNumTrues(200);
bcsd.setNumNulls(300);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
Checker afterUpdate = new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(2, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col1", cso.getColName());
Assert.assertEquals("boolean", cso.getColType());
BooleanColumnStatsData bcsd = cso.getStatsData().getBooleanStats();
Assert.assertEquals(110, bcsd.getNumFalses());
Assert.assertEquals(220, bcsd.getNumTrues());
Assert.assertEquals(330, bcsd.getNumNulls());
}
};
store.updatePartitionColumnStatistics(cs, partVals3);
store.backdoor().getStatsCache().setRunInvalidatorEvery(100);
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=tomorrow", "ds=today"), Arrays.asList("col1"));
afterUpdate.checkStats(aggrStats);
// Check that we missed, which means this aggregate was dropped from the cache.
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(6, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3, store.backdoor().getStatsCache().misses.getCnt());
// Check that our other aggregate is still in the cache.
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(7, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3, store.backdoor().getStatsCache().misses.getCnt());
// Drop 'yesterday', so our first aggregate should be dumped from memory and hbase
store.dropPartition(dbName, tableName, partVals2);
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1"));
new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(1, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col1", cso.getColName());
Assert.assertEquals("boolean", cso.getColType());
BooleanColumnStatsData bcsd = cso.getStatsData().getBooleanStats();
Assert.assertEquals(10, bcsd.getNumFalses());
Assert.assertEquals(20, bcsd.getNumTrues());
Assert.assertEquals(30, bcsd.getNumNulls());
}
}.checkStats(aggrStats);
// Check that we missed, which means this aggregate was dropped from the cache.
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(8, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(4, store.backdoor().getStatsCache().misses.getCnt());
// Check that our other aggregate is still in the cache.
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=tomorrow", "ds=today"), Arrays.asList("col1"));
afterUpdate.checkStats(aggrStats);
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(9, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(4, store.backdoor().getStatsCache().misses.getCnt());
} finally {
store.backdoor().getStatsCache().setRunInvalidatorEvery(5000);
store.backdoor().getStatsCache().setMaxTimeInCache(500000);
store.backdoor().getStatsCache().wakeInvalidator();
}
}
use of org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData in project hive by apache.
the class TestHBaseAggrStatsCacheIntegration method altersInvalidation.
@Test
public void altersInvalidation() throws Exception {
try {
String dbName = "default";
String tableName = "asi";
List<String> partVals1 = Arrays.asList("today");
List<String> partVals2 = Arrays.asList("yesterday");
List<String> partVals3 = Arrays.asList("tomorrow");
long now = System.currentTimeMillis();
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col1", "boolean", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("ds", "string", ""));
Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
store.createTable(table);
Partition[] partitions = new Partition[3];
int partnum = 0;
for (List<String> partVals : Arrays.asList(partVals1, partVals2, partVals3)) {
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/invalidation/ds=" + partVals.get(0));
Partition part = new Partition(partVals, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
partitions[partnum++] = part;
store.addPartition(part);
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + partVals.get(0));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("boolean");
ColumnStatisticsData data = new ColumnStatisticsData();
BooleanColumnStatsData bcsd = new BooleanColumnStatsData();
bcsd.setNumFalses(10);
bcsd.setNumTrues(20);
bcsd.setNumNulls(30);
data.setBooleanStats(bcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs, partVals);
}
AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=tomorrow"), Arrays.asList("col1"));
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1"));
// Check that we had to build it from the stats
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(2, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
// wake the invalidator and check again to make sure it isn't too aggressive about
// removing our stuff.
store.backdoor().getStatsCache().wakeInvalidator();
Partition[] newParts = new Partition[2];
newParts[0] = new Partition(partitions[0]);
newParts[0].setLastAccessTime((int) System.currentTimeMillis());
newParts[1] = new Partition(partitions[2]);
newParts[1].setLastAccessTime((int) System.currentTimeMillis());
store.alterPartitions(dbName, tableName, Arrays.asList(partVals1, partVals3), Arrays.asList(newParts));
store.backdoor().getStatsCache().setRunInvalidatorEvery(100);
store.backdoor().getStatsCache().wakeInvalidator();
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=tomorrow", "ds=today"), Arrays.asList("col1"));
// Check that we missed, which means this aggregate was dropped from the cache.
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(3, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(3, store.backdoor().getStatsCache().misses.getCnt());
// Check that our other aggregate got dropped too
aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1"));
Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
Assert.assertEquals(4, store.backdoor().getStatsCache().totalGets.getCnt());
Assert.assertEquals(4, store.backdoor().getStatsCache().misses.getCnt());
} finally {
store.backdoor().getStatsCache().setRunInvalidatorEvery(5000);
store.backdoor().getStatsCache().setMaxTimeInCache(500000);
store.backdoor().getStatsCache().wakeInvalidator();
}
}
use of org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData in project hive by apache.
the class TestHBaseStoreIntegration method tableStatistics.
@Test
public void tableStatistics() throws Exception {
long now = System.currentTimeMillis();
String dbname = "default";
String tableName = "statstable";
String boolcol = "boolcol";
String longcol = "longcol";
String doublecol = "doublecol";
String stringcol = "stringcol";
String binarycol = "bincol";
String decimalcol = "deccol";
long trues = 37;
long falses = 12;
long booleanNulls = 2;
long longHigh = 120938479124L;
long longLow = -12341243213412124L;
long longNulls = 23;
long longDVs = 213L;
double doubleHigh = 123423.23423;
double doubleLow = 0.00001234233;
long doubleNulls = 92;
long doubleDVs = 1234123421L;
long strMaxLen = 1234;
double strAvgLen = 32.3;
long strNulls = 987;
long strDVs = 906;
long binMaxLen = 123412987L;
double binAvgLen = 76.98;
long binNulls = 976998797L;
Decimal decHigh = new Decimal();
decHigh.setScale((short) 3);
// I have no clue how this is translated, but it
decHigh.setUnscaled("3876".getBytes());
// doesn't matter
Decimal decLow = new Decimal();
decLow.setScale((short) 3);
decLow.setUnscaled("38".getBytes());
long decNulls = 13;
long decDVs = 923947293L;
List<FieldSchema> cols = new ArrayList<FieldSchema>();
cols.add(new FieldSchema(boolcol, "boolean", "nocomment"));
cols.add(new FieldSchema(longcol, "long", "nocomment"));
cols.add(new FieldSchema(doublecol, "double", "nocomment"));
cols.add(new FieldSchema(stringcol, "varchar(32)", "nocomment"));
cols.add(new FieldSchema(binarycol, "binary", "nocomment"));
cols.add(new FieldSchema(decimalcol, "decimal(5, 3)", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
Table table = new Table(tableName, dbname, "me", (int) now / 1000, (int) now / 1000, 0, sd, null, emptyParameters, null, null, null);
store.createTable(table);
ColumnStatistics stats = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc();
desc.setLastAnalyzed(now);
desc.setDbName(dbname);
desc.setTableName(tableName);
desc.setIsTblLevel(true);
stats.setStatsDesc(desc);
// Do one column of each type
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName(boolcol);
obj.setColType("boolean");
ColumnStatisticsData data = new ColumnStatisticsData();
BooleanColumnStatsData boolData = new BooleanColumnStatsData();
boolData.setNumTrues(trues);
boolData.setNumFalses(falses);
boolData.setNumNulls(booleanNulls);
data.setBooleanStats(boolData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(longcol);
obj.setColType("long");
data = new ColumnStatisticsData();
LongColumnStatsData longData = new LongColumnStatsData();
longData.setHighValue(longHigh);
longData.setLowValue(longLow);
longData.setNumNulls(longNulls);
longData.setNumDVs(longDVs);
data.setLongStats(longData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(doublecol);
obj.setColType("double");
data = new ColumnStatisticsData();
DoubleColumnStatsData doubleData = new DoubleColumnStatsData();
doubleData.setHighValue(doubleHigh);
doubleData.setLowValue(doubleLow);
doubleData.setNumNulls(doubleNulls);
doubleData.setNumDVs(doubleDVs);
data.setDoubleStats(doubleData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol, longcol, doublecol));
// We'll check all of the individual values later.
Assert.assertEquals(3, stats.getStatsObjSize());
// check that we can fetch just some of the columns
stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol));
Assert.assertEquals(1, stats.getStatsObjSize());
stats = new ColumnStatistics();
stats.setStatsDesc(desc);
obj = new ColumnStatisticsObj();
obj.setColName(stringcol);
obj.setColType("string");
data = new ColumnStatisticsData();
StringColumnStatsData strData = new StringColumnStatsData();
strData.setMaxColLen(strMaxLen);
strData.setAvgColLen(strAvgLen);
strData.setNumNulls(strNulls);
strData.setNumDVs(strDVs);
data.setStringStats(strData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(binarycol);
obj.setColType("binary");
data = new ColumnStatisticsData();
BinaryColumnStatsData binData = new BinaryColumnStatsData();
binData.setMaxColLen(binMaxLen);
binData.setAvgColLen(binAvgLen);
binData.setNumNulls(binNulls);
data.setBinaryStats(binData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(decimalcol);
obj.setColType("decimal(5,3)");
data = new ColumnStatisticsData();
DecimalColumnStatsData decData = new DecimalColumnStatsData();
LOG.debug("Setting decimal high value to " + decHigh.getScale() + " <" + new String(decHigh.getUnscaled()) + ">");
decData.setHighValue(decHigh);
decData.setLowValue(decLow);
decData.setNumNulls(decNulls);
decData.setNumDVs(decDVs);
data.setDecimalStats(decData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol, longcol, doublecol, stringcol, binarycol, decimalcol));
Assert.assertEquals(now, stats.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(dbname, stats.getStatsDesc().getDbName());
Assert.assertEquals(tableName, stats.getStatsDesc().getTableName());
Assert.assertTrue(stats.getStatsDesc().isIsTblLevel());
Assert.assertEquals(6, stats.getStatsObjSize());
ColumnStatisticsData colData = stats.getStatsObj().get(0).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BOOLEAN_STATS, colData.getSetField());
boolData = colData.getBooleanStats();
Assert.assertEquals(trues, boolData.getNumTrues());
Assert.assertEquals(falses, boolData.getNumFalses());
Assert.assertEquals(booleanNulls, boolData.getNumNulls());
colData = stats.getStatsObj().get(1).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS, colData.getSetField());
longData = colData.getLongStats();
Assert.assertEquals(longHigh, longData.getHighValue());
Assert.assertEquals(longLow, longData.getLowValue());
Assert.assertEquals(longNulls, longData.getNumNulls());
Assert.assertEquals(longDVs, longData.getNumDVs());
colData = stats.getStatsObj().get(2).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS, colData.getSetField());
doubleData = colData.getDoubleStats();
Assert.assertEquals(doubleHigh, doubleData.getHighValue(), 0.01);
Assert.assertEquals(doubleLow, doubleData.getLowValue(), 0.01);
Assert.assertEquals(doubleNulls, doubleData.getNumNulls());
Assert.assertEquals(doubleDVs, doubleData.getNumDVs());
colData = stats.getStatsObj().get(3).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS, colData.getSetField());
strData = colData.getStringStats();
Assert.assertEquals(strMaxLen, strData.getMaxColLen());
Assert.assertEquals(strAvgLen, strData.getAvgColLen(), 0.01);
Assert.assertEquals(strNulls, strData.getNumNulls());
Assert.assertEquals(strDVs, strData.getNumDVs());
colData = stats.getStatsObj().get(4).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BINARY_STATS, colData.getSetField());
binData = colData.getBinaryStats();
Assert.assertEquals(binMaxLen, binData.getMaxColLen());
Assert.assertEquals(binAvgLen, binData.getAvgColLen(), 0.01);
Assert.assertEquals(binNulls, binData.getNumNulls());
colData = stats.getStatsObj().get(5).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS, colData.getSetField());
decData = colData.getDecimalStats();
Assert.assertEquals(decHigh, decData.getHighValue());
Assert.assertEquals(decLow, decData.getLowValue());
Assert.assertEquals(decNulls, decData.getNumNulls());
Assert.assertEquals(decDVs, decData.getNumDVs());
}
use of org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData in project hive by apache.
the class HBaseUtils method statsForOneColumnFromProtoBuf.
private static ColumnStatisticsObj statsForOneColumnFromProtoBuf(ColumnStatistics partitionColumnStats, HbaseMetastoreProto.ColumnStats proto) throws IOException {
ColumnStatisticsObj colStats = new ColumnStatisticsObj();
long lastAnalyzed = proto.getLastAnalyzed();
if (partitionColumnStats != null) {
partitionColumnStats.getStatsDesc().setLastAnalyzed(Math.max(lastAnalyzed, partitionColumnStats.getStatsDesc().getLastAnalyzed()));
}
colStats.setColType(proto.getColumnType());
colStats.setColName(proto.getColumnName());
ColumnStatisticsData colData = new ColumnStatisticsData();
if (proto.hasBoolStats()) {
BooleanColumnStatsData boolData = new BooleanColumnStatsData();
boolData.setNumTrues(proto.getBoolStats().getNumTrues());
boolData.setNumFalses(proto.getBoolStats().getNumFalses());
boolData.setNumNulls(proto.getNumNulls());
colData.setBooleanStats(boolData);
} else if (proto.hasLongStats()) {
LongColumnStatsData longData = new LongColumnStatsData();
if (proto.getLongStats().hasLowValue()) {
longData.setLowValue(proto.getLongStats().getLowValue());
}
if (proto.getLongStats().hasHighValue()) {
longData.setHighValue(proto.getLongStats().getHighValue());
}
longData.setNumNulls(proto.getNumNulls());
longData.setNumDVs(proto.getNumDistinctValues());
longData.setBitVectors(proto.getBitVectors());
colData.setLongStats(longData);
} else if (proto.hasDoubleStats()) {
DoubleColumnStatsData doubleData = new DoubleColumnStatsData();
if (proto.getDoubleStats().hasLowValue()) {
doubleData.setLowValue(proto.getDoubleStats().getLowValue());
}
if (proto.getDoubleStats().hasHighValue()) {
doubleData.setHighValue(proto.getDoubleStats().getHighValue());
}
doubleData.setNumNulls(proto.getNumNulls());
doubleData.setNumDVs(proto.getNumDistinctValues());
doubleData.setBitVectors(proto.getBitVectors());
colData.setDoubleStats(doubleData);
} else if (proto.hasStringStats()) {
StringColumnStatsData stringData = new StringColumnStatsData();
stringData.setMaxColLen(proto.getStringStats().getMaxColLength());
stringData.setAvgColLen(proto.getStringStats().getAvgColLength());
stringData.setNumNulls(proto.getNumNulls());
stringData.setNumDVs(proto.getNumDistinctValues());
stringData.setBitVectors(proto.getBitVectors());
colData.setStringStats(stringData);
} else if (proto.hasBinaryStats()) {
BinaryColumnStatsData binaryData = new BinaryColumnStatsData();
binaryData.setMaxColLen(proto.getBinaryStats().getMaxColLength());
binaryData.setAvgColLen(proto.getBinaryStats().getAvgColLength());
binaryData.setNumNulls(proto.getNumNulls());
colData.setBinaryStats(binaryData);
} else if (proto.hasDecimalStats()) {
DecimalColumnStatsData decimalData = new DecimalColumnStatsData();
if (proto.getDecimalStats().hasHighValue()) {
Decimal hiVal = new Decimal();
hiVal.setUnscaled(proto.getDecimalStats().getHighValue().getUnscaled().toByteArray());
hiVal.setScale((short) proto.getDecimalStats().getHighValue().getScale());
decimalData.setHighValue(hiVal);
}
if (proto.getDecimalStats().hasLowValue()) {
Decimal loVal = new Decimal();
loVal.setUnscaled(proto.getDecimalStats().getLowValue().getUnscaled().toByteArray());
loVal.setScale((short) proto.getDecimalStats().getLowValue().getScale());
decimalData.setLowValue(loVal);
}
decimalData.setNumNulls(proto.getNumNulls());
decimalData.setNumDVs(proto.getNumDistinctValues());
decimalData.setBitVectors(proto.getBitVectors());
colData.setDecimalStats(decimalData);
} else {
throw new RuntimeException("Woh, bad. Unknown stats type!");
}
colStats.setStatsData(colData);
return colStats;
}
use of org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData in project hive by apache.
the class HBaseUtils method protoBufStatsForOneColumn.
private static HbaseMetastoreProto.ColumnStats protoBufStatsForOneColumn(ColumnStatistics partitionColumnStats, ColumnStatisticsObj colStats) throws IOException {
HbaseMetastoreProto.ColumnStats.Builder builder = HbaseMetastoreProto.ColumnStats.newBuilder();
if (partitionColumnStats != null) {
builder.setLastAnalyzed(partitionColumnStats.getStatsDesc().getLastAnalyzed());
}
assert colStats.getColType() != null;
builder.setColumnType(colStats.getColType());
assert colStats.getColName() != null;
builder.setColumnName(colStats.getColName());
ColumnStatisticsData colData = colStats.getStatsData();
switch(colData.getSetField()) {
case BOOLEAN_STATS:
BooleanColumnStatsData boolData = colData.getBooleanStats();
builder.setNumNulls(boolData.getNumNulls());
builder.setBoolStats(HbaseMetastoreProto.ColumnStats.BooleanStats.newBuilder().setNumTrues(boolData.getNumTrues()).setNumFalses(boolData.getNumFalses()).build());
break;
case LONG_STATS:
LongColumnStatsData longData = colData.getLongStats();
builder.setNumNulls(longData.getNumNulls());
builder.setNumDistinctValues(longData.getNumDVs());
if (longData.isSetBitVectors()) {
builder.setBitVectors(longData.getBitVectors());
}
builder.setLongStats(HbaseMetastoreProto.ColumnStats.LongStats.newBuilder().setLowValue(longData.getLowValue()).setHighValue(longData.getHighValue()).build());
break;
case DOUBLE_STATS:
DoubleColumnStatsData doubleData = colData.getDoubleStats();
builder.setNumNulls(doubleData.getNumNulls());
builder.setNumDistinctValues(doubleData.getNumDVs());
if (doubleData.isSetBitVectors()) {
builder.setBitVectors(doubleData.getBitVectors());
}
builder.setDoubleStats(HbaseMetastoreProto.ColumnStats.DoubleStats.newBuilder().setLowValue(doubleData.getLowValue()).setHighValue(doubleData.getHighValue()).build());
break;
case STRING_STATS:
StringColumnStatsData stringData = colData.getStringStats();
builder.setNumNulls(stringData.getNumNulls());
builder.setNumDistinctValues(stringData.getNumDVs());
if (stringData.isSetBitVectors()) {
builder.setBitVectors(stringData.getBitVectors());
}
builder.setStringStats(HbaseMetastoreProto.ColumnStats.StringStats.newBuilder().setMaxColLength(stringData.getMaxColLen()).setAvgColLength(stringData.getAvgColLen()).build());
break;
case BINARY_STATS:
BinaryColumnStatsData binaryData = colData.getBinaryStats();
builder.setNumNulls(binaryData.getNumNulls());
builder.setBinaryStats(HbaseMetastoreProto.ColumnStats.StringStats.newBuilder().setMaxColLength(binaryData.getMaxColLen()).setAvgColLength(binaryData.getAvgColLen()).build());
break;
case DECIMAL_STATS:
DecimalColumnStatsData decimalData = colData.getDecimalStats();
builder.setNumNulls(decimalData.getNumNulls());
builder.setNumDistinctValues(decimalData.getNumDVs());
if (decimalData.isSetBitVectors()) {
builder.setBitVectors(decimalData.getBitVectors());
}
if (decimalData.getLowValue() != null && decimalData.getHighValue() != null) {
builder.setDecimalStats(HbaseMetastoreProto.ColumnStats.DecimalStats.newBuilder().setLowValue(HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.newBuilder().setUnscaled(ByteString.copyFrom(decimalData.getLowValue().getUnscaled())).setScale(decimalData.getLowValue().getScale()).build()).setHighValue(HbaseMetastoreProto.ColumnStats.DecimalStats.Decimal.newBuilder().setUnscaled(ByteString.copyFrom(decimalData.getHighValue().getUnscaled())).setScale(decimalData.getHighValue().getScale()).build())).build();
} else {
builder.setDecimalStats(HbaseMetastoreProto.ColumnStats.DecimalStats.newBuilder().clear().build());
}
break;
default:
throw new RuntimeException("Woh, bad. Unknown stats type!");
}
return builder.build();
}
Aggregations