use of org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData in project hive by apache.
the class TestHBaseStoreIntegration method tableStatistics.
@Test
public void tableStatistics() throws Exception {
long now = System.currentTimeMillis();
String dbname = "default";
String tableName = "statstable";
String boolcol = "boolcol";
String longcol = "longcol";
String doublecol = "doublecol";
String stringcol = "stringcol";
String binarycol = "bincol";
String decimalcol = "deccol";
long trues = 37;
long falses = 12;
long booleanNulls = 2;
long longHigh = 120938479124L;
long longLow = -12341243213412124L;
long longNulls = 23;
long longDVs = 213L;
double doubleHigh = 123423.23423;
double doubleLow = 0.00001234233;
long doubleNulls = 92;
long doubleDVs = 1234123421L;
long strMaxLen = 1234;
double strAvgLen = 32.3;
long strNulls = 987;
long strDVs = 906;
long binMaxLen = 123412987L;
double binAvgLen = 76.98;
long binNulls = 976998797L;
Decimal decHigh = new Decimal();
decHigh.setScale((short) 3);
// I have no clue how this is translated, but it
decHigh.setUnscaled("3876".getBytes());
// doesn't matter
Decimal decLow = new Decimal();
decLow.setScale((short) 3);
decLow.setUnscaled("38".getBytes());
long decNulls = 13;
long decDVs = 923947293L;
List<FieldSchema> cols = new ArrayList<FieldSchema>();
cols.add(new FieldSchema(boolcol, "boolean", "nocomment"));
cols.add(new FieldSchema(longcol, "long", "nocomment"));
cols.add(new FieldSchema(doublecol, "double", "nocomment"));
cols.add(new FieldSchema(stringcol, "varchar(32)", "nocomment"));
cols.add(new FieldSchema(binarycol, "binary", "nocomment"));
cols.add(new FieldSchema(decimalcol, "decimal(5, 3)", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
Table table = new Table(tableName, dbname, "me", (int) now / 1000, (int) now / 1000, 0, sd, null, emptyParameters, null, null, null);
store.createTable(table);
ColumnStatistics stats = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc();
desc.setLastAnalyzed(now);
desc.setDbName(dbname);
desc.setTableName(tableName);
desc.setIsTblLevel(true);
stats.setStatsDesc(desc);
// Do one column of each type
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName(boolcol);
obj.setColType("boolean");
ColumnStatisticsData data = new ColumnStatisticsData();
BooleanColumnStatsData boolData = new BooleanColumnStatsData();
boolData.setNumTrues(trues);
boolData.setNumFalses(falses);
boolData.setNumNulls(booleanNulls);
data.setBooleanStats(boolData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(longcol);
obj.setColType("long");
data = new ColumnStatisticsData();
LongColumnStatsData longData = new LongColumnStatsData();
longData.setHighValue(longHigh);
longData.setLowValue(longLow);
longData.setNumNulls(longNulls);
longData.setNumDVs(longDVs);
data.setLongStats(longData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(doublecol);
obj.setColType("double");
data = new ColumnStatisticsData();
DoubleColumnStatsData doubleData = new DoubleColumnStatsData();
doubleData.setHighValue(doubleHigh);
doubleData.setLowValue(doubleLow);
doubleData.setNumNulls(doubleNulls);
doubleData.setNumDVs(doubleDVs);
data.setDoubleStats(doubleData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol, longcol, doublecol));
// We'll check all of the individual values later.
Assert.assertEquals(3, stats.getStatsObjSize());
// check that we can fetch just some of the columns
stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol));
Assert.assertEquals(1, stats.getStatsObjSize());
stats = new ColumnStatistics();
stats.setStatsDesc(desc);
obj = new ColumnStatisticsObj();
obj.setColName(stringcol);
obj.setColType("string");
data = new ColumnStatisticsData();
StringColumnStatsData strData = new StringColumnStatsData();
strData.setMaxColLen(strMaxLen);
strData.setAvgColLen(strAvgLen);
strData.setNumNulls(strNulls);
strData.setNumDVs(strDVs);
data.setStringStats(strData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(binarycol);
obj.setColType("binary");
data = new ColumnStatisticsData();
BinaryColumnStatsData binData = new BinaryColumnStatsData();
binData.setMaxColLen(binMaxLen);
binData.setAvgColLen(binAvgLen);
binData.setNumNulls(binNulls);
data.setBinaryStats(binData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
obj = new ColumnStatisticsObj();
obj.setColName(decimalcol);
obj.setColType("decimal(5,3)");
data = new ColumnStatisticsData();
DecimalColumnStatsData decData = new DecimalColumnStatsData();
LOG.debug("Setting decimal high value to " + decHigh.getScale() + " <" + new String(decHigh.getUnscaled()) + ">");
decData.setHighValue(decHigh);
decData.setLowValue(decLow);
decData.setNumNulls(decNulls);
decData.setNumDVs(decDVs);
data.setDecimalStats(decData);
obj.setStatsData(data);
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
stats = store.getTableColumnStatistics(dbname, tableName, Arrays.asList(boolcol, longcol, doublecol, stringcol, binarycol, decimalcol));
Assert.assertEquals(now, stats.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(dbname, stats.getStatsDesc().getDbName());
Assert.assertEquals(tableName, stats.getStatsDesc().getTableName());
Assert.assertTrue(stats.getStatsDesc().isIsTblLevel());
Assert.assertEquals(6, stats.getStatsObjSize());
ColumnStatisticsData colData = stats.getStatsObj().get(0).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BOOLEAN_STATS, colData.getSetField());
boolData = colData.getBooleanStats();
Assert.assertEquals(trues, boolData.getNumTrues());
Assert.assertEquals(falses, boolData.getNumFalses());
Assert.assertEquals(booleanNulls, boolData.getNumNulls());
colData = stats.getStatsObj().get(1).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.LONG_STATS, colData.getSetField());
longData = colData.getLongStats();
Assert.assertEquals(longHigh, longData.getHighValue());
Assert.assertEquals(longLow, longData.getLowValue());
Assert.assertEquals(longNulls, longData.getNumNulls());
Assert.assertEquals(longDVs, longData.getNumDVs());
colData = stats.getStatsObj().get(2).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DOUBLE_STATS, colData.getSetField());
doubleData = colData.getDoubleStats();
Assert.assertEquals(doubleHigh, doubleData.getHighValue(), 0.01);
Assert.assertEquals(doubleLow, doubleData.getLowValue(), 0.01);
Assert.assertEquals(doubleNulls, doubleData.getNumNulls());
Assert.assertEquals(doubleDVs, doubleData.getNumDVs());
colData = stats.getStatsObj().get(3).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.STRING_STATS, colData.getSetField());
strData = colData.getStringStats();
Assert.assertEquals(strMaxLen, strData.getMaxColLen());
Assert.assertEquals(strAvgLen, strData.getAvgColLen(), 0.01);
Assert.assertEquals(strNulls, strData.getNumNulls());
Assert.assertEquals(strDVs, strData.getNumDVs());
colData = stats.getStatsObj().get(4).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.BINARY_STATS, colData.getSetField());
binData = colData.getBinaryStats();
Assert.assertEquals(binMaxLen, binData.getMaxColLen());
Assert.assertEquals(binAvgLen, binData.getAvgColLen(), 0.01);
Assert.assertEquals(binNulls, binData.getNumNulls());
colData = stats.getStatsObj().get(5).getStatsData();
Assert.assertEquals(ColumnStatisticsData._Fields.DECIMAL_STATS, colData.getSetField());
decData = colData.getDecimalStats();
Assert.assertEquals(decHigh, decData.getHighValue());
Assert.assertEquals(decLow, decData.getLowValue());
Assert.assertEquals(decNulls, decData.getNumNulls());
Assert.assertEquals(decDVs, decData.getNumDVs());
}
use of org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData in project hive by apache.
the class TestHBaseStore method binaryTableStatistics.
@Test
public void binaryTableStatistics() throws Exception {
createMockTable(BINARY_TYPE);
// Add a binary table stats for BINARY_COL to DB
// Because of the way our mock implementation works we actually need to not create the table
// before we set statistics on it.
ColumnStatistics stats = new ColumnStatistics();
// Get a default ColumnStatisticsDesc for table level stats
ColumnStatisticsDesc desc = getMockTblColStatsDesc();
stats.setStatsDesc(desc);
// Get one of the pre-created ColumnStatisticsObj
ColumnStatisticsObj obj = binaryColStatsObjs.get(0);
BinaryColumnStatsData binaryData = obj.getStatsData().getBinaryStats();
// Add to DB
stats.addToStatsObj(obj);
store.updateTableColumnStatistics(stats);
// Get from DB
ColumnStatistics statsFromDB = store.getTableColumnStatistics(DB, TBL, Arrays.asList(BINARY_COL));
// Compare ColumnStatisticsDesc
Assert.assertEquals(desc.getLastAnalyzed(), statsFromDB.getStatsDesc().getLastAnalyzed());
Assert.assertEquals(DB, statsFromDB.getStatsDesc().getDbName());
Assert.assertEquals(TBL, statsFromDB.getStatsDesc().getTableName());
Assert.assertTrue(statsFromDB.getStatsDesc().isIsTblLevel());
// Compare ColumnStatisticsObj
Assert.assertEquals(1, statsFromDB.getStatsObjSize());
ColumnStatisticsObj objFromDB = statsFromDB.getStatsObj().get(0);
ColumnStatisticsData dataFromDB = objFromDB.getStatsData();
// Compare ColumnStatisticsData
Assert.assertEquals(ColumnStatisticsData._Fields.BINARY_STATS, dataFromDB.getSetField());
// Compare BinaryColumnStatsData
BinaryColumnStatsData binaryDataFromDB = dataFromDB.getBinaryStats();
Assert.assertEquals(binaryData.getMaxColLen(), binaryDataFromDB.getMaxColLen());
Assert.assertEquals(binaryData.getAvgColLen(), binaryDataFromDB.getAvgColLen(), 0.01);
Assert.assertEquals(binaryData.getNumNulls(), binaryDataFromDB.getNumNulls());
}
use of org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData in project hive by apache.
the class TestHBaseStore method mockBinaryStats.
private static ColumnStatisticsObj mockBinaryStats(int i) {
;
long maxLen = 123412987L + 10 * i;
double avgLen = 76.98 + i;
long nulls = 976998797L + 10 * i;
ColumnStatisticsObj colStatsObj = new ColumnStatisticsObj();
colStatsObj.setColName(BINARY_COL);
colStatsObj.setColType(BINARY_TYPE);
ColumnStatisticsData data = new ColumnStatisticsData();
BinaryColumnStatsData binaryData = new BinaryColumnStatsData();
binaryData.setMaxColLen(maxLen);
binaryData.setAvgColLen(avgLen);
binaryData.setNumNulls(nulls);
data.setBinaryStats(binaryData);
colStatsObj.setStatsData(data);
return colStatsObj;
}
use of org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData in project hive by apache.
the class MetaDataFormatUtils method extractColumnValues.
public static String[] extractColumnValues(FieldSchema col, boolean isColStatsAvailable, ColumnStatisticsObj columnStatisticsObj) {
List<String> ret = new ArrayList<>();
ret.add(col.getName());
ret.add(col.getType());
if (isColStatsAvailable) {
if (columnStatisticsObj != null) {
ColumnStatisticsData csd = columnStatisticsObj.getStatsData();
// @formatter:off
if (csd.isSetBinaryStats()) {
BinaryColumnStatsData bcsd = csd.getBinaryStats();
ret.addAll(Lists.newArrayList("", "", "" + bcsd.getNumNulls(), "", "" + bcsd.getAvgColLen(), "" + bcsd.getMaxColLen(), "", "", convertToString(bcsd.getBitVectors())));
} else if (csd.isSetStringStats()) {
StringColumnStatsData scsd = csd.getStringStats();
ret.addAll(Lists.newArrayList("", "", "" + scsd.getNumNulls(), "" + scsd.getNumDVs(), "" + scsd.getAvgColLen(), "" + scsd.getMaxColLen(), "", "", convertToString(scsd.getBitVectors())));
} else if (csd.isSetBooleanStats()) {
BooleanColumnStatsData bcsd = csd.getBooleanStats();
ret.addAll(Lists.newArrayList("", "", "" + bcsd.getNumNulls(), "", "", "", "" + bcsd.getNumTrues(), "" + bcsd.getNumFalses(), convertToString(bcsd.getBitVectors())));
} else if (csd.isSetDecimalStats()) {
DecimalColumnStatsData dcsd = csd.getDecimalStats();
ret.addAll(Lists.newArrayList(convertToString(dcsd.getLowValue()), convertToString(dcsd.getHighValue()), "" + dcsd.getNumNulls(), "" + dcsd.getNumDVs(), "", "", "", "", convertToString(dcsd.getBitVectors())));
} else if (csd.isSetDoubleStats()) {
DoubleColumnStatsData dcsd = csd.getDoubleStats();
ret.addAll(Lists.newArrayList("" + dcsd.getLowValue(), "" + dcsd.getHighValue(), "" + dcsd.getNumNulls(), "" + dcsd.getNumDVs(), "", "", "", "", convertToString(dcsd.getBitVectors())));
} else if (csd.isSetLongStats()) {
LongColumnStatsData lcsd = csd.getLongStats();
ret.addAll(Lists.newArrayList("" + lcsd.getLowValue(), "" + lcsd.getHighValue(), "" + lcsd.getNumNulls(), "" + lcsd.getNumDVs(), "", "", "", "", convertToString(lcsd.getBitVectors())));
} else if (csd.isSetDateStats()) {
DateColumnStatsData dcsd = csd.getDateStats();
ret.addAll(Lists.newArrayList(convertToString(dcsd.getLowValue()), convertToString(dcsd.getHighValue()), "" + dcsd.getNumNulls(), "" + dcsd.getNumDVs(), "", "", "", "", convertToString(dcsd.getBitVectors())));
}
// @formatter:on
} else {
ret.addAll(Lists.newArrayList("", "", "", "", "", "", "", "", ""));
}
}
ret.add(getComment(col));
return ret.toArray(new String[] {});
}
use of org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackPrimitiveObject.
private static void unpackPrimitiveObject(ObjectInspector oi, Object o, String fieldName, ColumnStatisticsObj statsObj) throws UnsupportedDoubleException {
if (o == null) {
return;
}
// First infer the type of object
if (fieldName.equals("columntype")) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
String s = ((StringObjectInspector) poi).getPrimitiveJavaObject(o);
ColumnStatisticsData statsData = new ColumnStatisticsData();
if (s.equalsIgnoreCase("long")) {
LongColumnStatsDataInspector longStats = new LongColumnStatsDataInspector();
statsData.setLongStats(longStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("double")) {
DoubleColumnStatsDataInspector doubleStats = new DoubleColumnStatsDataInspector();
statsData.setDoubleStats(doubleStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("string")) {
StringColumnStatsDataInspector stringStats = new StringColumnStatsDataInspector();
statsData.setStringStats(stringStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("boolean")) {
BooleanColumnStatsData booleanStats = new BooleanColumnStatsData();
statsData.setBooleanStats(booleanStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("binary")) {
BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
statsData.setBinaryStats(binaryStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("decimal")) {
DecimalColumnStatsDataInspector decimalStats = new DecimalColumnStatsDataInspector();
statsData.setDecimalStats(decimalStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("date")) {
DateColumnStatsDataInspector dateStats = new DateColumnStatsDataInspector();
statsData.setDateStats(dateStats);
statsObj.setStatsData(statsData);
}
} else {
// invoke the right unpack method depending on data type of the column
if (statsObj.getStatsData().isSetBooleanStats()) {
unpackBooleanStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetLongStats()) {
unpackLongStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDoubleStats()) {
unpackDoubleStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetStringStats()) {
unpackStringStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetBinaryStats()) {
unpackBinaryStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDecimalStats()) {
unpackDecimalStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDateStats()) {
unpackDateStats(oi, o, fieldName, statsObj);
}
}
}
Aggregations