use of org.apache.hadoop.hive.metastore.api.LongColumnStatsData in project hive by apache.
the class StatObjectConverter method getTableColumnStatisticsObj.
public static ColumnStatisticsObj getTableColumnStatisticsObj(MTableColumnStatistics mStatsObj) {
ColumnStatisticsObj statsObj = new ColumnStatisticsObj();
statsObj.setColType(mStatsObj.getColType());
statsObj.setColName(mStatsObj.getColName());
String colType = mStatsObj.getColType().toLowerCase();
ColumnStatisticsData colStatsData = new ColumnStatisticsData();
if (colType.equals("boolean")) {
BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
boolStats.setNumFalses(mStatsObj.getNumFalses());
boolStats.setNumTrues(mStatsObj.getNumTrues());
boolStats.setNumNulls(mStatsObj.getNumNulls());
colStatsData.setBooleanStats(boolStats);
} else if (colType.equals("string") || colType.startsWith("varchar") || colType.startsWith("char")) {
StringColumnStatsData stringStats = new StringColumnStatsData();
stringStats.setNumNulls(mStatsObj.getNumNulls());
stringStats.setAvgColLen(mStatsObj.getAvgColLen());
stringStats.setMaxColLen(mStatsObj.getMaxColLen());
stringStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setStringStats(stringStats);
} else if (colType.equals("binary")) {
BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
binaryStats.setNumNulls(mStatsObj.getNumNulls());
binaryStats.setAvgColLen(mStatsObj.getAvgColLen());
binaryStats.setMaxColLen(mStatsObj.getMaxColLen());
colStatsData.setBinaryStats(binaryStats);
} else if (colType.equals("bigint") || colType.equals("int") || colType.equals("smallint") || colType.equals("tinyint") || colType.equals("timestamp")) {
LongColumnStatsData longStats = new LongColumnStatsData();
longStats.setNumNulls(mStatsObj.getNumNulls());
Long longHighValue = mStatsObj.getLongHighValue();
if (longHighValue != null) {
longStats.setHighValue(longHighValue);
}
Long longLowValue = mStatsObj.getLongLowValue();
if (longLowValue != null) {
longStats.setLowValue(longLowValue);
}
longStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setLongStats(longStats);
} else if (colType.equals("double") || colType.equals("float")) {
DoubleColumnStatsData doubleStats = new DoubleColumnStatsData();
doubleStats.setNumNulls(mStatsObj.getNumNulls());
Double doubleHighValue = mStatsObj.getDoubleHighValue();
if (doubleHighValue != null) {
doubleStats.setHighValue(doubleHighValue);
}
Double doubleLowValue = mStatsObj.getDoubleLowValue();
if (doubleLowValue != null) {
doubleStats.setLowValue(doubleLowValue);
}
doubleStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setDoubleStats(doubleStats);
} else if (colType.startsWith("decimal")) {
DecimalColumnStatsData decimalStats = new DecimalColumnStatsData();
decimalStats.setNumNulls(mStatsObj.getNumNulls());
String decimalHighValue = mStatsObj.getDecimalHighValue();
if (decimalHighValue != null) {
decimalStats.setHighValue(createThriftDecimal(decimalHighValue));
}
String decimalLowValue = mStatsObj.getDecimalLowValue();
if (decimalLowValue != null) {
decimalStats.setLowValue(createThriftDecimal(decimalLowValue));
}
decimalStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setDecimalStats(decimalStats);
} else if (colType.equals("date")) {
DateColumnStatsData dateStats = new DateColumnStatsData();
dateStats.setNumNulls(mStatsObj.getNumNulls());
Long highValue = mStatsObj.getLongHighValue();
if (highValue != null) {
dateStats.setHighValue(new Date(highValue));
}
Long lowValue = mStatsObj.getLongLowValue();
if (lowValue != null) {
dateStats.setLowValue(new Date(lowValue));
}
dateStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setDateStats(dateStats);
}
statsObj.setStatsData(colStatsData);
return statsObj;
}
use of org.apache.hadoop.hive.metastore.api.LongColumnStatsData in project hive by apache.
the class StatObjectConverter method getPartitionColumnStatisticsObj.
public static ColumnStatisticsObj getPartitionColumnStatisticsObj(MPartitionColumnStatistics mStatsObj) {
ColumnStatisticsObj statsObj = new ColumnStatisticsObj();
statsObj.setColType(mStatsObj.getColType());
statsObj.setColName(mStatsObj.getColName());
String colType = mStatsObj.getColType().toLowerCase();
ColumnStatisticsData colStatsData = new ColumnStatisticsData();
if (colType.equals("boolean")) {
BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
boolStats.setNumFalses(mStatsObj.getNumFalses());
boolStats.setNumTrues(mStatsObj.getNumTrues());
boolStats.setNumNulls(mStatsObj.getNumNulls());
colStatsData.setBooleanStats(boolStats);
} else if (colType.equals("string") || colType.startsWith("varchar") || colType.startsWith("char")) {
StringColumnStatsData stringStats = new StringColumnStatsData();
stringStats.setNumNulls(mStatsObj.getNumNulls());
stringStats.setAvgColLen(mStatsObj.getAvgColLen());
stringStats.setMaxColLen(mStatsObj.getMaxColLen());
stringStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setStringStats(stringStats);
} else if (colType.equals("binary")) {
BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
binaryStats.setNumNulls(mStatsObj.getNumNulls());
binaryStats.setAvgColLen(mStatsObj.getAvgColLen());
binaryStats.setMaxColLen(mStatsObj.getMaxColLen());
colStatsData.setBinaryStats(binaryStats);
} else if (colType.equals("tinyint") || colType.equals("smallint") || colType.equals("int") || colType.equals("bigint") || colType.equals("timestamp")) {
LongColumnStatsData longStats = new LongColumnStatsData();
longStats.setNumNulls(mStatsObj.getNumNulls());
if (mStatsObj.getLongHighValue() != null) {
longStats.setHighValue(mStatsObj.getLongHighValue());
}
if (mStatsObj.getLongLowValue() != null) {
longStats.setLowValue(mStatsObj.getLongLowValue());
}
longStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setLongStats(longStats);
} else if (colType.equals("double") || colType.equals("float")) {
DoubleColumnStatsData doubleStats = new DoubleColumnStatsData();
doubleStats.setNumNulls(mStatsObj.getNumNulls());
if (mStatsObj.getDoubleHighValue() != null) {
doubleStats.setHighValue(mStatsObj.getDoubleHighValue());
}
if (mStatsObj.getDoubleLowValue() != null) {
doubleStats.setLowValue(mStatsObj.getDoubleLowValue());
}
doubleStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setDoubleStats(doubleStats);
} else if (colType.startsWith("decimal")) {
DecimalColumnStatsData decimalStats = new DecimalColumnStatsData();
decimalStats.setNumNulls(mStatsObj.getNumNulls());
if (mStatsObj.getDecimalHighValue() != null) {
decimalStats.setHighValue(createThriftDecimal(mStatsObj.getDecimalHighValue()));
}
if (mStatsObj.getDecimalLowValue() != null) {
decimalStats.setLowValue(createThriftDecimal(mStatsObj.getDecimalLowValue()));
}
decimalStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setDecimalStats(decimalStats);
} else if (colType.equals("date")) {
DateColumnStatsData dateStats = new DateColumnStatsData();
dateStats.setNumNulls(mStatsObj.getNumNulls());
dateStats.setHighValue(new Date(mStatsObj.getLongHighValue()));
dateStats.setLowValue(new Date(mStatsObj.getLongLowValue()));
dateStats.setNumDVs(mStatsObj.getNumDVs());
colStatsData.setDateStats(dateStats);
}
statsObj.setStatsData(colStatsData);
return statsObj;
}
use of org.apache.hadoop.hive.metastore.api.LongColumnStatsData in project hive by apache.
the class ColumnStatsAggregatorFactory method newColumnStaticsObj.
public static ColumnStatisticsObj newColumnStaticsObj(String colName, String colType, _Fields type) {
ColumnStatisticsObj cso = new ColumnStatisticsObj();
ColumnStatisticsData csd = new ColumnStatisticsData();
cso.setColName(colName);
cso.setColType(colType);
switch(type) {
case BOOLEAN_STATS:
csd.setBooleanStats(new BooleanColumnStatsData());
break;
case LONG_STATS:
csd.setLongStats(new LongColumnStatsData());
break;
case DOUBLE_STATS:
csd.setDoubleStats(new DoubleColumnStatsData());
break;
case STRING_STATS:
csd.setStringStats(new StringColumnStatsData());
break;
case BINARY_STATS:
csd.setBinaryStats(new BinaryColumnStatsData());
break;
case DECIMAL_STATS:
csd.setDecimalStats(new DecimalColumnStatsData());
break;
default:
throw new RuntimeException("Woh, bad. Unknown stats type!");
}
cso.setStatsData(csd);
return cso;
}
use of org.apache.hadoop.hive.metastore.api.LongColumnStatsData in project hive by apache.
the class LongColumnStatsAggregator method aggregate.
@Override
public ColumnStatisticsObj aggregate(String colName, List<String> partNames, List<ColumnStatistics> css) throws MetaException {
ColumnStatisticsObj statsObj = null;
// check if all the ColumnStatisticsObjs contain stats and all the ndv are
// bitvectors
boolean doAllPartitionContainStats = partNames.size() == css.size();
boolean isNDVBitVectorSet = true;
String colType = null;
for (ColumnStatistics cs : css) {
if (cs.getStatsObjSize() != 1) {
throw new MetaException("The number of columns should be exactly one in aggrStats, but found " + cs.getStatsObjSize());
}
ColumnStatisticsObj cso = cs.getStatsObjIterator().next();
if (statsObj == null) {
colType = cso.getColType();
statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType, cso.getStatsData().getSetField());
}
if (numBitVectors <= 0 || !cso.getStatsData().getLongStats().isSetBitVectors() || cso.getStatsData().getLongStats().getBitVectors().length() == 0) {
isNDVBitVectorSet = false;
break;
}
}
ColumnStatisticsData columnStatisticsData = new ColumnStatisticsData();
if (doAllPartitionContainStats || css.size() < 2) {
LongColumnStatsData aggregateData = null;
long lowerBound = 0;
long higherBound = 0;
double densityAvgSum = 0.0;
NumDistinctValueEstimator ndvEstimator = null;
if (isNDVBitVectorSet) {
ndvEstimator = new NumDistinctValueEstimator(numBitVectors);
}
for (ColumnStatistics cs : css) {
ColumnStatisticsObj cso = cs.getStatsObjIterator().next();
LongColumnStatsData newData = cso.getStatsData().getLongStats();
if (useDensityFunctionForNDVEstimation) {
lowerBound = Math.max(lowerBound, newData.getNumDVs());
higherBound += newData.getNumDVs();
densityAvgSum += (newData.getHighValue() - newData.getLowValue()) / newData.getNumDVs();
}
if (isNDVBitVectorSet) {
ndvEstimator.mergeEstimators(new NumDistinctValueEstimator(newData.getBitVectors(), ndvEstimator.getnumBitVectors()));
}
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
aggregateData.setLowValue(Math.min(aggregateData.getLowValue(), newData.getLowValue()));
aggregateData.setHighValue(Math.max(aggregateData.getHighValue(), newData.getHighValue()));
aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls());
aggregateData.setNumDVs(Math.max(aggregateData.getNumDVs(), newData.getNumDVs()));
}
}
if (isNDVBitVectorSet) {
// if all the ColumnStatisticsObjs contain bitvectors, we do not need to
// use uniform distribution assumption because we can merge bitvectors
// to get a good estimation.
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
} else {
if (useDensityFunctionForNDVEstimation) {
// We have estimation, lowerbound and higherbound. We use estimation
// if it is between lowerbound and higherbound.
double densityAvg = densityAvgSum / partNames.size();
long estimation = (long) ((aggregateData.getHighValue() - aggregateData.getLowValue()) / densityAvg);
if (estimation < lowerBound) {
aggregateData.setNumDVs(lowerBound);
} else if (estimation > higherBound) {
aggregateData.setNumDVs(higherBound);
} else {
aggregateData.setNumDVs(estimation);
}
} else {
// Without useDensityFunctionForNDVEstimation, we just use the
// default one, which is the max of all the partitions and it is
// already done.
}
}
columnStatisticsData.setLongStats(aggregateData);
} else {
// we need extrapolation
Map<String, Integer> indexMap = new HashMap<String, Integer>();
for (int index = 0; index < partNames.size(); index++) {
indexMap.put(partNames.get(index), index);
}
Map<String, Double> adjustedIndexMap = new HashMap<String, Double>();
Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<String, ColumnStatisticsData>();
// while we scan the css, we also get the densityAvg, lowerbound and
// higerbound when useDensityFunctionForNDVEstimation is true.
double densityAvgSum = 0.0;
if (!isNDVBitVectorSet) {
// the traditional extrapolation methods.
for (ColumnStatistics cs : css) {
String partName = cs.getStatsDesc().getPartName();
ColumnStatisticsObj cso = cs.getStatsObjIterator().next();
LongColumnStatsData newData = cso.getStatsData().getLongStats();
if (useDensityFunctionForNDVEstimation) {
densityAvgSum += (newData.getHighValue() - newData.getLowValue()) / newData.getNumDVs();
}
adjustedIndexMap.put(partName, (double) indexMap.get(partName));
adjustedStatsMap.put(partName, cso.getStatsData());
}
} else {
// we first merge all the adjacent bitvectors that we could merge and
// derive new partition names and index.
NumDistinctValueEstimator ndvEstimator = new NumDistinctValueEstimator(numBitVectors);
StringBuilder pseudoPartName = new StringBuilder();
double pseudoIndexSum = 0;
int length = 0;
int curIndex = -1;
LongColumnStatsData aggregateData = null;
for (ColumnStatistics cs : css) {
String partName = cs.getStatsDesc().getPartName();
ColumnStatisticsObj cso = cs.getStatsObjIterator().next();
LongColumnStatsData newData = cso.getStatsData().getLongStats();
// already checked it before.
if (indexMap.get(partName) != curIndex) {
// There is bitvector, but it is not adjacent to the previous ones.
if (length > 0) {
// we have to set ndv
adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length);
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
ColumnStatisticsData csd = new ColumnStatisticsData();
csd.setLongStats(aggregateData);
adjustedStatsMap.put(pseudoPartName.toString(), csd);
if (useDensityFunctionForNDVEstimation) {
densityAvgSum += (aggregateData.getHighValue() - aggregateData.getLowValue()) / aggregateData.getNumDVs();
}
// reset everything
pseudoPartName = new StringBuilder();
pseudoIndexSum = 0;
length = 0;
}
aggregateData = null;
}
curIndex = indexMap.get(partName);
pseudoPartName.append(partName);
pseudoIndexSum += curIndex;
length++;
curIndex++;
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
aggregateData.setLowValue(Math.min(aggregateData.getLowValue(), newData.getLowValue()));
aggregateData.setHighValue(Math.max(aggregateData.getHighValue(), newData.getHighValue()));
aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls());
}
ndvEstimator.mergeEstimators(new NumDistinctValueEstimator(newData.getBitVectors(), ndvEstimator.getnumBitVectors()));
}
if (length > 0) {
// we have to set ndv
adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length);
aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues());
ColumnStatisticsData csd = new ColumnStatisticsData();
csd.setLongStats(aggregateData);
adjustedStatsMap.put(pseudoPartName.toString(), csd);
if (useDensityFunctionForNDVEstimation) {
densityAvgSum += (aggregateData.getHighValue() - aggregateData.getLowValue()) / aggregateData.getNumDVs();
}
}
}
extrapolate(columnStatisticsData, partNames.size(), css.size(), adjustedIndexMap, adjustedStatsMap, densityAvgSum / adjustedStatsMap.size());
}
statsObj.setStatsData(columnStatisticsData);
return statsObj;
}
use of org.apache.hadoop.hive.metastore.api.LongColumnStatsData in project hive by apache.
the class ColumnStatsMergerFactory method newColumnStaticsObj.
public static ColumnStatisticsObj newColumnStaticsObj(String colName, String colType, _Fields type) {
ColumnStatisticsObj cso = new ColumnStatisticsObj();
ColumnStatisticsData csd = new ColumnStatisticsData();
cso.setColName(colName);
cso.setColType(colType);
switch(type) {
case BOOLEAN_STATS:
csd.setBooleanStats(new BooleanColumnStatsData());
break;
case LONG_STATS:
csd.setLongStats(new LongColumnStatsData());
break;
case DOUBLE_STATS:
csd.setDoubleStats(new DoubleColumnStatsData());
break;
case STRING_STATS:
csd.setStringStats(new StringColumnStatsData());
break;
case BINARY_STATS:
csd.setBinaryStats(new BinaryColumnStatsData());
break;
case DECIMAL_STATS:
csd.setDecimalStats(new DecimalColumnStatsData());
break;
default:
throw new RuntimeException("Woh, bad. Unknown stats type!");
}
cso.setStatsData(csd);
return cso;
}
Aggregations