use of org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector in project hive by apache.
the class ColumnStatsMergerFactory method newColumnStaticsObj.
public static ColumnStatisticsObj newColumnStaticsObj(final String colName, final String colType, final _Fields type) {
final ColumnStatisticsObj cso = new ColumnStatisticsObj();
final ColumnStatisticsData csd = new ColumnStatisticsData();
Objects.requireNonNull(colName, "Column name cannot be null");
Objects.requireNonNull(colType, "Column type cannot be null");
Objects.requireNonNull(type, "Field type cannot be null");
switch(type) {
case BOOLEAN_STATS:
csd.setBooleanStats(new BooleanColumnStatsData());
break;
case LONG_STATS:
csd.setLongStats(new LongColumnStatsDataInspector());
break;
case DOUBLE_STATS:
csd.setDoubleStats(new DoubleColumnStatsDataInspector());
break;
case STRING_STATS:
csd.setStringStats(new StringColumnStatsDataInspector());
break;
case BINARY_STATS:
csd.setBinaryStats(new BinaryColumnStatsData());
break;
case DECIMAL_STATS:
csd.setDecimalStats(new DecimalColumnStatsDataInspector());
break;
case DATE_STATS:
csd.setDateStats(new DateColumnStatsDataInspector());
break;
case TIMESTAMP_STATS:
csd.setTimestampStats(new TimestampColumnStatsDataInspector());
break;
default:
throw new IllegalArgumentException("Unknown stats type: " + type);
}
cso.setColName(colName);
cso.setColType(colType);
cso.setStatsData(csd);
return cso;
}
use of org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector in project hive by apache.
the class StringColumnStatsAggregator method extrapolate.
@Override
public void extrapolate(ColumnStatisticsData extrapolateData, int numParts, int numPartsWithStats, Map<String, Double> adjustedIndexMap, Map<String, ColumnStatisticsData> adjustedStatsMap, double densityAvg) {
int rightBorderInd = numParts;
StringColumnStatsDataInspector extrapolateStringData = new StringColumnStatsDataInspector();
Map<String, StringColumnStatsData> extractedAdjustedStatsMap = new HashMap<>();
for (Map.Entry<String, ColumnStatisticsData> entry : adjustedStatsMap.entrySet()) {
extractedAdjustedStatsMap.put(entry.getKey(), entry.getValue().getStringStats());
}
List<Map.Entry<String, StringColumnStatsData>> list = new LinkedList<>(extractedAdjustedStatsMap.entrySet());
// get the avgLen
Collections.sort(list, new Comparator<Map.Entry<String, StringColumnStatsData>>() {
@Override
public int compare(Map.Entry<String, StringColumnStatsData> o1, Map.Entry<String, StringColumnStatsData> o2) {
return Double.compare(o1.getValue().getAvgColLen(), o2.getValue().getAvgColLen());
}
});
double minInd = adjustedIndexMap.get(list.get(0).getKey());
double maxInd = adjustedIndexMap.get(list.get(list.size() - 1).getKey());
double avgColLen = 0;
double min = list.get(0).getValue().getAvgColLen();
double max = list.get(list.size() - 1).getValue().getAvgColLen();
if (minInd == maxInd) {
avgColLen = min;
} else if (minInd < maxInd) {
// right border is the max
avgColLen = (min + (max - min) * (rightBorderInd - minInd) / (maxInd - minInd));
} else {
// left border is the max
avgColLen = (min + (max - min) * minInd / (minInd - maxInd));
}
// get the maxLen
Collections.sort(list, new Comparator<Map.Entry<String, StringColumnStatsData>>() {
@Override
public int compare(Map.Entry<String, StringColumnStatsData> o1, Map.Entry<String, StringColumnStatsData> o2) {
return Long.compare(o1.getValue().getMaxColLen(), o2.getValue().getMaxColLen());
}
});
minInd = adjustedIndexMap.get(list.get(0).getKey());
maxInd = adjustedIndexMap.get(list.get(list.size() - 1).getKey());
double maxColLen = 0;
min = list.get(0).getValue().getAvgColLen();
max = list.get(list.size() - 1).getValue().getAvgColLen();
if (minInd == maxInd) {
maxColLen = min;
} else if (minInd < maxInd) {
// right border is the max
maxColLen = (min + (max - min) * (rightBorderInd - minInd) / (maxInd - minInd));
} else {
// left border is the max
maxColLen = (min + (max - min) * minInd / (minInd - maxInd));
}
// get the #nulls
long numNulls = 0;
for (Map.Entry<String, StringColumnStatsData> entry : extractedAdjustedStatsMap.entrySet()) {
numNulls += entry.getValue().getNumNulls();
}
// we scale up sumNulls based on the number of partitions
numNulls = numNulls * numParts / numPartsWithStats;
// get the ndv
long ndv = 0;
Collections.sort(list, new Comparator<Map.Entry<String, StringColumnStatsData>>() {
@Override
public int compare(Map.Entry<String, StringColumnStatsData> o1, Map.Entry<String, StringColumnStatsData> o2) {
return Long.compare(o1.getValue().getNumDVs(), o2.getValue().getNumDVs());
}
});
minInd = adjustedIndexMap.get(list.get(0).getKey());
maxInd = adjustedIndexMap.get(list.get(list.size() - 1).getKey());
min = list.get(0).getValue().getNumDVs();
max = list.get(list.size() - 1).getValue().getNumDVs();
if (minInd == maxInd) {
ndv = (long) min;
} else if (minInd < maxInd) {
// right border is the max
ndv = (long) (min + (max - min) * (rightBorderInd - minInd) / (maxInd - minInd));
} else {
// left border is the max
ndv = (long) (min + (max - min) * minInd / (minInd - maxInd));
}
extrapolateStringData.setAvgColLen(avgColLen);
extrapolateStringData.setMaxColLen((long) maxColLen);
extrapolateStringData.setNumNulls(numNulls);
extrapolateStringData.setNumDVs(ndv);
extrapolateData.setStringStats(extrapolateStringData);
}
use of org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackPrimitiveObject.
private static void unpackPrimitiveObject(ObjectInspector oi, Object o, String fieldName, ColumnStatisticsObj statsObj) throws UnsupportedDoubleException {
if (o == null) {
return;
}
// First infer the type of object
if (fieldName.equals("columntype")) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
String s = ((StringObjectInspector) poi).getPrimitiveJavaObject(o);
ColumnStatisticsData statsData = new ColumnStatisticsData();
if (s.equalsIgnoreCase("long")) {
LongColumnStatsDataInspector longStats = new LongColumnStatsDataInspector();
statsData.setLongStats(longStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("double")) {
DoubleColumnStatsDataInspector doubleStats = new DoubleColumnStatsDataInspector();
statsData.setDoubleStats(doubleStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("string")) {
StringColumnStatsDataInspector stringStats = new StringColumnStatsDataInspector();
statsData.setStringStats(stringStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("boolean")) {
BooleanColumnStatsData booleanStats = new BooleanColumnStatsData();
statsData.setBooleanStats(booleanStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("binary")) {
BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
statsData.setBinaryStats(binaryStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("decimal")) {
DecimalColumnStatsDataInspector decimalStats = new DecimalColumnStatsDataInspector();
statsData.setDecimalStats(decimalStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("date")) {
DateColumnStatsDataInspector dateStats = new DateColumnStatsDataInspector();
statsData.setDateStats(dateStats);
statsObj.setStatsData(statsData);
}
} else {
// invoke the right unpack method depending on data type of the column
if (statsObj.getStatsData().isSetBooleanStats()) {
unpackBooleanStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetLongStats()) {
unpackLongStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDoubleStats()) {
unpackDoubleStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetStringStats()) {
unpackStringStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetBinaryStats()) {
unpackBinaryStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDecimalStats()) {
unpackDecimalStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDateStats()) {
unpackDateStats(oi, o, fieldName, statsObj);
}
}
}
use of org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector in project hive by apache.
the class TestCachedStore method testTableColStatsOps.
// @Test
public void testTableColStatsOps() throws Exception {
// Add a db via ObjectStore
String dbName = "testTableColStatsOps";
String dbOwner = "user1";
Database db = createTestDb(dbName, dbOwner);
objectStore.createDatabase(db);
db = objectStore.getDatabase(dbName);
// Add a table via ObjectStore
final String tblName = "tbl";
final String tblOwner = "user1";
final FieldSchema col1 = new FieldSchema("col1", "int", "integer column");
// Stats values for col1
long col1LowVal = 5;
long col1HighVal = 500;
long col1Nulls = 10;
long col1DV = 20;
final FieldSchema col2 = new FieldSchema("col2", "string", "string column");
// Stats values for col2
long col2MaxColLen = 100;
double col2AvgColLen = 45.5;
long col2Nulls = 5;
long col2DV = 40;
final FieldSchema col3 = new FieldSchema("col3", "boolean", "boolean column");
// Stats values for col3
long col3NumTrues = 100;
long col3NumFalses = 30;
long col3Nulls = 10;
final List<FieldSchema> cols = new ArrayList<>();
cols.add(col1);
cols.add(col2);
cols.add(col3);
FieldSchema ptnCol1 = new FieldSchema("part1", "string", "string partition column");
List<FieldSchema> ptnCols = new ArrayList<FieldSchema>();
ptnCols.add(ptnCol1);
Table tbl = createTestTbl(dbName, tblName, tblOwner, cols, ptnCols);
objectStore.createTable(tbl);
tbl = objectStore.getTable(dbName, tblName);
// Add ColumnStatistics for tbl to metastore DB via ObjectStore
ColumnStatistics stats = new ColumnStatistics();
ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, dbName, tblName);
List<ColumnStatisticsObj> colStatObjs = new ArrayList<>();
// Col1
ColumnStatisticsData data1 = new ColumnStatisticsData();
ColumnStatisticsObj col1Stats = new ColumnStatisticsObj(col1.getName(), col1.getType(), data1);
LongColumnStatsDataInspector longStats = new LongColumnStatsDataInspector();
longStats.setLowValue(col1LowVal);
longStats.setHighValue(col1HighVal);
longStats.setNumNulls(col1Nulls);
longStats.setNumDVs(col1DV);
data1.setLongStats(longStats);
colStatObjs.add(col1Stats);
// Col2
ColumnStatisticsData data2 = new ColumnStatisticsData();
ColumnStatisticsObj col2Stats = new ColumnStatisticsObj(col2.getName(), col2.getType(), data2);
StringColumnStatsDataInspector stringStats = new StringColumnStatsDataInspector();
stringStats.setMaxColLen(col2MaxColLen);
stringStats.setAvgColLen(col2AvgColLen);
stringStats.setNumNulls(col2Nulls);
stringStats.setNumDVs(col2DV);
data2.setStringStats(stringStats);
colStatObjs.add(col2Stats);
// Col3
ColumnStatisticsData data3 = new ColumnStatisticsData();
ColumnStatisticsObj col3Stats = new ColumnStatisticsObj(col3.getName(), col3.getType(), data3);
BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
boolStats.setNumTrues(col3NumTrues);
boolStats.setNumFalses(col3NumFalses);
boolStats.setNumNulls(col3Nulls);
data3.setBooleanStats(boolStats);
colStatObjs.add(col3Stats);
stats.setStatsDesc(statsDesc);
stats.setStatsObj(colStatObjs);
// Save to DB
objectStore.updateTableColumnStatistics(stats);
// Prewarm CachedStore
CachedStore.setCachePrewarmedState(false);
CachedStore.prewarm(objectStore);
// Read table stats via CachedStore
ColumnStatistics newStats = cachedStore.getTableColumnStatistics(dbName, tblName, Arrays.asList(col1.getName(), col2.getName(), col3.getName()));
Assert.assertEquals(stats, newStats);
// Clean up
objectStore.dropTable(dbName, tblName);
objectStore.dropDatabase(dbName);
sharedCache.getDatabaseCache().clear();
sharedCache.getTableCache().clear();
sharedCache.getSdCache().clear();
}
use of org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackPrimitiveObject.
private static void unpackPrimitiveObject(ObjectInspector oi, Object o, ColumnStatsField csf, ColumnStatisticsObj statsObj) throws UnsupportedDoubleException {
if (o == null) {
return;
}
// First infer the type of object
if (csf == ColumnStatsField.COLUMN_STATS_TYPE) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
String s = ((StringObjectInspector) poi).getPrimitiveJavaObject(o);
ColumnStatisticsData statsData = new ColumnStatisticsData();
if (s.equalsIgnoreCase(ColumnStatsType.LONG.toString())) {
LongColumnStatsDataInspector longStats = new LongColumnStatsDataInspector();
statsData.setLongStats(longStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase(ColumnStatsType.DOUBLE.toString())) {
DoubleColumnStatsDataInspector doubleStats = new DoubleColumnStatsDataInspector();
statsData.setDoubleStats(doubleStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase(ColumnStatsType.STRING.toString())) {
StringColumnStatsDataInspector stringStats = new StringColumnStatsDataInspector();
statsData.setStringStats(stringStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase(ColumnStatsType.BOOLEAN.toString())) {
BooleanColumnStatsData booleanStats = new BooleanColumnStatsData();
statsData.setBooleanStats(booleanStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase(ColumnStatsType.BINARY.toString())) {
BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
statsData.setBinaryStats(binaryStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase(ColumnStatsType.DECIMAL.toString())) {
DecimalColumnStatsDataInspector decimalStats = new DecimalColumnStatsDataInspector();
statsData.setDecimalStats(decimalStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase(ColumnStatsType.DATE.toString())) {
DateColumnStatsDataInspector dateStats = new DateColumnStatsDataInspector();
statsData.setDateStats(dateStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase(ColumnStatsType.TIMESTAMP.toString())) {
TimestampColumnStatsDataInspector timestampStats = new TimestampColumnStatsDataInspector();
statsData.setTimestampStats(timestampStats);
statsObj.setStatsData(statsData);
}
} else {
// invoke the right unpack method depending on data type of the column
if (statsObj.getStatsData().isSetBooleanStats()) {
unpackBooleanStats(oi, o, csf, statsObj);
} else if (statsObj.getStatsData().isSetLongStats()) {
unpackLongStats(oi, o, csf, statsObj);
} else if (statsObj.getStatsData().isSetDoubleStats()) {
unpackDoubleStats(oi, o, csf, statsObj);
} else if (statsObj.getStatsData().isSetStringStats()) {
unpackStringStats(oi, o, csf, statsObj);
} else if (statsObj.getStatsData().isSetBinaryStats()) {
unpackBinaryStats(oi, o, csf, statsObj);
} else if (statsObj.getStatsData().isSetDecimalStats()) {
unpackDecimalStats(oi, o, csf, statsObj);
} else if (statsObj.getStatsData().isSetDateStats()) {
unpackDateStats(oi, o, csf, statsObj);
} else if (statsObj.getStatsData().isSetTimestampStats()) {
unpackTimestampStats(oi, o, csf, statsObj);
}
}
}
Aggregations