Search in sources :

Example 56 with ColumnStatistics

use of org.apache.hadoop.hive.metastore.api.ColumnStatistics in project hive by apache.

the class HBaseReadWrite method getPartitionStatistics.

/**
   * Get statistics for a set of partitions
   *
   * @param dbName name of database table is in
   * @param tblName table partitions are in
   * @param partNames names of the partitions, used only to set values inside the return stats
   *          objects
   * @param partVals partition values for each partition, needed because this class doesn't know how
   *          to translate from partName to partVals
   * @param colNames column names to fetch stats for. These columns will be fetched for all
   *          requested partitions
   * @return list of ColumnStats, one for each partition for which we found at least one column's
   * stats.
   * @throws IOException
   */
List<ColumnStatistics> getPartitionStatistics(String dbName, String tblName, List<String> partNames, List<List<String>> partVals, List<String> colNames) throws IOException {
    List<ColumnStatistics> statsList = new ArrayList<>(partNames.size());
    Map<List<String>, String> valToPartMap = new HashMap<>(partNames.size());
    List<Get> gets = new ArrayList<>(partNames.size() * colNames.size());
    assert partNames.size() == partVals.size();
    byte[][] colNameBytes = new byte[colNames.size()][];
    for (int i = 0; i < colNames.size(); i++) {
        colNameBytes[i] = HBaseUtils.buildKey(colNames.get(i));
    }
    for (int i = 0; i < partNames.size(); i++) {
        valToPartMap.put(partVals.get(i), partNames.get(i));
        byte[] partKey = HBaseUtils.buildPartitionKey(dbName, tblName, HBaseUtils.getPartitionKeyTypes(getTable(dbName, tblName).getPartitionKeys()), partVals.get(i));
        Get get = new Get(partKey);
        for (byte[] colName : colNameBytes) {
            get.addColumn(STATS_CF, colName);
        }
        gets.add(get);
    }
    HTableInterface htab = conn.getHBaseTable(PART_TABLE);
    Result[] results = htab.get(gets);
    for (int i = 0; i < results.length; i++) {
        ColumnStatistics colStats = null;
        for (int j = 0; j < colNameBytes.length; j++) {
            byte[] serializedColStats = results[i].getValue(STATS_CF, colNameBytes[j]);
            if (serializedColStats != null) {
                if (colStats == null) {
                    // We initialize this late so that we don't create extras in the case of
                    // partitions with no stats
                    colStats = buildColStats(results[i].getRow(), false);
                    statsList.add(colStats);
                }
                ColumnStatisticsObj cso = HBaseUtils.deserializeStatsForOneColumn(colStats, serializedColStats);
                cso.setColName(colNames.get(j));
                colStats.addToStatsObj(cso);
            }
        }
    }
    return statsList;
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) Get(org.apache.hadoop.hbase.client.Get) List(java.util.List) ArrayList(java.util.ArrayList)

Example 57 with ColumnStatistics

use of org.apache.hadoop.hive.metastore.api.ColumnStatistics in project hive by apache.

the class HBaseReadWrite method getTableStatistics.

/**
   * Get statistics for a table
   *
   * @param dbName name of database table is in
   * @param tblName name of table
   * @param colNames list of column names to get statistics for
   * @return column statistics for indicated table
   * @throws IOException
   */
ColumnStatistics getTableStatistics(String dbName, String tblName, List<String> colNames) throws IOException {
    byte[] tabKey = HBaseUtils.buildKey(dbName, tblName);
    ColumnStatistics tableStats = new ColumnStatistics();
    ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc();
    statsDesc.setIsTblLevel(true);
    statsDesc.setDbName(dbName);
    statsDesc.setTableName(tblName);
    tableStats.setStatsDesc(statsDesc);
    byte[][] colKeys = new byte[colNames.size()][];
    for (int i = 0; i < colKeys.length; i++) {
        colKeys[i] = HBaseUtils.buildKey(colNames.get(i));
    }
    Result result = read(TABLE_TABLE, tabKey, STATS_CF, colKeys);
    for (int i = 0; i < colKeys.length; i++) {
        byte[] serializedColStats = result.getValue(STATS_CF, colKeys[i]);
        if (serializedColStats == null) {
            // There were no stats for this column, so skip it
            continue;
        }
        ColumnStatisticsObj obj = HBaseUtils.deserializeStatsForOneColumn(tableStats, serializedColStats);
        obj.setColName(colNames.get(i));
        tableStats.addToStatsObj(obj);
    }
    return tableStats;
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) Result(org.apache.hadoop.hbase.client.Result)

Example 58 with ColumnStatistics

use of org.apache.hadoop.hive.metastore.api.ColumnStatistics in project hive by apache.

the class HBaseReadWrite method printOnePartition.

private String printOnePartition(Result result) throws IOException, TException {
    byte[] key = result.getRow();
    HBaseUtils.StorageDescriptorParts sdParts = HBaseUtils.deserializePartition(key, result.getValue(CATALOG_CF, CATALOG_COL), this);
    StringBuilder builder = new StringBuilder();
    builder.append(dumpThriftObject(sdParts.containingPartition)).append(" sdHash: ").append(Base64.encodeBase64URLSafeString(sdParts.sdHash)).append(" stats:");
    NavigableMap<byte[], byte[]> statsCols = result.getFamilyMap(STATS_CF);
    for (Map.Entry<byte[], byte[]> statsCol : statsCols.entrySet()) {
        builder.append(" column ").append(new String(statsCol.getKey(), HBaseUtils.ENCODING)).append(": ");
        ColumnStatistics pcs = buildColStats(key, false);
        ColumnStatisticsObj cso = HBaseUtils.deserializeStatsForOneColumn(pcs, statsCol.getValue());
        builder.append(dumpThriftObject(cso));
    }
    return builder.toString();
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) Map(java.util.Map) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap)

Example 59 with ColumnStatistics

use of org.apache.hadoop.hive.metastore.api.ColumnStatistics in project hive by apache.

the class TestHBaseAggrStatsCacheIntegration method hit.

@Test
public void hit() throws Exception {
    String dbName = "default";
    String tableName = "hit";
    List<String> partVals1 = Arrays.asList("today");
    List<String> partVals2 = Arrays.asList("yesterday");
    long now = System.currentTimeMillis();
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("col1", "boolean", "nocomment"));
    cols.add(new FieldSchema("col2", "varchar", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
    List<FieldSchema> partCols = new ArrayList<>();
    partCols.add(new FieldSchema("ds", "string", ""));
    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
    store.createTable(table);
    for (List<String> partVals : Arrays.asList(partVals1, partVals2)) {
        StorageDescriptor psd = new StorageDescriptor(sd);
        psd.setLocation("file:/tmp/default/hit/ds=" + partVals.get(0));
        Partition part = new Partition(partVals, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
        store.addPartition(part);
        ColumnStatistics cs = new ColumnStatistics();
        ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
        desc.setLastAnalyzed(now);
        desc.setPartName("ds=" + partVals.get(0));
        cs.setStatsDesc(desc);
        ColumnStatisticsObj obj = new ColumnStatisticsObj();
        obj.setColName("col1");
        obj.setColType("boolean");
        ColumnStatisticsData data = new ColumnStatisticsData();
        BooleanColumnStatsData bcsd = new BooleanColumnStatsData();
        bcsd.setNumFalses(10);
        bcsd.setNumTrues(20);
        bcsd.setNumNulls(30);
        data.setBooleanStats(bcsd);
        obj.setStatsData(data);
        cs.addToStatsObj(obj);
        obj = new ColumnStatisticsObj();
        obj.setColName("col2");
        obj.setColType("varchar");
        data = new ColumnStatisticsData();
        StringColumnStatsData scsd = new StringColumnStatsData();
        scsd.setAvgColLen(10.3);
        scsd.setMaxColLen(2000);
        scsd.setNumNulls(3);
        scsd.setNumDVs(12342);
        data.setStringStats(scsd);
        obj.setStatsData(data);
        cs.addToStatsObj(obj);
        store.updatePartitionColumnStatistics(cs, partVals);
    }
    Checker statChecker = new Checker() {

        @Override
        public void checkStats(AggrStats aggrStats) throws Exception {
            Assert.assertEquals(2, aggrStats.getPartsFound());
            Assert.assertEquals(2, aggrStats.getColStatsSize());
            ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
            Assert.assertEquals("col1", cso.getColName());
            Assert.assertEquals("boolean", cso.getColType());
            BooleanColumnStatsData bcsd = cso.getStatsData().getBooleanStats();
            Assert.assertEquals(20, bcsd.getNumFalses());
            Assert.assertEquals(40, bcsd.getNumTrues());
            Assert.assertEquals(60, bcsd.getNumNulls());
            cso = aggrStats.getColStats().get(1);
            Assert.assertEquals("col2", cso.getColName());
            Assert.assertEquals("varchar", cso.getColType());
            StringColumnStatsData scsd = cso.getStatsData().getStringStats();
            Assert.assertEquals(10.3, scsd.getAvgColLen(), 0.1);
            Assert.assertEquals(2000, scsd.getMaxColLen());
            Assert.assertEquals(6, scsd.getNumNulls());
            Assert.assertEquals(12342, scsd.getNumDVs());
        }
    };
    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1", "col2"));
    statChecker.checkStats(aggrStats);
    // Check that we had to build it from the stats
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(2, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
    // Call again, this time it should come from memory.  Also, reverse the name order this time
    // to assure that we still hit.
    aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1", "col2"));
    statChecker.checkStats(aggrStats);
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(4, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
    store.backdoor().getStatsCache().flushMemory();
    // Call again, this time it should come from hbase
    aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1", "col2"));
    statChecker.checkStats(aggrStats);
    Assert.assertEquals(2, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(6, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) BooleanColumnStatsData(org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AggrStats(org.apache.hadoop.hive.metastore.api.AggrStats) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Example 60 with ColumnStatistics

use of org.apache.hadoop.hive.metastore.api.ColumnStatistics in project hive by apache.

the class TestHBaseAggrStatsCacheIntegration method someWithStats.

@Test
public void someWithStats() throws Exception {
    String dbName = "default";
    String tableName = "psws";
    List<String> partVals1 = Arrays.asList("today");
    List<String> partVals2 = Arrays.asList("yesterday");
    long now = System.currentTimeMillis();
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("col1", "long", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
    List<FieldSchema> partCols = new ArrayList<>();
    partCols.add(new FieldSchema("ds", "string", ""));
    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
    store.createTable(table);
    boolean first = true;
    for (List<String> partVals : Arrays.asList(partVals1, partVals2)) {
        StorageDescriptor psd = new StorageDescriptor(sd);
        psd.setLocation("file:/tmp/default/psws/ds=" + partVals.get(0));
        Partition part = new Partition(partVals, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
        store.addPartition(part);
        if (first) {
            ColumnStatistics cs = new ColumnStatistics();
            ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
            desc.setLastAnalyzed(now);
            desc.setPartName("ds=" + partVals.get(0));
            cs.setStatsDesc(desc);
            ColumnStatisticsObj obj = new ColumnStatisticsObj();
            obj.setColName("col1");
            obj.setColType("long");
            ColumnStatisticsData data = new ColumnStatisticsData();
            LongColumnStatsData lcsd = new LongColumnStatsData();
            lcsd.setHighValue(192L);
            lcsd.setLowValue(-20L);
            lcsd.setNumNulls(30);
            lcsd.setNumDVs(32);
            data.setLongStats(lcsd);
            obj.setStatsData(data);
            cs.addToStatsObj(obj);
            store.updatePartitionColumnStatistics(cs, partVals);
            first = false;
        }
    }
    Checker statChecker = new Checker() {

        @Override
        public void checkStats(AggrStats aggrStats) throws Exception {
            Assert.assertEquals(1, aggrStats.getPartsFound());
            Assert.assertEquals(1, aggrStats.getColStatsSize());
            ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
            Assert.assertEquals("col1", cso.getColName());
            Assert.assertEquals("long", cso.getColType());
            LongColumnStatsData lcsd = cso.getStatsData().getLongStats();
            Assert.assertEquals(192L, lcsd.getHighValue());
            Assert.assertEquals(-20L, lcsd.getLowValue());
            Assert.assertEquals(30, lcsd.getNumNulls());
            Assert.assertEquals(32, lcsd.getNumDVs());
        }
    };
    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1"));
    statChecker.checkStats(aggrStats);
    // Check that we had to build it from the stats
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
    // Call again, this time it should come from memory.  Also, reverse the name order this time
    // to assure that we still hit.
    aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1"));
    statChecker.checkStats(aggrStats);
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(2, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
    store.backdoor().getStatsCache().flushMemory();
    // Call again, this time it should come from hbase
    aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1"));
    statChecker.checkStats(aggrStats);
    Assert.assertEquals(1, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(3, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AggrStats(org.apache.hadoop.hive.metastore.api.AggrStats) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Aggregations

ColumnStatistics (org.apache.hadoop.hive.metastore.api.ColumnStatistics)90 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)75 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)67 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)62 ArrayList (java.util.ArrayList)61 Test (org.junit.Test)53 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)40 Table (org.apache.hadoop.hive.metastore.api.Table)38 Partition (org.apache.hadoop.hive.metastore.api.Partition)33 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)32 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)31 AggrStats (org.apache.hadoop.hive.metastore.api.AggrStats)29 List (java.util.List)26 LongColumnStatsData (org.apache.hadoop.hive.metastore.api.LongColumnStatsData)19 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)14 BooleanColumnStatsData (org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData)13 DoubleColumnStatsData (org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData)12 StringColumnStatsData (org.apache.hadoop.hive.metastore.api.StringColumnStatsData)12 HashMap (java.util.HashMap)11 Database (org.apache.hadoop.hive.metastore.api.Database)9