Search in sources :

Example 36 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseStore method createPartition.

@Test
public void createPartition() throws Exception {
    String tableName = "myparttable";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
    partCols.add(new FieldSchema("pc", "string", ""));
    Table table = new Table(tableName, DB, "me", startTime, startTime, 0, sd, partCols, emptyParameters, null, null, null);
    store.createTable(table);
    List<String> vals = Arrays.asList("fred");
    StorageDescriptor psd = new StorageDescriptor(sd);
    psd.setLocation("file:/tmp/pc=fred");
    Partition part = new Partition(vals, DB, tableName, startTime, startTime, psd, emptyParameters);
    store.addPartition(part);
    Partition p = store.getPartition(DB, tableName, vals);
    Assert.assertEquals(1, p.getSd().getColsSize());
    Assert.assertEquals("col1", p.getSd().getCols().get(0).getName());
    Assert.assertEquals("int", p.getSd().getCols().get(0).getType());
    Assert.assertEquals("nocomment", p.getSd().getCols().get(0).getComment());
    Assert.assertEquals("serde", p.getSd().getSerdeInfo().getName());
    Assert.assertEquals("seriallib", p.getSd().getSerdeInfo().getSerializationLib());
    Assert.assertEquals("file:/tmp/pc=fred", p.getSd().getLocation());
    Assert.assertEquals("input", p.getSd().getInputFormat());
    Assert.assertEquals("output", p.getSd().getOutputFormat());
    Assert.assertEquals(DB, p.getDbName());
    Assert.assertEquals(tableName, p.getTableName());
    Assert.assertEquals(1, p.getValuesSize());
    Assert.assertEquals("fred", p.getValues().get(0));
    Assert.assertTrue(store.doesPartitionExist(DB, tableName, vals));
    Assert.assertFalse(store.doesPartitionExist(DB, tableName, Arrays.asList("bob")));
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Test(org.junit.Test)

Example 37 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseStore method dropIndex.

@Test
public void dropIndex() throws Exception {
    String tableName = "mytable";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", ""));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    Map<String, String> params = new HashMap<String, String>();
    params.put("key", "value");
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 17, serde, Arrays.asList("bucketcol"), Arrays.asList(new Order("sortcol", 1)), params);
    Table table = new Table(tableName, "default", "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    store.createTable(table);
    String indexName = "myindex";
    Index index = new Index(indexName, null, "default", tableName, startTime, startTime, tableName + "__" + indexName + "__", sd, emptyParameters, false);
    store.addIndex(index);
    store.dropIndex("default", tableName, indexName);
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Index(org.apache.hadoop.hive.metastore.api.Index) Test(org.junit.Test)

Example 38 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseStoreBitVector method createMockTableAndPartition.

private Table createMockTableAndPartition(String partType, String partVal) throws Exception {
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", partType, ""));
    List<String> vals = new ArrayList<String>();
    vals.add(partVal);
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    Map<String, String> params = new HashMap<String, String>();
    params.put("key", "value");
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 17, serde, Arrays.asList("bucketcol"), Arrays.asList(new Order("sortcol", 1)), params);
    int currentTime = (int) (System.currentTimeMillis() / 1000);
    Table table = new Table(TBL, DB, "me", currentTime, currentTime, 0, sd, cols, emptyParameters, null, null, null);
    store.createTable(table);
    Partition part = new Partition(vals, DB, TBL, currentTime, currentTime, sd, emptyParameters);
    store.addPartition(part);
    return table;
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor)

Example 39 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseAggregateStatsCache method allWithStats.

// Do to limitations in the Mock infrastructure we use for HBase testing we can only test
// this for a single column table and we can't really test hits in hbase, only in memory or
// build from scratch.  But it's still useful to cover many bugs.  More in depth testing with
// multiple columns and with HBase hits is done in TestHBaseAggrStatsCacheIntegration.
@Test
public void allWithStats() throws Exception {
    String dbName = "default";
    String tableName = "hit";
    List<String> partVals1 = Arrays.asList("today");
    List<String> partVals2 = Arrays.asList("yesterday");
    long now = System.currentTimeMillis();
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("col1", "boolean", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
    List<FieldSchema> partCols = new ArrayList<>();
    partCols.add(new FieldSchema("ds", "string", ""));
    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
    store.createTable(table);
    for (List<String> partVals : Arrays.asList(partVals1, partVals2)) {
        StorageDescriptor psd = new StorageDescriptor(sd);
        psd.setLocation("file:/tmp/default/hit/ds=" + partVals.get(0));
        Partition part = new Partition(partVals, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
        store.addPartition(part);
        ColumnStatistics cs = new ColumnStatistics();
        ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
        desc.setLastAnalyzed(now);
        desc.setPartName("ds=" + partVals.get(0));
        cs.setStatsDesc(desc);
        ColumnStatisticsObj obj = new ColumnStatisticsObj();
        obj.setColName("col1");
        obj.setColType("boolean");
        ColumnStatisticsData data = new ColumnStatisticsData();
        BooleanColumnStatsData bcsd = new BooleanColumnStatsData();
        bcsd.setNumFalses(10);
        bcsd.setNumTrues(20);
        bcsd.setNumNulls(30);
        data.setBooleanStats(bcsd);
        obj.setStatsData(data);
        cs.addToStatsObj(obj);
        store.updatePartitionColumnStatistics(cs, partVals);
    }
    Checker statChecker = new Checker() {

        @Override
        public void checkStats(AggrStats aggrStats) throws Exception {
            Assert.assertEquals(2, aggrStats.getPartsFound());
            Assert.assertEquals(1, aggrStats.getColStatsSize());
            ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
            Assert.assertEquals("col1", cso.getColName());
            Assert.assertEquals("boolean", cso.getColType());
            BooleanColumnStatsData bcsd = cso.getStatsData().getBooleanStats();
            Assert.assertEquals(20, bcsd.getNumFalses());
            Assert.assertEquals(40, bcsd.getNumTrues());
            Assert.assertEquals(60, bcsd.getNumNulls());
        }
    };
    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1"));
    statChecker.checkStats(aggrStats);
    // Check that we had to build it from the stats
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
    // Call again, this time it should come from memory.  Also, reverse the name order this time
    // to assure that we still hit.
    aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1"));
    statChecker.checkStats(aggrStats);
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(2, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) BooleanColumnStatsData(org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AggrStats(org.apache.hadoop.hive.metastore.api.AggrStats) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Example 40 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseAggregateStatsCache method someNonexistentPartitions.

@Test
public void someNonexistentPartitions() throws Exception {
    String dbName = "default";
    String tableName = "snp";
    List<String> partVals1 = Arrays.asList("today");
    List<String> partVals2 = Arrays.asList("yesterday");
    long now = System.currentTimeMillis();
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("col1", "boolean", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
    List<FieldSchema> partCols = new ArrayList<>();
    partCols.add(new FieldSchema("ds", "string", ""));
    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
    store.createTable(table);
    StorageDescriptor psd = new StorageDescriptor(sd);
    psd.setLocation("file:/tmp/default/hit/ds=" + partVals1.get(0));
    Partition part = new Partition(partVals1, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
    store.addPartition(part);
    ColumnStatistics cs = new ColumnStatistics();
    ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
    desc.setLastAnalyzed(now);
    desc.setPartName("ds=" + partVals1.get(0));
    cs.setStatsDesc(desc);
    ColumnStatisticsObj obj = new ColumnStatisticsObj();
    obj.setColName("col1");
    obj.setColType("double");
    ColumnStatisticsData data = new ColumnStatisticsData();
    DoubleColumnStatsData dcsd = new DoubleColumnStatsData();
    dcsd.setHighValue(1000.2342343);
    dcsd.setLowValue(-20.1234213423);
    dcsd.setNumNulls(30);
    dcsd.setNumDVs(12342);
    data.setDoubleStats(dcsd);
    obj.setStatsData(data);
    cs.addToStatsObj(obj);
    store.updatePartitionColumnStatistics(cs, partVals1);
    Checker statChecker = new Checker() {

        @Override
        public void checkStats(AggrStats aggrStats) throws Exception {
            Assert.assertEquals(1, aggrStats.getPartsFound());
            Assert.assertEquals(1, aggrStats.getColStatsSize());
            ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
            Assert.assertEquals("col1", cso.getColName());
            Assert.assertEquals("double", cso.getColType());
            DoubleColumnStatsData dcsd = cso.getStatsData().getDoubleStats();
            Assert.assertEquals(1000.23, dcsd.getHighValue(), 0.01);
            Assert.assertEquals(-20.12, dcsd.getLowValue(), 0.01);
            Assert.assertEquals(30, dcsd.getNumNulls());
            Assert.assertEquals(12342, dcsd.getNumDVs());
        }
    };
    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1"));
    statChecker.checkStats(aggrStats);
    // Check that we had to build it from the stats
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
    // Call again, this time it should come from memory.  Also, reverse the name order this time
    // to assure that we still hit.
    aggrStats = store.get_aggr_stats_for(dbName, tableName, Arrays.asList("ds=yesterday", "ds=today"), Arrays.asList("col1"));
    statChecker.checkStats(aggrStats);
    Assert.assertEquals(0, store.backdoor().getStatsCache().hbaseHits.getCnt());
    Assert.assertEquals(2, store.backdoor().getStatsCache().totalGets.getCnt());
    Assert.assertEquals(1, store.backdoor().getStatsCache().misses.getCnt());
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AggrStats(org.apache.hadoop.hive.metastore.api.AggrStats) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) DoubleColumnStatsData(org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Aggregations

StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)200 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)139 Table (org.apache.hadoop.hive.metastore.api.Table)138 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)135 ArrayList (java.util.ArrayList)121 Test (org.junit.Test)115 Partition (org.apache.hadoop.hive.metastore.api.Partition)84 HashMap (java.util.HashMap)49 ColumnStatistics (org.apache.hadoop.hive.metastore.api.ColumnStatistics)32 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)32 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)32 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)32 AggrStats (org.apache.hadoop.hive.metastore.api.AggrStats)30 List (java.util.List)28 Database (org.apache.hadoop.hive.metastore.api.Database)26 Order (org.apache.hadoop.hive.metastore.api.Order)25 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)21 Path (org.apache.hadoop.fs.Path)19 SkewedInfo (org.apache.hadoop.hive.metastore.api.SkewedInfo)17 LongColumnStatsData (org.apache.hadoop.hive.metastore.api.LongColumnStatsData)14