Search in sources :

Example 46 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseStore method dropPartition.

@Test
public void dropPartition() throws Exception {
    String tableName = "myparttable2";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
    partCols.add(new FieldSchema("pc", "string", ""));
    Table table = new Table(tableName, DB, "me", startTime, startTime, 0, sd, partCols, emptyParameters, null, null, null);
    store.createTable(table);
    List<String> vals = Arrays.asList("fred");
    StorageDescriptor psd = new StorageDescriptor(sd);
    psd.setLocation("file:/tmp/pc=fred");
    Partition part = new Partition(vals, DB, tableName, startTime, startTime, psd, emptyParameters);
    store.addPartition(part);
    Assert.assertNotNull(store.getPartition(DB, tableName, vals));
    store.dropPartition(DB, tableName, vals);
    thrown.expect(NoSuchObjectException.class);
    store.getPartition(DB, tableName, vals);
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Test(org.junit.Test)

Example 47 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseStore method alterIndex.

@Test
public void alterIndex() throws Exception {
    String tableName = "mytable";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", ""));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    Map<String, String> params = new HashMap<String, String>();
    params.put("key", "value");
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 17, serde, Arrays.asList("bucketcol"), Arrays.asList(new Order("sortcol", 1)), params);
    Table table = new Table(tableName, "default", "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    store.createTable(table);
    String indexName = "myindex";
    Index index = new Index(indexName, null, "default", tableName, startTime, startTime, tableName + "__" + indexName + "__", sd, emptyParameters, false);
    store.addIndex(index);
    startTime += 10;
    index.setLastAccessTime(startTime);
    store.alterIndex("default", tableName, indexName, index);
    Index ind = store.getIndex("default", tableName, indexName);
    Assert.assertEquals(1, ind.getSd().getColsSize());
    Assert.assertEquals("col1", ind.getSd().getCols().get(0).getName());
    Assert.assertEquals("int", ind.getSd().getCols().get(0).getType());
    Assert.assertEquals("", ind.getSd().getCols().get(0).getComment());
    Assert.assertEquals("serde", ind.getSd().getSerdeInfo().getName());
    Assert.assertEquals("seriallib", ind.getSd().getSerdeInfo().getSerializationLib());
    Assert.assertEquals("file:/tmp", ind.getSd().getLocation());
    Assert.assertEquals("input", ind.getSd().getInputFormat());
    Assert.assertEquals("output", ind.getSd().getOutputFormat());
    Assert.assertFalse(ind.getSd().isCompressed());
    Assert.assertEquals(17, ind.getSd().getNumBuckets());
    Assert.assertEquals(1, ind.getSd().getBucketColsSize());
    Assert.assertEquals("bucketcol", ind.getSd().getBucketCols().get(0));
    Assert.assertEquals(1, ind.getSd().getSortColsSize());
    Assert.assertEquals("sortcol", ind.getSd().getSortCols().get(0).getCol());
    Assert.assertEquals(1, ind.getSd().getSortCols().get(0).getOrder());
    Assert.assertEquals(1, ind.getSd().getParametersSize());
    Assert.assertEquals("value", ind.getSd().getParameters().get("key"));
    Assert.assertEquals(indexName, ind.getIndexName());
    Assert.assertNull(ind.getIndexHandlerClass());
    Assert.assertEquals("default", ind.getDbName());
    Assert.assertEquals(tableName, ind.getOrigTableName());
    Assert.assertEquals(0, ind.getParametersSize());
    Assert.assertEquals(startTime, ind.getLastAccessTime());
    Assert.assertEquals(false, ind.isDeferredRebuild());
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Index(org.apache.hadoop.hive.metastore.api.Index) Test(org.junit.Test)

Example 48 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseAggregateStatsNDVUniformDist method TwoEndsAndMiddleOfPartitionsHaveBitVectorStatusLong.

@Test
public void TwoEndsAndMiddleOfPartitionsHaveBitVectorStatusLong() throws Exception {
    String dbName = "default";
    String tableName = "snp";
    long now = System.currentTimeMillis();
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("col5_long", "long", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
    List<FieldSchema> partCols = new ArrayList<>();
    partCols.add(new FieldSchema("ds", "string", ""));
    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
    store.createTable(table);
    List<List<String>> partVals = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        List<String> partVal = Arrays.asList("" + i);
        partVals.add(partVal);
        StorageDescriptor psd = new StorageDescriptor(sd);
        psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
        Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
        store.addPartition(part);
        if (i == 0 || i == 2 || i == 3 || i == 5 || i == 6 || i == 8) {
            ColumnStatistics cs = new ColumnStatistics();
            ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
            desc.setLastAnalyzed(now);
            desc.setPartName("ds=" + partVal);
            cs.setStatsDesc(desc);
            ColumnStatisticsObj obj = new ColumnStatisticsObj();
            obj.setColName("col5_long");
            obj.setColType("long");
            ColumnStatisticsData data = new ColumnStatisticsData();
            LongColumnStatsData dcsd = new LongColumnStatsData();
            dcsd.setHighValue(1000 + i);
            dcsd.setLowValue(-1000 - i);
            dcsd.setNumNulls(i);
            dcsd.setNumDVs(10 * i + 1);
            dcsd.setBitVectors(bitVectors[i / 5]);
            data.setLongStats(dcsd);
            obj.setStatsData(data);
            cs.addToStatsObj(obj);
            store.updatePartitionColumnStatistics(cs, partVal);
        }
    }
    Checker statChecker = new Checker() {

        @Override
        public void checkStats(AggrStats aggrStats) throws Exception {
            Assert.assertEquals(6, aggrStats.getPartsFound());
            Assert.assertEquals(1, aggrStats.getColStatsSize());
            ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
            Assert.assertEquals("col5_long", cso.getColName());
            Assert.assertEquals("long", cso.getColType());
            LongColumnStatsData lcsd = cso.getStatsData().getLongStats();
            Assert.assertEquals(1010, lcsd.getHighValue(), 0.01);
            Assert.assertEquals(-1010, lcsd.getLowValue(), 0.01);
            Assert.assertEquals(40, lcsd.getNumNulls());
            Assert.assertEquals(12, lcsd.getNumDVs());
        }
    };
    List<String> partNames = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        partNames.add("ds=" + i);
    }
    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col5_long"));
    statChecker.checkStats(aggrStats);
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AggrStats(org.apache.hadoop.hive.metastore.api.AggrStats) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ArrayList(java.util.ArrayList) List(java.util.List) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Example 49 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseStore method hashSd.

@Test
public void hashSd() throws Exception {
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", ""));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", true, 0, serde, null, null, emptyParameters);
    Map<List<String>, String> map = new HashMap<List<String>, String>();
    map.put(Arrays.asList("col3"), "col4");
    SkewedInfo skew = new SkewedInfo(Arrays.asList("col1"), Arrays.asList(Arrays.asList("col2")), map);
    sd.setSkewedInfo(skew);
    MessageDigest md = MessageDigest.getInstance("MD5");
    byte[] baseHash = HBaseUtils.hashStorageDescriptor(sd, md);
    StorageDescriptor changeSchema = new StorageDescriptor(sd);
    changeSchema.getCols().add(new FieldSchema("col2", "varchar(32)", "a comment"));
    byte[] schemaHash = HBaseUtils.hashStorageDescriptor(changeSchema, md);
    Assert.assertFalse(Arrays.equals(baseHash, schemaHash));
    StorageDescriptor changeLocation = new StorageDescriptor(sd);
    changeLocation.setLocation("file:/somewhere/else");
    byte[] locationHash = HBaseUtils.hashStorageDescriptor(changeLocation, md);
    Assert.assertArrayEquals(baseHash, locationHash);
}
Also used : SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) List(java.util.List) ArrayList(java.util.ArrayList) MessageDigest(java.security.MessageDigest) Test(org.junit.Test)

Example 50 with StorageDescriptor

use of org.apache.hadoop.hive.metastore.api.StorageDescriptor in project hive by apache.

the class TestHBaseStore method skewInfo.

@Test
public void skewInfo() throws Exception {
    String tableName = "mytable";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", ""));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", true, 0, serde, null, null, emptyParameters);
    Map<List<String>, String> map = new HashMap<List<String>, String>();
    map.put(Arrays.asList("col3"), "col4");
    SkewedInfo skew = new SkewedInfo(Arrays.asList("col1"), Arrays.asList(Arrays.asList("col2")), map);
    sd.setSkewedInfo(skew);
    Table table = new Table(tableName, "default", "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    store.createTable(table);
    Table t = store.getTable("default", tableName);
    Assert.assertEquals(1, t.getSd().getColsSize());
    Assert.assertEquals("col1", t.getSd().getCols().get(0).getName());
    Assert.assertEquals("int", t.getSd().getCols().get(0).getType());
    Assert.assertEquals("", t.getSd().getCols().get(0).getComment());
    Assert.assertEquals("serde", t.getSd().getSerdeInfo().getName());
    Assert.assertEquals("seriallib", t.getSd().getSerdeInfo().getSerializationLib());
    Assert.assertEquals("file:/tmp", t.getSd().getLocation());
    Assert.assertEquals("input", t.getSd().getInputFormat());
    Assert.assertEquals("output", t.getSd().getOutputFormat());
    Assert.assertTrue(t.getSd().isCompressed());
    Assert.assertEquals(0, t.getSd().getNumBuckets());
    Assert.assertEquals(0, t.getSd().getSortColsSize());
    Assert.assertEquals("me", t.getOwner());
    Assert.assertEquals("default", t.getDbName());
    Assert.assertEquals(tableName, t.getTableName());
    Assert.assertEquals(0, t.getParametersSize());
    skew = t.getSd().getSkewedInfo();
    Assert.assertNotNull(skew);
    Assert.assertEquals(1, skew.getSkewedColNamesSize());
    Assert.assertEquals("col1", skew.getSkewedColNames().get(0));
    Assert.assertEquals(1, skew.getSkewedColValuesSize());
    Assert.assertEquals("col2", skew.getSkewedColValues().get(0).get(0));
    Assert.assertEquals(1, skew.getSkewedColValueLocationMapsSize());
    Assert.assertEquals("col4", skew.getSkewedColValueLocationMaps().get(Arrays.asList("col3")));
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) List(java.util.List) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Aggregations

StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)284 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)163 Table (org.apache.hadoop.hive.metastore.api.Table)159 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)155 ArrayList (java.util.ArrayList)134 Test (org.junit.Test)131 Partition (org.apache.hadoop.hive.metastore.api.Partition)97 HashMap (java.util.HashMap)61 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)38 List (java.util.List)35 Order (org.apache.hadoop.hive.metastore.api.Order)33 Path (org.apache.hadoop.fs.Path)30 ColumnStatistics (org.apache.hadoop.hive.metastore.api.ColumnStatistics)30 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)30 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)29 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)29 AggrStats (org.apache.hadoop.hive.metastore.api.AggrStats)27 Database (org.apache.hadoop.hive.metastore.api.Database)25 SkewedInfo (org.apache.hadoop.hive.metastore.api.SkewedInfo)23 IOException (java.io.IOException)15