Search in sources :

Example 71 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project hive by apache.

the class TestHBaseAggregateStatsExtrapolation method allPartitionsHaveBitVectorStatusString.

@Test
public void allPartitionsHaveBitVectorStatusString() throws Exception {
    String dbName = "default";
    String tableName = "snp";
    long now = System.currentTimeMillis();
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("col1_string", "string", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
    List<FieldSchema> partCols = new ArrayList<>();
    partCols.add(new FieldSchema("ds", "string", ""));
    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
    store.createTable(table);
    List<List<String>> partVals = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        List<String> partVal = Arrays.asList("" + i);
        partVals.add(partVal);
        StorageDescriptor psd = new StorageDescriptor(sd);
        psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
        Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
        store.addPartition(part);
        ColumnStatistics cs = new ColumnStatistics();
        ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
        desc.setLastAnalyzed(now);
        desc.setPartName("ds=" + partVal);
        cs.setStatsDesc(desc);
        ColumnStatisticsObj obj = new ColumnStatisticsObj();
        obj.setColName("col1_string");
        obj.setColType("string");
        ColumnStatisticsData data = new ColumnStatisticsData();
        StringColumnStatsData dcsd = new StringColumnStatsData();
        dcsd.setAvgColLen(i + 1);
        dcsd.setMaxColLen(i + 10);
        dcsd.setNumNulls(i);
        dcsd.setNumDVs(10 * i + 1);
        dcsd.setBitVectors(bitVectors);
        data.setStringStats(dcsd);
        obj.setStatsData(data);
        cs.addToStatsObj(obj);
        store.updatePartitionColumnStatistics(cs, partVal);
    }
    Checker statChecker = new Checker() {

        @Override
        public void checkStats(AggrStats aggrStats) throws Exception {
            Assert.assertEquals(10, aggrStats.getPartsFound());
            Assert.assertEquals(1, aggrStats.getColStatsSize());
            ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
            Assert.assertEquals("col1_string", cso.getColName());
            Assert.assertEquals("string", cso.getColType());
            StringColumnStatsData lcsd = cso.getStatsData().getStringStats();
            Assert.assertEquals(10, lcsd.getAvgColLen(), 0.01);
            Assert.assertEquals(19, lcsd.getMaxColLen(), 0.01);
            Assert.assertEquals(45, lcsd.getNumNulls());
            Assert.assertEquals(3, lcsd.getNumDVs());
        }
    };
    List<String> partNames = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        partNames.add("ds=" + i);
    }
    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col1_string"));
    statChecker.checkStats(aggrStats);
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AggrStats(org.apache.hadoop.hive.metastore.api.AggrStats) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ArrayList(java.util.ArrayList) List(java.util.List) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Example 72 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project hive by apache.

the class TestHBaseAggregateStatsExtrapolation method TwoEndsAndMiddleOfPartitionsHaveBitVectorStatusLong.

@Test
public void TwoEndsAndMiddleOfPartitionsHaveBitVectorStatusLong() throws Exception {
    String dbName = "default";
    String tableName = "snp";
    long now = System.currentTimeMillis();
    List<FieldSchema> cols = new ArrayList<>();
    cols.add(new FieldSchema("col5", "long", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.<String, String>emptyMap());
    List<FieldSchema> partCols = new ArrayList<>();
    partCols.add(new FieldSchema("ds", "string", ""));
    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.<String, String>emptyMap(), null, null, null);
    store.createTable(table);
    List<List<String>> partVals = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        List<String> partVal = Arrays.asList("" + i);
        partVals.add(partVal);
        StorageDescriptor psd = new StorageDescriptor(sd);
        psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
        Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.<String, String>emptyMap());
        store.addPartition(part);
        if (i == 0 || i == 2 || i == 3 || i == 5 || i == 6 || i == 8) {
            ColumnStatistics cs = new ColumnStatistics();
            ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
            desc.setLastAnalyzed(now);
            desc.setPartName("ds=" + partVal);
            cs.setStatsDesc(desc);
            ColumnStatisticsObj obj = new ColumnStatisticsObj();
            obj.setColName("col5");
            obj.setColType("long");
            ColumnStatisticsData data = new ColumnStatisticsData();
            LongColumnStatsData dcsd = new LongColumnStatsData();
            dcsd.setHighValue(1000 + i);
            dcsd.setLowValue(-1000 - i);
            dcsd.setNumNulls(i);
            dcsd.setNumDVs(10 * i);
            dcsd.setBitVectors(bitVectors);
            data.setLongStats(dcsd);
            obj.setStatsData(data);
            cs.addToStatsObj(obj);
            store.updatePartitionColumnStatistics(cs, partVal);
        }
    }
    Checker statChecker = new Checker() {

        @Override
        public void checkStats(AggrStats aggrStats) throws Exception {
            Assert.assertEquals(6, aggrStats.getPartsFound());
            Assert.assertEquals(1, aggrStats.getColStatsSize());
            ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
            Assert.assertEquals("col5", cso.getColName());
            Assert.assertEquals("long", cso.getColType());
            LongColumnStatsData lcsd = cso.getStatsData().getLongStats();
            Assert.assertEquals(1010, lcsd.getHighValue(), 0.01);
            Assert.assertEquals(-1010, lcsd.getLowValue(), 0.01);
            Assert.assertEquals(40, lcsd.getNumNulls());
            Assert.assertEquals(3, lcsd.getNumDVs());
        }
    };
    List<String> partNames = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        partNames.add("ds=" + i);
    }
    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col5"));
    statChecker.checkStats(aggrStats);
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AggrStats(org.apache.hadoop.hive.metastore.api.AggrStats) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ArrayList(java.util.ArrayList) List(java.util.List) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData) Test(org.junit.Test)

Example 73 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project hive by apache.

the class TestHBaseStore method alterPartition.

@Test
public void alterPartition() throws Exception {
    String tableName = "alterparttable";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
    partCols.add(new FieldSchema("pc", "string", ""));
    Table table = new Table(tableName, DB, "me", startTime, startTime, 0, sd, partCols, emptyParameters, null, null, null);
    store.createTable(table);
    List<String> vals = Arrays.asList("fred");
    StorageDescriptor psd = new StorageDescriptor(sd);
    psd.setLocation("file:/tmp/pc=fred");
    Partition part = new Partition(vals, DB, tableName, startTime, startTime, psd, emptyParameters);
    store.addPartition(part);
    part.setLastAccessTime(startTime + 10);
    store.alterPartition(DB, tableName, vals, part);
    Partition p = store.getPartition(DB, tableName, vals);
    Assert.assertEquals(1, p.getSd().getColsSize());
    Assert.assertEquals("col1", p.getSd().getCols().get(0).getName());
    Assert.assertEquals("int", p.getSd().getCols().get(0).getType());
    Assert.assertEquals("nocomment", p.getSd().getCols().get(0).getComment());
    Assert.assertEquals("serde", p.getSd().getSerdeInfo().getName());
    Assert.assertEquals("seriallib", p.getSd().getSerdeInfo().getSerializationLib());
    Assert.assertEquals("file:/tmp/pc=fred", p.getSd().getLocation());
    Assert.assertEquals("input", p.getSd().getInputFormat());
    Assert.assertEquals("output", p.getSd().getOutputFormat());
    Assert.assertEquals(DB, p.getDbName());
    Assert.assertEquals(tableName, p.getTableName());
    Assert.assertEquals(1, p.getValuesSize());
    Assert.assertEquals("fred", p.getValues().get(0));
    Assert.assertEquals(startTime + 10, p.getLastAccessTime());
    Assert.assertTrue(store.doesPartitionExist(DB, tableName, vals));
    Assert.assertFalse(store.doesPartitionExist(DB, tableName, Arrays.asList("bob")));
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Test(org.junit.Test)

Example 74 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project hive by apache.

the class TestHBaseStore method createTable.

@Test
public void createTable() throws Exception {
    String tableName = "mytable";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", ""));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    Map<String, String> params = new HashMap<String, String>();
    params.put("key", "value");
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 17, serde, Arrays.asList("bucketcol"), Arrays.asList(new Order("sortcol", 1)), params);
    Table table = new Table(tableName, "default", "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    store.createTable(table);
    Table t = store.getTable("default", tableName);
    Assert.assertEquals(1, t.getSd().getColsSize());
    Assert.assertEquals("col1", t.getSd().getCols().get(0).getName());
    Assert.assertEquals("int", t.getSd().getCols().get(0).getType());
    Assert.assertEquals("", t.getSd().getCols().get(0).getComment());
    Assert.assertEquals("serde", t.getSd().getSerdeInfo().getName());
    Assert.assertEquals("seriallib", t.getSd().getSerdeInfo().getSerializationLib());
    Assert.assertEquals("file:/tmp", t.getSd().getLocation());
    Assert.assertEquals("input", t.getSd().getInputFormat());
    Assert.assertEquals("output", t.getSd().getOutputFormat());
    Assert.assertFalse(t.getSd().isCompressed());
    Assert.assertEquals(17, t.getSd().getNumBuckets());
    Assert.assertEquals(1, t.getSd().getBucketColsSize());
    Assert.assertEquals("bucketcol", t.getSd().getBucketCols().get(0));
    Assert.assertEquals(1, t.getSd().getSortColsSize());
    Assert.assertEquals("sortcol", t.getSd().getSortCols().get(0).getCol());
    Assert.assertEquals(1, t.getSd().getSortCols().get(0).getOrder());
    Assert.assertEquals(1, t.getSd().getParametersSize());
    Assert.assertEquals("value", t.getSd().getParameters().get("key"));
    Assert.assertEquals("me", t.getOwner());
    Assert.assertEquals("default", t.getDbName());
    Assert.assertEquals(tableName, t.getTableName());
    Assert.assertEquals(0, t.getParametersSize());
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Test(org.junit.Test)

Example 75 with FieldSchema

use of org.apache.hadoop.hive.metastore.api.FieldSchema in project hive by apache.

the class TestHBaseStore method dropPartition.

@Test
public void dropPartition() throws Exception {
    String tableName = "myparttable2";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", "nocomment"));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
    partCols.add(new FieldSchema("pc", "string", ""));
    Table table = new Table(tableName, DB, "me", startTime, startTime, 0, sd, partCols, emptyParameters, null, null, null);
    store.createTable(table);
    List<String> vals = Arrays.asList("fred");
    StorageDescriptor psd = new StorageDescriptor(sd);
    psd.setLocation("file:/tmp/pc=fred");
    Partition part = new Partition(vals, DB, tableName, startTime, startTime, psd, emptyParameters);
    store.addPartition(part);
    Assert.assertNotNull(store.getPartition(DB, tableName, vals));
    store.dropPartition(DB, tableName, vals);
    thrown.expect(NoSuchObjectException.class);
    store.getPartition(DB, tableName, vals);
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Test(org.junit.Test)

Aggregations

FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)407 ArrayList (java.util.ArrayList)254 Table (org.apache.hadoop.hive.metastore.api.Table)163 Test (org.junit.Test)160 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)136 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)118 Partition (org.apache.hadoop.hive.metastore.api.Partition)93 HashMap (java.util.HashMap)69 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)44 List (java.util.List)42 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)42 ColumnStatistics (org.apache.hadoop.hive.metastore.api.ColumnStatistics)40 Database (org.apache.hadoop.hive.metastore.api.Database)40 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)39 IOException (java.io.IOException)36 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)36 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)35 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)34 Path (org.apache.hadoop.fs.Path)32 AggrStats (org.apache.hadoop.hive.metastore.api.AggrStats)32