Search in sources :

Example 1 with CreateTableEvent

use of org.apache.hadoop.hive.metastore.events.CreateTableEvent in project hive by apache.

the class TestHiveMetaStoreStatsMerge method testStatsMerge.

public void testStatsMerge() throws Exception {
    int listSize = 0;
    List<ListenerEvent> notifyList = DummyListener.notifyList;
    assertEquals(notifyList.size(), listSize);
    msc.createDatabase(db);
    listSize++;
    assertEquals(listSize, notifyList.size());
    CreateDatabaseEvent dbEvent = (CreateDatabaseEvent) (notifyList.get(listSize - 1));
    assert dbEvent.getStatus();
    msc.createTable(table);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    CreateTableEvent tblEvent = (CreateTableEvent) (notifyList.get(listSize - 1));
    assert tblEvent.getStatus();
    table = msc.getTable(dbName, tblName);
    ColumnStatistics cs = new ColumnStatistics();
    ColumnStatisticsDesc desc = new ColumnStatisticsDesc(true, dbName, tblName);
    cs.setStatsDesc(desc);
    ColumnStatisticsObj obj = new ColumnStatisticsObj();
    obj.setColName("a");
    obj.setColType("string");
    ColumnStatisticsData data = new ColumnStatisticsData();
    StringColumnStatsData scsd = new StringColumnStatsData();
    scsd.setAvgColLen(10);
    scsd.setMaxColLen(20);
    scsd.setNumNulls(30);
    scsd.setNumDVs(123);
    scsd.setBitVectors("{0, 4, 5, 7}{0, 1}{0, 1, 2}{0, 1, 4}{0}{0, 2}{0, 3}{0, 2, 3, 4}{0, 1, 4}{0, 1}{0}{0, 1, 3, 8}{0, 2}{0, 2}{0, 9}{0, 1, 4}");
    data.setStringStats(scsd);
    obj.setStatsData(data);
    cs.addToStatsObj(obj);
    List<ColumnStatistics> colStats = new ArrayList<>();
    colStats.add(cs);
    SetPartitionsStatsRequest request = new SetPartitionsStatsRequest(colStats);
    msc.setPartitionColumnStatistics(request);
    List<String> colNames = new ArrayList<>();
    colNames.add("a");
    StringColumnStatsData getScsd = msc.getTableColumnStatistics(dbName, tblName, colNames).get(0).getStatsData().getStringStats();
    assertEquals(getScsd.getNumDVs(), 123);
    cs = new ColumnStatistics();
    scsd = new StringColumnStatsData();
    scsd.setAvgColLen(20);
    scsd.setMaxColLen(5);
    scsd.setNumNulls(70);
    scsd.setNumDVs(456);
    scsd.setBitVectors("{0, 1}{0, 1}{1, 2, 4}{0, 1, 2}{0, 1, 2}{0, 2}{0, 1, 3, 4}{0, 1}{0, 1}{3, 4, 6}{2}{0, 1}{0, 3}{0}{0, 1}{0, 1, 4}");
    data.setStringStats(scsd);
    obj.setStatsData(data);
    cs.addToStatsObj(obj);
    request = new SetPartitionsStatsRequest(colStats);
    request.setNeedMerge(true);
    msc.setPartitionColumnStatistics(request);
    getScsd = msc.getTableColumnStatistics(dbName, tblName, colNames).get(0).getStatsData().getStringStats();
    assertEquals(getScsd.getAvgColLen(), 20.0);
    assertEquals(getScsd.getMaxColLen(), 20);
    assertEquals(getScsd.getNumNulls(), 100);
    // since metastore is ObjectStore, we use the max function to merge.
    assertEquals(getScsd.getNumDVs(), 456);
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) CreateDatabaseEvent(org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent) ArrayList(java.util.ArrayList) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) SetPartitionsStatsRequest(org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest) ListenerEvent(org.apache.hadoop.hive.metastore.events.ListenerEvent) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) CreateTableEvent(org.apache.hadoop.hive.metastore.events.CreateTableEvent) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) ColumnStatisticsData(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)

Example 2 with CreateTableEvent

use of org.apache.hadoop.hive.metastore.events.CreateTableEvent in project hive by apache.

the class TestHiveMetaStoreWithEnvironmentContext method testEnvironmentContext.

public void testEnvironmentContext() throws Exception {
    int listSize = 0;
    List<ListenerEvent> notifyList = DummyListener.notifyList;
    assertEquals(notifyList.size(), listSize);
    msc.createDatabase(db);
    listSize++;
    assertEquals(listSize, notifyList.size());
    CreateDatabaseEvent dbEvent = (CreateDatabaseEvent) (notifyList.get(listSize - 1));
    assert dbEvent.getStatus();
    msc.createTable(table, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    CreateTableEvent tblEvent = (CreateTableEvent) (notifyList.get(listSize - 1));
    assert tblEvent.getStatus();
    assertEquals(envContext, tblEvent.getEnvironmentContext());
    table = msc.getTable(dbName, tblName);
    partition.getSd().setLocation(table.getSd().getLocation() + "/part1");
    msc.add_partition(partition, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    AddPartitionEvent partEvent = (AddPartitionEvent) (notifyList.get(listSize - 1));
    assert partEvent.getStatus();
    assertEquals(envContext, partEvent.getEnvironmentContext());
    List<String> partVals = new ArrayList<String>();
    partVals.add("2012");
    msc.appendPartition(dbName, tblName, partVals, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    AddPartitionEvent appendPartEvent = (AddPartitionEvent) (notifyList.get(listSize - 1));
    assert appendPartEvent.getStatus();
    assertEquals(envContext, appendPartEvent.getEnvironmentContext());
    table.setTableName(renamed);
    msc.alter_table_with_environmentContext(dbName, tblName, table, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    AlterTableEvent alterTableEvent = (AlterTableEvent) notifyList.get(listSize - 1);
    assert alterTableEvent.getStatus();
    assertEquals(envContext, alterTableEvent.getEnvironmentContext());
    table.setTableName(tblName);
    msc.alter_table_with_environmentContext(dbName, renamed, table, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    List<String> dropPartVals = new ArrayList<String>();
    dropPartVals.add("2011");
    msc.dropPartition(dbName, tblName, dropPartVals, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    DropPartitionEvent dropPartEvent = (DropPartitionEvent) notifyList.get(listSize - 1);
    assert dropPartEvent.getStatus();
    assertEquals(envContext, dropPartEvent.getEnvironmentContext());
    msc.dropPartition(dbName, tblName, "b=2012", true, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    DropPartitionEvent dropPartByNameEvent = (DropPartitionEvent) notifyList.get(listSize - 1);
    assert dropPartByNameEvent.getStatus();
    assertEquals(envContext, dropPartByNameEvent.getEnvironmentContext());
    msc.dropTable(dbName, tblName, true, false, envContext);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    DropTableEvent dropTblEvent = (DropTableEvent) notifyList.get(listSize - 1);
    assert dropTblEvent.getStatus();
    assertEquals(envContext, dropTblEvent.getEnvironmentContext());
    msc.dropDatabase(dbName);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    DropDatabaseEvent dropDB = (DropDatabaseEvent) notifyList.get(listSize - 1);
    assert dropDB.getStatus();
}
Also used : DropPartitionEvent(org.apache.hadoop.hive.metastore.events.DropPartitionEvent) CreateDatabaseEvent(org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent) ArrayList(java.util.ArrayList) ListenerEvent(org.apache.hadoop.hive.metastore.events.ListenerEvent) AlterTableEvent(org.apache.hadoop.hive.metastore.events.AlterTableEvent) DropDatabaseEvent(org.apache.hadoop.hive.metastore.events.DropDatabaseEvent) CreateTableEvent(org.apache.hadoop.hive.metastore.events.CreateTableEvent) DropTableEvent(org.apache.hadoop.hive.metastore.events.DropTableEvent) AddPartitionEvent(org.apache.hadoop.hive.metastore.events.AddPartitionEvent)

Example 3 with CreateTableEvent

use of org.apache.hadoop.hive.metastore.events.CreateTableEvent in project hive by apache.

the class TestMetaStoreEventListener method testListener.

public void testListener() throws Exception {
    int listSize = 0;
    List<ListenerEvent> notifyList = DummyListener.notifyList;
    List<PreEventContext> preNotifyList = DummyPreListener.notifyList;
    assertEquals(notifyList.size(), listSize);
    assertEquals(preNotifyList.size(), listSize);
    driver.run("create database " + dbName);
    listSize++;
    PreCreateDatabaseEvent preDbEvent = (PreCreateDatabaseEvent) (preNotifyList.get(preNotifyList.size() - 1));
    Database db = msc.getDatabase(dbName);
    assertEquals(listSize, notifyList.size());
    assertEquals(listSize + 1, preNotifyList.size());
    validateCreateDb(db, preDbEvent.getDatabase());
    CreateDatabaseEvent dbEvent = (CreateDatabaseEvent) (notifyList.get(listSize - 1));
    assert dbEvent.getStatus();
    validateCreateDb(db, dbEvent.getDatabase());
    driver.run("use " + dbName);
    driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
    PreCreateTableEvent preTblEvent = (PreCreateTableEvent) (preNotifyList.get(preNotifyList.size() - 1));
    listSize++;
    Table tbl = msc.getTable(dbName, tblName);
    validateCreateTable(tbl, preTblEvent.getTable());
    assertEquals(notifyList.size(), listSize);
    CreateTableEvent tblEvent = (CreateTableEvent) (notifyList.get(listSize - 1));
    assert tblEvent.getStatus();
    validateCreateTable(tbl, tblEvent.getTable());
    driver.run("create index tmptbl_i on table tmptbl(a) as 'compact' " + "WITH DEFERRED REBUILD IDXPROPERTIES ('prop1'='val1', 'prop2'='val2')");
    // creates index table internally
    listSize += 2;
    assertEquals(notifyList.size(), listSize);
    AddIndexEvent addIndexEvent = (AddIndexEvent) notifyList.get(listSize - 1);
    assert addIndexEvent.getStatus();
    PreAddIndexEvent preAddIndexEvent = (PreAddIndexEvent) (preNotifyList.get(preNotifyList.size() - 3));
    Index oldIndex = msc.getIndex(dbName, "tmptbl", "tmptbl_i");
    validateAddIndex(oldIndex, addIndexEvent.getIndex());
    validateAddIndex(oldIndex, preAddIndexEvent.getIndex());
    driver.run("alter index tmptbl_i on tmptbl set IDXPROPERTIES " + "('prop1'='val1_new', 'prop3'='val3')");
    listSize++;
    assertEquals(notifyList.size(), listSize);
    Index newIndex = msc.getIndex(dbName, "tmptbl", "tmptbl_i");
    AlterIndexEvent alterIndexEvent = (AlterIndexEvent) notifyList.get(listSize - 1);
    assert alterIndexEvent.getStatus();
    validateAlterIndex(oldIndex, alterIndexEvent.getOldIndex(), newIndex, alterIndexEvent.getNewIndex());
    PreAlterIndexEvent preAlterIndexEvent = (PreAlterIndexEvent) (preNotifyList.get(preNotifyList.size() - 1));
    validateAlterIndex(oldIndex, preAlterIndexEvent.getOldIndex(), newIndex, preAlterIndexEvent.getNewIndex());
    driver.run("drop index tmptbl_i on tmptbl");
    listSize++;
    assertEquals(notifyList.size(), listSize);
    DropIndexEvent dropIndexEvent = (DropIndexEvent) notifyList.get(listSize - 1);
    assert dropIndexEvent.getStatus();
    validateDropIndex(newIndex, dropIndexEvent.getIndex());
    PreDropIndexEvent preDropIndexEvent = (PreDropIndexEvent) (preNotifyList.get(preNotifyList.size() - 1));
    validateDropIndex(newIndex, preDropIndexEvent.getIndex());
    driver.run("alter table tmptbl add partition (b='2011')");
    listSize++;
    assertEquals(notifyList.size(), listSize);
    PreAddPartitionEvent prePartEvent = (PreAddPartitionEvent) (preNotifyList.get(preNotifyList.size() - 1));
    AddPartitionEvent partEvent = (AddPartitionEvent) (notifyList.get(listSize - 1));
    assert partEvent.getStatus();
    Partition part = msc.getPartition("hive2038", "tmptbl", "b=2011");
    Partition partAdded = partEvent.getPartitionIterator().next();
    validateAddPartition(part, partAdded);
    validateTableInAddPartition(tbl, partEvent.getTable());
    validateAddPartition(part, prePartEvent.getPartitions().get(0));
    // Test adding multiple partitions in a single partition-set, atomically.
    int currentTime = (int) System.currentTimeMillis();
    HiveMetaStoreClient hmsClient = new HiveMetaStoreClient(hiveConf);
    Table table = hmsClient.getTable(dbName, "tmptbl");
    Partition partition1 = new Partition(Arrays.asList("20110101"), dbName, "tmptbl", currentTime, currentTime, table.getSd(), table.getParameters());
    Partition partition2 = new Partition(Arrays.asList("20110102"), dbName, "tmptbl", currentTime, currentTime, table.getSd(), table.getParameters());
    Partition partition3 = new Partition(Arrays.asList("20110103"), dbName, "tmptbl", currentTime, currentTime, table.getSd(), table.getParameters());
    hmsClient.add_partitions(Arrays.asList(partition1, partition2, partition3));
    ++listSize;
    AddPartitionEvent multiplePartitionEvent = (AddPartitionEvent) (notifyList.get(listSize - 1));
    assertEquals("Unexpected table value.", table, multiplePartitionEvent.getTable());
    List<Partition> multiParts = Lists.newArrayList(multiplePartitionEvent.getPartitionIterator());
    assertEquals("Unexpected number of partitions in event!", 3, multiParts.size());
    assertEquals("Unexpected partition value.", partition1.getValues(), multiParts.get(0).getValues());
    assertEquals("Unexpected partition value.", partition2.getValues(), multiParts.get(1).getValues());
    assertEquals("Unexpected partition value.", partition3.getValues(), multiParts.get(2).getValues());
    driver.run(String.format("alter table %s touch partition (%s)", tblName, "b='2011'"));
    listSize++;
    assertEquals(notifyList.size(), listSize);
    PreAlterPartitionEvent preAlterPartEvent = (PreAlterPartitionEvent) preNotifyList.get(preNotifyList.size() - 1);
    //the partition did not change,
    // so the new partition should be similar to the original partition
    Partition origP = msc.getPartition(dbName, tblName, "b=2011");
    AlterPartitionEvent alterPartEvent = (AlterPartitionEvent) notifyList.get(listSize - 1);
    assert alterPartEvent.getStatus();
    validateAlterPartition(origP, origP, alterPartEvent.getOldPartition().getDbName(), alterPartEvent.getOldPartition().getTableName(), alterPartEvent.getOldPartition().getValues(), alterPartEvent.getNewPartition());
    validateAlterPartition(origP, origP, preAlterPartEvent.getDbName(), preAlterPartEvent.getTableName(), preAlterPartEvent.getNewPartition().getValues(), preAlterPartEvent.getNewPartition());
    List<String> part_vals = new ArrayList<String>();
    part_vals.add("c=2012");
    int preEventListSize;
    preEventListSize = preNotifyList.size() + 1;
    Partition newPart = msc.appendPartition(dbName, tblName, part_vals);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    assertEquals(preNotifyList.size(), preEventListSize);
    AddPartitionEvent appendPartEvent = (AddPartitionEvent) (notifyList.get(listSize - 1));
    Partition partAppended = appendPartEvent.getPartitionIterator().next();
    validateAddPartition(newPart, partAppended);
    PreAddPartitionEvent preAppendPartEvent = (PreAddPartitionEvent) (preNotifyList.get(preNotifyList.size() - 1));
    validateAddPartition(newPart, preAppendPartEvent.getPartitions().get(0));
    driver.run(String.format("alter table %s rename to %s", tblName, renamed));
    listSize++;
    assertEquals(notifyList.size(), listSize);
    PreAlterTableEvent preAlterTableE = (PreAlterTableEvent) preNotifyList.get(preNotifyList.size() - 1);
    Table renamedTable = msc.getTable(dbName, renamed);
    AlterTableEvent alterTableE = (AlterTableEvent) notifyList.get(listSize - 1);
    assert alterTableE.getStatus();
    validateAlterTable(tbl, renamedTable, alterTableE.getOldTable(), alterTableE.getNewTable());
    validateAlterTable(tbl, renamedTable, preAlterTableE.getOldTable(), preAlterTableE.getNewTable());
    //change the table name back
    driver.run(String.format("alter table %s rename to %s", renamed, tblName));
    listSize++;
    assertEquals(notifyList.size(), listSize);
    driver.run(String.format("alter table %s ADD COLUMNS (c int)", tblName));
    listSize++;
    assertEquals(notifyList.size(), listSize);
    preAlterTableE = (PreAlterTableEvent) preNotifyList.get(preNotifyList.size() - 1);
    Table altTable = msc.getTable(dbName, tblName);
    alterTableE = (AlterTableEvent) notifyList.get(listSize - 1);
    assert alterTableE.getStatus();
    validateAlterTableColumns(tbl, altTable, alterTableE.getOldTable(), alterTableE.getNewTable());
    validateAlterTableColumns(tbl, altTable, preAlterTableE.getOldTable(), preAlterTableE.getNewTable());
    Map<String, String> kvs = new HashMap<String, String>(1);
    kvs.put("b", "2011");
    msc.markPartitionForEvent("hive2038", "tmptbl", kvs, PartitionEventType.LOAD_DONE);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    LoadPartitionDoneEvent partMarkEvent = (LoadPartitionDoneEvent) notifyList.get(listSize - 1);
    assert partMarkEvent.getStatus();
    validateLoadPartitionDone("tmptbl", kvs, partMarkEvent.getTable().getTableName(), partMarkEvent.getPartitionName());
    PreLoadPartitionDoneEvent prePartMarkEvent = (PreLoadPartitionDoneEvent) preNotifyList.get(preNotifyList.size() - 1);
    validateLoadPartitionDone("tmptbl", kvs, prePartMarkEvent.getTableName(), prePartMarkEvent.getPartitionName());
    driver.run(String.format("alter table %s drop partition (b='2011')", tblName));
    listSize++;
    assertEquals(notifyList.size(), listSize);
    PreDropPartitionEvent preDropPart = (PreDropPartitionEvent) preNotifyList.get(preNotifyList.size() - 1);
    DropPartitionEvent dropPart = (DropPartitionEvent) notifyList.get(listSize - 1);
    assert dropPart.getStatus();
    validateDropPartition(Collections.singletonList(part).iterator(), dropPart.getPartitionIterator());
    validateTableInDropPartition(tbl, dropPart.getTable());
    validateDropPartition(Collections.singletonList(part).iterator(), preDropPart.getPartitionIterator());
    validateTableInDropPartition(tbl, preDropPart.getTable());
    driver.run("drop table " + tblName);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    PreDropTableEvent preDropTbl = (PreDropTableEvent) preNotifyList.get(preNotifyList.size() - 1);
    DropTableEvent dropTbl = (DropTableEvent) notifyList.get(listSize - 1);
    assert dropTbl.getStatus();
    validateDropTable(tbl, dropTbl.getTable());
    validateDropTable(tbl, preDropTbl.getTable());
    driver.run("drop database " + dbName);
    listSize++;
    assertEquals(notifyList.size(), listSize);
    PreDropDatabaseEvent preDropDB = (PreDropDatabaseEvent) preNotifyList.get(preNotifyList.size() - 1);
    DropDatabaseEvent dropDB = (DropDatabaseEvent) notifyList.get(listSize - 1);
    assert dropDB.getStatus();
    validateDropDb(db, dropDB.getDatabase());
    validateDropDb(db, preDropDB.getDatabase());
    SetProcessor.setVariable("metaconf:hive.metastore.try.direct.sql", "false");
    ConfigChangeEvent event = (ConfigChangeEvent) notifyList.get(notifyList.size() - 1);
    assertEquals("hive.metastore.try.direct.sql", event.getKey());
    assertEquals("true", event.getOldValue());
    assertEquals("false", event.getNewValue());
}
Also used : PreAddPartitionEvent(org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent) PreDropIndexEvent(org.apache.hadoop.hive.metastore.events.PreDropIndexEvent) HashMap(java.util.HashMap) CreateDatabaseEvent(org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent) PreCreateDatabaseEvent(org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent) PreAlterPartitionEvent(org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent) ArrayList(java.util.ArrayList) Index(org.apache.hadoop.hive.metastore.api.Index) PreAlterTableEvent(org.apache.hadoop.hive.metastore.events.PreAlterTableEvent) PreLoadPartitionDoneEvent(org.apache.hadoop.hive.metastore.events.PreLoadPartitionDoneEvent) PreDropDatabaseEvent(org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent) ListenerEvent(org.apache.hadoop.hive.metastore.events.ListenerEvent) AlterTableEvent(org.apache.hadoop.hive.metastore.events.AlterTableEvent) PreAlterTableEvent(org.apache.hadoop.hive.metastore.events.PreAlterTableEvent) DropDatabaseEvent(org.apache.hadoop.hive.metastore.events.DropDatabaseEvent) PreDropDatabaseEvent(org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent) PreAddIndexEvent(org.apache.hadoop.hive.metastore.events.PreAddIndexEvent) PreCreateTableEvent(org.apache.hadoop.hive.metastore.events.PreCreateTableEvent) CreateTableEvent(org.apache.hadoop.hive.metastore.events.CreateTableEvent) PreAlterIndexEvent(org.apache.hadoop.hive.metastore.events.PreAlterIndexEvent) DropIndexEvent(org.apache.hadoop.hive.metastore.events.DropIndexEvent) PreDropIndexEvent(org.apache.hadoop.hive.metastore.events.PreDropIndexEvent) PreLoadPartitionDoneEvent(org.apache.hadoop.hive.metastore.events.PreLoadPartitionDoneEvent) LoadPartitionDoneEvent(org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent) PreDropTableEvent(org.apache.hadoop.hive.metastore.events.PreDropTableEvent) DropTableEvent(org.apache.hadoop.hive.metastore.events.DropTableEvent) Database(org.apache.hadoop.hive.metastore.api.Database) PreCreateTableEvent(org.apache.hadoop.hive.metastore.events.PreCreateTableEvent) Partition(org.apache.hadoop.hive.metastore.api.Partition) PreAddIndexEvent(org.apache.hadoop.hive.metastore.events.PreAddIndexEvent) AddIndexEvent(org.apache.hadoop.hive.metastore.events.AddIndexEvent) PreDropPartitionEvent(org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent) DropPartitionEvent(org.apache.hadoop.hive.metastore.events.DropPartitionEvent) Table(org.apache.hadoop.hive.metastore.api.Table) AlterPartitionEvent(org.apache.hadoop.hive.metastore.events.AlterPartitionEvent) PreAlterPartitionEvent(org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent) PreCreateDatabaseEvent(org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent) PreDropPartitionEvent(org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent) PreEventContext(org.apache.hadoop.hive.metastore.events.PreEventContext) PreDropTableEvent(org.apache.hadoop.hive.metastore.events.PreDropTableEvent) ConfigChangeEvent(org.apache.hadoop.hive.metastore.events.ConfigChangeEvent) PreAddPartitionEvent(org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent) AddPartitionEvent(org.apache.hadoop.hive.metastore.events.AddPartitionEvent) PreAlterIndexEvent(org.apache.hadoop.hive.metastore.events.PreAlterIndexEvent) AlterIndexEvent(org.apache.hadoop.hive.metastore.events.AlterIndexEvent)

Aggregations

ArrayList (java.util.ArrayList)3 CreateDatabaseEvent (org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent)3 CreateTableEvent (org.apache.hadoop.hive.metastore.events.CreateTableEvent)3 ListenerEvent (org.apache.hadoop.hive.metastore.events.ListenerEvent)3 AddPartitionEvent (org.apache.hadoop.hive.metastore.events.AddPartitionEvent)2 AlterTableEvent (org.apache.hadoop.hive.metastore.events.AlterTableEvent)2 DropDatabaseEvent (org.apache.hadoop.hive.metastore.events.DropDatabaseEvent)2 DropPartitionEvent (org.apache.hadoop.hive.metastore.events.DropPartitionEvent)2 DropTableEvent (org.apache.hadoop.hive.metastore.events.DropTableEvent)2 HashMap (java.util.HashMap)1 ColumnStatistics (org.apache.hadoop.hive.metastore.api.ColumnStatistics)1 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)1 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)1 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)1 Database (org.apache.hadoop.hive.metastore.api.Database)1 Index (org.apache.hadoop.hive.metastore.api.Index)1 Partition (org.apache.hadoop.hive.metastore.api.Partition)1 SetPartitionsStatsRequest (org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest)1 StringColumnStatsData (org.apache.hadoop.hive.metastore.api.StringColumnStatsData)1 Table (org.apache.hadoop.hive.metastore.api.Table)1