Search in sources :

Example 1 with HMSHandler

use of org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler in project hive by apache.

the class MetaDataExportListener method export_meta_data.

/** Export the metadata to a given path, and then move it to the user's trash */
private void export_meta_data(PreDropTableEvent tableEvent) throws MetaException {
    FileSystem fs = null;
    Table tbl = tableEvent.getTable();
    String name = tbl.getTableName();
    org.apache.hadoop.hive.ql.metadata.Table mTbl = new org.apache.hadoop.hive.ql.metadata.Table(tbl);
    HMSHandler handler = tableEvent.getHandler();
    HiveConf hiveconf = handler.getHiveConf();
    Warehouse wh = new Warehouse(hiveconf);
    Path tblPath = new Path(tbl.getSd().getLocation());
    fs = wh.getFs(tblPath);
    Date now = new Date();
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
    String dateString = sdf.format(now);
    String exportPathString = hiveconf.getVar(HiveConf.ConfVars.METADATA_EXPORT_LOCATION);
    boolean moveMetadataToTrash = hiveconf.getBoolVar(HiveConf.ConfVars.MOVE_EXPORTED_METADATA_TO_TRASH);
    Path exportPath = null;
    if (exportPathString != null && exportPathString.length() == 0) {
        exportPath = fs.getHomeDirectory();
    } else {
        exportPath = new Path(exportPathString);
    }
    Path metaPath = new Path(exportPath, name + "." + dateString);
    LOG.info("Exporting the metadata of table " + tbl.toString() + " to path " + metaPath.toString());
    try {
        fs.mkdirs(metaPath);
    } catch (IOException e) {
        throw new MetaException(e.getMessage());
    }
    Path outFile = new Path(metaPath, name + EximUtil.METADATA_NAME);
    try {
        SessionState.getConsole().printInfo("Beginning metadata export");
        EximUtil.createExportDump(fs, outFile, mTbl, null, null);
        if (moveMetadataToTrash == true) {
            wh.deleteDir(metaPath, true);
        }
    } catch (IOException e) {
        throw new MetaException(e.getMessage());
    } catch (SemanticException e) {
        throw new MetaException(e.getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) Table(org.apache.hadoop.hive.metastore.api.Table) IOException(java.io.IOException) Date(java.util.Date) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) SimpleDateFormat(java.text.SimpleDateFormat) HMSHandler(org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 2 with HMSHandler

use of org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler in project hive by apache.

the class NotificationListener method onCreateTable.

@Override
public void onCreateTable(CreateTableEvent tableEvent) throws MetaException {
    // as "HCAT_EVENT = HCAT_ADD_TABLE"
    if (tableEvent.getStatus()) {
        Table tbl = tableEvent.getTable();
        HMSHandler handler = tableEvent.getHandler();
        HiveConf conf = handler.getHiveConf();
        Table newTbl;
        try {
            newTbl = handler.get_table_core(tbl.getDbName(), tbl.getTableName()).deepCopy();
            newTbl.getParameters().put(HCatConstants.HCAT_MSGBUS_TOPIC_NAME, getTopicPrefix(conf) + "." + newTbl.getDbName().toLowerCase() + "." + newTbl.getTableName().toLowerCase());
            handler.alter_table(newTbl.getDbName(), newTbl.getTableName(), newTbl);
        } catch (InvalidOperationException e) {
            MetaException me = new MetaException(e.toString());
            me.initCause(e);
            throw me;
        } catch (NoSuchObjectException e) {
            MetaException me = new MetaException(e.toString());
            me.initCause(e);
            throw me;
        }
        String topicName = getTopicPrefix(conf) + "." + newTbl.getDbName().toLowerCase();
        send(messageFactory.buildCreateTableMessage(newTbl), topicName);
    }
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) HMSHandler(org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 3 with HMSHandler

use of org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler in project hive by apache.

the class TestAdminUser method testCreateAdminNAddUser.

public void testCreateAdminNAddUser() throws IOException, Throwable {
    HiveConf conf = new HiveConf();
    conf.setVar(ConfVars.USERS_IN_ADMIN_ROLE, "adminuser");
    conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName());
    RawStore rawStore = new HMSHandler("testcreateroot", conf).getMS();
    Role adminRole = rawStore.getRole(HiveMetaStore.ADMIN);
    assertTrue(adminRole.getOwnerName().equals(HiveMetaStore.ADMIN));
    assertEquals(rawStore.listPrincipalGlobalGrants(HiveMetaStore.ADMIN, PrincipalType.ROLE).get(0).getGrantInfo().getPrivilege(), "All");
    assertEquals(rawStore.listRoles("adminuser", PrincipalType.USER).get(0).getRoleName(), HiveMetaStore.ADMIN);
}
Also used : Role(org.apache.hadoop.hive.metastore.api.Role) SQLStdHiveAuthorizerFactory(org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HMSHandler(org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler)

Example 4 with HMSHandler

use of org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler in project hive by apache.

the class TestDBTokenStore method testDBTokenStore.

public void testDBTokenStore() throws TokenStoreException, MetaException, IOException {
    DelegationTokenStore ts = new DBTokenStore();
    ts.init(new HMSHandler("Test handler"), ServerMode.METASTORE);
    assertEquals(0, ts.getMasterKeys().length);
    assertEquals(false, ts.removeMasterKey(-1));
    try {
        ts.updateMasterKey(-1, "non-existent-key");
        fail("Updated non-existent key.");
    } catch (TokenStoreException e) {
        assertTrue(e.getCause() instanceof NoSuchObjectException);
    }
    int keySeq = ts.addMasterKey("key1Data");
    int keySeq2 = ts.addMasterKey("key2Data");
    int keySeq2same = ts.addMasterKey("key2Data");
    assertEquals("keys sequential", keySeq + 1, keySeq2);
    assertEquals("keys sequential", keySeq + 2, keySeq2same);
    assertEquals("expected number of keys", 3, ts.getMasterKeys().length);
    assertTrue(ts.removeMasterKey(keySeq));
    assertTrue(ts.removeMasterKey(keySeq2same));
    assertEquals("expected number of keys", 1, ts.getMasterKeys().length);
    assertEquals("key2Data", ts.getMasterKeys()[0]);
    ts.updateMasterKey(keySeq2, "updatedData");
    assertEquals("updatedData", ts.getMasterKeys()[0]);
    assertTrue(ts.removeMasterKey(keySeq2));
    // tokens
    assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
    DelegationTokenIdentifier tokenId = new DelegationTokenIdentifier(new Text("owner"), new Text("renewer"), new Text("realUser"));
    assertNull(ts.getToken(tokenId));
    assertFalse(ts.removeToken(tokenId));
    DelegationTokenInformation tokenInfo = new DelegationTokenInformation(99, "password".getBytes());
    assertTrue(ts.addToken(tokenId, tokenInfo));
    assertFalse(ts.addToken(tokenId, tokenInfo));
    DelegationTokenInformation tokenInfoRead = ts.getToken(tokenId);
    assertEquals(tokenInfo.getRenewDate(), tokenInfoRead.getRenewDate());
    assertNotSame(tokenInfo, tokenInfoRead);
    Assert.assertArrayEquals(HiveDelegationTokenSupport.encodeDelegationTokenInformation(tokenInfo), HiveDelegationTokenSupport.encodeDelegationTokenInformation(tokenInfoRead));
    List<DelegationTokenIdentifier> allIds = ts.getAllDelegationTokenIdentifiers();
    assertEquals(1, allIds.size());
    Assert.assertEquals(TokenStoreDelegationTokenSecretManager.encodeWritable(tokenId), TokenStoreDelegationTokenSecretManager.encodeWritable(allIds.get(0)));
    assertTrue(ts.removeToken(tokenId));
    assertEquals(0, ts.getAllDelegationTokenIdentifiers().size());
    assertNull(ts.getToken(tokenId));
    ts.close();
}
Also used : TokenStoreException(org.apache.hadoop.hive.thrift.DelegationTokenStore.TokenStoreException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Text(org.apache.hadoop.io.Text) HMSHandler(org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler) DelegationTokenInformation(org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation)

Aggregations

HMSHandler (org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)2 Table (org.apache.hadoop.hive.metastore.api.Table)2 IOException (java.io.IOException)1 SimpleDateFormat (java.text.SimpleDateFormat)1 Date (java.util.Date)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 Warehouse (org.apache.hadoop.hive.metastore.Warehouse)1 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)1 Role (org.apache.hadoop.hive.metastore.api.Role)1 SQLStdHiveAuthorizerFactory (org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory)1 TokenStoreException (org.apache.hadoop.hive.thrift.DelegationTokenStore.TokenStoreException)1 Text (org.apache.hadoop.io.Text)1 DelegationTokenInformation (org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation)1