Search in sources :

Example 71 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class HBaseMetaHook method preCreateTable.

@Override
public void preCreateTable(Table tbl) throws MetaException {
    boolean isExternal = MetaStoreUtils.isExternalTable(tbl);
    // first we need to support storing NULL for location on a table
    if (tbl.getSd().getLocation() != null) {
        throw new MetaException("LOCATION may not be specified for HBase.");
    }
    org.apache.hadoop.hbase.client.Table htable = null;
    try {
        String tableName = getHBaseTableName(tbl);
        Map<String, String> serdeParam = tbl.getSd().getSerdeInfo().getParameters();
        String hbaseColumnsMapping = serdeParam.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
        ColumnMappings columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
        HTableDescriptor tableDesc;
        if (!getHBaseAdmin().tableExists(TableName.valueOf(tableName))) {
            // if it is not an external table then create one
            if (!isExternal) {
                // Create the column descriptors
                tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
                Set<String> uniqueColumnFamilies = new HashSet<String>();
                for (ColumnMappings.ColumnMapping colMap : columnMappings) {
                    if (!colMap.hbaseRowKey && !colMap.hbaseTimestamp) {
                        uniqueColumnFamilies.add(colMap.familyName);
                    }
                }
                for (String columnFamily : uniqueColumnFamilies) {
                    tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(columnFamily)));
                }
                getHBaseAdmin().createTable(tableDesc);
            } else {
                // an external table
                throw new MetaException("HBase table " + tableName + " doesn't exist while the table is declared as an external table.");
            }
        } else {
            if (!isExternal) {
                throw new MetaException("Table " + tableName + " already exists within HBase; " + "use CREATE EXTERNAL TABLE instead to register it in Hive.");
            }
            // make sure the schema mapping is right
            tableDesc = getHBaseAdmin().getTableDescriptor(TableName.valueOf(tableName));
            for (ColumnMappings.ColumnMapping colMap : columnMappings) {
                if (colMap.hbaseRowKey || colMap.hbaseTimestamp) {
                    continue;
                }
                if (!tableDesc.hasFamily(colMap.familyNameBytes)) {
                    throw new MetaException("Column Family " + colMap.familyName + " is not defined in hbase table " + tableName);
                }
            }
        }
        // ensure the table is online
        htable = getHBaseAdmin().getConnection().getTable(tableDesc.getTableName());
    } catch (Exception se) {
        throw new MetaException(StringUtils.stringifyException(se));
    } finally {
        if (htable != null) {
            IOUtils.closeQuietly(htable);
        }
    }
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HashSet(java.util.HashSet)

Example 72 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class HBaseMetaHook method commitDropTable.

@Override
public void commitDropTable(Table tbl, boolean deleteData) throws MetaException {
    try {
        String tableName = getHBaseTableName(tbl);
        boolean isExternal = MetaStoreUtils.isExternalTable(tbl);
        if (deleteData && !isExternal) {
            if (getHBaseAdmin().isTableEnabled(TableName.valueOf(tableName))) {
                getHBaseAdmin().disableTable(TableName.valueOf(tableName));
            }
            getHBaseAdmin().deleteTable(TableName.valueOf(tableName));
        }
    } catch (IOException ie) {
        throw new MetaException(StringUtils.stringifyException(ie));
    }
}
Also used : IOException(java.io.IOException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 73 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class HBaseMetaHook method getHBaseAdmin.

private Admin getHBaseAdmin() throws MetaException {
    try {
        if (admin == null) {
            Connection conn = ConnectionFactory.createConnection(hbaseConf);
            admin = conn.getAdmin();
        }
        return admin;
    } catch (IOException ioe) {
        throw new MetaException(StringUtils.stringifyException(ioe));
    }
}
Also used : Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 74 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class HBaseStorageHandler method configureTableJobProperties.

@Override
public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
    Properties tableProperties = tableDesc.getProperties();
    jobProperties.put(HBaseSerDe.HBASE_COLUMNS_MAPPING, tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING));
    jobProperties.put(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, "true"));
    jobProperties.put(HBaseSerDe.HBASE_COLUMNS_PREFIX_HIDE, tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_PREFIX_HIDE, "false"));
    jobProperties.put(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE, tableProperties.getProperty(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE, "string"));
    String scanCache = tableProperties.getProperty(HBaseSerDe.HBASE_SCAN_CACHE);
    if (scanCache != null) {
        jobProperties.put(HBaseSerDe.HBASE_SCAN_CACHE, scanCache);
    }
    String scanCacheBlocks = tableProperties.getProperty(HBaseSerDe.HBASE_SCAN_CACHEBLOCKS);
    if (scanCacheBlocks != null) {
        jobProperties.put(HBaseSerDe.HBASE_SCAN_CACHEBLOCKS, scanCacheBlocks);
    }
    String scanBatch = tableProperties.getProperty(HBaseSerDe.HBASE_SCAN_BATCH);
    if (scanBatch != null) {
        jobProperties.put(HBaseSerDe.HBASE_SCAN_BATCH, scanBatch);
    }
    String tableName = tableProperties.getProperty(HBaseSerDe.HBASE_TABLE_NAME);
    if (tableName == null) {
        tableName = tableProperties.getProperty(hive_metastoreConstants.META_TABLE_NAME);
        tableName = tableName.toLowerCase();
        if (tableName.startsWith(DEFAULT_PREFIX)) {
            tableName = tableName.substring(DEFAULT_PREFIX.length());
        }
    }
    jobProperties.put(HBaseSerDe.HBASE_TABLE_NAME, tableName);
    Configuration jobConf = getJobConf();
    addHBaseResources(jobConf, jobProperties);
    // check to see if this an input job or an outputjob
    if (this.configureInputJobProps) {
        LOG.info("Configuring input job properties");
        String snapshotName = HiveConf.getVar(jobConf, HiveConf.ConfVars.HIVE_HBASE_SNAPSHOT_NAME);
        if (snapshotName != null) {
            HBaseTableSnapshotInputFormatUtil.assertSupportsTableSnapshots();
            try {
                String restoreDir = HiveConf.getVar(jobConf, HiveConf.ConfVars.HIVE_HBASE_SNAPSHOT_RESTORE_DIR);
                if (restoreDir == null) {
                    throw new IllegalArgumentException("Cannot process HBase snapshot without specifying " + HiveConf.ConfVars.HIVE_HBASE_SNAPSHOT_RESTORE_DIR);
                }
                HBaseTableSnapshotInputFormatUtil.configureJob(hbaseConf, snapshotName, new Path(restoreDir));
                // copy over configs touched by above method
                jobProperties.put(HBASE_SNAPSHOT_NAME_KEY, hbaseConf.get(HBASE_SNAPSHOT_NAME_KEY));
                if (hbaseConf.get(HBASE_SNAPSHOT_TABLE_DIR_KEY, null) != null) {
                    jobProperties.put(HBASE_SNAPSHOT_TABLE_DIR_KEY, hbaseConf.get(HBASE_SNAPSHOT_TABLE_DIR_KEY));
                } else {
                    jobProperties.put(HBASE_SNAPSHOT_RESTORE_DIR_KEY, hbaseConf.get(HBASE_SNAPSHOT_RESTORE_DIR_KEY));
                }
                TableMapReduceUtil.resetCacheConfig(hbaseConf);
                // copy over configs touched by above method
                for (String cacheKey : HBASE_CACHE_KEYS) {
                    final String value = hbaseConf.get(cacheKey);
                    if (value != null) {
                        jobProperties.put(cacheKey, value);
                    } else {
                        jobProperties.remove(cacheKey);
                    }
                }
            } catch (IOException e) {
                throw new IllegalArgumentException(e);
            }
        }
        for (String k : jobProperties.keySet()) {
            jobConf.set(k, jobProperties.get(k));
        }
        try {
            addHBaseDelegationToken(jobConf);
        } catch (IOException | MetaException e) {
            throw new IllegalStateException("Error while configuring input job properties", e);
        }
    // input job properties
    } else {
        LOG.info("Configuring output job properties");
        if (isHBaseGenerateHFiles(jobConf)) {
            // only support bulkload when a hfile.family.path has been specified.
            // TODO: support detecting cf's from column mapping
            // TODO: support loading into multiple CF's at a time
            String path = HiveHFileOutputFormat.getFamilyPath(jobConf, tableProperties);
            if (path == null || path.isEmpty()) {
                throw new RuntimeException("Please set " + HiveHFileOutputFormat.HFILE_FAMILY_PATH + " to target location for HFiles");
            }
            // TODO: should call HiveHFileOutputFormat#setOutputPath
            jobProperties.put("mapred.output.dir", path);
        } else {
            jobProperties.put(TableOutputFormat.OUTPUT_TABLE, tableName);
        }
    }
// output job properties
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) IOException(java.io.IOException) Properties(java.util.Properties) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 75 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class TestMetaStoreAuthorization method testMetaStoreAuthorization.

public void testMetaStoreAuthorization() throws Exception {
    setup();
    MetaStoreTestUtils.startMetaStoreWithRetry(conf);
    HiveMetaStoreClient client = new HiveMetaStoreClient(conf);
    FileSystem fs = null;
    String dbName = "simpdb";
    Database db1 = null;
    Path p = null;
    try {
        try {
            db1 = client.getDatabase(dbName);
            client.dropDatabase(dbName);
        } catch (NoSuchObjectException noe) {
        }
        if (db1 != null) {
            p = new Path(db1.getLocationUri());
            fs = p.getFileSystem(conf);
            fs.delete(p, true);
        }
        db1 = new Database();
        db1.setName(dbName);
        client.createDatabase(db1);
        Database db = client.getDatabase(dbName);
        assertTrue("Databases do not match", db1.getName().equals(db.getName()));
        p = new Path(db.getLocationUri());
        if (fs == null) {
            fs = p.getFileSystem(conf);
        }
        fs.setPermission(p.getParent(), FsPermission.createImmutable((short) 0555));
        try {
            client.dropDatabase(dbName);
            throw new Exception("Expected dropDatabase call to fail");
        } catch (MetaException me) {
        }
        fs.setPermission(p.getParent(), FsPermission.createImmutable((short) 0755));
        client.dropDatabase(dbName);
    } finally {
        if (p != null) {
            fs.delete(p, true);
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

MetaException (org.apache.hadoop.hive.metastore.api.MetaException)318 IOException (java.io.IOException)123 ArrayList (java.util.ArrayList)95 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)74 TException (org.apache.thrift.TException)67 Table (org.apache.hadoop.hive.metastore.api.Table)59 Partition (org.apache.hadoop.hive.metastore.api.Partition)57 SQLException (java.sql.SQLException)55 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)53 Path (org.apache.hadoop.fs.Path)45 Connection (java.sql.Connection)36 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)34 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)32 Statement (java.sql.Statement)31 Test (org.junit.Test)30 List (java.util.List)25 Database (org.apache.hadoop.hive.metastore.api.Database)25 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)25 ResultSet (java.sql.ResultSet)22 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)22