Search in sources :

Example 1 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class GetColumnsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        String tablePattern = convertIdentifierPattern(tableName, true);
        Pattern columnPattern = null;
        if (columnName != null) {
            columnPattern = Pattern.compile(convertIdentifierPattern(columnName, false));
        }
        List<String> dbNames = metastoreClient.getDatabases(schemaPattern);
        Collections.sort(dbNames);
        Map<String, List<String>> db2Tabs = new HashMap<>();
        for (String dbName : dbNames) {
            List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
            Collections.sort(tableNames);
            db2Tabs.put(dbName, tableNames);
        }
        if (isAuthV2Enabled()) {
            List<HivePrivilegeObject> privObjs = getPrivObjs(db2Tabs);
            String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName + ", tablePattern : " + tableName;
            authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr);
        }
        int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
        for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
            String dbName = dbTabs.getKey();
            List<String> tableNames = dbTabs.getValue();
            for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
                TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
                List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName()));
                Set<String> pkColNames = new HashSet<>();
                for (SQLPrimaryKey key : primaryKeys) {
                    pkColNames.add(key.getColumn_name().toLowerCase());
                }
                for (ColumnDescriptor column : schema.getColumnDescriptors()) {
                    if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
                        continue;
                    }
                    Object[] rowData = new Object[] { // TABLE_CAT
                    null, // TABLE_SCHEM
                    table.getDbName(), // TABLE_NAME
                    table.getTableName(), // COLUMN_NAME
                    column.getName(), // DATA_TYPE
                    column.getType().toJavaSQLType(), // TYPE_NAME
                    column.getTypeName(), // COLUMN_SIZE
                    column.getTypeDescriptor().getColumnSize(), // BUFFER_LENGTH, unused
                    null, // DECIMAL_DIGITS
                    column.getTypeDescriptor().getDecimalDigits(), // NUM_PREC_RADIX
                    column.getType().getNumPrecRadix(), pkColNames.contains(column.getName().toLowerCase()) ? DatabaseMetaData.columnNoNulls : // NULLABLE
                    DatabaseMetaData.columnNullable, // REMARKS
                    column.getComment(), // COLUMN_DEF
                    null, // SQL_DATA_TYPE
                    null, // SQL_DATETIME_SUB
                    null, // CHAR_OCTET_LENGTH
                    null, // ORDINAL_POSITION
                    column.getOrdinalPosition(), // IS_NULLABLE
                    pkColNames.contains(column.getName().toLowerCase()) ? "NO" : "YES", // SCOPE_CATALOG
                    null, // SCOPE_SCHEMA
                    null, // SCOPE_TABLE
                    null, // SOURCE_DATA_TYPE
                    null, // IS_AUTO_INCREMENT
                    "NO" };
                    rowSet.addRow(rowData);
                }
            }
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : Pattern(java.util.regex.Pattern) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) TableSchema(org.apache.hive.service.cli.TableSchema) HashMap(java.util.HashMap) ColumnDescriptor(org.apache.hive.service.cli.ColumnDescriptor) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) TableIterable(org.apache.hadoop.hive.ql.metadata.TableIterable) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) ArrayList(java.util.ArrayList) List(java.util.List) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HashSet(java.util.HashSet)

Example 2 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class GetCrossReferenceOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, parentTableName, foreignSchemaName, foreignTableName);
        List<SQLForeignKey> fks = metastoreClient.getForeignKeys(fkReq);
        if (fks == null) {
            return;
        }
        for (SQLForeignKey fk : fks) {
            rowSet.addRow(new Object[] { parentCatalogName, fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(), foreignCatalogName, fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(), fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(), fk.getPk_name(), 0 });
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 3 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class GetFunctionsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    if (isAuthV2Enabled()) {
        // get databases for schema pattern
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        List<String> matchingDbs;
        try {
            matchingDbs = metastoreClient.getDatabases(schemaPattern);
        } catch (TException e) {
            setState(OperationState.ERROR);
            throw new HiveSQLException(e);
        }
        // authorize this call on the schema objects
        List<HivePrivilegeObject> privObjs = HivePrivilegeObjectUtils.getHivePrivDbObjects(matchingDbs);
        String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
        authorizeMetaGets(HiveOperationType.GET_FUNCTIONS, privObjs, cmdStr);
    }
    try {
        if ((null == catalogName || "".equals(catalogName)) && (null == schemaName || "".equals(schemaName))) {
            Set<String> functionNames = FunctionRegistry.getFunctionNames(CLIServiceUtils.patternToRegex(functionName));
            for (String functionName : functionNames) {
                FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(functionName);
                Object[] rowData = new Object[] { // FUNCTION_CAT
                null, // FUNCTION_SCHEM
                null, // FUNCTION_NAME
                functionInfo.getDisplayName(), // REMARKS
                "", (functionInfo.isGenericUDTF() ? DatabaseMetaData.functionReturnsTable : // FUNCTION_TYPE
                DatabaseMetaData.functionNoTable), functionInfo.getClass().getCanonicalName() };
                rowSet.addRow(rowData);
            }
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : TException(org.apache.thrift.TException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TException(org.apache.thrift.TException)

Example 4 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class GetTablesOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        List<String> matchingDbs = metastoreClient.getDatabases(schemaPattern);
        if (isAuthV2Enabled()) {
            List<HivePrivilegeObject> privObjs = HivePrivilegeObjectUtils.getHivePrivDbObjects(matchingDbs);
            String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
            authorizeMetaGets(HiveOperationType.GET_TABLES, privObjs, cmdStr);
        }
        String tablePattern = convertIdentifierPattern(tableName, true);
        for (TableMeta tableMeta : metastoreClient.getTableMeta(schemaPattern, tablePattern, tableTypeList)) {
            rowSet.addRow(new Object[] { DEFAULT_HIVE_CATALOG, tableMeta.getDbName(), tableMeta.getTableName(), tableTypeMapping.mapToClientType(tableMeta.getTableType()), tableMeta.getComments(), null, null, null, null, null });
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) TableMeta(org.apache.hadoop.hive.metastore.api.TableMeta) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 5 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project storm by apache.

the class HiveSetupUtil method createDbAndTable.

public static void createDbAndTable(HiveConf conf, String databaseName, String tableName, List<String> partVals, String[] colNames, String[] colTypes, String[] partNames, String dbLocation) throws Exception {
    IMetaStoreClient client = new HiveMetaStoreClient(conf);
    try {
        Database db = new Database();
        db.setName(databaseName);
        db.setLocationUri(dbLocation);
        client.createDatabase(db);
        Table tbl = new Table();
        tbl.setDbName(databaseName);
        tbl.setTableName(tableName);
        tbl.setTableType(TableType.MANAGED_TABLE.toString());
        StorageDescriptor sd = new StorageDescriptor();
        sd.setCols(getTableColumns(colNames, colTypes));
        sd.setNumBuckets(1);
        sd.setLocation(dbLocation + Path.SEPARATOR + tableName);
        if (partNames != null && partNames.length != 0) {
            tbl.setPartitionKeys(getPartitionKeys(partNames));
        }
        tbl.setSd(sd);
        sd.setBucketCols(new ArrayList<String>(2));
        sd.setSerdeInfo(new SerDeInfo());
        sd.getSerdeInfo().setName(tbl.getTableName());
        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
        sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
        sd.getSerdeInfo().setSerializationLib(OrcSerde.class.getName());
        sd.setInputFormat(OrcInputFormat.class.getName());
        sd.setOutputFormat(OrcOutputFormat.class.getName());
        Map<String, String> tableParams = new HashMap<String, String>();
        tbl.setParameters(tableParams);
        client.createTable(tbl);
        try {
            if (partVals != null && partVals.size() > 0) {
                addPartition(client, tbl, partVals);
            }
        } catch (AlreadyExistsException e) {
        }
    } finally {
        client.close();
    }
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Table(org.apache.hadoop.hive.metastore.api.Table) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) HashMap(java.util.HashMap) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) OrcInputFormat(org.apache.hadoop.hive.ql.io.orc.OrcInputFormat) Database(org.apache.hadoop.hive.metastore.api.Database)

Aggregations

IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)41 TException (org.apache.thrift.TException)12 IOException (java.io.IOException)11 Path (org.apache.hadoop.fs.Path)11 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)11 HiveConf (org.apache.hadoop.hive.conf.HiveConf)10 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)10 Table (org.apache.hadoop.hive.metastore.api.Table)10 Test (org.junit.Test)10 FileStatus (org.apache.hadoop.fs.FileStatus)9 FileSystem (org.apache.hadoop.fs.FileSystem)9 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)8 CompactionRequest (org.apache.hadoop.hive.metastore.api.CompactionRequest)8 TxnStore (org.apache.hadoop.hive.metastore.txn.TxnStore)8 HiveEndPoint (org.apache.hive.hcatalog.streaming.HiveEndPoint)8 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)7 ArrayList (java.util.ArrayList)6 DelimitedInputWriter (org.apache.hive.hcatalog.streaming.DelimitedInputWriter)6 StreamingConnection (org.apache.hive.hcatalog.streaming.StreamingConnection)6 Table (org.apache.hadoop.hive.ql.metadata.Table)5