Search in sources :

Example 1 with HiveSQLException

use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.

the class GetColumnsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        String tablePattern = convertIdentifierPattern(tableName, true);
        Pattern columnPattern = null;
        if (columnName != null) {
            columnPattern = Pattern.compile(convertIdentifierPattern(columnName, false));
        }
        List<String> dbNames = metastoreClient.getDatabases(schemaPattern);
        Collections.sort(dbNames);
        Map<String, List<String>> db2Tabs = new HashMap<>();
        for (String dbName : dbNames) {
            List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
            Collections.sort(tableNames);
            db2Tabs.put(dbName, tableNames);
        }
        if (isAuthV2Enabled()) {
            List<HivePrivilegeObject> privObjs = getPrivObjs(db2Tabs);
            String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName + ", tablePattern : " + tableName;
            authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr);
        }
        int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
        for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
            String dbName = dbTabs.getKey();
            List<String> tableNames = dbTabs.getValue();
            for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
                TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
                List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName()));
                Set<String> pkColNames = new HashSet<>();
                for (SQLPrimaryKey key : primaryKeys) {
                    pkColNames.add(key.getColumn_name().toLowerCase());
                }
                for (ColumnDescriptor column : schema.getColumnDescriptors()) {
                    if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
                        continue;
                    }
                    Object[] rowData = new Object[] { // TABLE_CAT
                    null, // TABLE_SCHEM
                    table.getDbName(), // TABLE_NAME
                    table.getTableName(), // COLUMN_NAME
                    column.getName(), // DATA_TYPE
                    column.getType().toJavaSQLType(), // TYPE_NAME
                    column.getTypeName(), // COLUMN_SIZE
                    column.getTypeDescriptor().getColumnSize(), // BUFFER_LENGTH, unused
                    null, // DECIMAL_DIGITS
                    column.getTypeDescriptor().getDecimalDigits(), // NUM_PREC_RADIX
                    column.getType().getNumPrecRadix(), pkColNames.contains(column.getName().toLowerCase()) ? DatabaseMetaData.columnNoNulls : // NULLABLE
                    DatabaseMetaData.columnNullable, // REMARKS
                    column.getComment(), // COLUMN_DEF
                    null, // SQL_DATA_TYPE
                    null, // SQL_DATETIME_SUB
                    null, // CHAR_OCTET_LENGTH
                    null, // ORDINAL_POSITION
                    column.getOrdinalPosition(), // IS_NULLABLE
                    pkColNames.contains(column.getName().toLowerCase()) ? "NO" : "YES", // SCOPE_CATALOG
                    null, // SCOPE_SCHEMA
                    null, // SCOPE_TABLE
                    null, // SOURCE_DATA_TYPE
                    null, // IS_AUTO_INCREMENT
                    "NO" };
                    rowSet.addRow(rowData);
                }
            }
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : Pattern(java.util.regex.Pattern) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) TableSchema(org.apache.hive.service.cli.TableSchema) HashMap(java.util.HashMap) ColumnDescriptor(org.apache.hive.service.cli.ColumnDescriptor) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) TableIterable(org.apache.hadoop.hive.ql.metadata.TableIterable) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) ArrayList(java.util.ArrayList) List(java.util.List) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HashSet(java.util.HashSet)

Example 2 with HiveSQLException

use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.

the class GetCrossReferenceOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, parentTableName, foreignSchemaName, foreignTableName);
        List<SQLForeignKey> fks = metastoreClient.getForeignKeys(fkReq);
        if (fks == null) {
            return;
        }
        for (SQLForeignKey fk : fks) {
            rowSet.addRow(new Object[] { parentCatalogName, fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(), foreignCatalogName, fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(), fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(), fk.getPk_name(), 0 });
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 3 with HiveSQLException

use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.

the class GetFunctionsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    if (isAuthV2Enabled()) {
        // get databases for schema pattern
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        List<String> matchingDbs;
        try {
            matchingDbs = metastoreClient.getDatabases(schemaPattern);
        } catch (TException e) {
            setState(OperationState.ERROR);
            throw new HiveSQLException(e);
        }
        // authorize this call on the schema objects
        List<HivePrivilegeObject> privObjs = HivePrivilegeObjectUtils.getHivePrivDbObjects(matchingDbs);
        String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
        authorizeMetaGets(HiveOperationType.GET_FUNCTIONS, privObjs, cmdStr);
    }
    try {
        if ((null == catalogName || "".equals(catalogName)) && (null == schemaName || "".equals(schemaName))) {
            Set<String> functionNames = FunctionRegistry.getFunctionNames(CLIServiceUtils.patternToRegex(functionName));
            for (String functionName : functionNames) {
                FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(functionName);
                Object[] rowData = new Object[] { // FUNCTION_CAT
                null, // FUNCTION_SCHEM
                null, // FUNCTION_NAME
                functionInfo.getDisplayName(), // REMARKS
                "", (functionInfo.isGenericUDTF() ? DatabaseMetaData.functionReturnsTable : // FUNCTION_TYPE
                DatabaseMetaData.functionNoTable), functionInfo.getClass().getCanonicalName() };
                rowSet.addRow(rowData);
            }
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : TException(org.apache.thrift.TException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TException(org.apache.thrift.TException)

Example 4 with HiveSQLException

use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.

the class GetTablesOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        List<String> matchingDbs = metastoreClient.getDatabases(schemaPattern);
        if (isAuthV2Enabled()) {
            List<HivePrivilegeObject> privObjs = HivePrivilegeObjectUtils.getHivePrivDbObjects(matchingDbs);
            String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
            authorizeMetaGets(HiveOperationType.GET_TABLES, privObjs, cmdStr);
        }
        String tablePattern = convertIdentifierPattern(tableName, true);
        for (TableMeta tableMeta : metastoreClient.getTableMeta(schemaPattern, tablePattern, tableTypeList)) {
            rowSet.addRow(new Object[] { DEFAULT_HIVE_CATALOG, tableMeta.getDbName(), tableMeta.getTableName(), tableTypeMapping.mapToClientType(tableMeta.getTableType()), tableMeta.getComments(), null, null, null, null, null });
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) TableMeta(org.apache.hadoop.hive.metastore.api.TableMeta) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 5 with HiveSQLException

use of org.apache.hive.service.cli.HiveSQLException in project hive by apache.

the class GetTypeInfoOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    if (isAuthV2Enabled()) {
        authorizeMetaGets(HiveOperationType.GET_TYPEINFO, null);
    }
    try {
        for (Type type : Type.values()) {
            Object[] rowData = new Object[] { // TYPE_NAME
            type.getName(), // DATA_TYPE
            type.toJavaSQLType(), // PRECISION
            type.getMaxPrecision(), // LITERAL_PREFIX
            type.getLiteralPrefix(), // LITERAL_SUFFIX
            type.getLiteralSuffix(), // CREATE_PARAMS
            type.getCreateParams(), // NULLABLE
            type.getNullable(), // CASE_SENSITIVE
            type.isCaseSensitive(), // SEARCHABLE
            type.getSearchable(), // UNSIGNED_ATTRIBUTE
            type.isUnsignedAttribute(), // FIXED_PREC_SCALE
            type.isFixedPrecScale(), // AUTO_INCREMENT
            type.isAutoIncrement(), // LOCAL_TYPE_NAME
            type.getLocalizedName(), // MINIMUM_SCALE
            type.getMinimumScale(), // MAXIMUM_SCALE
            type.getMaximumScale(), // SQL_DATA_TYPE, unused
            null, // SQL_DATETIME_SUB, unused
            null, //NUM_PREC_RADIX
            type.getNumPrecRadix() };
            rowSet.addRow(rowData);
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : Type(org.apache.hadoop.hive.serde2.thrift.Type) OperationType(org.apache.hive.service.cli.OperationType) HiveOperationType(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Aggregations

HiveSQLException (org.apache.hive.service.cli.HiveSQLException)83 OperationHandle (org.apache.hive.service.cli.OperationHandle)38 TException (org.apache.thrift.TException)25 SessionHandle (org.apache.hive.service.cli.SessionHandle)20 ExploreException (co.cask.cdap.explore.service.ExploreException)14 IOException (java.io.IOException)12 QueryHandle (co.cask.cdap.proto.QueryHandle)11 TProtocolVersion (org.apache.hive.service.rpc.thrift.TProtocolVersion)11 OperationManager (org.apache.hive.service.cli.operation.OperationManager)10 QueryStatus (co.cask.cdap.proto.QueryStatus)7 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)7 SQLException (java.sql.SQLException)6 OperationStatus (org.apache.hive.service.cli.OperationStatus)5 TableSchema (org.apache.hive.service.cli.TableSchema)5 FileNotFoundException (java.io.FileNotFoundException)4 ArrayList (java.util.ArrayList)4 Metrics (org.apache.hadoop.hive.common.metrics.common.Metrics)3 HivePrivilegeObject (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)3 NamespaceNotFoundException (co.cask.cdap.common.NamespaceNotFoundException)2 HandleNotFoundException (co.cask.cdap.explore.service.HandleNotFoundException)2