Search in sources :

Example 1 with TableSchema

use of org.apache.hive.service.cli.TableSchema in project hive by apache.

the class OperationManager method getOperationLogRowSet.

public RowSet getOperationLogRowSet(OperationHandle opHandle, FetchOrientation orientation, long maxRows, HiveConf hConf) throws HiveSQLException {
    TableSchema tableSchema = new TableSchema(getLogSchema());
    RowSet rowSet = RowSetFactory.create(tableSchema, getOperation(opHandle).getProtocolVersion(), false);
    if (hConf.getBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED) == false) {
        LOG.warn("Try to get operation log when hive.server2.logging.operation.enabled is false, no log will be returned. ");
        return rowSet;
    }
    // get the OperationLog object from the operation
    OperationLog operationLog = getOperation(opHandle).getOperationLog();
    if (operationLog == null) {
        throw new HiveSQLException("Couldn't find log associated with operation handle: " + opHandle);
    }
    // read logs
    List<String> logs;
    try {
        logs = operationLog.readOperationLog(isFetchFirst(orientation), maxRows);
    } catch (SQLException e) {
        throw new HiveSQLException(e.getMessage(), e.getCause());
    }
    // convert logs to RowSet
    for (String log : logs) {
        rowSet.addRow(new String[] { log });
    }
    return rowSet;
}
Also used : TableSchema(org.apache.hive.service.cli.TableSchema) SQLException(java.sql.SQLException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) RowSet(org.apache.hive.service.cli.RowSet) OperationLog(org.apache.hadoop.hive.ql.session.OperationLog)

Example 2 with TableSchema

use of org.apache.hive.service.cli.TableSchema in project hive by apache.

the class GetColumnsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        String tablePattern = convertIdentifierPattern(tableName, true);
        Pattern columnPattern = null;
        if (columnName != null) {
            columnPattern = Pattern.compile(convertIdentifierPattern(columnName, false));
        }
        List<String> dbNames = metastoreClient.getDatabases(schemaPattern);
        Collections.sort(dbNames);
        Map<String, List<String>> db2Tabs = new HashMap<>();
        for (String dbName : dbNames) {
            List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
            Collections.sort(tableNames);
            db2Tabs.put(dbName, tableNames);
        }
        if (isAuthV2Enabled()) {
            List<HivePrivilegeObject> privObjs = getPrivObjs(db2Tabs);
            String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName + ", tablePattern : " + tableName;
            authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr);
        }
        int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
        for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
            String dbName = dbTabs.getKey();
            List<String> tableNames = dbTabs.getValue();
            for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
                TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
                List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName()));
                Set<String> pkColNames = new HashSet<>();
                for (SQLPrimaryKey key : primaryKeys) {
                    pkColNames.add(key.getColumn_name().toLowerCase());
                }
                for (ColumnDescriptor column : schema.getColumnDescriptors()) {
                    if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
                        continue;
                    }
                    Object[] rowData = new Object[] { // TABLE_CAT
                    null, // TABLE_SCHEM
                    table.getDbName(), // TABLE_NAME
                    table.getTableName(), // COLUMN_NAME
                    column.getName(), // DATA_TYPE
                    column.getType().toJavaSQLType(), // TYPE_NAME
                    column.getTypeName(), // COLUMN_SIZE
                    column.getTypeDescriptor().getColumnSize(), // BUFFER_LENGTH, unused
                    null, // DECIMAL_DIGITS
                    column.getTypeDescriptor().getDecimalDigits(), // NUM_PREC_RADIX
                    column.getType().getNumPrecRadix(), pkColNames.contains(column.getName().toLowerCase()) ? DatabaseMetaData.columnNoNulls : // NULLABLE
                    DatabaseMetaData.columnNullable, // REMARKS
                    column.getComment(), // COLUMN_DEF
                    null, // SQL_DATA_TYPE
                    null, // SQL_DATETIME_SUB
                    null, // CHAR_OCTET_LENGTH
                    null, // ORDINAL_POSITION
                    column.getOrdinalPosition(), // IS_NULLABLE
                    pkColNames.contains(column.getName().toLowerCase()) ? "NO" : "YES", // SCOPE_CATALOG
                    null, // SCOPE_SCHEMA
                    null, // SCOPE_TABLE
                    null, // SOURCE_DATA_TYPE
                    null, // IS_AUTO_INCREMENT
                    "NO" };
                    rowSet.addRow(rowData);
                }
            }
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : Pattern(java.util.regex.Pattern) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) TableSchema(org.apache.hive.service.cli.TableSchema) HashMap(java.util.HashMap) ColumnDescriptor(org.apache.hive.service.cli.ColumnDescriptor) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) TableIterable(org.apache.hadoop.hive.ql.metadata.TableIterable) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) ArrayList(java.util.ArrayList) List(java.util.List) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HashSet(java.util.HashSet)

Example 3 with TableSchema

use of org.apache.hive.service.cli.TableSchema in project hive by apache.

the class HiveQueryResultSet method retrieveSchema.

/**
 * Retrieve schema from the server
 */
private void retrieveSchema() throws SQLException {
    try {
        TGetResultSetMetadataReq metadataReq = new TGetResultSetMetadataReq(stmtHandle);
        // TODO need session handle
        TGetResultSetMetadataResp metadataResp;
        metadataResp = client.GetResultSetMetadata(metadataReq);
        Utils.verifySuccess(metadataResp.getStatus());
        StringBuilder namesSb = new StringBuilder();
        StringBuilder typesSb = new StringBuilder();
        TTableSchema schema = metadataResp.getSchema();
        if (schema == null || !schema.isSetColumns()) {
            // TODO: should probably throw an exception here.
            return;
        }
        setSchema(new TableSchema(schema));
        List<TColumnDesc> columns = schema.getColumns();
        for (int pos = 0; pos < schema.getColumnsSize(); pos++) {
            if (pos != 0) {
                namesSb.append(",");
                typesSb.append(",");
            }
            String columnName = columns.get(pos).getColumnName();
            columnNames.add(columnName);
            normalizedColumnNames.add(columnName.toLowerCase());
            TPrimitiveTypeEntry primitiveTypeEntry = columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry();
            String columnTypeName = TYPE_NAMES.get(primitiveTypeEntry.getType());
            columnTypes.add(columnTypeName);
            columnAttributes.add(getColumnAttributes(primitiveTypeEntry));
        }
    } catch (SQLException eS) {
        // rethrow the SQLException as is
        throw eS;
    } catch (Exception ex) {
        ex.printStackTrace();
        throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex);
    }
}
Also used : TGetResultSetMetadataReq(org.apache.hive.service.rpc.thrift.TGetResultSetMetadataReq) TableSchema(org.apache.hive.service.cli.TableSchema) TTableSchema(org.apache.hive.service.rpc.thrift.TTableSchema) TPrimitiveTypeEntry(org.apache.hive.service.rpc.thrift.TPrimitiveTypeEntry) SQLException(java.sql.SQLException) TGetResultSetMetadataResp(org.apache.hive.service.rpc.thrift.TGetResultSetMetadataResp) TColumnDesc(org.apache.hive.service.rpc.thrift.TColumnDesc) TTableSchema(org.apache.hive.service.rpc.thrift.TTableSchema) SQLFeatureNotSupportedException(java.sql.SQLFeatureNotSupportedException) SQLException(java.sql.SQLException)

Example 4 with TableSchema

use of org.apache.hive.service.cli.TableSchema in project hive by apache.

the class SQLOperation method prepare.

/**
 * Compile the query and extract metadata
 *
 * @throws HiveSQLException
 */
public void prepare(QueryState queryState) throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        driver = DriverFactory.newDriver(queryState, getParentSession().getUserName(), queryInfo);
        // queryTimeout == 0 means no timeout
        if (queryTimeout > 0) {
            timeoutExecutor = new ScheduledThreadPoolExecutor(1);
            Runnable timeoutTask = new Runnable() {

                @Override
                public void run() {
                    try {
                        String queryId = queryState.getQueryId();
                        LOG.info("Query timed out after: " + queryTimeout + " seconds. Cancelling the execution now: " + queryId);
                        SQLOperation.this.cancel(OperationState.TIMEDOUT);
                    } catch (HiveSQLException e) {
                        LOG.error("Error cancelling the query after timeout: " + queryTimeout + " seconds", e);
                    } finally {
                        // Stop
                        timeoutExecutor.shutdown();
                    }
                }
            };
            timeoutExecutor.schedule(timeoutTask, queryTimeout, TimeUnit.SECONDS);
        }
        queryInfo.setQueryDisplay(driver.getQueryDisplay());
        // set the operation handle information in Driver, so that thrift API users
        // can use the operation handle they receive, to lookup query information in
        // Yarn ATS
        String guid64 = Base64.encodeBase64URLSafeString(getHandle().getHandleIdentifier().toTHandleIdentifier().getGuid()).trim();
        driver.setOperationId(guid64);
        // In Hive server mode, we are not able to retry in the FetchTask
        // case, when calling fetch queries since execute() has returned.
        // For now, we disable the test attempts.
        response = driver.compileAndRespond(statement);
        if (0 != response.getResponseCode()) {
            throw toSQLException("Error while compiling statement", response);
        }
        mResultSchema = driver.getSchema();
        // "explain" is an exception for now
        if (driver.getPlan().getFetchTask() != null) {
            // Schema has to be set
            if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) {
                throw new HiveSQLException("Error compiling query: Schema and FieldSchema " + "should be set when query plan has a FetchTask");
            }
            resultSchema = new TableSchema(mResultSchema);
            setHasResultSet(true);
        } else {
            setHasResultSet(false);
        }
        // TODO explain should use a FetchTask for reading
        for (Task<? extends Serializable> task : driver.getPlan().getRootTasks()) {
            if (task.getClass() == ExplainTask.class) {
                resultSchema = new TableSchema(mResultSchema);
                setHasResultSet(true);
                break;
            }
        }
    } catch (HiveSQLException e) {
        setState(OperationState.ERROR);
        throw e;
    } catch (Throwable e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException("Error running query: " + e.toString(), e);
    }
}
Also used : TableSchema(org.apache.hive.service.cli.TableSchema) ScheduledThreadPoolExecutor(java.util.concurrent.ScheduledThreadPoolExecutor) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 5 with TableSchema

use of org.apache.hive.service.cli.TableSchema in project hive by apache.

the class ThriftCLIService method GetResultSetMetadata.

@Override
public TGetResultSetMetadataResp GetResultSetMetadata(TGetResultSetMetadataReq req) throws TException {
    TGetResultSetMetadataResp resp = new TGetResultSetMetadataResp();
    try {
        TableSchema schema = cliService.getResultSetMetadata(new OperationHandle(req.getOperationHandle()));
        resp.setSchema(schema.toTTableSchema());
        resp.setStatus(OK_STATUS);
    } catch (Exception e) {
        LOG.warn("Error getting result set metadata: ", e);
        resp.setStatus(HiveSQLException.toTStatus(e));
    }
    return resp;
}
Also used : TableSchema(org.apache.hive.service.cli.TableSchema) TGetResultSetMetadataResp(org.apache.hive.service.rpc.thrift.TGetResultSetMetadataResp) OperationHandle(org.apache.hive.service.cli.OperationHandle) LoginException(javax.security.auth.login.LoginException) ServiceException(org.apache.hive.service.ServiceException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TException(org.apache.thrift.TException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException)

Aggregations

TableSchema (org.apache.hive.service.cli.TableSchema)7 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)6 TGetResultSetMetadataResp (org.apache.hive.service.rpc.thrift.TGetResultSetMetadataResp)3 IOException (java.io.IOException)2 SQLException (java.sql.SQLException)2 TGetResultSetMetadataReq (org.apache.hive.service.rpc.thrift.TGetResultSetMetadataReq)2 TException (org.apache.thrift.TException)2 FileNotFoundException (java.io.FileNotFoundException)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 UnknownHostException (java.net.UnknownHostException)1 SQLFeatureNotSupportedException (java.sql.SQLFeatureNotSupportedException)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 List (java.util.List)1 ScheduledThreadPoolExecutor (java.util.concurrent.ScheduledThreadPoolExecutor)1 Pattern (java.util.regex.Pattern)1 LoginException (javax.security.auth.login.LoginException)1 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)1 PrimaryKeysRequest (org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest)1