Search in sources :

Example 51 with SQLException

use of java.sql.SQLException in project hive by apache.

the class HiveConnection method setAutoCommit.

/*
   * (non-Javadoc)
   *
   * @see java.sql.Connection#setAutoCommit(boolean)
   */
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
    // Per JDBC spec, if the connection is closed a SQLException should be thrown.
    if (isClosed) {
        throw new SQLException("Connection is closed");
    }
    // if setAutoCommit is called and the auto-commit mode is not changed, the call is a no-op.
    if (!autoCommit) {
        LOG.warn("Request to set autoCommit to false; Hive does not support autoCommit=false.");
        SQLWarning warning = new SQLWarning("Hive does not support autoCommit=false");
        if (warningChain == null)
            warningChain = warning;
        else
            warningChain.setNextWarning(warning);
    }
}
Also used : SQLWarning(java.sql.SQLWarning) SQLException(java.sql.SQLException)

Example 52 with SQLException

use of java.sql.SQLException in project hive by apache.

the class HiveConnection method createBinaryTransport.

/**
   * Create transport per the connection options
   * Supported transport options are:
   *   - SASL based transports over
   *      + Kerberos
   *      + Delegation token
   *      + SSL
   *      + non-SSL
   *   - Raw (non-SASL) socket
   *
   *   Kerberos and Delegation token supports SASL QOP configurations
   * @throws SQLException, TTransportException
   */
private TTransport createBinaryTransport() throws SQLException, TTransportException {
    try {
        TTransport socketTransport = createUnderlyingTransport();
        // handle secure connection if specified
        if (!JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) {
            // If Kerberos
            Map<String, String> saslProps = new HashMap<String, String>();
            SaslQOP saslQOP = SaslQOP.AUTH;
            if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_QOP)) {
                try {
                    saslQOP = SaslQOP.fromString(sessConfMap.get(JdbcConnectionParams.AUTH_QOP));
                } catch (IllegalArgumentException e) {
                    throw new SQLException("Invalid " + JdbcConnectionParams.AUTH_QOP + " parameter. " + e.getMessage(), "42000", e);
                }
                saslProps.put(Sasl.QOP, saslQOP.toString());
            } else {
                // If the client did not specify qop then just negotiate the one supported by server
                saslProps.put(Sasl.QOP, "auth-conf,auth-int,auth");
            }
            saslProps.put(Sasl.SERVER_AUTH, "true");
            if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL)) {
                transport = KerberosSaslHelper.getKerberosTransport(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, socketTransport, saslProps, assumeSubject);
            } else {
                // If there's a delegation token available then use token based connection
                String tokenStr = getClientDelegationToken(sessConfMap);
                if (tokenStr != null) {
                    transport = KerberosSaslHelper.getTokenTransport(tokenStr, host, socketTransport, saslProps);
                } else {
                    // we are using PLAIN Sasl connection with user/password
                    String userName = getUserName();
                    String passwd = getPassword();
                    // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL)
                    transport = PlainSaslHelper.getPlainTransport(userName, passwd, socketTransport);
                }
            }
        } else {
            // Raw socket connection (non-sasl)
            transport = socketTransport;
        }
    } catch (SaslException e) {
        throw new SQLException("Could not create secure connection to " + jdbcUriString + ": " + e.getMessage(), " 08S01", e);
    }
    return transport;
}
Also used : HashMap(java.util.HashMap) SQLException(java.sql.SQLException) SaslQOP(org.apache.hive.service.auth.SaslQOP) TTransport(org.apache.thrift.transport.TTransport) SaslException(javax.security.sasl.SaslException)

Example 53 with SQLException

use of java.sql.SQLException in project hive by apache.

the class HiveBaseResultSet method getBinaryStream.

public InputStream getBinaryStream(int columnIndex) throws SQLException {
    Object obj = getObject(columnIndex);
    if (obj == null) {
        return null;
    } else if (obj instanceof InputStream) {
        return (InputStream) obj;
    } else if (obj instanceof byte[]) {
        byte[] byteArray = (byte[]) obj;
        InputStream is = new ByteArrayInputStream(byteArray);
        return is;
    } else if (obj instanceof String) {
        String str = (String) obj;
        InputStream is = null;
        try {
            is = new ByteArrayInputStream(str.getBytes("UTF-8"));
        } catch (UnsupportedEncodingException e) {
            throw new SQLException("Illegal conversion to binary stream from column " + columnIndex + " - Unsupported encoding exception");
        }
        return is;
    }
    throw new SQLException("Illegal conversion to binary stream from column " + columnIndex);
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) SQLException(java.sql.SQLException) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Example 54 with SQLException

use of java.sql.SQLException in project hive by apache.

the class CommandProcessorFactory method getForHiveCommandInternal.

public static CommandProcessor getForHiveCommandInternal(String[] cmd, HiveConf conf, boolean testOnly) throws SQLException {
    HiveCommand hiveCommand = HiveCommand.find(cmd, testOnly);
    if (hiveCommand == null || isBlank(cmd[0])) {
        return null;
    }
    if (conf == null) {
        conf = new HiveConf();
    }
    Set<String> availableCommands = new HashSet<String>();
    for (String availableCommand : conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST).split(",")) {
        availableCommands.add(availableCommand.toLowerCase().trim());
    }
    if (!availableCommands.contains(cmd[0].trim().toLowerCase())) {
        throw new SQLException("Insufficient privileges to execute " + cmd[0], "42000");
    }
    if (cmd.length > 1 && "reload".equalsIgnoreCase(cmd[0]) && "function".equalsIgnoreCase(cmd[1])) {
        // special handling for SQL "reload function"
        return null;
    }
    switch(hiveCommand) {
        case SET:
            return new SetProcessor();
        case RESET:
            return new ResetProcessor();
        case DFS:
            SessionState ss = SessionState.get();
            return new DfsProcessor(ss.getConf());
        case ADD:
            return new AddResourceProcessor();
        case LIST:
            return new ListResourceProcessor();
        case DELETE:
            return new DeleteResourceProcessor();
        case COMPILE:
            return new CompileProcessor();
        case RELOAD:
            return new ReloadProcessor();
        case CRYPTO:
            try {
                return new CryptoProcessor(SessionState.get().getHdfsEncryptionShim(), conf);
            } catch (HiveException e) {
                throw new SQLException("Fail to start the command processor due to the exception: ", e);
            }
        default:
            throw new AssertionError("Unknown HiveCommand " + hiveCommand);
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) SQLException(java.sql.SQLException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HashSet(java.util.HashSet)

Example 55 with SQLException

use of java.sql.SQLException in project hive by apache.

the class CompactionTxnHandler method purgeCompactionHistory.

/**
   * For any given compactable entity (partition; table if not partitioned) the history of compactions
   * may look like "sssfffaaasffss", for example.  The idea is to retain the tail (most recent) of the
   * history such that a configurable number of each type of state is present.  Any other entries
   * can be purged.  This scheme has advantage of always retaining the last failure/success even if
   * it's not recent.
   * @throws MetaException
   */
@Override
@RetrySemantics.SafeToRetry
public void purgeCompactionHistory() throws MetaException {
    Connection dbConn = null;
    Statement stmt = null;
    ResultSet rs = null;
    List<Long> deleteSet = new ArrayList<>();
    RetentionCounters rc = null;
    try {
        try {
            dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
            stmt = dbConn.createStatement();
            /*cc_id is monotonically increasing so for any entity sorts in order of compaction history,
        thus this query groups by entity and withing group sorts most recent first*/
            rs = stmt.executeQuery("select cc_id, cc_database, cc_table, cc_partition, cc_state from " + "COMPLETED_COMPACTIONS order by cc_database, cc_table, cc_partition, cc_id desc");
            String lastCompactedEntity = null;
            /*In each group, walk from most recent and count occurences of each state type.  Once you
        * have counted enough (for each state) to satisfy retention policy, delete all other
        * instances of this status.*/
            while (rs.next()) {
                CompactionInfo ci = new CompactionInfo(rs.getLong(1), rs.getString(2), rs.getString(3), rs.getString(4), rs.getString(5).charAt(0));
                if (!ci.getFullPartitionName().equals(lastCompactedEntity)) {
                    lastCompactedEntity = ci.getFullPartitionName();
                    rc = new RetentionCounters(conf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_ATTEMPTED), getFailedCompactionRetention(), conf.getIntVar(HiveConf.ConfVars.COMPACTOR_HISTORY_RETENTION_SUCCEEDED));
                }
                checkForDeletion(deleteSet, ci, rc);
            }
            close(rs);
            if (deleteSet.size() <= 0) {
                return;
            }
            List<String> queries = new ArrayList<String>();
            StringBuilder prefix = new StringBuilder();
            StringBuilder suffix = new StringBuilder();
            prefix.append("delete from COMPLETED_COMPACTIONS where ");
            suffix.append("");
            TxnUtils.buildQueryWithINClause(conf, queries, prefix, suffix, deleteSet, "cc_id", false, false);
            for (String query : queries) {
                LOG.debug("Going to execute update <" + query + ">");
                int count = stmt.executeUpdate(query);
                LOG.debug("Removed " + count + " records from COMPLETED_COMPACTIONS");
            }
            dbConn.commit();
        } catch (SQLException e) {
            rollbackDBConn(dbConn);
            checkRetryable(dbConn, e, "purgeCompactionHistory()");
            throw new MetaException("Unable to connect to transaction database " + StringUtils.stringifyException(e));
        } finally {
            close(rs, stmt, dbConn);
        }
    } catch (RetryException ex) {
        purgeCompactionHistory();
    }
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) Connection(java.sql.Connection) ArrayList(java.util.ArrayList) ResultSet(java.sql.ResultSet) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

SQLException (java.sql.SQLException)6792 PreparedStatement (java.sql.PreparedStatement)3048 ResultSet (java.sql.ResultSet)2426 Connection (java.sql.Connection)1871 ArrayList (java.util.ArrayList)972 Test (org.junit.Test)873 Statement (java.sql.Statement)779 IOException (java.io.IOException)341 List (java.util.List)335 CloudRuntimeException (com.cloud.utils.exception.CloudRuntimeException)298 Properties (java.util.Properties)255 DatabaseException (net.jforum.exceptions.DatabaseException)249 HashMap (java.util.HashMap)232 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)184 Timestamp (java.sql.Timestamp)171 CallableStatement (java.sql.CallableStatement)165 DbConnection (com.zimbra.cs.db.DbPool.DbConnection)160 DalHints (com.ctrip.platform.dal.dao.DalHints)159 Map (java.util.Map)125 Date (java.util.Date)123