Search in sources :

Example 21 with RowSet

use of org.apache.hive.service.cli.RowSet in project hive by apache.

the class TestOperationLoggingLayout method testHushableRandomAccessFileAppender.

@Test
public /**
 * Test to make sure that appending log event to HushableRandomAccessFileAppender even after
 * closing the corresponding operation would not throw an exception.
 */
void testHushableRandomAccessFileAppender() throws Exception {
    // verify whether the sql operation log is generated and fetch correctly.
    OperationHandle operationHandle = client.executeStatement(sessionHandle, sqlCntStar, null);
    RowSet rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 1000, FetchType.LOG);
    Appender queryAppender;
    Appender testQueryAppender;
    String queryId = getQueryId(rowSetLog);
    Assert.assertNotNull("Could not find query id, perhaps a logging message changed", queryId);
    checkAppenderState("before operation close ", LogDivertAppender.QUERY_ROUTING_APPENDER, queryId, false);
    queryAppender = getAppender(LogDivertAppender.QUERY_ROUTING_APPENDER, queryId);
    checkAppenderState("before operation close ", LogDivertAppenderForTest.TEST_QUERY_ROUTING_APPENDER, queryId, false);
    testQueryAppender = getAppender(LogDivertAppenderForTest.TEST_QUERY_ROUTING_APPENDER, queryId);
    client.closeOperation(operationHandle);
    appendHushableRandomAccessFileAppender(queryAppender);
    appendHushableRandomAccessFileAppender(testQueryAppender);
}
Also used : Appender(org.apache.logging.log4j.core.Appender) LogDivertAppender(org.apache.hadoop.hive.ql.log.LogDivertAppender) RoutingAppender(org.apache.logging.log4j.core.appender.routing.RoutingAppender) HushableRandomAccessFileAppender(org.apache.hadoop.hive.ql.log.HushableRandomAccessFileAppender) RowSet(org.apache.hive.service.cli.RowSet) OperationHandle(org.apache.hive.service.cli.OperationHandle) LogDivertAppenderForTest(org.apache.hadoop.hive.ql.log.LogDivertAppenderForTest) Test(org.junit.Test)

Example 22 with RowSet

use of org.apache.hive.service.cli.RowSet in project hive by apache.

the class TestOperationLoggingAPIWithMr method testFetchResultsOfLogAsync.

@Test
public void testFetchResultsOfLogAsync() throws Exception {
    // verify whether the sql operation log is generated and fetch correctly in async mode.
    OperationHandle operationHandle = client.executeStatementAsync(sessionHandle, sql, null);
    // Poll on the operation status till the query is completed
    boolean isQueryRunning = true;
    long pollTimeout = System.currentTimeMillis() + 100000;
    OperationStatus opStatus;
    OperationState state = null;
    RowSet rowSetAccumulated = null;
    StringBuilder logs = new StringBuilder();
    while (isQueryRunning) {
        // Break if polling times out
        if (System.currentTimeMillis() > pollTimeout) {
            break;
        }
        opStatus = client.getOperationStatus(operationHandle, false);
        Assert.assertNotNull(opStatus);
        state = opStatus.getState();
        rowSetAccumulated = client.fetchResults(operationHandle, FetchOrientation.FETCH_NEXT, 2000, FetchType.LOG);
        for (Object[] row : rowSetAccumulated) {
            logs.append(row[0]);
        }
        if (state == OperationState.CANCELED || state == OperationState.CLOSED || state == OperationState.FINISHED || state == OperationState.ERROR) {
            isQueryRunning = false;
        }
        Thread.sleep(10);
    }
    // The sql should be completed now.
    Assert.assertEquals("Query should be finished", OperationState.FINISHED, state);
    // Verify the accumulated logs
    verifyFetchedLogPost(logs.toString(), expectedLogsVerbose, true);
    // Verify the fetched logs from the beginning of the log file
    RowSet rowSet = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 2000, FetchType.LOG);
    verifyFetchedLog(rowSet, expectedLogsVerbose);
}
Also used : OperationStatus(org.apache.hive.service.cli.OperationStatus) RowSet(org.apache.hive.service.cli.RowSet) OperationHandle(org.apache.hive.service.cli.OperationHandle) OperationState(org.apache.hive.service.cli.OperationState) Test(org.junit.Test)

Example 23 with RowSet

use of org.apache.hive.service.cli.RowSet in project hive by apache.

the class TestOperationLoggingAPIWithMr method testFetchResultsOfLog.

@Test
public void testFetchResultsOfLog() throws Exception {
    // verify whether the sql operation log is generated and fetch correctly.
    OperationHandle operationHandle = client.executeStatement(sessionHandle, sql, null);
    RowSet rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 1000, FetchType.LOG);
    verifyFetchedLog(rowSetLog, expectedLogsVerbose);
}
Also used : RowSet(org.apache.hive.service.cli.RowSet) OperationHandle(org.apache.hive.service.cli.OperationHandle) Test(org.junit.Test)

Example 24 with RowSet

use of org.apache.hive.service.cli.RowSet in project hive by apache.

the class HiveCommandOperation method getNextRowSet.

/* (non-Javadoc)
   * @see org.apache.hive.service.cli.operation.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
   */
@Override
public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
    validateDefaultFetchOrientation(orientation);
    if (orientation.equals(FetchOrientation.FETCH_FIRST)) {
        resetResultReader();
    }
    List<String> rows = readResults((int) maxRows);
    RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion(), false);
    // different delimiter. so we will split only when the resultSchema has more than 1 column
    if (resultSchema != null && resultSchema.getSize() > 1) {
        for (String row : rows) {
            rowSet.addRow(row.split("\\t"));
        }
    } else {
        for (String row : rows) {
            rowSet.addRow(new String[] { row });
        }
    }
    return rowSet;
}
Also used : RowSet(org.apache.hive.service.cli.RowSet)

Example 25 with RowSet

use of org.apache.hive.service.cli.RowSet in project hive by apache.

the class SQLOperation method getNextRowSet.

@Override
public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
    validateDefaultFetchOrientation(orientation);
    assertState(Collections.singleton(OperationState.FINISHED));
    FetchTask fetchTask = driver.getFetchTask();
    boolean isBlobBased = false;
    if (fetchTask != null && fetchTask.getWork().isUsingThriftJDBCBinarySerDe()) {
        // Just fetch one blob if we've serialized thrift objects in final tasks
        maxRows = 1;
        isBlobBased = true;
    }
    RowSet rowSet = RowSetFactory.create(getResultSetSchema(), getProtocolVersion(), isBlobBased);
    try {
        /* if client is requesting fetch-from-start and its not the first time reading from this operation
       * then reset the fetch position to beginning
       */
        if (orientation.equals(FetchOrientation.FETCH_FIRST) && fetchStarted) {
            driver.resetFetch();
        }
        fetchStarted = true;
        final int capacity = Math.toIntExact(maxRows);
        convey.ensureCapacity(capacity);
        driver.setMaxRows(capacity);
        if (driver.getResults(convey)) {
            if (convey.size() == capacity) {
                log.info("Result set buffer filled to capacity [{}]", capacity);
            }
            return decode(convey, rowSet);
        }
        return rowSet;
    } catch (Exception e) {
        throw new HiveSQLException("Unable to get the next row set with exception: " + e.getMessage(), e);
    } finally {
        convey.clear();
    }
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) RowSet(org.apache.hive.service.cli.RowSet) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SQLException(java.sql.SQLException) RejectedExecutionException(java.util.concurrent.RejectedExecutionException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FetchTask(org.apache.hadoop.hive.ql.exec.FetchTask)

Aggregations

RowSet (org.apache.hive.service.cli.RowSet)27 OperationHandle (org.apache.hive.service.cli.OperationHandle)21 Test (org.junit.Test)14 SessionHandle (org.apache.hive.service.cli.SessionHandle)8 CLIServiceClient (org.apache.hive.service.cli.CLIServiceClient)5 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)5 SQLException (java.sql.SQLException)3 ArrayList (java.util.ArrayList)3 QueryResult (co.cask.cdap.proto.QueryResult)2 ImmutableList (com.google.common.collect.ImmutableList)2 LogDivertAppenderForTest (org.apache.hadoop.hive.ql.log.LogDivertAppenderForTest)2 TFetchResultsResp (org.apache.hive.service.rpc.thrift.TFetchResultsResp)2 TException (org.apache.thrift.TException)2 File (java.io.File)1 IOException (java.io.IOException)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 Method (java.lang.reflect.Method)1 UnknownHostException (java.net.UnknownHostException)1 SQLFeatureNotSupportedException (java.sql.SQLFeatureNotSupportedException)1 SQLTimeoutException (java.sql.SQLTimeoutException)1