use of co.cask.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method getTables.
@Override
public QueryHandle getTables(String catalog, String schemaPattern, String tableNamePattern, List<String> tableTypes) throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
String database = getHiveDatabase(schemaPattern);
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getTables(sessionHandle, catalog, database, tableNamePattern, tableTypes);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", database);
LOG.trace("Retrieving tables: catalog {}, schemaNamePattern {}, tableNamePattern {}, tableTypes {}", catalog, database, tableNamePattern, tableTypes);
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of co.cask.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method getTableTypes.
@Override
public QueryHandle getTableTypes() throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getTableTypes(sessionHandle);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", "");
LOG.trace("Retrieving table types");
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", ""));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of co.cask.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method execute.
@Override
public QueryHandle execute(NamespaceId namespace, String statement, @Nullable Map<String, String> additionalSessionConf) throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
LOG.trace("Got statement '{}' with additional session configuration {}", statement, additionalSessionConf);
Map<String, String> sessionConf = startSession(namespace, additionalSessionConf);
String database = getHiveDatabase(namespace.getNamespace());
try {
sessionHandle = openHiveSession(sessionConf);
// Switch database to the one being passed in.
setCurrentDatabase(database);
operationHandle = executeAsync(sessionHandle, statement);
QueryHandle handle = saveReadWriteOperation(operationHandle, sessionHandle, sessionConf, statement, database);
LOG.trace("Executing statement: {} with handle {}", statement, handle);
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadWriteOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of co.cask.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method getColumns.
@Override
public QueryHandle getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
String database = getHiveDatabase(schemaPattern);
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getColumns(sessionHandle, catalog, database, tableNamePattern, columnNamePattern);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", database);
LOG.trace("Retrieving columns: catalog {}, schemaPattern {}, tableNamePattern {}, columnNamePattern {}", catalog, database, tableNamePattern, columnNamePattern);
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of co.cask.cdap.proto.QueryHandle in project cdap by caskdata.
the class ExploreExecutorHttpHandler method updateDataset.
/**
* Enable ad-hoc exploration of a dataset instance.
*/
@POST
@Path("datasets/{dataset}/update")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void updateDataset(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) throws BadRequestException {
final DatasetId datasetId = new DatasetId(namespace, datasetName);
try {
UpdateExploreParameters params = readUpdateParameters(request);
final DatasetSpecification oldSpec = params.getOldSpec();
final DatasetSpecification datasetSpec = params.getNewSpec();
QueryHandle handle;
if (oldSpec.equals(datasetSpec)) {
handle = QueryHandle.NO_OP;
} else {
handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.updateDataset(datasetId, datasetSpec, oldSpec);
}
});
}
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json);
} catch (IllegalArgumentException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (ExploreException e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error updating explore on dataset " + datasetId);
} catch (SQLException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to update explore on dataset " + datasetId);
} catch (UnsupportedTypeException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
Aggregations