use of io.cdap.cdap.proto.QueryHandle in project cdap by cdapio.
the class BaseHiveExploreService method execute.
@Override
public QueryHandle execute(NamespaceId namespace, String statement, @Nullable Map<String, String> additionalSessionConf) throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
LOG.trace("Got statement '{}' with additional session configuration {}", statement, additionalSessionConf);
Map<String, String> sessionConf = startSession(namespace, additionalSessionConf);
String database = getHiveDatabase(namespace.getNamespace());
try {
sessionHandle = openHiveSession(sessionConf);
// Switch database to the one being passed in.
setCurrentDatabase(database);
operationHandle = executeAsync(sessionHandle, statement);
QueryHandle handle = saveReadWriteOperation(operationHandle, sessionHandle, sessionConf, statement, database);
LOG.trace("Executing statement: {} with handle {}", statement, handle);
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadWriteOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by cdapio.
the class BaseHiveExploreService method getTableTypes.
@Override
public QueryHandle getTableTypes() throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getTableTypes(sessionHandle);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", "");
LOG.trace("Retrieving table types");
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", ""));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by cdapio.
the class BaseHiveExploreService method saveReadOnlyOperation.
/**
* Saves information associated with a Hive operation that is read-only on Datasets.
* @param operationHandle {@link OperationHandle} of the Hive operation running.
* @param sessionHandle {@link SessionHandle} for the Hive operation running.
* @param sessionConf configuration for the session running the Hive operation.
* @param statement SQL statement executed with the call.
* @return {@link QueryHandle} that represents the Hive operation being run.
*/
private QueryHandle saveReadOnlyOperation(OperationHandle operationHandle, SessionHandle sessionHandle, Map<String, String> sessionConf, String statement, String hiveDatabase) {
QueryHandle handle = QueryHandle.fromId(sessionConf.get(Constants.Explore.QUERY_ID));
activeHandleCache.put(handle, new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, statement, hiveDatabase));
return handle;
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by cdapio.
the class ExploreExecutorHttpHandler method doPartitionOperation.
private void doPartitionOperation(FullHttpRequest request, HttpResponder responder, DatasetId datasetId, PartitionOperation partitionOperation) {
try (SystemDatasetInstantiator datasetInstantiator = datasetInstantiatorFactory.create()) {
Dataset dataset;
try {
dataset = datasetInstantiator.getDataset(datasetId);
} catch (Exception e) {
LOG.error("Exception instantiating dataset {}.", datasetId, e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception instantiating dataset " + datasetId);
return;
}
try {
if (!(dataset instanceof PartitionedFileSet)) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "not a partitioned dataset.");
return;
}
Partitioning partitioning = ((PartitionedFileSet) dataset).getPartitioning();
Reader reader = new InputStreamReader(new ByteBufInputStream(request.content()));
Map<String, String> properties = GSON.fromJson(reader, new TypeToken<Map<String, String>>() {
}.getType());
PartitionKey partitionKey;
try {
partitionKey = PartitionedFileSetArguments.getOutputPartitionKey(properties, partitioning);
} catch (Exception e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "invalid partition key: " + e.getMessage());
return;
}
if (partitionKey == null) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "no partition key was given.");
return;
}
QueryHandle handle = partitionOperation.submitOperation(partitionKey, properties);
if (handle == null) {
return;
}
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json.toString());
} finally {
Closeables.closeQuietly(dataset);
}
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by cdapio.
the class ExploreExecutorHttpHandler method updateDataset.
/**
* Enable ad-hoc exploration of a dataset instance.
*/
@POST
@Path("datasets/{dataset}/update")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void updateDataset(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) throws BadRequestException {
final DatasetId datasetId = new DatasetId(namespace, datasetName);
try {
UpdateExploreParameters params = readUpdateParameters(request);
final DatasetSpecification oldSpec = params.getOldSpec();
final DatasetSpecification datasetSpec = params.getNewSpec();
QueryHandle handle;
if (oldSpec.equals(datasetSpec)) {
handle = QueryHandle.NO_OP;
} else {
handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.updateDataset(datasetId, datasetSpec, oldSpec);
}
});
}
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json.toString());
} catch (IllegalArgumentException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (ExploreException e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error updating explore on dataset " + datasetId);
} catch (SQLException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to update explore on dataset " + datasetId);
} catch (UnsupportedTypeException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
Aggregations