use of io.cdap.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreServiceTest method createNamespace.
/**
* Create a namespace because app fabric is not started in explore tests.
*/
protected static void createNamespace(NamespaceId namespaceId) throws Exception {
namespacePathLocator.get(namespaceId).mkdirs();
NamespaceMeta namespaceMeta = new NamespaceMeta.Builder().setName(namespaceId).build();
namespaceAdmin.create(namespaceMeta);
if (!NamespaceId.DEFAULT.equals(namespaceId)) {
QueryHandle handle = exploreService.createNamespace(namespaceMeta);
waitForCompletionStatus(handle, 50, TimeUnit.MILLISECONDS, 40);
}
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method getCatalogs.
@Override
public QueryHandle getCatalogs() throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getCatalogs(sessionHandle);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", "");
LOG.trace("Retrieving catalogs");
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", ""));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method doStartSession.
private Map<String, String> doStartSession(@Nullable NamespaceId namespace, @Nullable Map<String, String> additionalSessionConf) throws IOException, ExploreException, NamespaceNotFoundException {
Map<String, String> sessionConf = new HashMap<>();
QueryHandle queryHandle = QueryHandle.generate();
sessionConf.put(Constants.Explore.QUERY_ID, queryHandle.getHandle());
String schedulerQueue = namespace != null ? schedulerQueueResolver.getQueue(namespace) : schedulerQueueResolver.getDefaultQueue();
if (schedulerQueue != null && !schedulerQueue.isEmpty()) {
sessionConf.put(JobContext.QUEUE_NAME, schedulerQueue);
}
Transaction tx = startTransaction();
ConfigurationUtil.set(sessionConf, Constants.Explore.TX_QUERY_KEY, TxnCodec.INSTANCE, tx);
ConfigurationUtil.set(sessionConf, Constants.Explore.CCONF_KEY, CConfCodec.INSTANCE, cConf);
ConfigurationUtil.set(sessionConf, Constants.Explore.HCONF_KEY, HConfCodec.INSTANCE, hConf);
HiveConf hiveConf = createHiveConf();
if (ExploreServiceUtils.isSparkEngine(hiveConf, additionalSessionConf)) {
sessionConf.putAll(sparkConf);
}
if (UserGroupInformation.isSecurityEnabled()) {
// make sure RM does not cancel delegation tokens after the query is run
sessionConf.put("mapreduce.job.complete.cancel.delegation.tokens", "false");
sessionConf.put("spark.hadoop.mapreduce.job.complete.cancel.delegation.tokens", "false");
// write the user's credentials to a file, to be used for the query
File credentialsFile = writeCredentialsFile(queryHandle);
String credentialsFilePath = credentialsFile.getAbsolutePath();
// mapreduce.job.credentials.binary is added by Hive only if Kerberos credentials are present and impersonation
// is enabled. However, in our case we don't have Kerberos credentials for Explore service.
// Hence it will not be automatically added by Hive, instead we have to add it ourselves.
sessionConf.put(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY, credentialsFilePath);
// CDAP-8367 We need to set this back to Kerberos if security is enabled. We override it in HiveConf.
sessionConf.put(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, UserGroupInformation.AuthenticationMethod.KERBEROS.name());
sessionConf.put(Constants.Explore.SUBMITLOCALTASKVIACHILD, Boolean.FALSE.toString());
sessionConf.put(Constants.Explore.SUBMITVIACHILD, Boolean.FALSE.toString());
if (ExploreServiceUtils.isTezEngine(hiveConf, additionalSessionConf)) {
// Add token file location property for tez if engine is tez
sessionConf.put("tez.credentials.path", credentialsFilePath);
}
}
if (additionalSessionConf != null) {
sessionConf.putAll(additionalSessionConf);
}
return sessionConf;
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method getTables.
@Override
public QueryHandle getTables(String catalog, String schemaPattern, String tableNamePattern, List<String> tableTypes) throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
String database = getHiveDatabase(schemaPattern);
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getTables(sessionHandle, catalog, database, tableNamePattern, tableTypes);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", database);
LOG.trace("Retrieving tables: catalog {}, schemaNamePattern {}, tableNamePattern {}, tableTypes {}", catalog, database, tableNamePattern, tableTypes);
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
use of io.cdap.cdap.proto.QueryHandle in project cdap by caskdata.
the class BaseHiveExploreService method getTableTypes.
@Override
public QueryHandle getTableTypes() throws ExploreException, SQLException {
startAndWait();
try {
SessionHandle sessionHandle = null;
OperationHandle operationHandle = null;
Map<String, String> sessionConf = startSession();
try {
sessionHandle = openHiveSession(sessionConf);
operationHandle = cliService.getTableTypes(sessionHandle);
QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", "");
LOG.trace("Retrieving table types");
return handle;
} catch (Throwable e) {
closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", ""));
throw e;
}
} catch (HiveSQLException e) {
throw getSqlException(e);
} catch (Throwable e) {
throw new ExploreException(e);
}
}
Aggregations