use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class SetProcessor method run.
@Override
public CommandProcessorResponse run(String command) {
SessionState ss = SessionState.get();
String nwcmd = command.trim();
if (nwcmd.equals("")) {
dumpOptions(ss.getConf().getChangedProperties());
return createProcessorSuccessResponse();
}
if (nwcmd.equals("-v")) {
Properties properties = null;
if (ss.getConf().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
Class<?> clazz;
try {
clazz = Class.forName("org.apache.tez.dag.api.TezConfiguration");
Configuration tezConf = (Configuration) clazz.getConstructor(Configuration.class).newInstance(ss.getConf());
properties = HiveConf.getProperties(tezConf);
} catch (Exception e) {
return new CommandProcessorResponse(1, e.getMessage(), "42000", e);
}
} else {
properties = ss.getConf().getAllProperties();
}
dumpOptions(properties);
return createProcessorSuccessResponse();
}
// Special handling for time-zone
Matcher matcher = TIME_ZONE_PATTERN.matcher(nwcmd);
if (matcher.find()) {
nwcmd = HiveConf.ConfVars.HIVE_LOCAL_TIME_ZONE.varname + "=" + nwcmd.substring(matcher.end());
}
String[] part = new String[2];
int eqIndex = nwcmd.indexOf('=');
if (nwcmd.contains("=")) {
if (eqIndex == nwcmd.length() - 1) {
// x=
part[0] = nwcmd.substring(0, nwcmd.length() - 1);
part[1] = "";
} else {
// x=y
part[0] = nwcmd.substring(0, eqIndex).trim();
part[1] = nwcmd.substring(eqIndex + 1).trim();
}
if (part[0].equals("silent")) {
ss.setIsSilent(getBoolean(part[1]));
return new CommandProcessorResponse(0);
}
return executeSetVariable(part[0], part[1]);
}
try {
return getVariable(nwcmd);
} catch (Exception e) {
return new CommandProcessorResponse(1, e.getMessage(), "42000", e);
}
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class ExprNodeGenericFuncDesc method newInstance.
/**
* Create a ExprNodeGenericFuncDesc based on the genericUDFClass and the
* children parameters. If the function has an explicit name, the
* newInstance method should be passed the function name in the funcText
* argument.
*
* @throws UDFArgumentException
*/
public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, String funcText, List<ExprNodeDesc> children) throws UDFArgumentException {
ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
for (int i = 0; i < childrenOIs.length; i++) {
childrenOIs[i] = children.get(i).getWritableObjectInspector();
}
// Perform the check here instead of in GenericUDFBaseCompare to guarantee it is only run once per operator
if (genericUDF instanceof GenericUDFBaseCompare && children.size() == 2) {
TypeInfo oiTypeInfo0 = children.get(0).getTypeInfo();
TypeInfo oiTypeInfo1 = children.get(1).getTypeInfo();
SessionState ss = SessionState.get();
Configuration conf = (ss != null) ? ss.getConf() : new Configuration();
LogHelper console = new LogHelper(LOG);
// For now, if a bigint is going to be cast to a double throw an error or warning
if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) {
String error = StrictChecks.checkTypeSafety(conf);
if (error != null)
throw new UDFArgumentException(error);
console.printError("WARNING: Comparing a bigint and a string may result in a loss of precision.");
} else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) {
String error = StrictChecks.checkTypeSafety(conf);
if (error != null)
throw new UDFArgumentException(error);
console.printError("WARNING: Comparing a bigint and a double may result in a loss of precision.");
}
}
ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);
String[] requiredJars = genericUDF.getRequiredJars();
String[] requiredFiles = genericUDF.getRequiredFiles();
SessionState ss = SessionState.get();
if (requiredJars != null) {
SessionState.ResourceType t = SessionState.find_resource_type("JAR");
try {
ss.add_resources(t, Arrays.asList(requiredJars));
} catch (Exception e) {
throw new UDFArgumentException(e);
}
}
if (requiredFiles != null) {
SessionState.ResourceType t = SessionState.find_resource_type("FILE");
try {
ss.add_resources(t, Arrays.asList(requiredFiles));
} catch (Exception e) {
throw new UDFArgumentException(e);
}
}
return new ExprNodeGenericFuncDesc(oi, genericUDF, funcText, children);
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class DDLTask method killQuery.
private int killQuery(Hive db, KillQueryDesc desc) throws HiveException {
SessionState sessionState = SessionState.get();
for (String queryId : desc.getQueryIds()) {
sessionState.getKillQuery().killQuery(queryId, "User invoked KILL QUERY");
}
LOG.info("kill query called ({})", desc.getQueryIds());
return 0;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TezSessionState method createTezDir.
/**
* createTezDir creates a temporary directory in the scratchDir folder to
* be used with Tez. Assumes scratchDir exists.
*/
private Path createTezDir(String sessionId, String suffix) throws IOException {
// tez needs its own scratch dir (per session)
// TODO: De-link from SessionState. A TezSession can be linked to different Hive Sessions via the pool.
SessionState sessionState = SessionState.get();
String hdfsScratchDir = sessionState == null ? HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR) : sessionState.getHdfsScratchDirURIString();
Path tezDir = new Path(hdfsScratchDir, TEZ_DIR);
tezDir = new Path(tezDir, sessionId + ((suffix == null) ? "" : ("-" + suffix)));
FileSystem fs = tezDir.getFileSystem(conf);
FsPermission fsPermission = new FsPermission(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIRPERMISSION));
fs.mkdirs(tezDir, fsPermission);
// Make sure the path is normalized (we expect validation to pass since we just created it).
tezDir = DagUtils.validateTargetDir(tezDir, conf).getPath();
// Directory removal will be handled by cleanup at the SessionState level.
return tezDir;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class Utilities method getSessionSpecifiedClassLoader.
/**
* get session specified class loader and get current class loader if fall
*
* @return
*/
public static ClassLoader getSessionSpecifiedClassLoader() {
SessionState state = SessionState.get();
if (state == null || state.getConf() == null) {
LOG.debug("Hive Conf not found or Session not initiated, use thread based class loader instead");
return JavaUtils.getClassLoader();
}
ClassLoader sessionCL = state.getConf().getClassLoader();
if (sessionCL != null) {
// it's normal case
LOG.trace("Use session specified class loader");
return sessionCL;
}
LOG.debug("Session specified class loader not found, use thread based class loader");
return JavaUtils.getClassLoader();
}
Aggregations