use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class CompileProcessor method run.
/**
* User supplies dynamic code in this format:
* COMPILE ` some code here ` AS groovy NAMED something.groovy;
* CompileProcessor will compile and package this code into a jar. The jar
* will be added to the session state via the session state's
* ADD RESOURCE command.
* @param command a String to be compiled
* @return CommandProcessorResponse with 0 for success and 1 for failure
*/
@Override
public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
SessionState ss = SessionState.get();
this.command = command;
CommandProcessorResponse authErrResp = CommandUtil.authorizeCommand(ss, HiveOperationType.COMPILE, Arrays.asList(command));
if (authErrResp != null) {
// there was an authorization issue
return authErrResp;
}
myId = runCount.getAndIncrement();
try {
parse(ss);
} catch (CompileProcessorException e) {
return CommandProcessorResponse.create(e);
}
CommandProcessorResponse result = null;
try {
result = compile(ss);
} catch (CompileProcessorException e) {
return CommandProcessorResponse.create(e);
}
return result;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class Registry method getQualifiedFunctionInfoUnderLock.
private FunctionInfo getQualifiedFunctionInfoUnderLock(String qualifiedName) throws SemanticException {
FunctionInfo info = mFunctions.get(qualifiedName);
if (info != null && info.isBlockedFunction()) {
throw new SemanticException("UDF " + qualifiedName + " is not allowed");
}
if (!isNative && info != null && info.isDiscarded()) {
// the persistent function is discarded. try reload
mFunctions.remove(qualifiedName);
return null;
}
// and if necessary load the JARs in this thread.
if (isNative && info != null && info.isPersistent()) {
return registerToSessionRegistry(qualifiedName, info);
}
if (info != null || !isNative) {
// We have the UDF, or we are in the session registry (or both).
return info;
}
// If we are in the system registry and this feature is enabled, try to get it from metastore.
SessionState ss = SessionState.get();
HiveConf conf = (ss == null) ? null : ss.getConf();
if (conf == null || !HiveConf.getBoolVar(conf, ConfVars.HIVE_ALLOW_UDF_LOAD_ON_DEMAND)) {
return null;
}
// This is a little bit weird. We'll do the MS call outside of the lock. Our caller calls us
// under lock, so we'd preserve the lock state for them; their finally block will release the
// lock correctly. See the comment on the lock field - the locking needs to be reworked.
lock.unlock();
try {
return getFunctionInfoFromMetastoreNoLock(qualifiedName, conf);
} finally {
lock.lock();
}
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class GenericUDFTimestamp method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException("The function TIMESTAMP requires at least one argument, got " + arguments.length);
}
SessionState ss = SessionState.get();
if (ss != null) {
intToTimestampInSeconds = ss.getConf().getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS);
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
} catch (ClassCastException e) {
throw new UDFArgumentException("The function TIMESTAMP takes only primitive types");
}
tc = new TimestampConverter(argumentOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
tc.setIntToTimestampInSeconds(intToTimestampInSeconds);
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TestAddResource method testSanity.
// Check that all the jars are added to the classpath
@Test
public void testSanity() throws URISyntaxException, IOException {
SessionState ss = Mockito.spy(SessionState.start(conf).get());
String query = "testQuery";
// add all the dependencies to a list
List<URI> list = new LinkedList<URI>();
List<String> addList = new LinkedList<String>();
list.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
//return all the dependency urls
Mockito.when(ss.resolveAndDownload(query, false)).thenReturn(list);
addList.add(query);
ss.add_resources(t, addList);
Set<String> dependencies = ss.list_resource(t, null);
LinkedList<URI> actual = new LinkedList<URI>();
for (String dependency : dependencies) {
actual.add(createURI(dependency));
}
// sort both the lists
Collections.sort(list);
Collections.sort(actual);
assertEquals(list, actual);
ss.close();
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TestAddResource method testUnion.
// test when two jars with shared dependencies are added, the classloader contains union of the dependencies
@Test
public void testUnion() throws URISyntaxException, IOException {
HiveConf conf = new HiveConf();
SessionState ss = Mockito.spy(SessionState.start(conf).get());
ResourceType t = ResourceType.JAR;
String query1 = "testQuery1";
String query2 = "testQuery2";
List<String> addList = new LinkedList<String>();
// add dependencies for the jars
List<URI> list1 = new LinkedList<URI>();
List<URI> list2 = new LinkedList<URI>();
list1.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list1.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
list2.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
Mockito.when(ss.resolveAndDownload(query1, false)).thenReturn(list1);
Mockito.when(ss.resolveAndDownload(query2, false)).thenReturn(list2);
addList.add(query1);
addList.add(query2);
ss.add_resources(t, addList);
Set<String> dependencies = ss.list_resource(t, null);
LinkedList<URI> actual = new LinkedList<URI>();
for (String dependency : dependencies) {
actual.add(createURI(dependency));
}
List<URI> expected = union(list1, list2);
Collections.sort(expected);
Collections.sort(actual);
assertEquals(expected, actual);
ss.close();
}
Aggregations