use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class MRExecutor method executePlan.
/**
* Invoke the MapRedTask and set the MapRedWork
* query plan to be used for execution.
* MapRedTask (an extension to the ExecDriver) is
* used to execute the query plan on Hadoop.
* @param mr
* @param hiveConf
* @throws Exception
*/
private void executePlan(MapredWork mr, HiveConf hiveConf) throws Exception {
MapRedTask mrtask = new MapRedTask();
DriverContext dctx = new DriverContext();
mrtask.setWork(mr);
mrtask.initialize(hiveConf, null, dctx);
int exitVal = mrtask.execute(dctx);
if (exitVal != 0) {
System.out.println("Test execution failed with exit status: " + exitVal);
throw new WindowingException("Test execution failed with exit status: " + exitVal);
} else
System.out.println("Test execution completed successfully");
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class PTFOperator method processOp.
@Override
public void processOp(Object row, int tag) throws HiveException {
try {
if (!isMapOperator) {
/*
* checkif current row belongs to the current accumulated Partition:
* - If not:
* - process the current Partition
* - reset input Partition
* - set currentKey to the newKey if it is null or has changed.
*/
newKeys.getNewKey(row, inputPart.getOI());
boolean keysAreEqual = (currentKeys != null && newKeys != null) ? newKeys.equals(currentKeys) : false;
if (currentKeys != null && !keysAreEqual) {
processInputPartition();
inputPart = RuntimeUtils.createFirstPartitionForChain(qDef, inputObjInspectors[0], hiveConf, isMapOperator);
}
if (currentKeys == null || !keysAreEqual) {
if (currentKeys == null) {
currentKeys = newKeys.copyKey();
} else {
currentKeys.copyKey(newKeys);
}
}
}
// add row to current Partition.
inputPart.append(row);
} catch (WindowingException we) {
throw new HiveException("Cannot process PTFOperator.", we);
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class InputTranslation method translate.
public static void translate(QueryDef qDef) throws WindowingException {
QuerySpec spec = qDef.getSpec();
/*
* validate that input chain ends in a Hive Query or TAble.
*/
if (!spec.getInput().sourcedFromHive()) {
throw new WindowingException("Translation not supported for HdfsLocation based queries");
}
EnsureTableFunctionInQuery.execute(qDef);
SlidePartitionAndOrderSpecs.execute(qDef);
TranslateInputSpecs.execute(qDef);
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class InputTranslation method translate.
/*
* <ol>
* <li> Get the <code>TableFunctionResolver</code> for this Function from the FunctionRegistry.
* <li> Create the TableFuncDef object.
* <li> Get the InputInfo for the input to this function.
* <li> Translate the Arguments to this Function in the Context of the InputInfo.
* <li> ask the TableFunctionResolver to create a TableFunctionEvaluator based on the Args passed in.
* <li> ask the TableFunctionEvaluator to setup the Map-side ObjectInspector. Gives a chance to functions that
* reshape the Input before it is partitioned to define the Shape after raw data is transformed.
* <li> Setup the Window Definition for this Function. The Window Definition is resolved wrt to the InputDef's
* Shape or the MapOI, for Functions that reshape the raw input.
* <li> ask the TableFunctionEvaluator to setup the Output ObjectInspector for this Function.
* <li> setup a Serde for the Output partition based on the OutputOI.
* </ol>
*/
private static TableFuncDef translate(QueryDef qDef, TableFuncSpec tSpec, QueryInputDef inputDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(tSpec.getName());
if (tFn == null) {
throw new WindowingException(sprintf("Unknown Table Function %s", tSpec.getName()));
}
TableFuncDef tDef = new TableFuncDef();
tDef.setSpec(tSpec);
tDef.setInput(inputDef);
InputInfo iInfo = tInfo.getInputInfo(inputDef);
/*
* translate args
*/
ArrayList<ASTNode> args = tSpec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
ArgDef argDef = translateTableFunctionArg(qDef, tDef, iInfo, expr);
tDef.addArg(argDef);
}
}
tFn.initialize(qDef, tDef);
TableFunctionEvaluator tEval = tFn.getEvaluator();
tDef.setFunction(tEval);
tFn.setupRawInputOI();
tDef.setWindow(WindowSpecTranslation.translateWindow(qDef, tDef));
tFn.setupOutputOI();
TranslateUtils.setupSerdeAndOI(tDef, inputDef, tInfo, tEval);
return tDef;
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class WindowingClient method setupConf.
private void setupConf(HiveConf cfg) throws WindowingException {
String hiveHome = System.getenv("HIVE_HOME");
// for testing purposes
hiveHome = hiveHome == null ? cfg.get("HIVE_HOME") : hiveHome;
if (hiveHome == null) {
throw new WindowingException("Environment variable HIVE_HOME must be set.");
}
if (!hiveHome.endsWith("/")) {
hiveHome += "/";
}
/*
* add jars to SessionState
*/
SessionState ss = SessionState.get();
for (String j : addedJars) {
ss.add_resource(ResourceType.JAR, hiveHome + j);
}
/*
* set run in childJvm to true
* why? because w/o this CLI tries to print status based on QueryPlan, which is null for us.
*/
cfg.setBoolean(ConfVars.SUBMITVIACHILD.toString(), true);
cfg.setBoolean(Constants.WINDOWING_OUTPUT_QUERY_RESULT, true);
}
Aggregations