use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class WindowingClient method executeHiveQuery.
public void executeHiveQuery(String hQry) throws WindowingException {
hiveConsole.printInfo(sprintf("Executing Embedded Hive Query:\n %s\n", hQry));
int rc = 0;
try {
rc = hiveDriver.processEmbeddedQuery(hQry);
} catch (Throwable t) {
throw new WindowingException(sprintf("Error while executing hive query '%s'", hQry), t);
} finally {
hiveConsole.printInfo(sprintf("Finished Executing Embedded Hive Query\n"));
}
if (rc != 0) {
throw new WindowingException(sprintf("Failed to execute query '%s', return = %d", hQry, rc));
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class WindowingClient method setupConf.
private void setupConf(HiveConf cfg) throws WindowingException {
String hiveHome = System.getenv("HIVE_HOME");
// for testing purposes
hiveHome = hiveHome == null ? cfg.get("HIVE_HOME") : hiveHome;
if (hiveHome == null) {
throw new WindowingException("Environment variable HIVE_HOME must be set.");
}
if (!hiveHome.endsWith("/")) {
hiveHome += "/";
}
/*
* add jars to SessionState
*/
SessionState ss = SessionState.get();
for (String j : addedJars) {
ss.add_resource(ResourceType.JAR, hiveHome + j);
}
/*
* set run in childJvm to true
* why? because w/o this CLI tries to print status based on QueryPlan, which is null for us.
*/
cfg.setBoolean(ConfVars.SUBMITVIACHILD.toString(), true);
cfg.setBoolean(Constants.WINDOWING_OUTPUT_QUERY_RESULT, true);
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class IOUtils method createFileWindowingInput.
@SuppressWarnings("unchecked")
public static WindowingInput createFileWindowingInput(String path, String inputFormatClassName, String serDeClassName, Properties serDeProperties, Configuration conf) throws WindowingException {
try {
HiveConf hConf = new HiveConf(conf, IOUtils.class);
JobConf job = new JobConf(hConf);
Path p = new Path(path);
p = makeQualified(p, conf);
Class<? extends InputFormat<? extends Writable, ? extends Writable>> inputFormatClass = (Class<? extends InputFormat<? extends Writable, ? extends Writable>>) Class.forName(inputFormatClassName);
hConf.setClass("mapred.input.format.class", inputFormatClass, InputFormat.class);
hConf.set(INPUT_INPUTFORMAT_CLASS, inputFormatClass.getName());
InputFormat<? extends Writable, ? extends Writable> iFmt = inputFormatClass.newInstance();
if (iFmt instanceof TextInputFormat) {
((TextInputFormat) iFmt).configure(job);
}
FileInputFormat.addInputPath(job, p);
InputSplit[] iSplits = iFmt.getSplits(job, 1);
org.apache.hadoop.mapred.RecordReader<Writable, Writable> rdr = (org.apache.hadoop.mapred.RecordReader<Writable, Writable>) iFmt.getRecordReader(iSplits[0], job, Reporter.NULL);
hConf.set(INPUT_PATH, path);
hConf.set(INPUT_KEY_CLASS, rdr.createKey().getClass().getName());
hConf.set(INPUT_VALUE_CLASS, rdr.createValue().getClass().getName());
hConf.set(INPUT_SERDE_CLASS, serDeClassName);
TableWindowingInput tIn = new TableWindowingInput();
tIn.initialize(null, hConf, serDeProperties);
return tIn;
} catch (Exception e) {
throw new WindowingException(e);
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class MRWindowingInput method initialize.
void initialize(Configuration conf, String serDeClassName, Properties tbl) throws WindowingException {
if (serDeClassName == null) {
throw new WindowingException("Cannot initialize MRWindowingInput, serDeClass is null");
}
try {
serDe = (SerDe) SerDeUtils.lookupDeserializer(serDeClassName);
serDe.initialize(conf, tbl);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
LOG.error("error in initSerDe: " + e.getClass().getName() + " " + e.getMessage());
throw new WindowingException(e.getClass().getName() + " " + e.getMessage());
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class MRWindowingInput method initialize.
public void initialize(InputStream in1, Configuration conf, Properties tbl) throws IOException {
String dbName = null;
if (conf.get(Constants.WINDOWING_INPUT_DATABASE) != null)
dbName = conf.get(Constants.WINDOWING_INPUT_DATABASE);
String tableName = conf.get(Constants.WINDOWING_INPUT_TABLE);
try {
serDe = (SerDe) HiveUtils.getDeserializer(dbName, tableName, conf);
} catch (WindowingException we) {
throw new IOException(we);
}
}
Aggregations