use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class ListFactory method createList.
@SuppressWarnings("unchecked")
public static ByteBasedList createList(String clsName, int capacity) throws WindowingException {
try {
Class<? extends ByteBasedList> cls = (Class<? extends ByteBasedList>) Class.forName(clsName);
Constructor<? extends ByteBasedList> cons = cls.getConstructor(Integer.TYPE);
return cons.newInstance(capacity);
} catch (Exception e) {
throw new WindowingException(e);
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class HiveUtils method getRowResolver.
public static RowResolver getRowResolver(String tabAlias, StructObjectInspector rowObjectInspector) throws WindowingException {
LOG.info("HiveUtils::getRowResolver invoked on ObjectInspector");
try {
RowResolver rwsch = new RowResolver();
List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
rwsch.put(tabAlias, fields.get(i).getFieldName(), new ColumnInfo(fields.get(i).getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector()), tabAlias, false));
}
return rwsch;
} catch (Exception me) {
throw new WindowingException(me);
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class HiveUtils method getRowResolver.
public static RowResolver getRowResolver(String db, String table, String alias, HiveConf conf) throws WindowingException {
LOG.info("HiveUtils::getRowResolver invoked on " + table);
try {
HiveMetaStoreClient client = getClient(conf);
db = validateDB(client, db);
org.apache.hadoop.hive.ql.metadata.Table t = Hive.get(conf).getTable(db, table);
StructObjectInspector rowObjectInspector = (StructObjectInspector) t.getDeserializer().getObjectInspector();
RowResolver rwsch = getRowResolver(alias, rowObjectInspector);
for (FieldSchema part_col : t.getPartCols()) {
LOG.trace("Adding partition col: " + part_col);
rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(), TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true));
}
Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
// use a list for easy cumtomize
List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(), vc.getTypeInfo(), alias, true, vc.getIsHidden()));
vcList.add(vc);
}
return rwsch;
} catch (WindowingException w) {
throw w;
} catch (Exception me) {
throw new WindowingException(me);
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class IOUtils method createTableWindowingInput.
@SuppressWarnings("unchecked")
public static WindowingInput createTableWindowingInput(String dbName, String tableName, Configuration conf) throws WindowingException {
try {
HiveMetaStoreClient client = HiveUtils.getClient(conf);
String db = HiveUtils.validateDB(client, dbName);
Table t = HiveUtils.getTable(client, db, tableName);
StorageDescriptor sd = t.getSd();
HiveConf hConf = new HiveConf(conf, IOUtils.class);
JobConf job = new JobConf(hConf);
Class<? extends InputFormat<? extends Writable, ? extends Writable>> inputFormatClass = (Class<? extends InputFormat<? extends Writable, ? extends Writable>>) Class.forName(sd.getInputFormat());
hConf.setClass("mapred.input.format.class", inputFormatClass, InputFormat.class);
hConf.set(INPUT_INPUTFORMAT_CLASS, inputFormatClass.getName());
InputFormat<? extends Writable, ? extends Writable> iFmt = inputFormatClass.newInstance();
if (iFmt instanceof TextInputFormat) {
((TextInputFormat) iFmt).configure(job);
}
Path p = new Path(sd.getLocation());
/*
* Convert the Path in the StorageDescriptor into a Path in the current FileSystem.
* Used in testing: Jobs run on MiniDFSCluster, whereas hive metadata refers to a real cluster.
*/
{
p = makeQualified(p, conf);
}
FileInputFormat.addInputPath(job, p);
InputSplit[] iSplits = iFmt.getSplits(job, 1);
org.apache.hadoop.mapred.RecordReader<Writable, Writable> rdr = (org.apache.hadoop.mapred.RecordReader<Writable, Writable>) iFmt.getRecordReader(iSplits[0], job, Reporter.NULL);
hConf.set(INPUT_PATH, sd.getLocation());
hConf.set(INPUT_KEY_CLASS, rdr.createKey().getClass().getName());
hConf.set(INPUT_VALUE_CLASS, rdr.createValue().getClass().getName());
hConf.set(INPUT_SERDE_CLASS, sd.getSerdeInfo().getSerializationLib());
TableWindowingInput tIn = new TableWindowingInput();
tIn.initialize(null, hConf, MetaStoreUtils.getSchema(t));
return tIn;
} catch (WindowingException w) {
throw w;
} catch (Exception e) {
throw new WindowingException(e);
}
}
use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.
the class TranslateUtils method setupSerdeAndOI.
/**
* For NOOP table functions, the serde is the same as that on the input
* hive table,; for other table functions it is the lazy binary serde.
* If the query has a map-phase, the map oi is set to be the oi on the
* lazy binary serde unless the table function is a NOOP_MAP_TABLE_FUNCTION
* (in which case it is set to the oi on the serde of the input hive
* table definition).
* @param tDef
* @param inputDef
* @param tInfo
* @param tEval
* @throws WindowingException
*/
public static void setupSerdeAndOI(TableFuncDef tDef, QueryInputDef inputDef, QueryTranslationInfo tInfo, TableFunctionEvaluator tEval) throws WindowingException {
/*
* setup the SerDe.
*/
SerDe serde = null;
// to the next function in the chain.
if (tDef.getName().equals(FunctionRegistry.NOOP_TABLE_FUNCTION) || tDef.getName().equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) {
serde = inputDef.getSerde();
} else {
serde = TranslateUtils.createLazyBinarySerDe(tInfo.getHiveCfg(), tEval.getOutputOI());
}
tDef.setSerde(serde);
try {
tDef.setOI((StructObjectInspector) serde.getObjectInspector());
} catch (SerDeException se) {
throw new WindowingException(se);
}
if (tEval.isTransformsRawInput()) {
if (tDef.getName().equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) {
serde = inputDef.getSerde();
} else {
serde = TranslateUtils.createLazyBinarySerDe(tInfo.getHiveCfg(), tEval.getRawInputOI());
}
try {
tDef.setMapOI((StructObjectInspector) serde.getObjectInspector());
} catch (SerDeException se) {
throw new WindowingException(se);
}
}
}
Aggregations