use of org.apache.hadoop.hive.ql.plan.CollectDesc in project hive by apache.
the class TestOperators method testScriptOperator.
public void testScriptOperator() throws Throwable {
try {
System.out.println("Testing Script Operator");
// col1
ExprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");
// col2
ExprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
ExprNodeDesc expr2 = new ExprNodeConstantDesc("1");
ExprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
// select operator to project these two columns
ArrayList<ExprNodeDesc> earr = new ArrayList<ExprNodeDesc>();
earr.add(exprDesc1);
earr.add(exprDesc2);
ArrayList<String> outputCols = new ArrayList<String>();
for (int i = 0; i < earr.size(); i++) {
outputCols.add("_col" + i);
}
SelectDesc selectCtx = new SelectDesc(earr, outputCols);
Operator<SelectDesc> op = OperatorFactory.get(new CompilationOpContext(), SelectDesc.class);
op.setConf(selectCtx);
// scriptOperator to echo the output of the select
TableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
TableDesc scriptInput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
ScriptDesc sd = new ScriptDesc("cat", scriptOutput, TextRecordWriter.class, scriptInput, TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
// Collect operator to observe the output of the script
CollectDesc cd = new CollectDesc(Integer.valueOf(10));
CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, sop);
op.initialize(new JobConf(TestOperators.class), new ObjectInspector[] { r[0].oi });
// evaluate on row
for (int i = 0; i < 5; i++) {
op.process(r[i].o, 0);
}
op.close(false);
InspectableObject io = new InspectableObject();
for (int i = 0; i < 5; i++) {
cdop.retrieve(io);
System.out.println("[" + i + "] io.o=" + io.o);
System.out.println("[" + i + "] io.oi=" + io.oi);
StructObjectInspector soi = (StructObjectInspector) io.oi;
assert (soi != null);
StructField a = soi.getStructFieldRef("a");
StructField b = soi.getStructFieldRef("b");
assertEquals("" + (i + 1), ((PrimitiveObjectInspector) a.getFieldObjectInspector()).getPrimitiveJavaObject(soi.getStructFieldData(io.o, a)));
assertEquals((i) + "1", ((PrimitiveObjectInspector) b.getFieldObjectInspector()).getPrimitiveJavaObject(soi.getStructFieldData(io.o, b)));
}
System.out.println("Script Operator ok");
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.ql.plan.CollectDesc in project hive by apache.
the class TestOperators method testMapOperator.
public void testMapOperator() throws Throwable {
try {
System.out.println("Testing Map Operator");
// initialize configuration
JobConf hconf = new JobConf(TestOperators.class);
hconf.set(MRJobConfig.MAP_INPUT_FILE, "hdfs:///testDir/testFile");
IOContextMap.get(hconf).setInputPath(new Path("hdfs:///testDir/testFile"));
// initialize pathToAliases
ArrayList<String> aliases = new ArrayList<String>();
aliases.add("a");
aliases.add("b");
LinkedHashMap<Path, ArrayList<String>> pathToAliases = new LinkedHashMap<>();
pathToAliases.put(new Path("hdfs:///testDir"), aliases);
// initialize pathToTableInfo
// Default: treat the table as a single column "col"
TableDesc td = Utilities.defaultTd;
PartitionDesc pd = new PartitionDesc(td, null);
LinkedHashMap<Path, org.apache.hadoop.hive.ql.plan.PartitionDesc> pathToPartitionInfo = new LinkedHashMap<>();
pathToPartitionInfo.put(new Path("hdfs:///testDir"), pd);
// initialize aliasToWork
CompilationOpContext ctx = new CompilationOpContext();
CollectDesc cd = new CollectDesc(Integer.valueOf(1));
CollectOperator cdop1 = (CollectOperator) OperatorFactory.get(ctx, CollectDesc.class);
cdop1.setConf(cd);
CollectOperator cdop2 = (CollectOperator) OperatorFactory.get(ctx, CollectDesc.class);
cdop2.setConf(cd);
LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
aliasToWork.put("a", cdop1);
aliasToWork.put("b", cdop2);
// initialize mapredWork
MapredWork mrwork = new MapredWork();
mrwork.getMapWork().setPathToAliases(pathToAliases);
mrwork.getMapWork().setPathToPartitionInfo(pathToPartitionInfo);
mrwork.getMapWork().setAliasToWork(aliasToWork);
// get map operator and initialize it
MapOperator mo = new MapOperator(new CompilationOpContext());
mo.initializeAsRoot(hconf, mrwork.getMapWork());
Text tw = new Text();
InspectableObject io1 = new InspectableObject();
InspectableObject io2 = new InspectableObject();
for (int i = 0; i < 5; i++) {
String answer = "[[" + i + ", " + (i + 1) + ", " + (i + 2) + "]]";
tw.set("" + i + "" + (i + 1) + "" + (i + 2));
mo.process(tw);
cdop1.retrieve(io1);
cdop2.retrieve(io2);
System.out.println("io1.o.toString() = " + io1.o.toString());
System.out.println("io2.o.toString() = " + io2.o.toString());
System.out.println("answer.toString() = " + answer.toString());
assertEquals(answer.toString(), io1.o.toString());
assertEquals(answer.toString(), io2.o.toString());
}
System.out.println("Map Operator ok");
} catch (Throwable e) {
e.printStackTrace();
throw (e);
}
}
use of org.apache.hadoop.hive.ql.plan.CollectDesc in project hive by apache.
the class BaseScalarUdfTest method testUdf.
/**
* This method drives the test. It takes the data from getBaseTable() and
* feeds it through a SELECT operator with a COLLECT operator after. Each
* row that is produced by the collect operator is compared to getExpectedResult()
* and if every row is the expected result the method completes without asserting.
* @throws HiveException
*/
public final void testUdf() throws HiveException {
InspectableObject[] data = getBaseTable();
List<ExprNodeDesc> expressionList = getExpressionList();
SelectDesc selectCtx = new SelectDesc(expressionList, OperatorTestUtils.createOutputColumnNames(expressionList));
Operator<SelectDesc> op = OperatorFactory.get(new CompilationOpContext(), SelectDesc.class);
op.setConf(selectCtx);
CollectDesc cd = new CollectDesc(Integer.valueOf(10));
CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, op);
op.initialize(new JobConf(OperatorTestUtils.class), new ObjectInspector[] { data[0].oi });
OperatorTestUtils.assertResults(op, cdop, data, getExpectedResult());
}
Aggregations