use of org.apache.hadoop.hive.ql.exec.SelectOperator in project hive by apache.
the class TestColumnPrunerProcCtx method testGetSelectNestedColPathsFromChildren6.
// Test select abs(root.col1.b) from table test(root struct<col1:struct<a:boolean,b:double>,
// col2:double>);
@Test
public void testGetSelectNestedColPathsFromChildren6() {
ColumnPrunerProcCtx ctx = new ColumnPrunerProcCtx(null);
ExprNodeDesc colDesc = new ExprNodeColumnDesc(col3Type, "root", "test", false);
ExprNodeDesc col1 = new ExprNodeFieldDesc(col1Type, colDesc, "col1", false);
ExprNodeDesc fieldDesc = new ExprNodeFieldDesc(TypeInfoFactory.doubleTypeInfo, col1, "b", false);
final List<FieldNode> paths = Arrays.asList(new FieldNode("_col0"));
GenericUDF udf = mock(GenericUDFBridge.class);
List<ExprNodeDesc> list = new ArrayList<>();
list.add(fieldDesc);
ExprNodeDesc funcDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.binaryTypeInfo, udf, "abs", list);
SelectOperator selectOperator = buildSelectOperator(Arrays.asList(funcDesc), paths);
List<FieldNode> groups = ctx.getSelectColsFromChildren(selectOperator, paths);
compareTestResults(groups, "root.col1.b");
}
use of org.apache.hadoop.hive.ql.exec.SelectOperator in project hive by apache.
the class TestColumnPrunerProcCtx method buildSelectOperator.
private SelectOperator buildSelectOperator(List<ExprNodeDesc> colList, List<FieldNode> outputCols) {
SelectOperator selectOperator = mock(SelectOperator.class);
SelectDesc selectDesc = new SelectDesc(colList, ColumnPrunerProcCtx.toColumnNames(outputCols));
selectDesc.setSelStarNoCompute(false);
when(selectOperator.getConf()).thenReturn(selectDesc);
return selectOperator;
}
use of org.apache.hadoop.hive.ql.exec.SelectOperator in project hive by apache.
the class TestColumnPrunerProcCtx method testGetSelectNestedColPathsFromChildren5.
// Test select named_struct from named_struct:struct<a:boolean,b:double>
@Test
public void testGetSelectNestedColPathsFromChildren5() {
ColumnPrunerProcCtx ctx = new ColumnPrunerProcCtx(null);
ExprNodeConstantDesc constADesc = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, "a");
ExprNodeConstantDesc constBDesc = new ExprNodeConstantDesc(TypeInfoFactory.doubleTypeInfo, "b");
List<ExprNodeDesc> list = new ArrayList<>();
list.add(constADesc);
list.add(constBDesc);
GenericUDF udf = mock(GenericUDF.class);
ExprNodeDesc funcDesc = new ExprNodeGenericFuncDesc(col1Type, udf, "named_struct", list);
ExprNodeDesc fieldDesc = new ExprNodeFieldDesc(TypeInfoFactory.doubleTypeInfo, funcDesc, "foo", false);
final List<FieldNode> paths = Arrays.asList(new FieldNode("_col0"));
SelectOperator selectOperator = buildSelectOperator(Arrays.asList(fieldDesc), paths);
List<FieldNode> groups = ctx.getSelectColsFromChildren(selectOperator, paths);
// Return empty result since only constant Desc exists
assertEquals(0, groups.size());
}
use of org.apache.hadoop.hive.ql.exec.SelectOperator in project hive by apache.
the class TestColumnPrunerProcCtx method testGetSelectNestedColPathsFromChildren2.
// Test select root.col1 from root:struct<col1:struct<a:boolean,b:double>,col2:double>
@Test
public void testGetSelectNestedColPathsFromChildren2() {
ColumnPrunerProcCtx ctx = new ColumnPrunerProcCtx(null);
ExprNodeDesc colDesc = new ExprNodeColumnDesc(col3Type, "root", "test", false);
ExprNodeDesc fieldDesc = new ExprNodeFieldDesc(col1Type, colDesc, "col1", false);
final List<FieldNode> paths = Arrays.asList(new FieldNode("_col0"));
SelectOperator selectOperator = buildSelectOperator(Arrays.asList(fieldDesc), paths);
List<FieldNode> groups = ctx.getSelectColsFromChildren(selectOperator, paths);
compareTestResults(groups, "root.col1");
}
use of org.apache.hadoop.hive.ql.exec.SelectOperator in project hive by apache.
the class MapJoinProcessor method genSelectPlan.
protected void genSelectPlan(ParseContext pctx, MapJoinOperator input) throws SemanticException {
List<Operator<? extends OperatorDesc>> childOps = input.getChildOperators();
input.setChildOperators(null);
// create a dummy select - This select is needed by the walker to split the
// mapJoin later on
RowSchema inputRS = input.getSchema();
ArrayList<ExprNodeDesc> exprs = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputs = new ArrayList<String>();
List<String> outputCols = input.getConf().getOutputColumnNames();
ArrayList<ColumnInfo> outputRS = new ArrayList<ColumnInfo>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < outputCols.size(); i++) {
String internalName = outputCols.get(i);
ColumnInfo valueInfo = inputRS.getColumnInfo(internalName);
ExprNodeDesc colDesc = new ExprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName(), valueInfo.getTabAlias(), valueInfo.getIsVirtualCol());
exprs.add(colDesc);
outputs.add(internalName);
ColumnInfo newCol = new ColumnInfo(internalName, valueInfo.getType(), valueInfo.getTabAlias(), valueInfo.getIsVirtualCol(), valueInfo.isHiddenVirtualCol());
newCol.setAlias(valueInfo.getAlias());
outputRS.add(newCol);
colExprMap.put(internalName, colDesc);
}
SelectDesc select = new SelectDesc(exprs, outputs, false);
SelectOperator sel = (SelectOperator) OperatorFactory.getAndMakeChild(select, new RowSchema(outputRS), input);
sel.setColumnExprMap(colExprMap);
// Insert the select operator in between.
sel.setChildOperators(childOps);
for (Operator<? extends OperatorDesc> ch : childOps) {
ch.replaceParent(input, sel);
}
}
Aggregations