use of org.apache.calcite.rex.RexBuilder in project hive by apache.
the class HiveTableScan method project.
@Override
public RelNode project(ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields, RelBuilder relBuilder) {
// 1. If the schema is the same then bail out
final int fieldCount = getRowType().getFieldCount();
if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount)) && extraFields.isEmpty()) {
return this;
}
// 2. Make sure there is no dynamic addition of virtual cols
if (extraFields != null && !extraFields.isEmpty()) {
throw new RuntimeException("Hive TS does not support adding virtual columns dynamically");
}
// 3. Create new TS schema that is a subset of original
final List<RelDataTypeField> fields = getRowType().getFieldList();
List<RelDataType> fieldTypes = new LinkedList<RelDataType>();
List<String> fieldNames = new LinkedList<String>();
List<RexNode> exprList = new ArrayList<RexNode>();
RexBuilder rexBuilder = getCluster().getRexBuilder();
for (int i : fieldsUsed) {
RelDataTypeField field = fields.get(i);
fieldTypes.add(field.getType());
fieldNames.add(field.getName());
exprList.add(rexBuilder.makeInputRef(this, i));
}
// 4. Build new TS
HiveTableScan newHT = copy(getCluster().getTypeFactory().createStructType(fieldTypes, fieldNames));
// 5. Add Proj on top of TS
HiveProject hp = (HiveProject) relBuilder.push(newHT).project(exprList, new ArrayList<String>(fieldNames)).build();
// 6. Set synthetic flag, so that we would push filter below this one
hp.setSynthetic();
return hp;
}
use of org.apache.calcite.rex.RexBuilder in project druid by druid-io.
the class DruidPlanner method planWithBindableConvention.
private PlannerResult planWithBindableConvention(final SqlExplain explain, final RelRoot root) throws RelConversionException {
BindableRel bindableRel = (BindableRel) planner.transform(Rules.BINDABLE_CONVENTION_RULES, planner.getEmptyTraitSet().replace(BindableConvention.INSTANCE).plus(root.collation), root.rel);
if (!root.isRefTrivial()) {
// Add a projection on top to accommodate root.fields.
final List<RexNode> projects = new ArrayList<>();
final RexBuilder rexBuilder = bindableRel.getCluster().getRexBuilder();
for (int field : Pair.left(root.fields)) {
projects.add(rexBuilder.makeInputRef(bindableRel, field));
}
bindableRel = new Bindables.BindableProject(bindableRel.getCluster(), bindableRel.getTraitSet(), bindableRel, projects, root.validatedRowType);
}
if (explain != null) {
return planExplanation(bindableRel, explain);
} else {
final BindableRel theRel = bindableRel;
final DataContext dataContext = plannerContext.createDataContext((JavaTypeFactory) planner.getTypeFactory());
final Supplier<Sequence<Object[]>> resultsSupplier = new Supplier<Sequence<Object[]>>() {
@Override
public Sequence<Object[]> get() {
final Enumerable enumerable = theRel.bind(dataContext);
return Sequences.simple(enumerable);
}
};
return new PlannerResult(resultsSupplier, root.validatedRowType);
}
}
use of org.apache.calcite.rex.RexBuilder in project drill by apache.
the class ConvertHiveParquetScanToDrillParquetScan method createProjectRel.
/**
* Create a project that converts the native scan output to expected output of Hive scan.
*/
private DrillProjectRel createProjectRel(final DrillScanRel hiveScanRel, final Map<String, String> partitionColMapping, final DrillScanRel nativeScanRel) {
final List<RexNode> rexNodes = Lists.newArrayList();
final RexBuilder rb = hiveScanRel.getCluster().getRexBuilder();
final RelDataType hiveScanRowType = hiveScanRel.getRowType();
for (String colName : hiveScanRowType.getFieldNames()) {
final String dirColName = partitionColMapping.get(colName);
if (dirColName != null) {
rexNodes.add(createPartitionColumnCast(hiveScanRel, nativeScanRel, colName, dirColName, rb));
} else {
rexNodes.add(createColumnFormatConversion(hiveScanRel, nativeScanRel, colName, rb));
}
}
return DrillProjectRel.create(hiveScanRel.getCluster(), hiveScanRel.getTraitSet(), nativeScanRel, rexNodes, hiveScanRowType);
}
use of org.apache.calcite.rex.RexBuilder in project drill by apache.
the class DrillOptiqTest method testUnsupportedRexNode.
/* Method checks if we raise the appropriate error while dealing with RexNode that cannot be converted to
* equivalent Drill expressions
*/
@Test
public void testUnsupportedRexNode() {
try {
// Create the data type factory.
RelDataTypeFactory relFactory = new SqlTypeFactoryImpl(DrillRelDataTypeSystem.DRILL_REL_DATATYPE_SYSTEM);
// Create the rex builder
RexBuilder rex = new RexBuilder(relFactory);
RelDataType anyType = relFactory.createSqlType(SqlTypeName.ANY);
List<RexNode> emptyList = new LinkedList<>();
ImmutableList<RexFieldCollation> e = ImmutableList.copyOf(new RexFieldCollation[0]);
// create a dummy RexOver object.
RexNode window = rex.makeOver(anyType, SqlStdOperatorTable.AVG, emptyList, emptyList, e, null, null, true, false, false);
DrillOptiq.toDrill(null, (RelNode) null, window);
} catch (UserException e) {
if (e.getMessage().contains(DrillOptiq.UNSUPPORTED_REX_NODE_ERROR)) {
// got expected error return
return;
}
Assert.fail("Hit exception with unexpected error message");
}
Assert.fail("Failed to raise the expected exception");
}
use of org.apache.calcite.rex.RexBuilder in project drill by apache.
the class SqlConverter method toRel.
public RelNode toRel(final SqlNode validatedNode) {
final RexBuilder rexBuilder = new DrillRexBuilder(typeFactory);
if (planner == null) {
planner = new VolcanoPlanner(costFactory, settings);
planner.setExecutor(new DrillConstExecutor(functions, util, settings));
planner.clearRelTraitDefs();
planner.addRelTraitDef(ConventionTraitDef.INSTANCE);
planner.addRelTraitDef(DrillDistributionTraitDef.INSTANCE);
planner.addRelTraitDef(RelCollationTraitDef.INSTANCE);
}
final RelOptCluster cluster = RelOptCluster.create(planner, rexBuilder);
final SqlToRelConverter sqlToRelConverter = new SqlToRelConverter(new Expander(), validator, catalog, cluster, DrillConvertletTable.INSTANCE, sqlToRelConverterConfig);
final RelNode rel = sqlToRelConverter.convertQuery(validatedNode, false, !isInnerQuery);
final RelNode rel2 = sqlToRelConverter.flattenTypes(rel, true);
final RelNode rel3 = RelDecorrelator.decorrelateQuery(rel2);
return rel3;
}
Aggregations