use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.DataContext in project storm by apache.
the class QueryPlanner method compile.
public AbstractTridentProcessor compile(Map<String, ISqlTridentDataSource> sources, String query) throws Exception {
TridentRel relNode = getPlan(query);
TridentPlanCreator tridentPlanCreator = new TridentPlanCreator(sources, new RexBuilder(typeFactory));
relNode.tridentPlan(tridentPlanCreator);
final TridentTopology topology = tridentPlanCreator.getTopology();
final IAggregatableStream lastStream = tridentPlanCreator.pop();
final DataContext dc = tridentPlanCreator.getDataContext();
final List<CompilingClassLoader> cls = tridentPlanCreator.getClassLoaders();
return new AbstractTridentProcessor() {
@Override
public TridentTopology build() {
return topology;
}
@Override
public Stream outputStream() {
return lastStream.toStream();
}
@Override
public DataContext getDataContext() {
return dc;
}
@Override
public List<CompilingClassLoader> getClassLoaders() {
return cls;
}
};
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.DataContext in project storm by apache.
the class EvaluationFilter method isKeep.
@Override
public boolean isKeep(TridentTuple tuple) {
Context calciteContext = new StormContext(dataContext);
calciteContext.values = tuple.getValues().toArray();
filterInstance.execute(calciteContext, outputValues);
return (outputValues[0] != null && (boolean) outputValues[0]);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.DataContext in project storm by apache.
the class EvaluationFunction method execute.
@Override
public Values execute(TridentTuple input) {
Context calciteContext = new StormContext(dataContext);
calciteContext.values = input.getValues().toArray();
projectionInstance.execute(calciteContext, outputValues);
return new Values(outputValues);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.DataContext in project druid by druid-io.
the class DruidPlanner method planWithBindableConvention.
private PlannerResult planWithBindableConvention(final SqlExplain explain, final RelRoot root) throws RelConversionException {
BindableRel bindableRel = (BindableRel) planner.transform(Rules.BINDABLE_CONVENTION_RULES, planner.getEmptyTraitSet().replace(BindableConvention.INSTANCE).plus(root.collation), root.rel);
if (!root.isRefTrivial()) {
// Add a projection on top to accommodate root.fields.
final List<RexNode> projects = new ArrayList<>();
final RexBuilder rexBuilder = bindableRel.getCluster().getRexBuilder();
for (int field : Pair.left(root.fields)) {
projects.add(rexBuilder.makeInputRef(bindableRel, field));
}
bindableRel = new Bindables.BindableProject(bindableRel.getCluster(), bindableRel.getTraitSet(), bindableRel, projects, root.validatedRowType);
}
if (explain != null) {
return planExplanation(bindableRel, explain);
} else {
final BindableRel theRel = bindableRel;
final DataContext dataContext = plannerContext.createDataContext((JavaTypeFactory) planner.getTypeFactory());
final Supplier<Sequence<Object[]>> resultsSupplier = new Supplier<Sequence<Object[]>>() {
@Override
public Sequence<Object[]> get() {
final Enumerable enumerable = theRel.bind(dataContext);
return Sequences.simple(enumerable);
}
};
return new PlannerResult(resultsSupplier, root.validatedRowType);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.DataContext in project calcite by apache.
the class TableScanNode method createProjectableFilterable.
private static TableScanNode createProjectableFilterable(Compiler compiler, TableScan rel, ImmutableList<RexNode> filters, ImmutableIntList projects, ProjectableFilterableTable pfTable) {
final DataContext root = compiler.getDataContext();
final ImmutableIntList originalProjects = projects;
for (; ; ) {
final List<RexNode> mutableFilters = Lists.newArrayList(filters);
final int[] projectInts;
if (projects == null || projects.equals(TableScan.identity(rel.getTable()))) {
projectInts = null;
} else {
projectInts = projects.toIntArray();
}
final Enumerable<Object[]> enumerable1 = pfTable.scan(root, mutableFilters, projectInts);
for (RexNode filter : mutableFilters) {
if (!filters.contains(filter)) {
throw RESOURCE.filterableTableInventedFilter(filter.toString()).ex();
}
}
final ImmutableBitSet usedFields = RelOptUtil.InputFinder.bits(mutableFilters, null);
if (projects != null) {
int changeCount = 0;
for (int usedField : usedFields) {
if (!projects.contains(usedField)) {
// A field that is not projected is used in a filter that the
// table rejected. We won't be able to apply the filter later.
// Try again without any projects.
projects = ImmutableIntList.copyOf(Iterables.concat(projects, ImmutableList.of(usedField)));
++changeCount;
}
}
if (changeCount > 0) {
continue;
}
}
final Enumerable<Row> rowEnumerable = Enumerables.toRow(enumerable1);
final ImmutableIntList rejectedProjects;
if (Objects.equals(projects, originalProjects)) {
rejectedProjects = null;
} else {
// We projected extra columns because they were needed in filters. Now
// project the leading columns.
rejectedProjects = ImmutableIntList.identity(originalProjects.size());
}
return createEnumerable(compiler, rel, rowEnumerable, projects, mutableFilters, rejectedProjects);
}
}
Aggregations