use of org.apache.storm.trident.fluent.IAggregatableStream in project storm by apache.
the class QueryPlanner method compile.
public AbstractTridentProcessor compile(Map<String, ISqlTridentDataSource> sources, String query) throws Exception {
TridentRel relNode = getPlan(query);
TridentPlanCreator tridentPlanCreator = new TridentPlanCreator(sources, new RexBuilder(typeFactory));
relNode.tridentPlan(tridentPlanCreator);
final TridentTopology topology = tridentPlanCreator.getTopology();
final IAggregatableStream lastStream = tridentPlanCreator.pop();
final DataContext dc = tridentPlanCreator.getDataContext();
final List<CompilingClassLoader> cls = tridentPlanCreator.getClassLoaders();
return new AbstractTridentProcessor() {
@Override
public TridentTopology build() {
return topology;
}
@Override
public Stream outputStream() {
return lastStream.toStream();
}
@Override
public DataContext getDataContext() {
return dc;
}
@Override
public List<CompilingClassLoader> getClassLoaders() {
return cls;
}
};
}
use of org.apache.storm.trident.fluent.IAggregatableStream in project storm by apache.
the class TridentFilterRel method tridentPlan.
@Override
public void tridentPlan(TridentPlanCreator planCreator) throws Exception {
// SingleRel
RelNode input = getInput();
StormRelUtils.getStormRelInput(input).tridentPlan(planCreator);
Stream inputStream = planCreator.pop().toStream();
String stageName = StormRelUtils.getStageName(this);
List<RexNode> childExps = getChildExps();
RelDataType inputRowType = getInput(0).getRowType();
String filterClassName = StormRelUtils.getClassName(this);
ExecutableExpression filterInstance = planCreator.createScalarInstance(childExps, inputRowType, filterClassName);
IAggregatableStream finalStream = inputStream.filter(new EvaluationFilter(filterInstance, planCreator.getDataContext())).name(stageName);
planCreator.addStream(finalStream);
}
use of org.apache.storm.trident.fluent.IAggregatableStream in project storm by apache.
the class TridentStreamInsertRel method tridentPlan.
@Override
public void tridentPlan(TridentPlanCreator planCreator) throws Exception {
// SingleRel
RelNode input = getInput();
StormRelUtils.getStormRelInput(input).tridentPlan(planCreator);
Stream inputStream = planCreator.pop().toStream();
String stageName = StormRelUtils.getStageName(this);
Preconditions.checkArgument(isInsert(), "Only INSERT statement is supported.");
List<String> inputFields = this.input.getRowType().getFieldNames();
List<String> outputFields = getRowType().getFieldNames();
// FIXME: this should be really different...
String tableName = Joiner.on('.').join(getTable().getQualifiedName());
ISqlTridentDataSource.SqlTridentConsumer consumer = planCreator.getSources().get(tableName).getConsumer();
// In fact this is normally the end of stream, but I'm still not sure so I open new streams based on State values
IAggregatableStream finalStream = inputStream.partitionPersist(consumer.getStateFactory(), new Fields(inputFields), consumer.getStateUpdater(), new Fields(outputFields)).newValuesStream().name(stageName);
planCreator.addStream(finalStream);
}
use of org.apache.storm.trident.fluent.IAggregatableStream in project storm by apache.
the class TridentStreamScanRel method tridentPlan.
@Override
public void tridentPlan(TridentPlanCreator planCreator) throws Exception {
String sourceName = Joiner.on('.').join(getTable().getQualifiedName());
// FIXME: this should be really different...
Map<String, ISqlTridentDataSource> sources = planCreator.getSources();
if (!sources.containsKey(sourceName)) {
throw new RuntimeException("Cannot find table " + sourceName);
}
String stageName = StormRelUtils.getStageName(this);
IAggregatableStream finalStream = planCreator.getTopology().newStream(stageName, sources.get(sourceName).getProducer()).parallelismHint(parallelismHint);
planCreator.addStream(finalStream);
}
Aggregations