use of org.apache.storm.tuple.Fields in project storm by apache.
the class TridentMinMaxOfVehiclesTopology method buildVehiclesTopology.
/**
* Creates a topology which demonstrates min/max operations on tuples of stream which contain vehicle and driver fields
* with values {@link TridentMinMaxOfVehiclesTopology.Vehicle} and {@link TridentMinMaxOfVehiclesTopology.Driver} respectively.
*/
public static StormTopology buildVehiclesTopology() {
Fields driverField = new Fields(Driver.FIELD_NAME);
Fields vehicleField = new Fields(Vehicle.FIELD_NAME);
Fields allFields = new Fields(Vehicle.FIELD_NAME, Driver.FIELD_NAME);
FixedBatchSpout spout = new FixedBatchSpout(allFields, 10, Vehicle.generateVehicles(20));
spout.setCycle(true);
TridentTopology topology = new TridentTopology();
Stream vehiclesStream = topology.newStream("spout1", spout).each(allFields, new Debug("##### vehicles"));
Stream slowVehiclesStream = vehiclesStream.min(new SpeedComparator()).each(vehicleField, new Debug("#### slowest vehicle"));
Stream slowDriversStream = slowVehiclesStream.project(driverField).each(driverField, new Debug("##### slowest driver"));
vehiclesStream.max(new SpeedComparator()).each(vehicleField, new Debug("#### fastest vehicle")).project(driverField).each(driverField, new Debug("##### fastest driver"));
vehiclesStream.minBy(Vehicle.FIELD_NAME, new EfficiencyComparator()).each(vehicleField, new Debug("#### least efficient vehicle"));
vehiclesStream.maxBy(Vehicle.FIELD_NAME, new EfficiencyComparator()).each(vehicleField, new Debug("#### most efficient vehicle"));
return topology.build();
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class TridentCalcRel method tridentPlan.
@Override
public void tridentPlan(TridentPlanCreator planCreator) throws Exception {
// SingleRel
RelNode input = getInput();
StormRelUtils.getStormRelInput(input).tridentPlan(planCreator);
Stream inputStream = planCreator.pop().toStream();
String stageName = StormRelUtils.getStageName(this);
RelDataType inputRowType = getInput(0).getRowType();
List<String> outputFieldNames = getRowType().getFieldNames();
int outputCount = outputFieldNames.size();
// filter
ExecutableExpression filterInstance = null;
RexLocalRef condition = program.getCondition();
if (condition != null) {
RexNode conditionNode = program.expandLocalRef(condition);
filterInstance = planCreator.createScalarInstance(Lists.newArrayList(conditionNode), inputRowType, StormRelUtils.getClassName(this));
}
// projection
ExecutableExpression projectionInstance = null;
List<RexLocalRef> projectList = program.getProjectList();
if (projectList != null && !projectList.isEmpty()) {
List<RexNode> expandedNodes = new ArrayList<>();
for (RexLocalRef project : projectList) {
expandedNodes.add(program.expandLocalRef(project));
}
projectionInstance = planCreator.createScalarInstance(expandedNodes, inputRowType, StormRelUtils.getClassName(this));
}
if (projectionInstance == null && filterInstance == null) {
// it shouldn't be happen
throw new IllegalStateException("Either projection or condition, or both should be provided.");
}
final Stream finalStream = inputStream.flatMap(new EvaluationCalc(filterInstance, projectionInstance, outputCount, planCreator.getDataContext()), new Fields(outputFieldNames)).name(stageName);
planCreator.addStream(finalStream);
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class TridentStreamInsertRel method tridentPlan.
@Override
public void tridentPlan(TridentPlanCreator planCreator) throws Exception {
// SingleRel
RelNode input = getInput();
StormRelUtils.getStormRelInput(input).tridentPlan(planCreator);
Stream inputStream = planCreator.pop().toStream();
String stageName = StormRelUtils.getStageName(this);
Preconditions.checkArgument(isInsert(), "Only INSERT statement is supported.");
List<String> inputFields = this.input.getRowType().getFieldNames();
List<String> outputFields = getRowType().getFieldNames();
// FIXME: this should be really different...
String tableName = Joiner.on('.').join(getTable().getQualifiedName());
ISqlTridentDataSource.SqlTridentConsumer consumer = planCreator.getSources().get(tableName).getConsumer();
// In fact this is normally the end of stream, but I'm still not sure so I open new streams based on State values
IAggregatableStream finalStream = inputStream.partitionPersist(consumer.getStateFactory(), new Fields(inputFields), consumer.getStateUpdater(), new Fields(outputFields)).newValuesStream().name(stageName);
planCreator.addStream(finalStream);
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class TestPlanCompiler method testCaseStatement.
@Test
public void testCaseStatement() throws Exception {
int EXPECTED_VALUE_SIZE = 5;
String sql = "SELECT CASE WHEN NAME IN ('a', 'abc', 'abcde') THEN UPPER('a') " + "WHEN UPPER(NAME) = 'AB' THEN 'b' ELSE {fn CONCAT(NAME, '#')} END FROM FOO";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values("A"), new Values("b"), new Values("A"), new Values("abcd#"), new Values("A") }, getCollectedValues().toArray());
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class TestPlanCompiler method testCompile.
@Test
public void testCompile() throws Exception {
final int EXPECTED_VALUE_SIZE = 2;
String sql = "SELECT ID FROM FOO WHERE ID > 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values(3), new Values(4) }, getCollectedValues().toArray());
}
Aggregations