use of org.apache.flink.api.common.functions.FilterFunction in project flink by apache.
the class NFAITCase method testSimplePatternWithTimeoutHandling.
/**
* Tests that the NFA successfully returns partially matched event sequences when they've timed
* out.
*/
@Test
public void testSimplePatternWithTimeoutHandling() {
List<StreamRecord<Event>> events = new ArrayList<>();
List<Map<String, Event>> resultingPatterns = new ArrayList<>();
Set<Tuple2<Map<String, Event>, Long>> resultingTimeoutPatterns = new HashSet<>();
Set<Tuple2<Map<String, Event>, Long>> expectedTimeoutPatterns = new HashSet<>();
events.add(new StreamRecord<Event>(new Event(1, "start", 1.0), 1));
events.add(new StreamRecord<Event>(new Event(2, "start", 1.0), 2));
events.add(new StreamRecord<Event>(new Event(3, "middle", 1.0), 3));
events.add(new StreamRecord<Event>(new Event(4, "foobar", 1.0), 4));
events.add(new StreamRecord<Event>(new Event(5, "end", 1.0), 11));
events.add(new StreamRecord<Event>(new Event(6, "end", 1.0), 13));
Map<String, Event> timeoutPattern1 = new HashMap<>();
timeoutPattern1.put("start", new Event(1, "start", 1.0));
timeoutPattern1.put("middle", new Event(3, "middle", 1.0));
Map<String, Event> timeoutPattern2 = new HashMap<>();
timeoutPattern2.put("start", new Event(2, "start", 1.0));
timeoutPattern2.put("middle", new Event(3, "middle", 1.0));
Map<String, Event> timeoutPattern3 = new HashMap<>();
timeoutPattern3.put("start", new Event(1, "start", 1.0));
Map<String, Event> timeoutPattern4 = new HashMap<>();
timeoutPattern4.put("start", new Event(2, "start", 1.0));
expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern1, 11L));
expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern2, 13L));
expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern3, 11L));
expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern4, 13L));
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {
private static final long serialVersionUID = 7907391379273505897L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle").where(new FilterFunction<Event>() {
private static final long serialVersionUID = -3268741540234334074L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("middle");
}
}).followedBy("end").where(new FilterFunction<Event>() {
private static final long serialVersionUID = -8995174172182138608L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
}).within(Time.milliseconds(10));
NFA<Event> nfa = NFACompiler.compile(pattern, Event.createTypeSerializer(), true);
for (StreamRecord<Event> event : events) {
Tuple2<Collection<Map<String, Event>>, Collection<Tuple2<Map<String, Event>, Long>>> patterns = nfa.process(event.getValue(), event.getTimestamp());
Collection<Map<String, Event>> matchedPatterns = patterns.f0;
Collection<Tuple2<Map<String, Event>, Long>> timeoutPatterns = patterns.f1;
resultingPatterns.addAll(matchedPatterns);
resultingTimeoutPatterns.addAll(timeoutPatterns);
}
assertEquals(1, resultingPatterns.size());
assertEquals(expectedTimeoutPatterns.size(), resultingTimeoutPatterns.size());
assertEquals(expectedTimeoutPatterns, resultingTimeoutPatterns);
}
use of org.apache.flink.api.common.functions.FilterFunction in project flink by apache.
the class NFAITCase method testBranchingPattern.
@Test
public void testBranchingPattern() {
List<StreamRecord<Event>> inputEvents = new ArrayList<>();
Event startEvent = new Event(40, "start", 1.0);
SubEvent middleEvent1 = new SubEvent(41, "foo1", 1.0, 10.0);
SubEvent middleEvent2 = new SubEvent(42, "foo2", 1.0, 10.0);
SubEvent middleEvent3 = new SubEvent(43, "foo3", 1.0, 10.0);
SubEvent nextOne1 = new SubEvent(44, "next-one", 1.0, 2.0);
SubEvent nextOne2 = new SubEvent(45, "next-one", 1.0, 2.0);
Event endEvent = new Event(46, "end", 1.0);
inputEvents.add(new StreamRecord<Event>(startEvent, 1));
inputEvents.add(new StreamRecord<Event>(middleEvent1, 3));
inputEvents.add(new StreamRecord<Event>(middleEvent2, 4));
inputEvents.add(new StreamRecord<Event>(middleEvent3, 5));
inputEvents.add(new StreamRecord<Event>(nextOne1, 6));
inputEvents.add(new StreamRecord<Event>(nextOne2, 7));
inputEvents.add(new StreamRecord<Event>(endEvent, 8));
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle-first").subtype(SubEvent.class).where(new FilterFunction<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getVolume() > 5.0;
}
}).followedBy("middle-second").subtype(SubEvent.class).where(new FilterFunction<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("next-one");
}
}).followedBy("end").where(new FilterFunction<Event>() {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
});
NFA<Event> nfa = NFACompiler.compile(pattern, Event.createTypeSerializer(), false);
List<Map<String, Event>> resultingPatterns = new ArrayList<>();
for (StreamRecord<Event> inputEvent : inputEvents) {
Collection<Map<String, Event>> patterns = nfa.process(inputEvent.getValue(), inputEvent.getTimestamp()).f0;
resultingPatterns.addAll(patterns);
}
assertEquals(6, resultingPatterns.size());
final Set<Set<Event>> patterns = new HashSet<>();
for (Map<String, Event> resultingPattern : resultingPatterns) {
patterns.add(new HashSet<>(resultingPattern.values()));
}
assertEquals(Sets.newHashSet(Sets.newHashSet(startEvent, middleEvent1, nextOne1, endEvent), Sets.newHashSet(startEvent, middleEvent2, nextOne1, endEvent), Sets.newHashSet(startEvent, middleEvent3, nextOne1, endEvent), Sets.newHashSet(startEvent, middleEvent1, nextOne2, endEvent), Sets.newHashSet(startEvent, middleEvent2, nextOne2, endEvent), Sets.newHashSet(startEvent, middleEvent3, nextOne2, endEvent)), patterns);
}
use of org.apache.flink.api.common.functions.FilterFunction in project flink by apache.
the class NFAITCase method testSimplePatternWithTimeWindowNFA.
/**
* Tests that the NFA successfully filters out expired elements with respect to the window
* length
*/
@Test
public void testSimplePatternWithTimeWindowNFA() {
List<StreamRecord<Event>> events = new ArrayList<>();
List<Map<String, Event>> resultingPatterns = new ArrayList<>();
final Event startEvent;
final Event middleEvent;
final Event endEvent;
events.add(new StreamRecord<Event>(new Event(1, "start", 1.0), 1));
events.add(new StreamRecord<Event>(startEvent = new Event(2, "start", 1.0), 2));
events.add(new StreamRecord<Event>(middleEvent = new Event(3, "middle", 1.0), 3));
events.add(new StreamRecord<Event>(new Event(4, "foobar", 1.0), 4));
events.add(new StreamRecord<Event>(endEvent = new Event(5, "end", 1.0), 11));
events.add(new StreamRecord<Event>(new Event(6, "end", 1.0), 13));
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {
private static final long serialVersionUID = 7907391379273505897L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle").where(new FilterFunction<Event>() {
private static final long serialVersionUID = -3268741540234334074L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("middle");
}
}).followedBy("end").where(new FilterFunction<Event>() {
private static final long serialVersionUID = -8995174172182138608L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
}).within(Time.milliseconds(10));
NFA<Event> nfa = NFACompiler.compile(pattern, Event.createTypeSerializer(), false);
for (StreamRecord<Event> event : events) {
Collection<Map<String, Event>> patterns = nfa.process(event.getValue(), event.getTimestamp()).f0;
resultingPatterns.addAll(patterns);
}
assertEquals(1, resultingPatterns.size());
Map<String, Event> patternMap = resultingPatterns.get(0);
assertEquals(startEvent, patternMap.get("start"));
assertEquals(middleEvent, patternMap.get("middle"));
assertEquals(endEvent, patternMap.get("end"));
}
use of org.apache.flink.api.common.functions.FilterFunction in project flink by apache.
the class CEPITCase method testTimeoutHandling.
@Test
public void testTimeoutHandling() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
// (Event, timestamp)
DataStream<Event> input = env.fromElements(Tuple2.of(new Event(1, "start", 1.0), 1L), Tuple2.of(new Event(1, "middle", 2.0), 5L), Tuple2.of(new Event(1, "start", 2.0), 4L), Tuple2.of(new Event(1, "end", 2.0), 6L)).assignTimestampsAndWatermarks(new AssignerWithPunctuatedWatermarks<Tuple2<Event, Long>>() {
@Override
public long extractTimestamp(Tuple2<Event, Long> element, long currentTimestamp) {
return element.f1;
}
@Override
public Watermark checkAndGetNextWatermark(Tuple2<Event, Long> lastElement, long extractedTimestamp) {
return new Watermark(lastElement.f1 - 5);
}
}).map(new MapFunction<Tuple2<Event, Long>, Event>() {
@Override
public Event map(Tuple2<Event, Long> value) throws Exception {
return value.f0;
}
});
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle").where(new FilterFunction<Event>() {
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("middle");
}
}).followedBy("end").where(new FilterFunction<Event>() {
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
}).within(Time.milliseconds(3));
DataStream<Either<String, String>> result = CEP.pattern(input, pattern).select(new PatternTimeoutFunction<Event, String>() {
@Override
public String timeout(Map<String, Event> pattern, long timeoutTimestamp) throws Exception {
return pattern.get("start").getPrice() + "";
}
}, new PatternSelectFunction<Event, String>() {
@Override
public String select(Map<String, Event> pattern) {
StringBuilder builder = new StringBuilder();
builder.append(pattern.get("start").getPrice()).append(",").append(pattern.get("middle").getPrice()).append(",").append(pattern.get("end").getPrice());
return builder.toString();
}
});
result.writeAsText(resultPath, FileSystem.WriteMode.OVERWRITE);
// the expected sequences of matching event ids
expected = "Left(1.0)\nLeft(2.0)\nLeft(2.0)\nRight(2.0,2.0,2.0)";
env.execute();
}
use of org.apache.flink.api.common.functions.FilterFunction in project flink by apache.
the class CEPITCase method testSimpleKeyedPatternCEP.
@Test
public void testSimpleKeyedPatternCEP() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(2);
DataStream<Event> input = env.fromElements(new Event(1, "barfoo", 1.0), new Event(2, "start", 2.0), new Event(3, "start", 2.1), new Event(3, "foobar", 3.0), new SubEvent(4, "foo", 4.0, 1.0), new SubEvent(3, "middle", 3.2, 1.0), new Event(42, "start", 3.1), new SubEvent(42, "middle", 3.3, 1.2), new Event(5, "middle", 5.0), new SubEvent(2, "middle", 6.0, 2.0), new SubEvent(7, "bar", 3.0, 3.0), new Event(42, "42", 42.0), new Event(3, "end", 2.0), new Event(2, "end", 1.0), new Event(42, "end", 42.0)).keyBy(new KeySelector<Event, Integer>() {
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
});
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle").subtype(SubEvent.class).where(new FilterFunction<SubEvent>() {
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("middle");
}
}).followedBy("end").where(new FilterFunction<Event>() {
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
});
DataStream<String> result = CEP.pattern(input, pattern).select(new PatternSelectFunction<Event, String>() {
@Override
public String select(Map<String, Event> pattern) {
StringBuilder builder = new StringBuilder();
builder.append(pattern.get("start").getId()).append(",").append(pattern.get("middle").getId()).append(",").append(pattern.get("end").getId());
return builder.toString();
}
});
result.writeAsText(resultPath, FileSystem.WriteMode.OVERWRITE);
// the expected sequences of matching event ids
expected = "2,2,2\n3,3,3\n42,42,42";
env.execute();
}
Aggregations