use of org.apache.flink.api.java.tuple.Tuple4 in project flink by apache.
the class SortPartitionTest method testSortPartitionWithKeySelector1.
@Test
public void testSortPartitionWithKeySelector1() {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple4<Integer, Long, CustomType, Long[]>> tupleDs = env.fromCollection(tupleWithCustomData, tupleWithCustomInfo);
// should work
try {
tupleDs.sortPartition(new KeySelector<Tuple4<Integer, Long, CustomType, Long[]>, Integer>() {
@Override
public Integer getKey(Tuple4<Integer, Long, CustomType, Long[]> value) throws Exception {
return value.f0;
}
}, Order.ASCENDING);
} catch (Exception e) {
Assert.fail();
}
}
use of org.apache.flink.api.java.tuple.Tuple4 in project flink by apache.
the class SortPartitionTest method testSortPartitionWithPositionKeys4.
@Test(expected = InvalidProgramException.class)
public void testSortPartitionWithPositionKeys4() {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple4<Integer, Long, CustomType, Long[]>> tupleDs = env.fromCollection(tupleWithCustomData, tupleWithCustomInfo);
// must not work
tupleDs.sortPartition(3, Order.ASCENDING);
}
use of org.apache.flink.api.java.tuple.Tuple4 in project flink by apache.
the class TableEnvironmentITCase method testAsFromTupleToPojo.
@Test
public void testAsFromTupleToPojo() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config());
List<Tuple4<String, Integer, Double, String>> data = new ArrayList<>();
data.add(new Tuple4<>("Rofl", 1, 1.0, "Hi"));
data.add(new Tuple4<>("lol", 2, 1.0, "Hi"));
data.add(new Tuple4<>("Test me", 4, 3.33, "Hello world"));
Table table = tableEnv.fromDataSet(env.fromCollection(data), "q, w, e, r").select("q as a, w as b, e as c, r as d");
DataSet<SmallPojo2> ds = tableEnv.toDataSet(table, SmallPojo2.class);
List<SmallPojo2> results = ds.collect();
String expected = "Rofl,1,1.0,Hi\n" + "lol,2,1.0,Hi\n" + "Test me,4,3.33,Hello world\n";
compareResultAsText(results, expected);
}
use of org.apache.flink.api.java.tuple.Tuple4 in project flink by apache.
the class BroadcastBranchingITCase method testProgram.
// Sc1(id,a,b,c) --
// \
// Sc2(id,x) -------- Jn2(id) -- Mp2 -- Sk
// \ / / <=BC
// Jn1(id) -- Mp1 ----
// /
// Sc3(id,y) --------
@Override
protected void testProgram() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
// Sc1 generates M parameters a,b,c for second degree polynomials P(x) = ax^2 + bx + c identified by id
DataSet<Tuple4<String, Integer, Integer, Integer>> sc1 = env.fromElements(new Tuple4<>("1", 61, 6, 29), new Tuple4<>("2", 7, 13, 10), new Tuple4<>("3", 8, 13, 27));
// Sc2 generates N x values to be evaluated with the polynomial identified by id
DataSet<Tuple2<String, Integer>> sc2 = env.fromElements(new Tuple2<>("1", 5), new Tuple2<>("2", 3), new Tuple2<>("3", 6));
// Sc3 generates N y values to be evaluated with the polynomial identified by id
DataSet<Tuple2<String, Integer>> sc3 = env.fromElements(new Tuple2<>("1", 2), new Tuple2<>("2", 3), new Tuple2<>("3", 7));
// Jn1 matches x and y values on id and emits (id, x, y) triples
JoinOperator<Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Integer, Integer>> jn1 = sc2.join(sc3).where(0).equalTo(0).with(new Jn1());
// Jn2 matches polynomial and arguments by id, computes p = min(P(x),P(y)) and emits (id, p) tuples
JoinOperator<Tuple3<String, Integer, Integer>, Tuple4<String, Integer, Integer, Integer>, Tuple2<String, Integer>> jn2 = jn1.join(sc1).where(0).equalTo(0).with(new Jn2());
// Mp1 selects (id, x, y) triples where x = y and broadcasts z (=x=y) to Mp2
FlatMapOperator<Tuple3<String, Integer, Integer>, Tuple2<String, Integer>> mp1 = jn1.flatMap(new Mp1());
// Mp2 filters out all p values which can be divided by z
List<Tuple2<String, Integer>> result = jn2.flatMap(new Mp2()).withBroadcastSet(mp1, "z").collect();
JavaProgramTestBase.compareResultAsText(result, RESULT);
}
use of org.apache.flink.api.java.tuple.Tuple4 in project flink by apache.
the class AbstractEventTimeWindowCheckpointingITCase method testPreAggregatedTumblingTimeWindow.
@Test
public void testPreAggregatedTumblingTimeWindow() {
final int NUM_ELEMENTS_PER_KEY = numElementsPerKey();
final int WINDOW_SIZE = windowSize();
final int NUM_KEYS = numKeys();
FailingSource.reset();
try {
StreamExecutionEnvironment env = StreamExecutionEnvironment.createRemoteEnvironment("localhost", cluster.getLeaderRPCPort());
env.setParallelism(PARALLELISM);
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
env.enableCheckpointing(100);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 0));
env.getConfig().disableSysoutLogging();
env.setStateBackend(this.stateBackend);
env.addSource(new FailingSource(NUM_KEYS, NUM_ELEMENTS_PER_KEY, NUM_ELEMENTS_PER_KEY / 3)).rebalance().keyBy(0).timeWindow(Time.of(WINDOW_SIZE, MILLISECONDS)).reduce(new ReduceFunction<Tuple2<Long, IntType>>() {
@Override
public Tuple2<Long, IntType> reduce(Tuple2<Long, IntType> a, Tuple2<Long, IntType> b) {
return new Tuple2<>(a.f0, new IntType(a.f1.value + b.f1.value));
}
}, new RichWindowFunction<Tuple2<Long, IntType>, Tuple4<Long, Long, Long, IntType>, Tuple, TimeWindow>() {
private boolean open = false;
@Override
public void open(Configuration parameters) {
assertEquals(PARALLELISM, getRuntimeContext().getNumberOfParallelSubtasks());
open = true;
}
@Override
public void apply(Tuple tuple, TimeWindow window, Iterable<Tuple2<Long, IntType>> input, Collector<Tuple4<Long, Long, Long, IntType>> out) {
// validate that the function has been opened properly
assertTrue(open);
for (Tuple2<Long, IntType> in : input) {
out.collect(new Tuple4<>(in.f0, window.getStart(), window.getEnd(), in.f1));
}
}
}).addSink(new ValidatingSink(NUM_KEYS, NUM_ELEMENTS_PER_KEY / WINDOW_SIZE)).setParallelism(1);
tryExecute(env, "Tumbling Window Test");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations