use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.
the class ReduceWithCombinerITCase method testReduceOnKeyedDataset.
@Test
public void testReduceOnKeyedDataset() throws Exception {
// set up the execution environment
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
// creates the input data and distributes them evenly among the available downstream tasks
DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);
List<Tuple3<String, Integer, Boolean>> actual = input.groupBy(0).reduceGroup(new KeyedCombReducer()).collect();
String expected = "k1,6,true\nk2,4,true\n";
compareResultAsTuples(actual, expected);
}
use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.
the class ReduceWithCombinerITCase method testReduceOnKeyedDatasetWithSelector.
@Test
public void testReduceOnKeyedDatasetWithSelector() throws Exception {
// set up the execution environment
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
// creates the input data and distributes them evenly among the available downstream tasks
DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);
List<Tuple3<String, Integer, Boolean>> actual = input.groupBy(new KeySelectorX()).reduceGroup(new KeyedCombReducer()).collect();
String expected = "k1,6,true\nk2,4,true\n";
compareResultAsTuples(actual, expected);
}
use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.
the class PartitionITCase method testRangePartitionByKeyFieldAndDifferentParallelism.
@Test
public void testRangePartitionByKeyFieldAndDifferentParallelism() throws Exception {
/*
* Test range partition by key field and different parallelism
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(3);
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
DataSet<Long> uniqLongs = ds.partitionByRange(1).setParallelism(4).mapPartition(new UniqueTupleLongMapper());
List<Long> result = uniqLongs.collect();
String expected = "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "6\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.
the class PartitionITCase method testHashPartitionByKeyField.
@Test
public void testHashPartitionByKeyField() throws Exception {
/*
* Test hash partition by key field
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
DataSet<Long> uniqLongs = ds.partitionByHash(1).mapPartition(new UniqueTupleLongMapper());
List<Long> result = uniqLongs.collect();
String expected = "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "6\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.api.java.tuple.Tuple3 in project flink by apache.
the class PartitionITCase method testRangePartitionByKeySelector.
@Test
public void testRangePartitionByKeySelector() throws Exception {
/*
* Test range partition by key selector
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
DataSet<Long> uniqLongs = ds.partitionByRange(new KeySelector1()).mapPartition(new UniqueTupleLongMapper());
List<Long> result = uniqLongs.collect();
String expected = "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "6\n";
compareResultAsText(result, expected);
}
Aggregations