use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO in project flink by apache.
the class OuterJoinITCase method testNestedIntoTupleIntoPojo.
@Test
public void testNestedIntoTupleIntoPojo() throws Exception {
/*
* nested into tuple into pojo
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env);
DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
DataSet<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> joinDs = ds1.fullOuterJoin(ds2).where("nestedTupleWithCustom.f0", "nestedTupleWithCustom.f1.myInt", "nestedTupleWithCustom.f1.myLong").equalTo("f2", "f3", "f4").with(new ProjectBothFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>());
env.setParallelism(1);
List<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> result = joinDs.collect();
String expected = "1 First (10,100,1000,One) 10000,(1,First,10,100,1000,One,10000)\n" + "2 Second (20,200,2000,Two) 20000,(2,Second,20,200,2000,Two,20000)\n" + "3 Third (30,300,3000,Three) 30000,(3,Third,30,300,3000,Three,30000)\n";
compareResultAsTuples(result, expected);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO in project flink by apache.
the class PartitionITCase method testRangePartitionWithKeyExpression.
@Test
public void testRangePartitionWithKeyExpression() throws Exception {
/*
* Test range partition with key expression
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(3);
DataSet<POJO> ds = CollectionDataSets.getDuplicatePojoDataSet(env);
DataSet<Long> uniqLongs = ds.partitionByRange("nestedPojo.longNumber").setParallelism(4).mapPartition(new UniqueNestedPojoLongMapper());
List<Long> result = uniqLongs.collect();
String expected = "10000\n" + "20000\n" + "30000\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO in project flink by apache.
the class CoGroupITCase method testCoGroupFieldSelectorAndKeySelector.
@Test
public void testCoGroupFieldSelectorAndKeySelector() throws Exception {
/*
* CoGroup field-selector (expression keys) + key selector function
* The key selector is simple here
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env);
DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where(new KeySelector1()).equalTo(6).with(new CoGroup2());
List<CustomType> result = coGroupDs.collect();
String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO in project flink by apache.
the class DistinctITCase method testCorrectnessOfDistinctOnPojos.
@Test
public void testCorrectnessOfDistinctOnPojos() throws Exception {
/*
* check correctness of distinct on Pojos
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<POJO> ds = CollectionDataSets.getDuplicatePojoDataSet(env);
DataSet<Integer> reduceDs = ds.distinct("nestedPojo.longNumber").map(new Mapper2());
List<Integer> result = reduceDs.collect();
String expected = "10000\n20000\n30000\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO in project flink by apache.
the class JoinITCase method testSelectingMultipleFieldsUsingExpressionLanguage.
@Test
public void testSelectingMultipleFieldsUsingExpressionLanguage() throws Exception {
/*
* selecting multiple fields using expression language
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env);
DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
DataSet<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> joinDs = ds1.join(ds2).where("nestedPojo.longNumber", "number", "str").equalTo("f6", "f0", "f1");
env.setParallelism(1);
List<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> result = joinDs.collect();
String expected = "1 First (10,100,1000,One) 10000,(1,First,10,100,1000,One,10000)\n" + "2 Second (20,200,2000,Two) 20000,(2,Second,20,200,2000,Two,20000)\n" + "3 Third (30,300,3000,Three) 30000,(3,Third,30,300,3000,Three,30000)\n";
compareResultAsTuples(result, expected);
}
Aggregations