use of org.apache.flink.test.operators.util.CollectionDataSets.CustomType in project flink by apache.
the class GroupReduceITCase method testCorrectnessOfGroupReduceOnCustomTypeWithKeyExtractorAndCombine.
@Test
public void testCorrectnessOfGroupReduceOnCustomTypeWithKeyExtractorAndCombine() throws Exception {
/*
* check correctness of groupReduce on custom type with key extractor and combine
*/
org.junit.Assume.assumeTrue(mode != TestExecutionMode.COLLECTION);
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> reduceDs = ds.groupBy(new KeySelector3()).reduceGroup(new CustomTypeGroupReduceWithCombine());
List<CustomType> result = reduceDs.collect();
String expected = "1,0,test1\n" + "2,3,test2\n" + "3,12,test3\n" + "4,30,test4\n" + "5,60,test5\n" + "6,105,test6\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.operators.util.CollectionDataSets.CustomType in project flink by apache.
the class MapITCase method testMapperOnCustomLowercaseString.
@Test
public void testMapperOnCustomLowercaseString() throws Exception {
/*
* Test mapper on Custom - lowercase myString
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> customMapDs = ds.map(new Mapper6());
List<CustomType> result = customMapDs.collect();
String expected = "1,0,hi\n" + "2,1,hello\n" + "2,2,hello world\n" + "3,3,hello world, how are you?\n" + "3,4,i am fine.\n" + "3,5,luke skywalker\n" + "4,6,comment#1\n" + "4,7,comment#2\n" + "4,8,comment#3\n" + "4,9,comment#4\n" + "5,10,comment#5\n" + "5,11,comment#6\n" + "5,12,comment#7\n" + "5,13,comment#8\n" + "5,14,comment#9\n" + "6,15,comment#10\n" + "6,16,comment#11\n" + "6,17,comment#12\n" + "6,18,comment#13\n" + "6,19,comment#14\n" + "6,20,comment#15\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.operators.util.CollectionDataSets.CustomType in project flink by apache.
the class ReduceITCase method testAllReduceForCustomTypes.
@Test
public void testAllReduceForCustomTypes() throws Exception {
/*
* All-reduce for custom types
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> reduceDs = ds.reduce(new AllAddingCustomTypeReduce());
List<CustomType> result = reduceDs.collect();
String expected = "91,210,Hello!";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.operators.util.CollectionDataSets.CustomType in project flink by apache.
the class CoGroupITCase method testCoGroupOnTwoCustomTypeInputsWithExpressionKeyAndFieldSelector.
@Test
public void testCoGroupOnTwoCustomTypeInputsWithExpressionKeyAndFieldSelector() throws Exception {
/*
* CoGroup on two custom type inputs using expression keys
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env);
DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where("nestedPojo.longNumber").equalTo(6).with(new CoGroup1());
List<CustomType> result = coGroupDs.collect();
String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.operators.util.CollectionDataSets.CustomType in project flink by apache.
the class CoGroupITCase method testCoGroupFieldSelectorAndComplicatedKeySelector.
@Test
public void testCoGroupFieldSelectorAndComplicatedKeySelector() throws Exception {
/*
* CoGroup field-selector (expression keys) + key selector function
* The key selector is unnecessary complicated (Tuple1) ;)
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env);
DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where(new KeySelector6()).equalTo(6).with(new CoGroup3());
List<CustomType> result = coGroupDs.collect();
String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n";
compareResultAsText(result, expected);
}
Aggregations