use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.CustomType in project flink by apache.
the class FilterITCase method testFilterOnCustomType.
@Test
public void testFilterOnCustomType() throws Exception {
/*
* Test filter on custom type
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> filterDs = ds.filter(new Filter6());
List<CustomType> result = filterDs.collect();
String expected = "3,3,Hello world, how are you?\n" + "3,4,I am fine.\n" + "3,5,Luke Skywalker\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.CustomType in project flink by apache.
the class MapITCase method testMapperOnCustomLowercaseString.
@Test
public void testMapperOnCustomLowercaseString() throws Exception {
/*
* Test mapper on Custom - lowercase myString
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> customMapDs = ds.map(new Mapper6());
List<CustomType> result = customMapDs.collect();
String expected = "1,0,hi\n" + "2,1,hello\n" + "2,2,hello world\n" + "3,3,hello world, how are you?\n" + "3,4,i am fine.\n" + "3,5,luke skywalker\n" + "4,6,comment#1\n" + "4,7,comment#2\n" + "4,8,comment#3\n" + "4,9,comment#4\n" + "5,10,comment#5\n" + "5,11,comment#6\n" + "5,12,comment#7\n" + "5,13,comment#8\n" + "5,14,comment#9\n" + "6,15,comment#10\n" + "6,16,comment#11\n" + "6,17,comment#12\n" + "6,18,comment#13\n" + "6,19,comment#14\n" + "6,20,comment#15\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.CustomType in project flink by apache.
the class ReduceITCase method testReduceOnCustomTypeWithKeyExtractor.
@Test
public void testReduceOnCustomTypeWithKeyExtractor() throws Exception {
/*
* Reduce on custom type with key extractor
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> reduceDs = ds.groupBy(new KeySelector2()).reduce(new CustomTypeReduce());
List<CustomType> result = reduceDs.collect();
String expected = "1,0,Hi\n" + "2,3,Hello!\n" + "3,12,Hello!\n" + "4,30,Hello!\n" + "5,60,Hello!\n" + "6,105,Hello!\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.CustomType in project flink by apache.
the class JoinITCase method testDefaultJoinOnTwoCustomTypeInputsWithInnerClassKeyExtractorsDisabledClosureCleaner.
@Test
public void testDefaultJoinOnTwoCustomTypeInputsWithInnerClassKeyExtractorsDisabledClosureCleaner() throws Exception {
/*
* (Default) Join on two custom type inputs with key extractors, check if disableing closure cleaning works
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.getConfig().disableClosureCleaner();
DataSet<CustomType> ds1 = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> ds2 = CollectionDataSets.getSmallCustomTypeDataSet(env);
boolean correctExceptionTriggered = false;
try {
DataSet<Tuple2<CustomType, CustomType>> joinDs = ds1.join(ds2).where(new KeySelector<CustomType, Integer>() {
@Override
public Integer getKey(CustomType value) {
return value.myInt;
}
}).equalTo(new KeySelector<CustomType, Integer>() {
@Override
public Integer getKey(CustomType value) throws Exception {
return value.myInt;
}
});
} catch (InvalidProgramException ex) {
correctExceptionTriggered = (ex.getCause() instanceof java.io.NotSerializableException);
}
Assert.assertTrue(correctExceptionTriggered);
}
use of org.apache.flink.test.javaApiOperators.util.CollectionDataSets.CustomType in project flink by apache.
the class JoinITCase method testDefaultJoinOnTwoCustomTypeInputsWithKeyExtractors.
@Test
public void testDefaultJoinOnTwoCustomTypeInputsWithKeyExtractors() throws Exception {
/*
* (Default) Join on two custom type inputs with key extractors
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<CustomType> ds1 = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> ds2 = CollectionDataSets.getSmallCustomTypeDataSet(env);
DataSet<Tuple2<CustomType, CustomType>> joinDs = ds1.join(ds2).where(new KeySelector5()).equalTo(new KeySelector6());
List<Tuple2<CustomType, CustomType>> result = joinDs.collect();
String expected = "1,0,Hi,1,0,Hi\n" + "2,1,Hello,2,1,Hello\n" + "2,1,Hello,2,2,Hello world\n" + "2,2,Hello world,2,1,Hello\n" + "2,2,Hello world,2,2,Hello world\n";
compareResultAsTuples(result, expected);
}
Aggregations