use of org.apache.flink.api.java.ExecutionEnvironment in project flink by apache.
the class ReduceWithCombinerITCase method testReduceOnKeyedDatasetWithSelector.
@Test
public void testReduceOnKeyedDatasetWithSelector() throws Exception {
// set up the execution environment
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
// creates the input data and distributes them evenly among the available downstream tasks
DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env);
List<Tuple3<String, Integer, Boolean>> actual = input.groupBy(new KeySelectorX()).reduceGroup(new KeyedCombReducer()).collect();
String expected = "k1,6,true\nk2,4,true\n";
compareResultAsTuples(actual, expected);
}
use of org.apache.flink.api.java.ExecutionEnvironment in project flink by apache.
the class RemoteEnvironmentITCase method testInvalidAkkaConfiguration.
/**
* Ensure that that Akka configuration parameters can be set.
*/
@Test(expected = IllegalArgumentException.class)
public void testInvalidAkkaConfiguration() throws Throwable {
Configuration config = new Configuration();
config.setString(ConfigConstants.AKKA_STARTUP_TIMEOUT, INVALID_STARTUP_TIMEOUT);
final ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment(cluster.hostname(), cluster.getLeaderRPCPort(), config);
env.getConfig().disableSysoutLogging();
DataSet<String> result = env.createInput(new TestNonRichInputFormat());
result.output(new LocalCollectionOutputFormat<String>(new ArrayList<String>()));
try {
env.execute();
Assert.fail("Program should not run successfully, cause of invalid akka settings.");
} catch (IOException ex) {
throw ex.getCause();
}
}
use of org.apache.flink.api.java.ExecutionEnvironment in project flink by apache.
the class ReplicatingDataSourceITCase method testReplicatedSourceToJoin.
@Test
public void testReplicatedSourceToJoin() throws Exception {
/*
* Test replicated source going into join
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple1<Long>> source1 = env.createInput(new ReplicatingInputFormat<Long, GenericInputSplit>(new ParallelIteratorInputFormat<Long>(new NumberSequenceIterator(0l, 1000l))), BasicTypeInfo.LONG_TYPE_INFO).map(new ToTuple());
DataSet<Tuple1<Long>> source2 = env.generateSequence(0l, 1000l).map(new ToTuple());
DataSet<Tuple> pairs = source1.join(source2).where(0).equalTo(0).projectFirst(0).sum(0);
List<Tuple> result = pairs.collect();
String expectedResult = "(500500)";
compareResultAsText(result, expectedResult);
}
use of org.apache.flink.api.java.ExecutionEnvironment in project flink by apache.
the class PartitionITCase method testRangePartitionByKeyFieldAndDifferentParallelism.
@Test
public void testRangePartitionByKeyFieldAndDifferentParallelism() throws Exception {
/*
* Test range partition by key field and different parallelism
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(3);
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
DataSet<Long> uniqLongs = ds.partitionByRange(1).setParallelism(4).mapPartition(new UniqueTupleLongMapper());
List<Long> result = uniqLongs.collect();
String expected = "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "6\n";
compareResultAsText(result, expected);
}
use of org.apache.flink.api.java.ExecutionEnvironment in project flink by apache.
the class PartitionITCase method testHashPartitionByKeyField.
@Test
public void testHashPartitionByKeyField() throws Exception {
/*
* Test hash partition by key field
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
DataSet<Long> uniqLongs = ds.partitionByHash(1).mapPartition(new UniqueTupleLongMapper());
List<Long> result = uniqLongs.collect();
String expected = "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "6\n";
compareResultAsText(result, expected);
}
Aggregations