use of org.apache.flink.api.common.InvalidProgramException in project flink by splunk.
the class CoGroupCustomPartitioningTest method testCoGroupWithTuplesWrongType.
@Test
public void testCoGroupWithTuplesWrongType() {
try {
final Partitioner<Integer> partitioner = new TestPartitionerInt();
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<Long, Long>> input1 = env.fromElements(new Tuple2<Long, Long>(0L, 0L));
DataSet<Tuple3<Long, Long, Long>> input2 = env.fromElements(new Tuple3<Long, Long, Long>(0L, 0L, 0L));
try {
input1.coGroup(input2).where(1).equalTo(0).withPartitioner(partitioner);
fail("should throw an exception");
} catch (InvalidProgramException e) {
// expected
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by splunk.
the class CustomPartitioningTest method testPartitionPojoInvalidType.
@Test
public void testPartitionPojoInvalidType() {
try {
final int parallelism = 4;
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(parallelism);
DataSet<Pojo> data = env.fromElements(new Pojo()).rebalance();
try {
data.partitionCustom(new TestPartitionerLong(), "a");
fail("Should throw an exception");
} catch (InvalidProgramException e) {
// expected
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by splunk.
the class GroupingPojoTranslationTest method testCustomPartitioningTupleInvalidTypeSorted.
@Test
public void testCustomPartitioningTupleInvalidTypeSorted() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Pojo3> data = env.fromElements(new Pojo3()).rebalance().setParallelism(4);
try {
data.groupBy("a").sortGroup("b", Order.ASCENDING).withPartitioner(new TestPartitionerLong());
fail("Should throw an exception");
} catch (InvalidProgramException e) {
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by splunk.
the class JoinCustomPartitioningTest method testJoinWithKeySelectorsWrongType.
@Test
public void testJoinWithKeySelectorsWrongType() {
try {
final Partitioner<Long> partitioner = new TestPartitionerLong();
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Pojo2> input1 = env.fromElements(new Pojo2());
DataSet<Pojo3> input2 = env.fromElements(new Pojo3());
try {
input1.join(input2, JoinHint.REPARTITION_HASH_FIRST).where(new Pojo2KeySelector()).equalTo(new Pojo3KeySelector()).withPartitioner(partitioner);
fail("should throw an exception");
} catch (InvalidProgramException e) {
// expected
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by splunk.
the class JoinCustomPartitioningTest method testJoinWithTuplesWrongType.
@Test
public void testJoinWithTuplesWrongType() {
try {
final Partitioner<Integer> partitioner = new TestPartitionerInt();
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<Long, Long>> input1 = env.fromElements(new Tuple2<Long, Long>(0L, 0L));
DataSet<Tuple3<Long, Long, Long>> input2 = env.fromElements(new Tuple3<Long, Long, Long>(0L, 0L, 0L));
try {
input1.join(input2, JoinHint.REPARTITION_HASH_FIRST).where(1).equalTo(0).withPartitioner(partitioner);
fail("should throw an exception");
} catch (InvalidProgramException e) {
// expected
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations