Search in sources :

Example 11 with Tuple7

use of org.apache.flink.api.java.tuple.Tuple7 in project flink by apache.

the class OuterJoinITCase method testJoinWithNestedKeyExpression1.

@Test
public void testJoinWithNestedKeyExpression1() throws Exception {
    /*
		 * Join nested pojo against tuple (selected using a string)
		 */
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env);
    DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
    DataSet<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> joinDs = ds1.fullOuterJoin(ds2).where("nestedPojo.longNumber").equalTo("f6").with(new ProjectBothFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>());
    List<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> result = joinDs.collect();
    String expected = "1 First (10,100,1000,One) 10000,(1,First,10,100,1000,One,10000)\n" + "2 Second (20,200,2000,Two) 20000,(2,Second,20,200,2000,Two,20000)\n" + "3 Third (30,300,3000,Three) 30000,(3,Third,30,300,3000,Three,30000)\n";
    compareResultAsTuples(result, expected);
}
Also used : POJO(org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Tuple7(org.apache.flink.api.java.tuple.Tuple7) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 12 with Tuple7

use of org.apache.flink.api.java.tuple.Tuple7 in project flink by apache.

the class JoinITCase method testNestedIntoTuple.

@Test
public void testNestedIntoTuple() throws Exception {
    /*
		 * nested into tuple
		 */
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env);
    DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
    DataSet<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> joinDs = ds1.join(ds2).where("nestedPojo.longNumber", "number", "nestedTupleWithCustom.f0").equalTo("f6", "f0", "f2");
    env.setParallelism(1);
    List<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> result = joinDs.collect();
    String expected = "1 First (10,100,1000,One) 10000,(1,First,10,100,1000,One,10000)\n" + "2 Second (20,200,2000,Two) 20000,(2,Second,20,200,2000,Two,20000)\n" + "3 Third (30,300,3000,Three) 30000,(3,Third,30,300,3000,Three,30000)\n";
    compareResultAsTuples(result, expected);
}
Also used : POJO(org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Tuple7(org.apache.flink.api.java.tuple.Tuple7) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 13 with Tuple7

use of org.apache.flink.api.java.tuple.Tuple7 in project flink by apache.

the class JoinITCase method testFullPojoWithFullTuple.

@Test
public void testFullPojoWithFullTuple() throws Exception {
    /*
		 * full pojo with full tuple
		 */
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env);
    DataSet<Tuple7<Long, Integer, Integer, Long, String, Integer, String>> ds2 = CollectionDataSets.getSmallTuplebasedDataSetMatchingPojo(env);
    DataSet<Tuple2<POJO, Tuple7<Long, Integer, Integer, Long, String, Integer, String>>> joinDs = ds1.join(ds2).where("*").equalTo("*");
    env.setParallelism(1);
    List<Tuple2<POJO, Tuple7<Long, Integer, Integer, Long, String, Integer, String>>> result = joinDs.collect();
    String expected = "1 First (10,100,1000,One) 10000,(10000,10,100,1000,One,1,First)\n" + "2 Second (20,200,2000,Two) 20000,(20000,20,200,2000,Two,2,Second)\n" + "3 Third (30,300,3000,Three) 30000,(30000,30,300,3000,Three,3,Third)\n";
    compareResultAsTuples(result, expected);
}
Also used : POJO(org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Tuple7(org.apache.flink.api.java.tuple.Tuple7) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 14 with Tuple7

use of org.apache.flink.api.java.tuple.Tuple7 in project flink by apache.

the class OuterJoinITCase method testNestedIntoTupleIntoPojo.

@Test
public void testNestedIntoTupleIntoPojo() throws Exception {
    /*
		 * nested into tuple into pojo
		 */
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env);
    DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
    DataSet<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> joinDs = ds1.fullOuterJoin(ds2).where("nestedTupleWithCustom.f0", "nestedTupleWithCustom.f1.myInt", "nestedTupleWithCustom.f1.myLong").equalTo("f2", "f3", "f4").with(new ProjectBothFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>());
    env.setParallelism(1);
    List<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> result = joinDs.collect();
    String expected = "1 First (10,100,1000,One) 10000,(1,First,10,100,1000,One,10000)\n" + "2 Second (20,200,2000,Two) 20000,(2,Second,20,200,2000,Two,20000)\n" + "3 Third (30,300,3000,Three) 30000,(3,Third,30,300,3000,Three,30000)\n";
    compareResultAsTuples(result, expected);
}
Also used : POJO(org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Tuple7(org.apache.flink.api.java.tuple.Tuple7) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 15 with Tuple7

use of org.apache.flink.api.java.tuple.Tuple7 in project flink by apache.

the class CoGroupITCase method testCoGroupFieldSelectorAndKeySelector.

@Test
public void testCoGroupFieldSelectorAndKeySelector() throws Exception {
    /*
		 * CoGroup field-selector (expression keys) + key selector function
		 * The key selector is simple here
		 */
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env);
    DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
    DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where(new KeySelector1()).equalTo(6).with(new CoGroup2());
    List<CustomType> result = coGroupDs.collect();
    String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n";
    compareResultAsText(result, expected);
}
Also used : CustomType(org.apache.flink.test.javaApiOperators.util.CollectionDataSets.CustomType) POJO(org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Tuple7(org.apache.flink.api.java.tuple.Tuple7) Test(org.junit.Test)

Aggregations

ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)16 Tuple7 (org.apache.flink.api.java.tuple.Tuple7)16 Test (org.junit.Test)16 POJO (org.apache.flink.test.javaApiOperators.util.CollectionDataSets.POJO)15 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)12 CustomType (org.apache.flink.test.javaApiOperators.util.CollectionDataSets.CustomType)3 Plan (org.apache.flink.api.common.Plan)1 FieldList (org.apache.flink.api.common.operators.util.FieldList)1 DiscardingOutputFormat (org.apache.flink.api.java.io.DiscardingOutputFormat)1 Channel (org.apache.flink.optimizer.plan.Channel)1 DualInputPlanNode (org.apache.flink.optimizer.plan.DualInputPlanNode)1 OptimizedPlan (org.apache.flink.optimizer.plan.OptimizedPlan)1 SinkPlanNode (org.apache.flink.optimizer.plan.SinkPlanNode)1 IdentityCoGrouper (org.apache.flink.optimizer.testfunctions.IdentityCoGrouper)1