Flink,connect雙流,connect實例,java版本

 

package connect;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Date;
import java.util.Random;
import java.util.concurrent.TimeUnit;

/**
 * @Author you guess
 * @Date 2020/6/17 09:45
 * @Version 1.0
 * @Desc
 */
public class ConnectDataStreamTest {
    private static final Logger LOG = LoggerFactory.getLogger(ConnectDataStreamTest.class);
    private static final String[] TYPE = {"a", "b", "c", "d"};

    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //添加自定義數據源,每秒發出一筆訂單信息{商品名稱,商品數量}
        DataStreamSource<Tuple2<String, Integer>> orderSource1 = env.addSource(new SourceFunction<Tuple2<String, Integer>>() {
            private volatile boolean isRunning = true;
            private final Random random = new Random();

            @Override
            public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
                while (isRunning) {
                    TimeUnit.SECONDS.sleep(1);
                    Tuple2<String, Integer> tuple2 = Tuple2.of(TYPE[random.nextInt(TYPE.length)], random.nextInt(10));
                    System.out.println(new Date() + ",orderSource1提交元素:" + tuple2);
                    ctx.collect(tuple2);
                }
            }

            @Override
            public void cancel() {
                isRunning = false;
            }

        }, "orderSource1");

        DataStreamSource<Tuple2<String, Integer>> orderSource2 = env.addSource(new SourceFunction<Tuple2<String, Integer>>() {
            private volatile boolean isRunning = true;
            private final Random random = new Random();

            @Override
            public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
                while (isRunning) {
                    TimeUnit.SECONDS.sleep(1);
                    Tuple2<String, Integer> tuple2 = Tuple2.of(TYPE[random.nextInt(TYPE.length)], random.nextInt(10));
                    System.out.println(new Date() + ",orderSource2提交元素:" + tuple2);
                    ctx.collect(tuple2);
                }
            }

            @Override
            public void cancel() {
                isRunning = false;
            }

        }, "orderSource2");


        orderSource1.connect(orderSource2).flatMap(

                /**
                 * @param <IN1> Type of the first input.
                 * @param <IN2> Type of the second input.
                 * @param <OUT> Output type.
                 */
                new CoFlatMapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, Object>() {

                    /**
                     * This method is called for each element in the first of the connected streams.
                     *
                     * @param value The stream element
                     * @param out The collector to emit resulting elements to
                     * @throws Exception The function may throw exceptions which cause the streaming program
                     *                   to fail and go into recovery.
                     */
                    @Override
                    public void flatMap1(Tuple2<String, Integer> value, Collector<Object> out) throws Exception {
                        out.collect(value);
                    }

                    /**
                     * This method is called for each element in the second of the connected streams.
                     *
                     * @param value The stream element
                     * @param out The collector to emit resulting elements to
                     * @throws Exception The function may throw exceptions which cause the streaming program
                     *                   to fail and go into recovery.
                     */
                    @Override
                    public void flatMap2(Tuple2<String, Integer> value, Collector<Object> out) throws Exception {
                        out.collect(value);
                    }
                }).print();

        env.execute("Flink ConnectDataStreamTest by Java ");
    }
}

輸出:

Wed Jun 17 10:36:06 CST 2020,orderSource2提交元素:(a,4)
Wed Jun 17 10:36:06 CST 2020,orderSource1提交元素:(b,1)
2> (a,4)
11> (b,1)
Wed Jun 17 10:36:07 CST 2020,orderSource1提交元素:(b,7)
Wed Jun 17 10:36:07 CST 2020,orderSource2提交元素:(c,5)
3> (c,5)
12> (b,7)
Wed Jun 17 10:36:08 CST 2020,orderSource1提交元素:(d,0)
Wed Jun 17 10:36:08 CST 2020,orderSource2提交元素:(a,6)
4> (a,6)
1> (d,0)

 

使用CoMapFunction:

        orderSource1.connect(orderSource2).map(new CoMapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, Object>() {
            @Override
            public Object map1(Tuple2<String, Integer> value) throws Exception {
                return null;
            }

            @Override
            public Object map2(Tuple2<String, Integer> value) throws Exception {
                return null;
            }
        }).print();

 

flink版本1.9.2,java版本1.8

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章