Search in sources :

Example 1 with HasOffsetRanges

use of org.apache.spark.streaming.kafka010.HasOffsetRanges in project lambda-arch by apssouza22.

the class TrafficOffsetCommitCallback method commitOffset.

/**
 * Commit the ack to kafka after process have completed
 *
 * @param directKafkaStream
 */
private void commitOffset(JavaInputDStream<ConsumerRecord<String, IoTData>> directKafkaStream) {
    directKafkaStream.foreachRDD((JavaRDD<ConsumerRecord<String, IoTData>> trafficRdd) -> {
        if (!trafficRdd.isEmpty()) {
            OffsetRange[] offsetRanges = ((HasOffsetRanges) trafficRdd.rdd()).offsetRanges();
            CanCommitOffsets canCommitOffsets = (CanCommitOffsets) directKafkaStream.inputDStream();
            canCommitOffsets.commitAsync(offsetRanges, new TrafficOffsetCommitCallback());
        }
    });
}
Also used : OffsetRange(org.apache.spark.streaming.kafka010.OffsetRange) CanCommitOffsets(org.apache.spark.streaming.kafka010.CanCommitOffsets) IoTData(com.apssouza.iot.dto.IoTData) HasOffsetRanges(org.apache.spark.streaming.kafka010.HasOffsetRanges) JavaRDD(org.apache.spark.api.java.JavaRDD)

Example 2 with HasOffsetRanges

use of org.apache.spark.streaming.kafka010.HasOffsetRanges in project lambda-arch by apssouza22.

the class TrafficOffsetCommitCallback method commitOffset.

/**
 * Commit the ack to kafka after process have completed
 * This is our fault-tolerance implementation
 *
 * @param directKafkaStream
 */
private void commitOffset(JavaInputDStream<ConsumerRecord<String, IoTData>> directKafkaStream) {
    directKafkaStream.foreachRDD((JavaRDD<ConsumerRecord<String, IoTData>> trafficRdd) -> {
        if (!trafficRdd.isEmpty()) {
            OffsetRange[] offsetRanges = ((HasOffsetRanges) trafficRdd.rdd()).offsetRanges();
            CanCommitOffsets canCommitOffsets = (CanCommitOffsets) directKafkaStream.inputDStream();
            canCommitOffsets.commitAsync(offsetRanges, new TrafficOffsetCommitCallback());
        }
    });
}
Also used : OffsetRange(org.apache.spark.streaming.kafka010.OffsetRange) CanCommitOffsets(org.apache.spark.streaming.kafka010.CanCommitOffsets) IoTData(com.apssouza.iot.common.dto.IoTData) HasOffsetRanges(org.apache.spark.streaming.kafka010.HasOffsetRanges) JavaRDD(org.apache.spark.api.java.JavaRDD)

Example 3 with HasOffsetRanges

use of org.apache.spark.streaming.kafka010.HasOffsetRanges in project beijingThirdPeriod by weidongcao.

the class SparkStreamingKafkaDemo method main.

public static void main(String[] args) {
    SparkConf conf = new SparkConf().setAppName("demo").setMaster("local[4]");
    JavaStreamingContext streamingContext = new JavaStreamingContext(conf, Durations.seconds(8));
    Map<String, Object> kafkaParams = new HashMap<>();
    // Kafka服务监听端口
    kafkaParams.put("bootstrap.servers", "cm02.spark.com:9092");
    // 指定kafka输出key的数据类型及编码格式(默认为字符串类型编码格式为uft-8)
    kafkaParams.put("key.deserializer", StringDeserializer.class);
    // 指定kafka输出value的数据类型及编码格式(默认为字符串类型编码格式为uft-8)
    kafkaParams.put("value.deserializer", StringDeserializer.class);
    // 消费者ID,随意指定
    kafkaParams.put("group.id", "jis");
    // 指定从latest(最新,其他版本的是largest这里不行)还是smallest(最早)处开始读取数据
    kafkaParams.put("auto.offset.reset", "latest");
    // 如果true,consumer定期地往zookeeper写入每个分区的offset
    kafkaParams.put("enable.auto.commit", false);
    // 要监听的Topic
    Collection<String> topics = Arrays.asList("topicA", "topicB");
    // 指定偏移量
    OffsetRange[] offsetRanges = { OffsetRange.create("test", 0, 0, 3), OffsetRange.create("test", 1, 0, 3) };
    final JavaInputDStream<ConsumerRecord<String, String>> stream = KafkaUtils.createDirectStream(streamingContext, LocationStrategies.PreferConsistent(), ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams));
    JavaRDD<ConsumerRecord<String, String>> rdd = KafkaUtils.createRDD(streamingContext.sparkContext(), kafkaParams, offsetRanges, LocationStrategies.PreferConsistent());
    stream.foreachRDD((VoidFunction<JavaRDD<ConsumerRecord<String, String>>>) rdd1 -> {
        final OffsetRange[] offsetRanges1 = ((HasOffsetRanges) (rdd1.rdd())).offsetRanges();
        rdd1.foreachPartition((VoidFunction<Iterator<ConsumerRecord<String, String>>>) consumerRecordIterator -> {
            OffsetRange o = offsetRanges1[TaskContext.get().partitionId()];
            System.out.println(o.topic() + " " + o.partition() + " " + o.fromOffset() + " " + o.untilOffset());
        });
    });
// stream.mapToPair((PairFunction<ConsumerRecord<String, String>, String, String>) record -> new Tuple2<>(record.key(), record.value()));
}
Also used : org.apache.spark.streaming.kafka010(org.apache.spark.streaming.kafka010) java.util(java.util) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) TaskContext(org.apache.spark.TaskContext) JavaInputDStream(org.apache.spark.streaming.api.java.JavaInputDStream) JavaStreamingContext(org.apache.spark.streaming.api.java.JavaStreamingContext) SparkConf(org.apache.spark.SparkConf) Durations(org.apache.spark.streaming.Durations) VoidFunction(org.apache.spark.api.java.function.VoidFunction) JavaRDD(org.apache.spark.api.java.JavaRDD) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) JavaRDD(org.apache.spark.api.java.JavaRDD) JavaStreamingContext(org.apache.spark.streaming.api.java.JavaStreamingContext) VoidFunction(org.apache.spark.api.java.function.VoidFunction) SparkConf(org.apache.spark.SparkConf)

Aggregations

JavaRDD (org.apache.spark.api.java.JavaRDD)3 CanCommitOffsets (org.apache.spark.streaming.kafka010.CanCommitOffsets)2 HasOffsetRanges (org.apache.spark.streaming.kafka010.HasOffsetRanges)2 OffsetRange (org.apache.spark.streaming.kafka010.OffsetRange)2 IoTData (com.apssouza.iot.common.dto.IoTData)1 IoTData (com.apssouza.iot.dto.IoTData)1 java.util (java.util)1 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)1 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)1 SparkConf (org.apache.spark.SparkConf)1 TaskContext (org.apache.spark.TaskContext)1 VoidFunction (org.apache.spark.api.java.function.VoidFunction)1 Durations (org.apache.spark.streaming.Durations)1 JavaInputDStream (org.apache.spark.streaming.api.java.JavaInputDStream)1 JavaStreamingContext (org.apache.spark.streaming.api.java.JavaStreamingContext)1 org.apache.spark.streaming.kafka010 (org.apache.spark.streaming.kafka010)1