use of org.apache.spark.api.java.JavaRDD in project cxf by apache.
the class StreamingService method processStream.
private void processStream(AsyncResponse async, List<String> inputStrings) {
try {
SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("JAX-RS Spark Connect " + SparkUtils.getRandomId());
JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
SparkStreamingOutput streamOut = new SparkStreamingOutput(jssc);
SparkStreamingListener sparkListener = new SparkStreamingListener(streamOut);
jssc.addStreamingListener(sparkListener);
JavaDStream<String> receiverStream = null;
if ("queue".equals(receiverType)) {
Queue<JavaRDD<String>> rddQueue = new LinkedList<>();
for (int i = 0; i < 30; i++) {
rddQueue.add(jssc.sparkContext().parallelize(inputStrings));
}
receiverStream = jssc.queueStream(rddQueue);
} else {
receiverStream = jssc.receiverStream(new StringListReceiver(inputStrings));
}
JavaPairDStream<String, Integer> wordCounts = SparkUtils.createOutputDStream(receiverStream, false);
wordCounts.foreachRDD(new OutputFunction(streamOut));
jssc.start();
executor.execute(new SparkJob(async, sparkListener));
} catch (Exception ex) {
// the compiler does not allow to catch SparkException directly
if (ex instanceof SparkException) {
async.cancel(60);
} else {
async.resume(new WebApplicationException(ex));
}
}
}
use of org.apache.spark.api.java.JavaRDD in project cxf by apache.
the class StreamingService method processStreamOneWay.
private void processStreamOneWay(List<String> inputStrings) {
try {
SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("JAX-RS Spark Connect OneWay " + SparkUtils.getRandomId());
JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
JavaDStream<String> receiverStream = null;
if ("queue".equals(receiverType)) {
Queue<JavaRDD<String>> rddQueue = new LinkedList<>();
for (int i = 0; i < 30; i++) {
rddQueue.add(jssc.sparkContext().parallelize(inputStrings));
}
receiverStream = jssc.queueStream(rddQueue);
} else {
receiverStream = jssc.receiverStream(new StringListReceiver(inputStrings));
}
JavaPairDStream<String, Integer> wordCounts = SparkUtils.createOutputDStream(receiverStream, false);
wordCounts.foreachRDD(new PrintOutputFunction(jssc));
jssc.start();
} catch (Exception ex) {
// ignore
}
}
use of org.apache.spark.api.java.JavaRDD in project rocketmq-externals by apache.
the class RocketMqUtilsTest method testGetOffsets.
@Test
public void testGetOffsets() throws MQBrokerException, MQClientException, InterruptedException, UnsupportedEncodingException {
Map<String, String> optionParams = new HashMap<>();
optionParams.put(RocketMQConfig.NAME_SERVER_ADDR, NAME_SERVER);
SparkConf sparkConf = new SparkConf().setAppName("JavaCustomReceiver").setMaster("local[*]");
JavaStreamingContext sc = new JavaStreamingContext(sparkConf, new Duration(1000));
List<String> topics = new ArrayList<>();
topics.add(TOPIC_DEFAULT);
LocationStrategy locationStrategy = LocationStrategy.PreferConsistent();
JavaInputDStream<MessageExt> dStream = RocketMqUtils.createJavaMQPullStream(sc, UUID.randomUUID().toString(), topics, ConsumerStrategy.earliest(), false, false, false, locationStrategy, optionParams);
// hold a reference to the current offset ranges, so it can be used downstream
final AtomicReference<Map<TopicQueueId, OffsetRange[]>> offsetRanges = new AtomicReference<>();
final Set<MessageExt> result = Collections.synchronizedSet(new HashSet<MessageExt>());
dStream.transform(new Function<JavaRDD<MessageExt>, JavaRDD<MessageExt>>() {
@Override
public JavaRDD<MessageExt> call(JavaRDD<MessageExt> v1) throws Exception {
Map<TopicQueueId, OffsetRange[]> offsets = ((HasOffsetRanges) v1.rdd()).offsetRanges();
offsetRanges.set(offsets);
return v1;
}
}).foreachRDD(new VoidFunction<JavaRDD<MessageExt>>() {
@Override
public void call(JavaRDD<MessageExt> messageExtJavaRDD) throws Exception {
result.addAll(messageExtJavaRDD.collect());
}
});
sc.start();
long startTime = System.currentTimeMillis();
boolean matches = false;
while (!matches && System.currentTimeMillis() - startTime < 20000) {
matches = MESSAGE_NUM == result.size();
Thread.sleep(50);
}
sc.stop();
}
Aggregations