use of com.hazelcast.jet.JetInstance in project hazelcast-jet-reference-manual by hazelcast.
the class Configuration method s1.
static void s1() {
// tag::s1[]
JetConfig config = new JetConfig();
config.getInstanceConfig().setCooperativeThreadCount(2);
JetInstance jet = Jet.newJetInstance(config);
// end::s1[]
}
use of com.hazelcast.jet.JetInstance in project gora by apache.
the class LogAnalyticsJet method main.
/**
* In the main method pageviews are fetched though the jet source connector.
* Then those are grouped by url and day. Then a counting aggregator is
* applied to calculate the aggregated daily pageviews. Then the result is
* output through the jet sink connector to a gora compatible data store.
*/
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
inStore = DataStoreFactory.getDataStore(Long.class, Pageview.class, conf);
outStore = DataStoreFactory.getDataStore(String.class, MetricDatum.class, conf);
Query<Long, Pageview> query = inStore.newQuery();
JetEngine<Long, Pageview, String, MetricDatum> jetEngine = new JetEngine<>();
Pipeline p = Pipeline.create();
p.drawFrom(jetEngine.createDataSource(inStore, query)).groupingKey(e -> e.getValue().getUrl().toString()).aggregate(groupingBy(e -> getDay(e.getValue().getTimestamp()), counting())).map(e -> {
MetricDatum metricDatum = new MetricDatum();
String url = e.getKey();
for (Map.Entry<Long, Long> item : e.getValue().entrySet()) {
long timeStamp = item.getKey();
long sum = item.getKey();
metricDatum.setTimestamp(timeStamp);
metricDatum.setMetric(sum);
}
metricDatum.setMetricDimension(url);
return new JetInputOutputFormat<String, MetricDatum>(url + "_" + "ip", metricDatum);
}).peek().drainTo(jetEngine.createDataSink(outStore));
JetInstance jet = Jet.newJetInstance();
try {
jet.newJob(p).join();
} finally {
Jet.shutdownAll();
}
}
use of com.hazelcast.jet.JetInstance in project gora by apache.
the class JetTest method jetWordCount.
@Test
public void jetWordCount() throws GoraException {
dataStoreOut = DataStoreFactory.getDataStore(Long.class, ResultPageView.class, utility.getConfiguration());
Query<Long, ResultPageView> query = dataStoreOut.newQuery();
JetEngine<Long, ResultPageView, Long, ResultPageView> jetEngine = new JetEngine<>();
Pattern delimiter = Pattern.compile("\\W+");
Pipeline p = Pipeline.create();
p.drawFrom(jetEngine.createDataSource(dataStoreOut, query)).flatMap(e -> traverseArray(delimiter.split(e.getValue().getUrl().toString()))).filter(word -> !word.isEmpty()).groupingKey(wholeItem()).aggregate(counting()).drainTo(Sinks.map("COUNTS"));
JetInstance jet = Jet.newJetInstance();
;
jet.newJob(p).join();
IMap<String, Long> counts = jet.getMap("COUNTS");
assertEquals(3L, (long) counts.get("the"));
}
use of com.hazelcast.jet.JetInstance in project beam by apache.
the class JetRunner method run.
private JetPipelineResult run(DAG dag) {
startClusterIfNeeded(options);
JetInstance jet = getJetInstance(// todo: we use single client for each job, it might be better to have a
options);
// shared client with refcount
Job job = jet.newJob(dag, getJobConfig(options));
IMap<String, MetricUpdates> metricsAccumulator = jet.getMap(JetMetricsContainer.getMetricsMapName(job.getId()));
JetPipelineResult pipelineResult = new JetPipelineResult(job, metricsAccumulator);
CompletableFuture<Void> completionFuture = job.getFuture().whenCompleteAsync((r, f) -> {
pipelineResult.freeze(f);
metricsAccumulator.destroy();
jet.shutdown();
stopClusterIfNeeded(options);
});
pipelineResult.setCompletionFuture(completionFuture);
return pipelineResult;
}
Aggregations