use of edu.iu.dsc.tws.tset.sets.batch.SourceTSet in project twister2 by DSC-SPIDAL.
the class FullGraphRunExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
SourceTSet<Integer> src = dummySource(env, COUNT, PARALLELISM);
src.direct().flatmap((FlatMapFunc<Integer, Object>) (integer, collector) -> LOG.info("dir= " + integer));
src.reduce(Integer::sum).flatmap((FlatMapFunc<Integer, Object>) (integer, collector) -> LOG.info("red= " + integer));
// env.run();
}
use of edu.iu.dsc.tws.tset.sets.batch.SourceTSet in project twister2 by DSC-SPIDAL.
the class PartitionExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
int start = env.getWorkerID() * 100;
SourceTSet<Integer> src = dummySource(env, start, COUNT, PARALLELISM);
LOG.info("test foreach");
src.partition(new LoadBalancePartitioner<>()).forEach(i -> LOG.info("foreach: " + i));
LOG.info("test map");
src.partition(new LoadBalancePartitioner<>()).map(i -> i.toString() + "$$").direct().forEach(s -> LOG.info("map: " + s));
LOG.info("test flat map");
src.partition(new LoadBalancePartitioner<>()).flatmap((i, c) -> c.collect(i.toString() + "##")).direct().forEach(s -> LOG.info("flat:" + s));
LOG.info("test compute");
src.partition(new LoadBalancePartitioner<>()).compute((ComputeFunc<Iterator<Integer>, Integer>) input -> {
int sum = 0;
while (input.hasNext()) {
sum += input.next();
}
return sum;
}).direct().forEach(i -> LOG.info("comp: " + i));
LOG.info("test computec");
src.partition(new LoadBalancePartitioner<>()).compute((ComputeCollectorFunc<Iterator<Integer>, String>) (input, output) -> {
int sum = 0;
while (input.hasNext()) {
sum += input.next();
}
output.collect("sum" + sum);
}).direct().forEach(s -> LOG.info("computec: " + s));
}
use of edu.iu.dsc.tws.tset.sets.batch.SourceTSet in project twister2 by DSC-SPIDAL.
the class TSetCommunicationExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
LOG.info(String.format("Hello from worker %d", env.getWorkerID()));
SourceTSet<Integer> sourceX = env.createSource(new SourceFunc<Integer>() {
private int count = 0;
@Override
public boolean hasNext() {
return count < 10;
}
@Override
public Integer next() {
return count++;
}
}, 4);
sourceX.direct().compute((itr, collector) -> {
itr.forEachRemaining(i -> {
collector.collect(i * 5);
});
}).direct().compute((itr, collector) -> {
itr.forEachRemaining(i -> {
collector.collect((int) i + 2);
});
}).reduce((i1, i2) -> {
return (int) i1 + (int) i2;
}).forEach(i -> {
LOG.info("SUM=" + i);
});
}
use of edu.iu.dsc.tws.tset.sets.batch.SourceTSet in project twister2 by DSC-SPIDAL.
the class TSetGatherExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
super.execute(workerEnv);
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
// set the parallelism of source to task stage 0
int srcPara = jobParameters.getTaskStages().get(0);
int sinkPara = jobParameters.getTaskStages().get(1);
SourceTSet<int[]> source = env.createSource(new TestBaseSource(), srcPara).setName("Source");
GatherTLink<int[]> gather = source.gather();
SinkTSet<Iterator<Tuple<Integer, int[]>>> sink = gather.sink((SinkFunc<Iterator<Tuple<Integer, int[]>>>) val -> {
int[] value = null;
while (val.hasNext()) {
value = val.next().getValue();
}
experimentData.setOutput(value);
LOG.info("Results " + Arrays.toString(value));
try {
verify(OperationNames.GATHER);
} catch (VerificationException e) {
LOG.info("Exception Message : " + e.getMessage());
}
return true;
});
env.run(sink);
}
use of edu.iu.dsc.tws.tset.sets.batch.SourceTSet in project twister2 by DSC-SPIDAL.
the class HadoopTSet method execute.
@Override
public void execute(Config config, JobAPI.Job job, IWorkerController workerController, IPersistentVolume persistentVolume, IVolatileVolume volatileVolume) {
int workerId = workerController.getWorkerInfo().getWorkerID();
WorkerEnvironment workerEnv = WorkerEnvironment.init(config, job, workerController, persistentVolume, volatileVolume);
BatchEnvironment tSetEnv = TSetEnvironment.initBatch(workerEnv);
Configuration configuration = new Configuration();
configuration.addResource(new Path(HdfsDataContext.getHdfsConfigDirectory(config)));
configuration.set(TextInputFormat.INPUT_DIR, "/input4");
SourceTSet<String> source = tSetEnv.createHadoopSource(configuration, TextInputFormat.class, 4, new MapFunc<Tuple<LongWritable, Text>, String>() {
@Override
public String map(Tuple<LongWritable, Text> input) {
return input.getKey().toString() + " : " + input.getValue().toString();
}
});
SinkTSet<Iterator<String>> sink = source.direct().sink((SinkFunc<Iterator<String>>) value -> {
while (value.hasNext()) {
String next = value.next();
LOG.info("Received value: " + next);
}
return true;
});
tSetEnv.run(sink);
}
Aggregations