use of edu.iu.dsc.tws.tset.env.BatchEnvironment in project twister2 by DSC-SPIDAL.
the class TSetAllReduceExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
super.execute(workerEnv);
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
// set the parallelism of source to task stage 0
int srcPara = jobParameters.getTaskStages().get(0);
int sinkPara = jobParameters.getTaskStages().get(1);
SourceTSet<int[]> source = env.createSource(new TestBaseSource(), srcPara).setName("Source");
AllReduceTLink<int[]> reduce = source.allReduce((t1, t2) -> {
int[] val = new int[t1.length];
for (int i = 0; i < t1.length; i++) {
val[i] = t1[i] + t2[i];
}
return val;
});
SinkTSet<int[]> sink = reduce.sink(value -> {
experimentData.setOutput(value);
try {
LOG.info("Results " + Arrays.toString(value));
verify(OperationNames.ALLREDUCE);
} catch (VerificationException e) {
LOG.info("Exception Message : " + e.getMessage());
}
return true;
});
env.run(sink);
}
use of edu.iu.dsc.tws.tset.env.BatchEnvironment in project twister2 by DSC-SPIDAL.
the class TSetFileAccessExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
super.execute(workerEnv);
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
/* super.execute(tc);
String inputDirectory = config.getStringValue(Constants.ARGS_FNAME,
"/tmp/twister2");
int numFiles = config.getIntegerValue(Constants.ARGS_WORKERS, 4);
int size = config.getIntegerValue(Constants.ARGS_SIZE, 1000);
String input = inputDirectory + "/input";
String output = inputDirectory + "/output";
if (workerId == 0) {
try {
new File(input).mkdirs();
new File(output).mkdirs();
DataGenerator.generateData("txt", new Path(input),
numFiles, size, 10);
} catch (IOException e) {
throw new RuntimeException("Failed to create data: " + input);
}
}
BatchSourceTSet<String> textSource = tc.createSource(new FileSource<>(
new SharedTextInputPartitioner(new Path(input))), jobParameters.getTaskStages().get(0));
textSource.partition(new OneToOnePartitioner<>()).sink(
new FileSink<>(new TextOutputWriter(
FileSystem.WriteMode.OVERWRITE,
new Path(output))), jobParameters.getTaskStages().get(0)); */
}
use of edu.iu.dsc.tws.tset.env.BatchEnvironment in project twister2 by DSC-SPIDAL.
the class TSetKeyedGatherExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
super.execute(workerEnv);
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
// set the parallelism of source to task stage 0
/*TSet<int[]> source = tSetBuilder.createSource(new TestBaseSource()).setName("Source").
setParallelism(jobParameters.getTaskStages().get(0));
TSet<int[]> reduce = source.groupBy(new LoadBalancePartitioner<>(), new IdentitySelector<>()).
setParallelism(10);
reduce.sink(new Sink<int[]>() {
@Override
public boolean add(int[] value) {
experimentData.setOutput(value);
try {
verify(OperationNames.REDUCE);
} catch (VerificationException e) {
LOG.info("Exception Message : " + e.getMessage());
}
return true;
}
@Override
public void prepare(TSetContext context) {
}
});
*/
}
use of edu.iu.dsc.tws.tset.env.BatchEnvironment in project twister2 by DSC-SPIDAL.
the class PartitionMtoNExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
int n = 4;
SourceTSet<Integer> src = dummySource(env, COUNT, PARALLELISM);
// test M < N
runPartition(src, n);
// test M < N
n = 1;
runPartition(src, n);
}
use of edu.iu.dsc.tws.tset.env.BatchEnvironment in project twister2 by DSC-SPIDAL.
the class SetSchemaExample method execute.
@Override
public void execute(WorkerEnvironment workerEnv) {
BatchEnvironment env = TSetEnvironment.initBatch(workerEnv);
SourceTSet<Integer> src = env.createSource(new BaseSourceFunc<Integer>() {
private int i = 0;
@Override
public void prepare(TSetContext ctx) {
super.prepare(ctx);
LOG.info("schemas0: " + ctx.getInputSchema() + " -> " + ctx.getOutputSchema());
}
@Override
public boolean hasNext() {
return i == 0;
}
@Override
public Integer next() {
return ++i;
}
}, 2).setName("src");
src.direct().forEach(ii -> LOG.info("out0: " + ii));
src.withSchema(PrimitiveSchemas.INTEGER).direct().forEach(ii -> LOG.info("out1: " + ii));
ComputeTSet<String> map = src.allReduce(Integer::sum).map(new BaseMapFunc<Integer, String>() {
@Override
public void prepare(TSetContext ctx) {
super.prepare(ctx);
LOG.info("schemas1: " + ctx.getInputSchema() + " -> " + ctx.getOutputSchema());
}
@Override
public String map(Integer input) {
return input.toString();
}
});
map.direct().forEach(ii -> LOG.info("out2: " + ii));
map.withSchema(PrimitiveSchemas.STRING).direct().forEach(ii -> LOG.info("out3: " + ii));
KeyedTSet<String, Integer> keyed = map.mapToTuple(new BaseMapFunc<String, Tuple<String, Integer>>() {
@Override
public void prepare(TSetContext ctx) {
super.prepare(ctx);
LOG.info("schemas2: " + ctx.getInputSchema() + " -> " + ctx.getOutputSchema());
}
@Override
public Tuple<String, Integer> map(String input) {
return new Tuple<>(input, Integer.parseInt(input));
}
});
keyed.keyedDirect().forEach(ii -> LOG.info("out4: " + ii));
keyed.withSchema(new KeyedSchema(MessageTypes.STRING, MessageTypes.INTEGER)).keyedDirect().forEach(ii -> LOG.info("out5: " + ii));
}
Aggregations