use of org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor in project asterixdb by apache.
the class ThreadCountingOperatorDescriptor method testScanUnion.
@Test
public void testScanUnion() throws Exception {
JobSpecification spec = new JobSpecification();
DummySourceOperatorDescriptor ets1 = new DummySourceOperatorDescriptor(spec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ets1, NC1_ID);
DummySourceOperatorDescriptor ets2 = new DummySourceOperatorDescriptor(spec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ets2, NC1_ID);
DummySourceOperatorDescriptor ets3 = new DummySourceOperatorDescriptor(spec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ets3, NC1_ID);
ThreadCountingOperatorDescriptor tc = new ThreadCountingOperatorDescriptor(spec, 3);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, tc, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ets1, 0, tc, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), ets2, 0, tc, 1);
spec.connect(new OneToOneConnectorDescriptor(spec), ets3, 0, tc, 2);
spec.addRoot(tc);
runTest(spec);
}
use of org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor in project asterixdb by apache.
the class ReplicateOperatorTest method test.
@Test
public void test() throws Exception {
final int outputArity = 2;
JobSpecification spec = new JobSpecification();
String inputFileName = "data" + File.separator + "nc1" + File.separator + "words.txt";
File[] outputFile = new File[outputArity];
for (int i = 0; i < outputArity; i++) {
outputFile[i] = File.createTempFile("replicateop", null);
outputFile[i].deleteOnExit();
}
FileSplit[] inputSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, inputFileName) };
String[] locations = new String[] { NC1_ID };
DelimitedDataTupleParserFactory stringParser = new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, '