Search in sources :

Example 1 with WaitCondition

use of com.datatorrent.stram.support.StramTestSupport.WaitCondition in project apex-core by apache.

the class PartitioningTest method assertNumberPartitions.

private static List<PTOperator> assertNumberPartitions(final int count, final StramLocalCluster lc, final LogicalPlan.OperatorMeta ow) throws Exception {
    WaitCondition c = new WaitCondition() {

        @Override
        public boolean isComplete() {
            List<PTOperator> operators = lc.getPlanOperators(ow);
            LOG.debug("Number of operators {}, expected number {}", operators.size(), count);
            return (operators.size() == count);
        }
    };
    StramTestSupport.awaitCompletion(c, 10000);
    Assert.assertTrue("Number partitions match " + ow, c.isComplete());
    return lc.getPlanOperators(ow);
}
Also used : WaitCondition(com.datatorrent.stram.support.StramTestSupport.WaitCondition) PTOperator(com.datatorrent.stram.plan.physical.PTOperator)

Example 2 with WaitCondition

use of com.datatorrent.stram.support.StramTestSupport.WaitCondition in project apex-core by apache.

the class TupleRecorderTest method testRecordingOnOperator.

private void testRecordingOnOperator(final StramLocalCluster localCluster, final PTOperator op) throws Exception {
    String id = "xyz";
    localCluster.getStreamingContainerManager().startRecording(id, op.getId(), null, 0);
    WaitCondition c = new WaitCondition() {

        @Override
        public boolean isComplete() {
            return null != getTupleRecorder(localCluster, op);
        }
    };
    Assert.assertTrue("Should get a tuple recorder within 10 seconds", StramTestSupport.awaitCompletion(c, 10000));
    final TupleRecorder tupleRecorder = getTupleRecorder(localCluster, op);
    long startTime = tupleRecorder.getStartTime();
    String line;
    File dir = new File(testWorkDir, "recordings/" + op.getId() + "/" + id);
    File file;
    file = new File(dir, FSPartFileCollection.META_FILE);
    Assert.assertTrue("meta file should exist", file.exists());
    int numPorts = tupleRecorder.getSinkMap().size();
    try (BufferedReader br = new BufferedReader(new FileReader(file))) {
        line = br.readLine();
        Assert.assertEquals("version should be 1.2", "1.2", line);
        line = br.readLine();
        JSONObject json = new JSONObject(line);
        Assert.assertEquals("Start time verification", startTime, json.getLong("startTime"));
        Assert.assertTrue(numPorts > 0);
        for (int i = 0; i < numPorts; i++) {
            line = br.readLine();
            Assert.assertTrue("should contain name, streamName, type and id", line != null && line.contains("\"name\"") && line.contains("\"streamName\"") && line.contains("\"type\"") && line.contains("\"id\""));
        }
    }
    c = new WaitCondition() {

        @Override
        public boolean isComplete() {
            return (tupleRecorder.getTotalTupleCount() >= testTupleCount);
        }
    };
    Assert.assertTrue("Should record more than " + testTupleCount + " tuples within 15 seconds", StramTestSupport.awaitCompletion(c, 15000));
    localCluster.getStreamingContainerManager().stopRecording(op.getId(), null);
    c = new WaitCondition() {

        @Override
        public boolean isComplete() {
            TupleRecorder tupleRecorder = getTupleRecorder(localCluster, op);
            return (tupleRecorder == null);
        }
    };
    Assert.assertTrue("Tuple recorder shouldn't exist any more after stopping", StramTestSupport.awaitCompletion(c, 5000));
    file = new File(dir, FSPartFileCollection.INDEX_FILE);
    Assert.assertTrue("index file should exist", file.exists());
    ArrayList<String> partFiles = new ArrayList<>();
    int indexCount = 0;
    try (BufferedReader br = new BufferedReader(new FileReader(file))) {
        while ((line = br.readLine()) != null) {
            String partFile = "part" + indexCount + ".txt";
            if (line.startsWith("F:" + partFile + ":")) {
                partFiles.add(partFile);
                indexCount++;
            } else if (line.startsWith("E")) {
                Assert.assertEquals("index file should end after E line", br.readLine(), null);
                break;
            } else {
                Assert.fail("index file line is not starting with F or E");
            }
        }
    }
    int[] tupleCount = new int[numPorts];
    boolean beginWindowExists = false;
    boolean endWindowExists = false;
    for (String partFile : partFiles) {
        file = new File(dir, partFile);
        if (!partFile.equals(partFiles.get(partFiles.size() - 1))) {
            Assert.assertTrue(partFile + " should be greater than 1KB", file.length() >= 1024);
        }
        Assert.assertTrue(partFile + " should exist", file.exists());
        try (BufferedReader br = new BufferedReader(new FileReader(file))) {
            while ((line = br.readLine()) != null) {
                if (line.startsWith("B:")) {
                    beginWindowExists = true;
                } else if (line.startsWith("E:")) {
                    endWindowExists = true;
                } else if (line.startsWith("T:")) {
                    String[] parts = line.split(":");
                    tupleCount[Integer.valueOf(parts[2])]++;
                }
            }
        }
    }
    Assert.assertTrue("begin window should exist", beginWindowExists);
    Assert.assertTrue("end window should exist", endWindowExists);
    int sum = 0;
    for (int i = 0; i < numPorts; i++) {
        Assert.assertTrue("tuple exists for port " + i, tupleCount[i] > 0);
        sum += tupleCount[i];
    }
    Assert.assertTrue("total tuple count >= " + testTupleCount, sum >= testTupleCount);
}
Also used : WaitCondition(com.datatorrent.stram.support.StramTestSupport.WaitCondition) ArrayList(java.util.ArrayList) JSONObject(org.codehaus.jettison.json.JSONObject) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) File(java.io.File)

Example 3 with WaitCondition

use of com.datatorrent.stram.support.StramTestSupport.WaitCondition in project apex-core by apache.

the class InputOperatorTest method testSomeMethod.

@Test
public void testSomeMethod() throws Exception {
    LogicalPlan dag = new LogicalPlan();
    String testWorkDir = new File("target").getAbsolutePath();
    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testWorkDir, null));
    EvenOddIntegerGeneratorInputOperator generator = dag.addOperator("NumberGenerator", EvenOddIntegerGeneratorInputOperator.class);
    final CollectorModule<Number> collector = dag.addOperator("NumberCollector", new CollectorModule<Number>());
    dag.addStream("EvenIntegers", generator.even, collector.even).setLocality(Locality.CONTAINER_LOCAL);
    dag.addStream("OddIntegers", generator.odd, collector.odd).setLocality(Locality.CONTAINER_LOCAL);
    final StramLocalCluster lc = new StramLocalCluster(dag);
    lc.setHeartbeatMonitoringEnabled(false);
    lc.runAsync();
    WaitCondition c = new WaitCondition() {

        @Override
        public boolean isComplete() {
            return tupleCount.get() > 2;
        }
    };
    StramTestSupport.awaitCompletion(c, 2000);
    lc.shutdown();
    Assert.assertEquals("Collections size", 2, collections.size());
    Assert.assertFalse("Zero tuple count", collections.get(collector.even.id).isEmpty() && collections.get(collector.odd.id).isEmpty());
    Assert.assertTrue("Tuple count", collections.get(collector.even.id).size() - collections.get(collector.odd.id).size() <= 1);
}
Also used : WaitCondition(com.datatorrent.stram.support.StramTestSupport.WaitCondition) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) AsyncFSStorageAgent(com.datatorrent.common.util.AsyncFSStorageAgent) File(java.io.File) StramLocalCluster(com.datatorrent.stram.StramLocalCluster) Test(org.junit.Test)

Example 4 with WaitCondition

use of com.datatorrent.stram.support.StramTestSupport.WaitCondition in project apex-core by apache.

the class InlineStreamTest method test.

@Test
@SuppressWarnings("SleepWhileInLoop")
public void test() throws Exception {
    final int totalTupleCount = 5000;
    final PassThroughNode<Object> operator1 = new PassThroughNode<>();
    final GenericNode node1 = new GenericNode(operator1, new OperatorContext(1, "operator1", new DefaultAttributeMap(), null));
    node1.setId(1);
    operator1.setup(node1.context);
    final PassThroughNode<Object> operator2 = new PassThroughNode<>();
    final GenericNode node2 = new GenericNode(operator2, new OperatorContext(2, "operator2", new DefaultAttributeMap(), null));
    node2.setId(2);
    operator2.setup(node2.context);
    StreamContext streamContext = new StreamContext("node1->node2");
    final InlineStream stream = new InlineStream(1024);
    stream.setup(streamContext);
    node1.connectOutputPort("output", stream);
    node2.connectInputPort("input", stream.getReservoir());
    prev = null;
    Sink<Object> sink = new Sink<Object>() {

        @Override
        public void put(Object payload) {
            if (payload instanceof Tuple) {
                return;
            }
            if (prev == null) {
                prev = payload;
            } else {
                if (Integer.valueOf(payload.toString()) - Integer.valueOf(prev.toString()) != 1) {
                    synchronized (InlineStreamTest.this) {
                        InlineStreamTest.this.notify();
                    }
                }
                prev = payload;
            }
            if (Integer.valueOf(prev.toString()) == totalTupleCount - 1) {
                synchronized (InlineStreamTest.this) {
                    InlineStreamTest.this.notify();
                }
            }
        }

        @Override
        public int getCount(boolean reset) {
            return 0;
        }
    };
    node2.connectOutputPort("output", sink);
    AbstractReservoir reservoir1 = AbstractReservoir.newReservoir("input", 1024 * 5);
    node1.connectInputPort("input", reservoir1);
    Map<Integer, Node<?>> activeNodes = new ConcurrentHashMap<>();
    launchNodeThread(node1, activeNodes);
    launchNodeThread(node2, activeNodes);
    stream.activate(streamContext);
    reservoir1.put(StramTestSupport.generateBeginWindowTuple("irrelevant", 0));
    for (int i = 0; i < totalTupleCount; i++) {
        reservoir1.put(i);
    }
    reservoir1.put(StramTestSupport.generateEndWindowTuple("irrelevant", 0));
    synchronized (this) {
        this.wait(200);
    }
    Assert.assertNotNull(prev);
    Assert.assertEquals("processing complete", totalTupleCount, Integer.valueOf(prev.toString()) + 1);
    Assert.assertEquals("active operators", 2, activeNodes.size());
    WaitCondition c = new WaitCondition() {

        @Override
        public boolean isComplete() {
            final SweepableReservoir reservoir = stream.getReservoir();
            logger.debug("stream {} empty {}, size {}", stream, reservoir.isEmpty(), reservoir.size(false));
            return reservoir.isEmpty();
        }
    };
    Assert.assertTrue("operator should finish processing all events within 1 second", StramTestSupport.awaitCompletion(c, 1000));
    stream.deactivate();
    for (Node<?> node : activeNodes.values()) {
        node.shutdown();
    }
    for (int i = 0; i < 10; i++) {
        Thread.sleep(20);
        if (activeNodes.isEmpty()) {
            break;
        }
    }
    stream.teardown();
    operator2.teardown();
    operator1.teardown();
    Assert.assertEquals("active operators", 0, activeNodes.size());
}
Also used : WaitCondition(com.datatorrent.stram.support.StramTestSupport.WaitCondition) AbstractReservoir(com.datatorrent.stram.engine.AbstractReservoir) SweepableReservoir(com.datatorrent.stram.engine.SweepableReservoir) StreamContext(com.datatorrent.stram.engine.StreamContext) Node(com.datatorrent.stram.engine.Node) GenericNode(com.datatorrent.stram.engine.GenericNode) GenericNode(com.datatorrent.stram.engine.GenericNode) DefaultAttributeMap(com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Sink(com.datatorrent.api.Sink) OperatorContext(com.datatorrent.stram.engine.OperatorContext) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Tuple(com.datatorrent.stram.tuple.Tuple) Test(org.junit.Test)

Aggregations

WaitCondition (com.datatorrent.stram.support.StramTestSupport.WaitCondition)4 File (java.io.File)2 Test (org.junit.Test)2 DefaultAttributeMap (com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap)1 Sink (com.datatorrent.api.Sink)1 AsyncFSStorageAgent (com.datatorrent.common.util.AsyncFSStorageAgent)1 StramLocalCluster (com.datatorrent.stram.StramLocalCluster)1 AbstractReservoir (com.datatorrent.stram.engine.AbstractReservoir)1 GenericNode (com.datatorrent.stram.engine.GenericNode)1 Node (com.datatorrent.stram.engine.Node)1 OperatorContext (com.datatorrent.stram.engine.OperatorContext)1 StreamContext (com.datatorrent.stram.engine.StreamContext)1 SweepableReservoir (com.datatorrent.stram.engine.SweepableReservoir)1 LogicalPlan (com.datatorrent.stram.plan.logical.LogicalPlan)1 PTOperator (com.datatorrent.stram.plan.physical.PTOperator)1 Tuple (com.datatorrent.stram.tuple.Tuple)1 BufferedReader (java.io.BufferedReader)1 FileReader (java.io.FileReader)1 ArrayList (java.util.ArrayList)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1