Search in sources :

Example 6 with SparkManager

use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.

the class DataStreamsTest method testAlertPublisher.

@Test
public void testAlertPublisher() throws Exception {
    String sinkName = "alertSink";
    String topic = "alertTopic";
    Schema schema = Schema.recordOf("x", Schema.Field.of("id", Schema.nullableOf(Schema.of(Schema.Type.LONG))));
    StructuredRecord record1 = StructuredRecord.builder(schema).set("id", 1L).build();
    StructuredRecord record2 = StructuredRecord.builder(schema).set("id", 2L).build();
    StructuredRecord alertRecord = StructuredRecord.builder(schema).build();
    /*
     * source --> nullAlert --> sink
     *               |
     *               |--> TMS publisher
     */
    DataStreamsConfig config = DataStreamsConfig.builder().setBatchInterval("5s").addStage(new ETLStage("source", MockSource.getPlugin(schema, ImmutableList.of(record1, record2, alertRecord)))).addStage(new ETLStage("nullAlert", NullAlertTransform.getPlugin("id"))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addStage(new ETLStage("tms", TMSAlertPublisher.getPlugin(topic, NamespaceId.DEFAULT.getNamespace()))).addConnection("source", "nullAlert").addConnection("nullAlert", "sink").addConnection("nullAlert", "tms").setCheckpointDir(checkpointDir).build();
    AppRequest<DataStreamsConfig> appRequest = new AppRequest<>(APP_ARTIFACT, config);
    ApplicationId appId = NamespaceId.DEFAULT.app("AlertTest");
    ApplicationManager appManager = deployApplication(appId, appRequest);
    SparkManager sparkManager = appManager.getSparkManager(DataStreamsSparkLauncher.NAME);
    sparkManager.start();
    sparkManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
    final Set<StructuredRecord> expectedRecords = ImmutableSet.of(record1, record2);
    final Set<Alert> expectedMessages = ImmutableSet.of(new Alert("nullAlert", new HashMap<String, String>()));
    final DataSetManager<Table> sinkTable = getDataset(sinkName);
    Tasks.waitFor(true, () -> {
        // get alerts from TMS
        try {
            getMessagingAdmin(NamespaceId.DEFAULT.getNamespace()).getTopicProperties(topic);
        } catch (TopicNotFoundException e) {
            return false;
        }
        MessageFetcher messageFetcher = getMessagingContext().getMessageFetcher();
        Set<Alert> actualMessages = new HashSet<>();
        try (CloseableIterator<Message> iter = messageFetcher.fetch(NamespaceId.DEFAULT.getNamespace(), topic, 5, 0)) {
            while (iter.hasNext()) {
                Message message = iter.next();
                Alert alert = message.decodePayload(r -> GSON.fromJson(r, Alert.class));
                actualMessages.add(alert);
            }
        }
        // get records from sink
        sinkTable.flush();
        Set<StructuredRecord> outputRecords = new HashSet<>(MockSink.readOutput(sinkTable));
        return expectedRecords.equals(outputRecords) && expectedMessages.equals(actualMessages);
    }, 4, TimeUnit.MINUTES);
    sparkManager.stop();
    sparkManager.waitForStopped(10, TimeUnit.SECONDS);
    validateMetric(appId, "source.records.out", 3);
    validateMetric(appId, "nullAlert.records.in", 3);
    validateMetric(appId, "nullAlert.records.out", 2);
    validateMetric(appId, "nullAlert.records.alert", 1);
    validateMetric(appId, "sink.records.in", 2);
    validateMetric(appId, "tms.records.in", 1);
}
Also used : ApplicationManager(io.cdap.cdap.test.ApplicationManager) MessageFetcher(io.cdap.cdap.api.messaging.MessageFetcher) SparkManager(io.cdap.cdap.test.SparkManager) Table(io.cdap.cdap.api.dataset.table.Table) Message(io.cdap.cdap.api.messaging.Message) HashMap(java.util.HashMap) TopicNotFoundException(io.cdap.cdap.api.messaging.TopicNotFoundException) Schema(io.cdap.cdap.api.data.schema.Schema) StructuredRecord(io.cdap.cdap.api.data.format.StructuredRecord) DataStreamsConfig(io.cdap.cdap.etl.proto.v2.DataStreamsConfig) AppRequest(io.cdap.cdap.proto.artifact.AppRequest) ETLStage(io.cdap.cdap.etl.proto.v2.ETLStage) Alert(io.cdap.cdap.etl.api.Alert) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 7 with SparkManager

use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.

the class TestFrameworkTestRun method testAppWithPlugin.

@Test
public void testAppWithPlugin() throws Exception {
    ArtifactId artifactId = NamespaceId.DEFAULT.artifact("app-with-plugin", "1.0.0-SNAPSHOT");
    addAppArtifact(artifactId, AppWithPlugin.class);
    ArtifactId pluginArtifactId = NamespaceId.DEFAULT.artifact("test-plugin", "1.0.0-SNAPSHOT");
    addPluginArtifact(pluginArtifactId, artifactId, ToStringPlugin.class);
    ApplicationId appId = NamespaceId.DEFAULT.app("AppWithPlugin");
    AppRequest createRequest = new AppRequest(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()));
    ApplicationManager appManager = deployApplication(appId, createRequest);
    final WorkerManager workerManager = appManager.getWorkerManager(AppWithPlugin.WORKER);
    workerManager.start();
    workerManager.waitForRun(ProgramRunStatus.COMPLETED, 10, TimeUnit.SECONDS);
    final ServiceManager serviceManager = appManager.getServiceManager(AppWithPlugin.SERVICE);
    serviceManager.start();
    serviceManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
    URL serviceURL = serviceManager.getServiceURL(5, TimeUnit.SECONDS);
    callServiceGet(serviceURL, "dummy");
    serviceManager.stop();
    serviceManager.waitForStopped(10, TimeUnit.SECONDS);
    WorkflowManager workflowManager = appManager.getWorkflowManager(AppWithPlugin.WORKFLOW);
    workflowManager.start();
    workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
    List<RunRecord> runRecords = workflowManager.getHistory();
    Assert.assertNotEquals(ProgramRunStatus.FAILED, runRecords.get(0).getStatus());
    DataSetManager<KeyValueTable> workflowTableManager = getDataset(AppWithPlugin.WORKFLOW_TABLE);
    String value = Bytes.toString(workflowTableManager.get().read("val"));
    Assert.assertEquals(AppWithPlugin.TEST, value);
    Map<String, String> workflowTags = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getNamespace(), Constants.Metrics.Tag.APP, "AppWithPlugin", Constants.Metrics.Tag.WORKFLOW, AppWithPlugin.WORKFLOW, Constants.Metrics.Tag.RUN_ID, runRecords.get(0).getPid());
    getMetricsManager().waitForTotalMetricCount(workflowTags, String.format("user.destroy.%s", AppWithPlugin.WORKFLOW), 1, 60, TimeUnit.SECONDS);
    // Testing Spark Plugins. First send some data to fileset for the Spark program to process
    DataSetManager<FileSet> fileSetManager = getDataset(AppWithPlugin.SPARK_INPUT);
    FileSet fileSet = fileSetManager.get();
    try (PrintStream out = new PrintStream(fileSet.getLocation("input").append("file.txt").getOutputStream(), true, "UTF-8")) {
        for (int i = 0; i < 5; i++) {
            out.println("Message " + i);
        }
    }
    Map<String, String> sparkArgs = new HashMap<>();
    FileSetArguments.setInputPath(sparkArgs, "input");
    SparkManager sparkManager = appManager.getSparkManager(AppWithPlugin.SPARK).start(sparkArgs);
    sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES);
    // Verify the Spark result.
    DataSetManager<Table> dataSetManager = getDataset(AppWithPlugin.SPARK_TABLE);
    Table table = dataSetManager.get();
    try (Scanner scanner = table.scan(null, null)) {
        for (int i = 0; i < 5; i++) {
            Row row = scanner.next();
            Assert.assertNotNull(row);
            String expected = "Message " + i + " " + AppWithPlugin.TEST;
            Assert.assertEquals(expected, Bytes.toString(row.getRow()));
            Assert.assertEquals(expected, Bytes.toString(row.get(expected)));
        }
        // There shouldn't be any more rows in the table.
        Assert.assertNull(scanner.next());
    }
}
Also used : Scanner(io.cdap.cdap.api.dataset.table.Scanner) ApplicationManager(io.cdap.cdap.test.ApplicationManager) ArtifactId(io.cdap.cdap.proto.id.ArtifactId) HashMap(java.util.HashMap) WorkflowManager(io.cdap.cdap.test.WorkflowManager) URL(java.net.URL) ServiceManager(io.cdap.cdap.test.ServiceManager) PrintStream(java.io.PrintStream) SparkManager(io.cdap.cdap.test.SparkManager) Table(io.cdap.cdap.api.dataset.table.Table) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) FileSet(io.cdap.cdap.api.dataset.lib.FileSet) AppRequest(io.cdap.cdap.proto.artifact.AppRequest) WorkerManager(io.cdap.cdap.test.WorkerManager) RunRecord(io.cdap.cdap.proto.RunRecord) ArtifactSummary(io.cdap.cdap.api.artifact.ArtifactSummary) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) Row(io.cdap.cdap.api.dataset.table.Row) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) Test(org.junit.Test)

Example 8 with SparkManager

use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.

the class TestFrameworkTestRun method testClusterName.

@Test
public void testClusterName() throws Exception {
    String clusterName = getConfiguration().get(Constants.CLUSTER_NAME);
    ApplicationManager appManager = deployApplication(ClusterNameTestApp.class);
    final DataSetManager<KeyValueTable> datasetManager = getDataset(ClusterNameTestApp.CLUSTER_NAME_TABLE);
    final KeyValueTable clusterNameTable = datasetManager.get();
    // A callable for reading the cluster name from the ClusterNameTable.
    // It is used for Tasks.waitFor call down below.
    final AtomicReference<String> key = new AtomicReference<>();
    Callable<String> readClusterName = new Callable<String>() {

        @Nullable
        @Override
        public String call() throws Exception {
            datasetManager.flush();
            byte[] bytes = clusterNameTable.read(key.get());
            return bytes == null ? null : new String(bytes, StandardCharsets.UTF_8);
        }
    };
    // Service
    ServiceManager serviceManager = appManager.getServiceManager(ClusterNameTestApp.ClusterNameServiceHandler.class.getSimpleName()).start();
    Assert.assertEquals(clusterName, callServiceGet(serviceManager.getServiceURL(10, TimeUnit.SECONDS), "clusterName"));
    serviceManager.stop();
    // Worker
    WorkerManager workerManager = appManager.getWorkerManager(ClusterNameTestApp.ClusterNameWorker.class.getSimpleName()).start();
    key.set("worker.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // The worker will stop by itself. No need to call stop
    workerManager.waitForRun(ProgramRunStatus.COMPLETED, 10, TimeUnit.SECONDS);
    // MapReduce
    // Setup the input file used by MR
    Location location = this.<FileSet>getDataset(ClusterNameTestApp.INPUT_FILE_SET).get().getLocation("input");
    try (PrintStream printer = new PrintStream(location.getOutputStream(), true, "UTF-8")) {
        for (int i = 0; i < 10; i++) {
            printer.println("Hello World " + i);
        }
    }
    // Setup input and output dataset arguments
    Map<String, String> inputArgs = new HashMap<>();
    FileSetArguments.setInputPath(inputArgs, "input");
    Map<String, String> outputArgs = new HashMap<>();
    FileSetArguments.setOutputPath(outputArgs, "output");
    Map<String, String> args = new HashMap<>();
    args.putAll(RuntimeArguments.addScope(Scope.DATASET, ClusterNameTestApp.INPUT_FILE_SET, inputArgs));
    args.putAll(RuntimeArguments.addScope(Scope.DATASET, ClusterNameTestApp.OUTPUT_FILE_SET, outputArgs));
    MapReduceManager mrManager = appManager.getMapReduceManager(ClusterNameTestApp.ClusterNameMapReduce.class.getSimpleName()).start(args);
    key.set("mr.client.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set("mapper.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set("reducer.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    mrManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
    // Spark
    SparkManager sparkManager = appManager.getSparkManager(ClusterNameTestApp.ClusterNameSpark.class.getSimpleName()).start();
    key.set("spark.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
    // Workflow
    // Cleanup the output path for the MR job in the workflow first
    this.<FileSet>getDataset(ClusterNameTestApp.OUTPUT_FILE_SET).get().getLocation("output").delete(true);
    args = RuntimeArguments.addScope(Scope.MAPREDUCE, ClusterNameTestApp.ClusterNameMapReduce.class.getSimpleName(), args);
    WorkflowManager workflowManager = appManager.getWorkflowManager(ClusterNameTestApp.ClusterNameWorkflow.class.getSimpleName()).start(args);
    String prefix = ClusterNameTestApp.ClusterNameWorkflow.class.getSimpleName() + ".";
    key.set(prefix + "mr.client.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "mapper.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "reducer.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "spark.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "action.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 120, TimeUnit.SECONDS);
}
Also used : PrintStream(java.io.PrintStream) ApplicationManager(io.cdap.cdap.test.ApplicationManager) SparkManager(io.cdap.cdap.test.SparkManager) FileSet(io.cdap.cdap.api.dataset.lib.FileSet) MapReduceManager(io.cdap.cdap.test.MapReduceManager) HashMap(java.util.HashMap) WorkflowManager(io.cdap.cdap.test.WorkflowManager) AtomicReference(java.util.concurrent.atomic.AtomicReference) Callable(java.util.concurrent.Callable) WorkerManager(io.cdap.cdap.test.WorkerManager) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) ServiceManager(io.cdap.cdap.test.ServiceManager) Location(org.apache.twill.filesystem.Location) Test(org.junit.Test)

Example 9 with SparkManager

use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.

the class TestFrameworkTestRun method testAppWithTxTimeout.

// CDAP-18061 opened for tacking fix of this flaky test case
@Ignore
@Category(SlowTests.class)
@Test
public void testAppWithTxTimeout() throws Exception {
    int txDefaulTimeoutService = 17;
    int txDefaulTimeoutWorker = 18;
    int txDefaulTimeoutWorkflow = 19;
    int txDefaulTimeoutAction = 20;
    int txDefaulTimeoutMapReduce = 23;
    int txDefaulTimeoutSpark = 24;
    final ApplicationManager appManager = deployApplication(testSpace, AppWithCustomTx.class);
    try {
        // attempt to start with a tx timeout that exceeds the max tx timeout
        appManager.getServiceManager(AppWithCustomTx.SERVICE).start(txTimeoutArguments(100000));
        // wait for the failed status of AppWithCustomTx.SERVICE to be persisted, so that it can be started again
        Tasks.waitFor(1, () -> appManager.getServiceManager(AppWithCustomTx.SERVICE).getHistory(ProgramRunStatus.FAILED).size(), 30L, TimeUnit.SECONDS, 1, TimeUnit.SECONDS);
        ServiceManager serviceManager = appManager.getServiceManager(AppWithCustomTx.SERVICE).start(txTimeoutArguments(txDefaulTimeoutService));
        WorkerManager notxWorkerManager = appManager.getWorkerManager(AppWithCustomTx.WORKER_NOTX).start(txTimeoutArguments(txDefaulTimeoutWorker));
        WorkerManager txWorkerManager = appManager.getWorkerManager(AppWithCustomTx.WORKER_TX).start(txTimeoutArguments(txDefaulTimeoutWorker));
        WorkflowManager txWFManager = appManager.getWorkflowManager(AppWithCustomTx.WORKFLOW_TX).start(txTimeoutArguments(txDefaulTimeoutWorkflow));
        WorkflowManager notxWFManager = appManager.getWorkflowManager(AppWithCustomTx.WORKFLOW_NOTX).start(txTimeoutArguments(txDefaulTimeoutWorkflow, txDefaulTimeoutAction, "action", AppWithCustomTx.ACTION_NOTX));
        MapReduceManager txMRManager = appManager.getMapReduceManager(AppWithCustomTx.MAPREDUCE_TX).start(txTimeoutArguments(txDefaulTimeoutMapReduce));
        MapReduceManager notxMRManager = appManager.getMapReduceManager(AppWithCustomTx.MAPREDUCE_NOTX).start(txTimeoutArguments(txDefaulTimeoutMapReduce));
        SparkManager txSparkManager = appManager.getSparkManager(AppWithCustomTx.SPARK_TX).start(txTimeoutArguments(txDefaulTimeoutSpark));
        SparkManager notxSparkManager = appManager.getSparkManager(AppWithCustomTx.SPARK_NOTX).start(txTimeoutArguments(txDefaulTimeoutSpark));
        serviceManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
        callServicePut(serviceManager.getServiceURL(), "tx", "hello");
        callServicePut(serviceManager.getServiceURL(), "tx", AppWithCustomTx.FAIL_PRODUCER, 200);
        callServicePut(serviceManager.getServiceURL(), "tx", AppWithCustomTx.FAIL_CONSUMER, 500);
        callServicePut(serviceManager.getServiceURL(), "notx", "hello");
        callServicePut(serviceManager.getServiceURL(), "notx", AppWithCustomTx.FAIL_PRODUCER, 200);
        callServicePut(serviceManager.getServiceURL(), "notx", AppWithCustomTx.FAIL_CONSUMER, 500);
        serviceManager.stop();
        serviceManager.waitForRun(ProgramRunStatus.KILLED, 10, TimeUnit.SECONDS);
        txMRManager.waitForRun(ProgramRunStatus.FAILED, 10L, TimeUnit.SECONDS);
        notxMRManager.waitForRun(ProgramRunStatus.FAILED, 10L, TimeUnit.SECONDS);
        txSparkManager.waitForRun(ProgramRunStatus.COMPLETED, 10L, TimeUnit.SECONDS);
        notxSparkManager.waitForRun(ProgramRunStatus.COMPLETED, 10L, TimeUnit.SECONDS);
        notxWorkerManager.waitForRun(ProgramRunStatus.COMPLETED, 10L, TimeUnit.SECONDS);
        txWorkerManager.waitForRun(ProgramRunStatus.COMPLETED, 10L, TimeUnit.SECONDS);
        txWFManager.waitForRun(ProgramRunStatus.COMPLETED, 10L, TimeUnit.SECONDS);
        notxWFManager.waitForRun(ProgramRunStatus.COMPLETED, 10L, TimeUnit.SECONDS);
        DataSetManager<TransactionCapturingTable> dataset = getDataset(testSpace.dataset(AppWithCustomTx.CAPTURE));
        Table t = dataset.get().getTable();
        // all programs attempt to write to the table in different transactional contexts
        // - if it is outside a transaction, then the expected value is null
        // - if it is in an attempt of a nested transaction, then the expected value is null
        // - if it is in an implicit transaction, then the expected value is "default"
        // - if it is in an explicit transaction, then the expected value is the transaction timeout
        Object[][] writesToValidate = new Object[][] { // transactions attempted by the workers
        { AppWithCustomTx.WORKER_TX, AppWithCustomTx.INITIALIZE, txDefaulTimeoutWorker }, { AppWithCustomTx.WORKER_TX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKER_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutWorker }, { AppWithCustomTx.WORKER_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKER_TX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.WORKER_TX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutWorker }, { AppWithCustomTx.WORKER_TX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_WORKER_RUNTIME }, { AppWithCustomTx.WORKER_TX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.INITIALIZE, null }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.INITIALIZE_TX, AppWithCustomTx.TIMEOUT_WORKER_INITIALIZE }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.INITIALIZE_TX_D, txDefaulTimeoutWorker }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_WORKER_DESTROY }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutWorker }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_WORKER_RUNTIME }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutWorker }, { AppWithCustomTx.WORKER_NOTX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, // transactions attempted by the service
        { AppWithCustomTx.HANDLER_TX, AppWithCustomTx.INITIALIZE, txDefaulTimeoutService }, { AppWithCustomTx.HANDLER_TX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.HANDLER_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutService }, { AppWithCustomTx.HANDLER_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.HANDLER_TX, AppWithCustomTx.RUNTIME, txDefaulTimeoutService }, { AppWithCustomTx.HANDLER_TX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_CONSUMER_RUNTIME }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME_TX_T, AppWithCustomTx.TIMEOUT_CONSUMER_RUNTIME }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME_NEST_T, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME_NEST_CT, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.RUNTIME_NEST_TC, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutService }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.ONERROR, txDefaulTimeoutService }, { AppWithCustomTx.CONSUMER_TX, AppWithCustomTx.ONERROR_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_PRODUCER_RUNTIME }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME_TX_T, AppWithCustomTx.TIMEOUT_PRODUCER_RUNTIME }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME_NEST_T, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME_NEST_CT, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.RUNTIME_NEST_TC, AppWithCustomTx.FAILED }, // { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutService },
        { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.ONERROR, txDefaulTimeoutService }, { AppWithCustomTx.PRODUCER_TX, AppWithCustomTx.ONERROR_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.INITIALIZE, null }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.INITIALIZE_TX, AppWithCustomTx.TIMEOUT_HANDLER_INITIALIZE }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.INITIALIZE_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_HANDLER_DESTROY }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_HANDLER_RUNTIME }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.HANDLER_NOTX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_CONSUMER_RUNTIME }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME_TX_T, AppWithCustomTx.TIMEOUT_CONSUMER_RUNTIME }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME_NEST_T, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME_NEST_CT, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.RUNTIME_NEST_TC, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_CONSUMER_DESTROY }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.ONERROR, null }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.ONERROR_TX, AppWithCustomTx.TIMEOUT_CONSUMER_ERROR }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.ONERROR_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.CONSUMER_NOTX, AppWithCustomTx.ONERROR_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_PRODUCER_RUNTIME }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME_TX_T, AppWithCustomTx.TIMEOUT_PRODUCER_RUNTIME }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME_NEST_T, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME_NEST_CT, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.RUNTIME_NEST_TC, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_PRODUCER_DESTROY }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.ONERROR, null }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.ONERROR_TX, AppWithCustomTx.TIMEOUT_PRODUCER_ERROR }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.ONERROR_TX_D, txDefaulTimeoutService }, { AppWithCustomTx.PRODUCER_NOTX, AppWithCustomTx.ONERROR_NEST, AppWithCustomTx.FAILED }, // transactions attempted by the workflows
        { AppWithCustomTx.WORKFLOW_TX, AppWithCustomTx.INITIALIZE, txDefaulTimeoutWorkflow }, { AppWithCustomTx.WORKFLOW_TX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKFLOW_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutWorkflow }, { AppWithCustomTx.WORKFLOW_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.INITIALIZE, txDefaulTimeoutWorkflow }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_ACTION_RUNTIME }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutWorkflow }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutWorkflow }, { AppWithCustomTx.ACTION_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.INITIALIZE, null }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.INITIALIZE_TX, AppWithCustomTx.TIMEOUT_WORKFLOW_INITIALIZE }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.INITIALIZE_TX_D, txDefaulTimeoutWorkflow }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_WORKFLOW_DESTROY }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutWorkflow }, { AppWithCustomTx.WORKFLOW_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.INITIALIZE, null }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.INITIALIZE_TX, AppWithCustomTx.TIMEOUT_ACTION_INITIALIZE }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.INITIALIZE_TX_D, txDefaulTimeoutAction }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.RUNTIME, null }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.RUNTIME_TX, AppWithCustomTx.TIMEOUT_ACTION_RUNTIME }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.RUNTIME_TX_D, txDefaulTimeoutAction }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.RUNTIME_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_ACTION_DESTROY }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutAction }, { AppWithCustomTx.ACTION_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, // transactions attempted by the mapreduce's
        { AppWithCustomTx.MAPREDUCE_TX, AppWithCustomTx.INITIALIZE, txDefaulTimeoutMapReduce }, { AppWithCustomTx.MAPREDUCE_TX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.MAPREDUCE_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutMapReduce }, { AppWithCustomTx.MAPREDUCE_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.INITIALIZE, null }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.INITIALIZE_TX, AppWithCustomTx.TIMEOUT_MAPREDUCE_INITIALIZE }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.INITIALIZE_TX_D, txDefaulTimeoutMapReduce }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_MAPREDUCE_DESTROY }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutMapReduce }, { AppWithCustomTx.MAPREDUCE_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, // transactions attempted by the spark's
        { AppWithCustomTx.SPARK_TX, AppWithCustomTx.INITIALIZE, txDefaulTimeoutSpark }, { AppWithCustomTx.SPARK_TX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.SPARK_TX, AppWithCustomTx.DESTROY, txDefaulTimeoutSpark }, { AppWithCustomTx.SPARK_TX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.INITIALIZE, null }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.INITIALIZE_TX, AppWithCustomTx.TIMEOUT_SPARK_INITIALIZE }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.INITIALIZE_TX_D, txDefaulTimeoutSpark }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.INITIALIZE_NEST, AppWithCustomTx.FAILED }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.DESTROY, null }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.DESTROY_TX, AppWithCustomTx.TIMEOUT_SPARK_DESTROY }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.DESTROY_TX_D, txDefaulTimeoutSpark }, { AppWithCustomTx.SPARK_NOTX, AppWithCustomTx.DESTROY_NEST, AppWithCustomTx.FAILED } };
        for (Object[] writeToValidate : writesToValidate) {
            String row = (String) writeToValidate[0];
            String column = (String) writeToValidate[1];
            String expectedValue = writeToValidate[2] == null ? null : String.valueOf(writeToValidate[2]);
            Tasks.waitFor(expectedValue, () -> t.get(new Get(row, column)).getString(column), 30L, TimeUnit.SECONDS, 1, TimeUnit.SECONDS, String.format("Error getting value for %s.%s. Expected: %s, Got: %s", row, column, expectedValue, t.get(new Get(row, column)).getString(column)));
        }
    } finally {
        appManager.stopAll();
    }
}
Also used : ApplicationManager(io.cdap.cdap.test.ApplicationManager) SparkManager(io.cdap.cdap.test.SparkManager) Table(io.cdap.cdap.api.dataset.table.Table) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) MapReduceManager(io.cdap.cdap.test.MapReduceManager) WorkflowManager(io.cdap.cdap.test.WorkflowManager) WorkerManager(io.cdap.cdap.test.WorkerManager) ServiceManager(io.cdap.cdap.test.ServiceManager) Get(io.cdap.cdap.api.dataset.table.Get) Ignore(org.junit.Ignore) Category(org.junit.experimental.categories.Category) Test(org.junit.Test)

Example 10 with SparkManager

use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.

the class SparkStreamingTestRun method test.

@Test
public void test() throws Exception {
    File checkpointDir = TEMP_FOLDER.newFolder();
    KafkaPublisher publisher = KAFKA_TESTER.getKafkaClient().getPublisher(KafkaPublisher.Ack.LEADER_RECEIVED, Compression.NONE);
    ApplicationManager appManager = deployApplication(TestSparkApp.class);
    Map<String, String> args = ImmutableMap.of("checkpoint.path", checkpointDir.getAbsolutePath(), "kafka.brokers", KAFKA_TESTER.getBrokerService().getBrokerList(), "kafka.topics", "testtopic", "result.dataset", "TimeSeriesResult");
    SparkManager manager = appManager.getSparkManager(KafkaSparkStreaming.class.getSimpleName());
    manager.start(args);
    // Send 100 messages over 5 seconds
    for (int i = 0; i < 100; i++) {
        publisher.prepare("testtopic").add(Charsets.UTF_8.encode("Message " + i), "1").send();
        TimeUnit.MILLISECONDS.sleep(50);
    }
    // Sum up everything from the TimeSeriesTable. The "Message" should have count 100, while each number (0-99) should
    // have count of 1
    final DataSetManager<TimeseriesTable> tsTableManager = getDataset("TimeSeriesResult");
    final TimeseriesTable tsTable = tsTableManager.get();
    Tasks.waitFor(100L, new Callable<Long>() {

        @Override
        public Long call() throws Exception {
            tsTableManager.flush();
            return getCounts("Message", tsTable);
        }
    }, 1, TimeUnit.MINUTES, 1, TimeUnit.SECONDS);
    for (int i = 0; i < 100; i++) {
        final int finalI = i;
        Tasks.waitFor(1L, new Callable<Long>() {

            @Override
            public Long call() throws Exception {
                tsTableManager.flush();
                return getCounts(Integer.toString(finalI), tsTable);
            }
        }, 1, TimeUnit.MINUTES, 1, TimeUnit.SECONDS);
    }
    manager.stop();
    manager.waitForRun(ProgramRunStatus.KILLED, 60, TimeUnit.SECONDS);
    // Send 100 more messages without pause
    for (int i = 100; i < 200; i++) {
        publisher.prepare("testtopic").add(Charsets.UTF_8.encode("Message " + i), "1").send();
    }
    // Start the streaming app again. It should resume from where it left off because of checkpoint
    manager.start(args);
    // Expects "Message" having count = 200.
    Tasks.waitFor(100L, new Callable<Long>() {

        @Override
        public Long call() throws Exception {
            tsTableManager.flush();
            return getCounts("Message", tsTable);
        }
    }, 1, TimeUnit.MINUTES, 1, TimeUnit.SECONDS);
    // Expects each number (0-199) have count of 1
    for (int i = 0; i < 200; i++) {
        final int finalI = i;
        Tasks.waitFor(1L, new Callable<Long>() {

            @Override
            public Long call() throws Exception {
                tsTableManager.flush();
                return getCounts(Integer.toString(finalI), tsTable);
            }
        }, 1, TimeUnit.MINUTES, 1, TimeUnit.SECONDS);
    }
    manager.stop();
    manager.waitForRuns(ProgramRunStatus.KILLED, 2, 60, TimeUnit.SECONDS);
}
Also used : ApplicationManager(io.cdap.cdap.test.ApplicationManager) SparkManager(io.cdap.cdap.test.SparkManager) TimeseriesTable(io.cdap.cdap.api.dataset.lib.TimeseriesTable) KafkaSparkStreaming(io.cdap.cdap.spark.app.KafkaSparkStreaming) File(java.io.File) KafkaPublisher(org.apache.twill.kafka.client.KafkaPublisher) Test(org.junit.Test)

Aggregations

SparkManager (io.cdap.cdap.test.SparkManager)41 ApplicationManager (io.cdap.cdap.test.ApplicationManager)32 Test (org.junit.Test)29 KeyValueTable (io.cdap.cdap.api.dataset.lib.KeyValueTable)17 HashMap (java.util.HashMap)15 StructuredRecord (io.cdap.cdap.api.data.format.StructuredRecord)12 ApplicationId (io.cdap.cdap.proto.id.ApplicationId)12 HashSet (java.util.HashSet)12 Table (io.cdap.cdap.api.dataset.table.Table)11 AppRequest (io.cdap.cdap.proto.artifact.AppRequest)11 Schema (io.cdap.cdap.api.data.schema.Schema)10 DataStreamsConfig (io.cdap.cdap.etl.proto.v2.DataStreamsConfig)10 ETLStage (io.cdap.cdap.etl.proto.v2.ETLStage)10 FileSet (io.cdap.cdap.api.dataset.lib.FileSet)9 Location (org.apache.twill.filesystem.Location)9 File (java.io.File)8 URL (java.net.URL)7 PrintStream (java.io.PrintStream)6 HttpURLConnection (java.net.HttpURLConnection)6 WorkflowManager (io.cdap.cdap.test.WorkflowManager)5