Search in sources :

Example 26 with ApplicationWithPrograms

use of co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.

the class FlowTest method testFlowPendingMetric.

@Test
public void testFlowPendingMetric() throws Exception {
    final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(PendingMetricTestApp.class, TEMP_FOLDER_SUPPLIER);
    File tempFolder = TEMP_FOLDER_SUPPLIER.get();
    ProgramController controller = null;
    for (ProgramDescriptor programDescriptor : app.getPrograms()) {
        // running mapreduce is out of scope of this tests (there's separate unit-test for that)
        if (programDescriptor.getProgramId().getType() == ProgramType.FLOW) {
            Arguments args = new BasicArguments(ImmutableMap.of("temp", tempFolder.getAbsolutePath(), "count", "4"));
            controller = AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), args, TEMP_FOLDER_SUPPLIER);
        }
    }
    Assert.assertNotNull(controller);
    Map<String, String> tagsForSourceToOne = metricTagsForQueue("source", "ints", "forward-one");
    Map<String, String> tagsForSourceToTwo = metricTagsForQueue("source", null, "forward-two");
    Map<String, String> tagsForSourceToTwoInts = metricTagsForQueue("source", "ints", "forward-two");
    Map<String, String> tagsForSourceToTwoStrings = metricTagsForQueue("source", "strings", "forward-two");
    Map<String, String> tagsForOneToSink = metricTagsForQueue("forward-one", "queue", "sink");
    Map<String, String> tagsForTwoToSink = metricTagsForQueue("forward-two", "queue", "sink");
    Map<String, String> tagsForAllToOne = metricTagsForQueue(null, null, "forward-one");
    Map<String, String> tagsForAllToTwo = metricTagsForQueue(null, null, "forward-two");
    Map<String, String> tagsForAllToSink = metricTagsForQueue(null, null, "sink");
    Map<String, String> tagsForAll = metricTagsForQueue(null, null, null);
    try {
        // source emits 4, then forward-one reads 1, hence 3 should be pending
        // wait a little longer as flow needs to start
        waitForPending(tagsForSourceToOne, 3, 5000);
        // wait a little longer as flow needs to start
        waitForPending(tagsForAllToOne, 3, 100);
        // forward-two receives each of the 4 as a string and an int, but could have read 1 at most per each queue
        // so there should be either 3 + 4 = 7 pending or 3 + 3 = 6 pending, or 4 + 4 = 8 pending
        // but we don't know whether the queue pending count will be 4, 3 or 3, 4 or 3, 3 or 4, 4
        long intPending = waitForPending(tagsForSourceToTwoInts, 3, 4L, 1000);
        long stringPending = waitForPending(tagsForSourceToTwoStrings, 3, 4L, 1000);
        long totalPending = intPending + stringPending;
        Assert.assertTrue(String.format("Expected the pending events count to be 6, 7 or 8. But it was %d", totalPending), totalPending == 6 || totalPending == 7 || totalPending == 8);
        waitForPending(tagsForSourceToTwo, 7, 6L, 500);
        waitForPending(tagsForAllToTwo, 7, 6L, 100);
        // neither one nor two have emitted, so the total pending should be = 12 - 1 (forward-one) - 1 or 2 (forward-two)
        // => 10 or 9 events
        waitForPending(tagsForAll, 10, 9L, 100);
        // kick on forward-one, it should now consume all its events
        Assert.assertTrue(new File(tempFolder, "one").createNewFile());
        waitForPending(tagsForSourceToOne, 0, 2000);
        waitForPending(tagsForAllToOne, 0, 100);
        // sink has received 4 but started to read 1, so it has 3 pending
        waitForPending(tagsForOneToSink, 3, 1000);
        waitForPending(tagsForAllToSink, 3, 100);
        // kick-off forward-two, it should now consume all its integer and string events
        Assert.assertTrue(new File(tempFolder, "two-i").createNewFile());
        Assert.assertTrue(new File(tempFolder, "two-s").createNewFile());
        // pending events for all of forward-two's queues should go to zero
        waitForPending(tagsForSourceToTwoInts, 0, 2000);
        waitForPending(tagsForSourceToTwoStrings, 0, 1000);
        waitForPending(tagsForSourceToTwo, 0, 1000);
        waitForPending(tagsForAllToTwo, 0, 100);
        // but now sink should have 8 more events waiting
        waitForPending(tagsForOneToSink, 3, 1000);
        waitForPending(tagsForTwoToSink, 8, 1000);
        waitForPending(tagsForAllToSink, 11, 100);
        // kick off sink, its pending events should now go to zero
        Assert.assertTrue(new File(tempFolder, "three").createNewFile());
        waitForPending(tagsForOneToSink, 0, 2000);
        waitForPending(tagsForTwoToSink, 0, 2000);
        waitForPending(tagsForAllToSink, 0, 100);
    } finally {
        controller.stop();
    }
}
Also used : ProgramController(co.cask.cdap.app.runtime.ProgramController) ApplicationWithPrograms(co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms) Arguments(co.cask.cdap.app.runtime.Arguments) BasicArguments(co.cask.cdap.internal.app.runtime.BasicArguments) ProgramDescriptor(co.cask.cdap.app.program.ProgramDescriptor) BasicArguments(co.cask.cdap.internal.app.runtime.BasicArguments) File(java.io.File) Test(org.junit.Test)

Example 27 with ApplicationWithPrograms

use of co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.

the class MultiConsumerTest method testMulti.

@Test
public void testMulti() throws Exception {
    // TODO: Fix this test case to really test with numGroups settings.
    final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(MultiApp.class, TEMP_FOLDER_SUPPLIER);
    List<ProgramController> controllers = Lists.newArrayList();
    for (ProgramDescriptor programDescriptor : app.getPrograms()) {
        controllers.add(AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), new BasicArguments(), TEMP_FOLDER_SUPPLIER));
    }
    DatasetFramework datasetFramework = AppFabricTestHelper.getInjector().getInstance(DatasetFramework.class);
    DynamicDatasetCache datasetCache = new SingleThreadDatasetCache(new SystemDatasetInstantiator(datasetFramework, getClass().getClassLoader(), null), AppFabricTestHelper.getInjector().getInstance(TransactionSystemClient.class), NamespaceId.DEFAULT, DatasetDefinition.NO_ARGUMENTS, null, null);
    final KeyValueTable accumulated = datasetCache.getDataset("accumulated");
    TransactionExecutorFactory txExecutorFactory = AppFabricTestHelper.getInjector().getInstance(TransactionExecutorFactory.class);
    // Try to get accumulated result and verify it. Expect result appear in max of 60 seconds.
    int trial = 0;
    while (trial < 60) {
        try {
            Transactions.createTransactionExecutor(txExecutorFactory, accumulated).execute(new TransactionExecutor.Subroutine() {

                @Override
                public void apply() throws Exception {
                    byte[] value = accumulated.read(MultiApp.KEY);
                    // Sum(1..100) * 3
                    Assert.assertEquals(((1 + 99) * 99 / 2) * 3, Longs.fromByteArray(value));
                }
            });
            break;
        } catch (TransactionFailureException e) {
            // No-op
            trial++;
            TimeUnit.SECONDS.sleep(1);
        }
    }
    Assert.assertTrue(trial < 60);
    for (ProgramController controller : controllers) {
        controller.stop().get();
    }
}
Also used : ProgramController(co.cask.cdap.app.runtime.ProgramController) DynamicDatasetCache(co.cask.cdap.data2.dataset2.DynamicDatasetCache) TransactionExecutor(org.apache.tephra.TransactionExecutor) SingleThreadDatasetCache(co.cask.cdap.data2.dataset2.SingleThreadDatasetCache) TransactionFailureException(org.apache.tephra.TransactionFailureException) IOException(java.io.IOException) TransactionExecutorFactory(org.apache.tephra.TransactionExecutorFactory) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) TransactionFailureException(org.apache.tephra.TransactionFailureException) ApplicationWithPrograms(co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms) SystemDatasetInstantiator(co.cask.cdap.data.dataset.SystemDatasetInstantiator) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) ProgramDescriptor(co.cask.cdap.app.program.ProgramDescriptor) BasicArguments(co.cask.cdap.internal.app.runtime.BasicArguments) Test(org.junit.Test)

Example 28 with ApplicationWithPrograms

use of co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.

the class WorkerProgramRunnerTest method testWorkerDatasetWithMetrics.

@Test
public void testWorkerDatasetWithMetrics() throws Throwable {
    final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(AppWithWorker.class, TEMP_FOLDER_SUPPLIER);
    ProgramController controller = startProgram(app, AppWithWorker.TableWriter.class);
    // validate worker wrote the "initialize" and "run" rows
    final TransactionExecutor executor = txExecutorFactory.createExecutor(datasetCache);
    // wait at most 5 seconds until the "RUN" row is set (indicates the worker has started running)
    Tasks.waitFor(AppWithWorker.RUN, new Callable<String>() {

        @Override
        public String call() throws Exception {
            return executor.execute(new Callable<String>() {

                @Override
                public String call() throws Exception {
                    KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
                    return Bytes.toString(kvTable.read(AppWithWorker.RUN));
                }
            });
        }
    }, 5, TimeUnit.SECONDS);
    stopProgram(controller);
    txExecutorFactory.createExecutor(datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
            Assert.assertEquals(AppWithWorker.RUN, Bytes.toString(kvTable.read(AppWithWorker.RUN)));
            Assert.assertEquals(AppWithWorker.INITIALIZE, Bytes.toString(kvTable.read(AppWithWorker.INITIALIZE)));
            Assert.assertEquals(AppWithWorker.STOP, Bytes.toString(kvTable.read(AppWithWorker.STOP)));
        }
    });
    // validate that the table emitted metrics
    Tasks.waitFor(3L, new Callable<Long>() {

        @Override
        public Long call() throws Exception {
            Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getEntityName(), Constants.Metrics.Tag.APP, AppWithWorker.NAME, Constants.Metrics.Tag.WORKER, AppWithWorker.WORKER, Constants.Metrics.Tag.DATASET, AppWithWorker.DATASET), Collections.<String>emptyList()));
            if (metrics.isEmpty()) {
                return 0L;
            }
            Assert.assertEquals(1, metrics.size());
            MetricTimeSeries ts = metrics.iterator().next();
            Assert.assertEquals(1, ts.getTimeValues().size());
            return ts.getTimeValues().get(0).getValue();
        }
    }, 5L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
Also used : ProgramController(co.cask.cdap.app.runtime.ProgramController) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) TransactionExecutor(org.apache.tephra.TransactionExecutor) AppWithWorker(co.cask.cdap.AppWithWorker) IOException(java.io.IOException) Callable(java.util.concurrent.Callable) ApplicationWithPrograms(co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Collection(java.util.Collection) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Example 29 with ApplicationWithPrograms

use of co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.

the class PreviewRunnerModule method configure.

@Override
protected void configure() {
    bind(ArtifactRepository.class).toInstance(artifactRepository);
    expose(ArtifactRepository.class);
    bind(ArtifactStore.class).toInstance(artifactStore);
    expose(ArtifactStore.class);
    bind(AuthorizerInstantiator.class).toInstance(authorizerInstantiator);
    expose(AuthorizerInstantiator.class);
    bind(AuthorizationEnforcer.class).toInstance(authorizationEnforcer);
    expose(AuthorizationEnforcer.class);
    bind(PrivilegesManager.class).toInstance(privilegesManager);
    expose(PrivilegesManager.class);
    bind(StreamConsumerFactory.class).to(InMemoryStreamConsumerFactory.class).in(Scopes.SINGLETON);
    expose(StreamConsumerFactory.class);
    bind(StreamCoordinatorClient.class).toInstance(streamCoordinatorClient);
    expose(StreamCoordinatorClient.class);
    bind(PreferencesStore.class).toInstance(preferencesStore);
    // bind explore client to mock.
    bind(ExploreClient.class).to(MockExploreClient.class);
    expose(ExploreClient.class);
    bind(StorageProviderNamespaceAdmin.class).to(LocalStorageProviderNamespaceAdmin.class);
    bind(PipelineFactory.class).to(SynchronousPipelineFactory.class);
    install(new FactoryModuleBuilder().implement(new TypeLiteral<Manager<AppDeploymentInfo, ApplicationWithPrograms>>() {
    }, new TypeLiteral<PreviewApplicationManager<AppDeploymentInfo, ApplicationWithPrograms>>() {
    }).build(new TypeLiteral<ManagerFactory<AppDeploymentInfo, ApplicationWithPrograms>>() {
    }));
    bind(Store.class).to(DefaultStore.class);
    bind(RouteStore.class).to(LocalRouteStore.class).in(Scopes.SINGLETON);
    bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
    expose(UGIProvider.class);
    bind(RuntimeStore.class).to(DefaultStore.class);
    expose(RuntimeStore.class);
    // we don't delete namespaces in preview as we just delete preview directory when its done
    bind(NamespaceResourceDeleter.class).to(NoopNamespaceResourceDeleter.class).in(Scopes.SINGLETON);
    bind(NamespaceAdmin.class).to(DefaultNamespaceAdmin.class).in(Scopes.SINGLETON);
    bind(NamespaceQueryAdmin.class).to(DefaultNamespaceAdmin.class).in(Scopes.SINGLETON);
    expose(NamespaceAdmin.class);
    expose(NamespaceQueryAdmin.class);
    bind(PreviewRunner.class).to(DefaultPreviewRunner.class).in(Scopes.SINGLETON);
    expose(PreviewRunner.class);
    bind(PreviewStore.class).to(DefaultPreviewStore.class).in(Scopes.SINGLETON);
    bind(Scheduler.class).to(NoOpScheduler.class);
    bind(co.cask.cdap.internal.app.runtime.schedule.Scheduler.class).to(co.cask.cdap.internal.app.runtime.schedule.NoOpScheduler.class);
    bind(DataTracerFactory.class).to(DefaultDataTracerFactory.class);
    expose(DataTracerFactory.class);
    bind(OwnerStore.class).to(DefaultOwnerStore.class);
    expose(OwnerStore.class);
    bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
    expose(OwnerAdmin.class);
}
Also used : ExploreClient(co.cask.cdap.explore.client.ExploreClient) MockExploreClient(co.cask.cdap.explore.client.MockExploreClient) AuthorizerInstantiator(co.cask.cdap.security.authorization.AuthorizerInstantiator) SynchronousPipelineFactory(co.cask.cdap.internal.pipeline.SynchronousPipelineFactory) PipelineFactory(co.cask.cdap.pipeline.PipelineFactory) FactoryModuleBuilder(com.google.inject.assistedinject.FactoryModuleBuilder) Scheduler(co.cask.cdap.scheduler.Scheduler) NoOpScheduler(co.cask.cdap.scheduler.NoOpScheduler) UGIProvider(co.cask.cdap.security.impersonation.UGIProvider) UnsupportedUGIProvider(co.cask.cdap.security.impersonation.UnsupportedUGIProvider) RuntimeStore(co.cask.cdap.app.store.RuntimeStore) PreferencesStore(co.cask.cdap.config.PreferencesStore) DefaultPreviewStore(co.cask.cdap.internal.app.store.preview.DefaultPreviewStore) PreviewStore(co.cask.cdap.app.store.preview.PreviewStore) Store(co.cask.cdap.app.store.Store) LocalRouteStore(co.cask.cdap.route.store.LocalRouteStore) OwnerStore(co.cask.cdap.security.impersonation.OwnerStore) DefaultOwnerStore(co.cask.cdap.store.DefaultOwnerStore) RouteStore(co.cask.cdap.route.store.RouteStore) RuntimeStore(co.cask.cdap.app.store.RuntimeStore) DefaultStore(co.cask.cdap.internal.app.store.DefaultStore) ArtifactStore(co.cask.cdap.internal.app.runtime.artifact.ArtifactStore) AuthorizationEnforcer(co.cask.cdap.security.spi.authorization.AuthorizationEnforcer) PrivilegesManager(co.cask.cdap.security.spi.authorization.PrivilegesManager) Manager(co.cask.cdap.app.deploy.Manager) DefaultNamespaceAdmin(co.cask.cdap.internal.app.namespace.DefaultNamespaceAdmin) TypeLiteral(com.google.inject.TypeLiteral) AppDeploymentInfo(co.cask.cdap.internal.app.deploy.pipeline.AppDeploymentInfo) StorageProviderNamespaceAdmin(co.cask.cdap.internal.app.namespace.StorageProviderNamespaceAdmin) LocalStorageProviderNamespaceAdmin(co.cask.cdap.internal.app.namespace.LocalStorageProviderNamespaceAdmin) ApplicationWithPrograms(co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms) PreferencesStore(co.cask.cdap.config.PreferencesStore) LocalRouteStore(co.cask.cdap.route.store.LocalRouteStore) NoopNamespaceResourceDeleter(co.cask.cdap.internal.app.namespace.NoopNamespaceResourceDeleter) DefaultPreviewStore(co.cask.cdap.internal.app.store.preview.DefaultPreviewStore) DefaultOwnerAdmin(co.cask.cdap.security.impersonation.DefaultOwnerAdmin) OwnerAdmin(co.cask.cdap.security.impersonation.OwnerAdmin) PrivilegesManager(co.cask.cdap.security.spi.authorization.PrivilegesManager) StreamCoordinatorClient(co.cask.cdap.data.stream.StreamCoordinatorClient) ArtifactRepository(co.cask.cdap.internal.app.runtime.artifact.ArtifactRepository) OwnerStore(co.cask.cdap.security.impersonation.OwnerStore) DefaultOwnerStore(co.cask.cdap.store.DefaultOwnerStore) ArtifactStore(co.cask.cdap.internal.app.runtime.artifact.ArtifactStore) InMemoryStreamConsumerFactory(co.cask.cdap.data2.transaction.stream.inmemory.InMemoryStreamConsumerFactory) DefaultPreviewRunner(co.cask.cdap.internal.app.preview.DefaultPreviewRunner) DefaultDataTracerFactory(co.cask.cdap.internal.app.preview.DefaultDataTracerFactory)

Example 30 with ApplicationWithPrograms

use of co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.

the class WorkflowTest method testWorkflow.

@Test(timeout = 120 * 1000L)
public void testWorkflow() throws Exception {
    final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(WorkflowApp.class, TEMP_FOLDER_SUPPLIER);
    final Injector injector = AppFabricTestHelper.getInjector();
    final ProgramDescriptor programDescriptor = Iterators.filter(app.getPrograms().iterator(), new Predicate<ProgramDescriptor>() {

        @Override
        public boolean apply(ProgramDescriptor input) {
            return input.getProgramId().getType() == ProgramType.WORKFLOW;
        }
    }).next();
    String inputPath = createInput();
    String outputPath = new File(tmpFolder.newFolder(), "output").getAbsolutePath();
    BasicArguments userArgs = new BasicArguments(ImmutableMap.of("inputPath", inputPath, "outputPath", outputPath));
    final SettableFuture<String> completion = SettableFuture.create();
    final ProgramController controller = AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), userArgs, TEMP_FOLDER_SUPPLIER);
    controller.addListener(new AbstractListener() {

        @Override
        public void init(ProgramController.State currentState, @Nullable Throwable cause) {
            LOG.info("Starting");
            injector.getInstance(Store.class).setStart(controller.getProgramRunId().getParent(), controller.getProgramRunId().getRun(), System.currentTimeMillis());
        }

        @Override
        public void completed() {
            LOG.info("Completed");
            completion.set("Completed");
        }

        @Override
        public void error(Throwable cause) {
            LOG.info("Error", cause);
            completion.setException(cause);
        }
    }, Threads.SAME_THREAD_EXECUTOR);
    completion.get();
}
Also used : ProgramController(co.cask.cdap.app.runtime.ProgramController) Predicate(com.google.common.base.Predicate) ApplicationWithPrograms(co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms) Injector(com.google.inject.Injector) AbstractListener(co.cask.cdap.internal.app.runtime.AbstractListener) ProgramDescriptor(co.cask.cdap.app.program.ProgramDescriptor) BasicArguments(co.cask.cdap.internal.app.runtime.BasicArguments) File(java.io.File) Test(org.junit.Test)

Aggregations

ApplicationWithPrograms (co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms)30 Test (org.junit.Test)23 BasicArguments (co.cask.cdap.internal.app.runtime.BasicArguments)18 TransactionExecutor (org.apache.tephra.TransactionExecutor)11 KeyValueTable (co.cask.cdap.api.dataset.lib.KeyValueTable)10 ProgramController (co.cask.cdap.app.runtime.ProgramController)8 ProgramDescriptor (co.cask.cdap.app.program.ProgramDescriptor)7 Location (org.apache.twill.filesystem.Location)6 IOException (java.io.IOException)5 TransactionAware (org.apache.tephra.TransactionAware)5 Table (co.cask.cdap.api.dataset.table.Table)4 File (java.io.File)4 RandomEndpointStrategy (co.cask.cdap.common.discovery.RandomEndpointStrategy)3 AppDeploymentInfo (co.cask.cdap.internal.app.deploy.pipeline.AppDeploymentInfo)3 Discoverable (org.apache.twill.discovery.Discoverable)3 DiscoveryServiceClient (org.apache.twill.discovery.DiscoveryServiceClient)3 DatasetInstantiationException (co.cask.cdap.api.data.DatasetInstantiationException)2 FileSet (co.cask.cdap.api.dataset.lib.FileSet)2 StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)2 ApplicationNotFoundException (co.cask.cdap.common.ApplicationNotFoundException)2