Search in sources :

Example 6 with TransactionExecutorFactory

use of io.cdap.cdap.data2.transaction.TransactionExecutorFactory in project cdap by caskdata.

the class UpgradeTool method createInjector.

@VisibleForTesting
Injector createInjector() throws Exception {
    return Guice.createInjector(new ConfigModule(cConf, hConf), new LocationRuntimeModule().getDistributedModules(), new ZKClientModule(), new DiscoveryRuntimeModule().getDistributedModules(), new MessagingClientModule(), Modules.override(new DataSetsModules().getDistributedModules()).with(new AbstractModule() {

        @Override
        protected void configure() {
            bind(DatasetFramework.class).to(InMemoryDatasetFramework.class).in(Scopes.SINGLETON);
            // the DataSetsModules().getDistributedModules() binds to RemoteDatasetFramework so override that to
            // the same InMemoryDatasetFramework
            bind(DatasetFramework.class).annotatedWith(Names.named(DataSetsModules.BASE_DATASET_FRAMEWORK)).to(DatasetFramework.class);
            install(new FactoryModuleBuilder().implement(DatasetDefinitionRegistry.class, DefaultDatasetDefinitionRegistry.class).build(DatasetDefinitionRegistryFactory.class));
            // CDAP-5954 Upgrade tool does not need to record lineage and metadata changes for now.
            bind(LineageWriter.class).to(NoOpLineageWriter.class);
        }
    }), new ViewAdminModules().getDistributedModules(), new StreamAdminModules().getDistributedModules(), new NotificationFeedClientModule(), new TwillModule(), new ExploreClientModule(), new ProgramRunnerRuntimeModule().getDistributedModules(), new ServiceStoreModules().getDistributedModules(), new SystemDatasetRuntimeModule().getDistributedModules(), // don't need real notifications for upgrade, so use the in-memory implementations
    new NotificationServiceRuntimeModule().getInMemoryModules(), new KafkaClientModule(), new NamespaceStoreModule().getDistributedModules(), new AuthenticationContextModules().getMasterModule(), new AuthorizationModule(), new AuthorizationEnforcementModule().getMasterModule(), new SecureStoreModules().getDistributedModules(), new DataFabricModules(UpgradeTool.class.getName()).getDistributedModules(), new AppFabricServiceRuntimeModule().getDistributedModules(), new AbstractModule() {

        @Override
        protected void configure() {
            // the DataFabricDistributedModule needs MetricsCollectionService binding and since Upgrade tool does not do
            // anything with Metrics we just bind it to NoOpMetricsCollectionService
            bind(MetricsCollectionService.class).to(NoOpMetricsCollectionService.class).in(Scopes.SINGLETON);
            bind(MetricDatasetFactory.class).to(DefaultMetricDatasetFactory.class).in(Scopes.SINGLETON);
            bind(MetricStore.class).to(DefaultMetricStore.class);
        }

        @Provides
        @Singleton
        @Named("datasetInstanceManager")
        @SuppressWarnings("unused")
        public DatasetInstanceManager getDatasetInstanceManager(TransactionSystemClientService txClient, TransactionExecutorFactory txExecutorFactory, @Named("datasetMDS") DatasetFramework framework) {
            return new DatasetInstanceManager(txClient, txExecutorFactory, framework);
        }

        // This is needed because the LocalApplicationManager
        // expects a dsframework injection named datasetMDS
        @Provides
        @Singleton
        @Named("datasetMDS")
        @SuppressWarnings("unused")
        public DatasetFramework getInDsFramework(DatasetFramework dsFramework) {
            return dsFramework;
        }
    });
}
Also used : MessagingClientModule(co.cask.cdap.messaging.guice.MessagingClientModule) ConfigModule(co.cask.cdap.common.guice.ConfigModule) FactoryModuleBuilder(com.google.inject.assistedinject.FactoryModuleBuilder) NamespaceStoreModule(co.cask.cdap.store.guice.NamespaceStoreModule) NotificationServiceRuntimeModule(co.cask.cdap.notifications.guice.NotificationServiceRuntimeModule) ViewAdminModules(co.cask.cdap.data.view.ViewAdminModules) TransactionExecutorFactory(co.cask.cdap.data2.transaction.TransactionExecutorFactory) MetricDatasetFactory(co.cask.cdap.metrics.store.MetricDatasetFactory) DefaultMetricDatasetFactory(co.cask.cdap.metrics.store.DefaultMetricDatasetFactory) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) InMemoryDatasetFramework(co.cask.cdap.data2.dataset2.InMemoryDatasetFramework) ZKClientModule(co.cask.cdap.common.guice.ZKClientModule) DatasetDefinitionRegistryFactory(co.cask.cdap.data2.dataset2.DatasetDefinitionRegistryFactory) KafkaClientModule(co.cask.cdap.common.guice.KafkaClientModule) TransactionSystemClientService(co.cask.cdap.data2.transaction.TransactionSystemClientService) SystemDatasetRuntimeModule(co.cask.cdap.data.runtime.SystemDatasetRuntimeModule) DiscoveryRuntimeModule(co.cask.cdap.common.guice.DiscoveryRuntimeModule) AuthorizationModule(co.cask.cdap.app.guice.AuthorizationModule) InMemoryDatasetFramework(co.cask.cdap.data2.dataset2.InMemoryDatasetFramework) Named(com.google.inject.name.Named) TwillModule(co.cask.cdap.app.guice.TwillModule) DatasetInstanceManager(co.cask.cdap.data2.datafabric.dataset.instance.DatasetInstanceManager) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) NoOpMetricsCollectionService(co.cask.cdap.common.metrics.NoOpMetricsCollectionService) AuthenticationContextModules(co.cask.cdap.security.auth.context.AuthenticationContextModules) DataSetsModules(co.cask.cdap.data.runtime.DataSetsModules) SecureStoreModules(co.cask.cdap.security.guice.SecureStoreModules) LocationRuntimeModule(co.cask.cdap.common.guice.LocationRuntimeModule) DefaultMetricStore(co.cask.cdap.metrics.store.DefaultMetricStore) Provides(com.google.inject.Provides) AbstractModule(com.google.inject.AbstractModule) StreamAdminModules(co.cask.cdap.data.stream.StreamAdminModules) ProgramRunnerRuntimeModule(co.cask.cdap.app.guice.ProgramRunnerRuntimeModule) LineageWriter(co.cask.cdap.data2.metadata.writer.LineageWriter) NoOpLineageWriter(co.cask.cdap.data2.metadata.writer.NoOpLineageWriter) ExploreClientModule(co.cask.cdap.explore.guice.ExploreClientModule) Singleton(com.google.inject.Singleton) NotificationFeedClientModule(co.cask.cdap.notifications.feeds.client.NotificationFeedClientModule) DataFabricModules(co.cask.cdap.data.runtime.DataFabricModules) ServiceStoreModules(co.cask.cdap.app.guice.ServiceStoreModules) AuthorizationEnforcementModule(co.cask.cdap.security.authorization.AuthorizationEnforcementModule) AppFabricServiceRuntimeModule(co.cask.cdap.app.guice.AppFabricServiceRuntimeModule) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 7 with TransactionExecutorFactory

use of io.cdap.cdap.data2.transaction.TransactionExecutorFactory in project cdap by caskdata.

the class ProgramScheduleStoreDatasetTest method testFindSchedulesByEventAndUpdateSchedule.

@Test
public void testFindSchedulesByEventAndUpdateSchedule() throws Exception {
    DatasetFramework dsFramework = getInjector().getInstance(DatasetFramework.class);
    TransactionSystemClient txClient = getInjector().getInstance(TransactionSystemClient.class);
    TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txClient);
    dsFramework.truncateInstance(Schedulers.STORE_DATASET_ID);
    final ProgramScheduleStoreDataset store = dsFramework.getDataset(Schedulers.STORE_DATASET_ID, new HashMap<String, String>(), null);
    Assert.assertNotNull(store);
    TransactionExecutor txExecutor = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) store));
    final ProgramSchedule sched11 = new ProgramSchedule("sched11", "one partition schedule", PROG1_ID, ImmutableMap.of("prop3", "abc"), new PartitionTrigger(DS1_ID, 1), ImmutableList.<Constraint>of());
    final ProgramSchedule sched12 = new ProgramSchedule("sched12", "two partition schedule", PROG1_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS2_ID, 2), ImmutableList.<Constraint>of());
    final ProgramSchedule sched22 = new ProgramSchedule("sched22", "twentytwo partition schedule", PROG2_ID, ImmutableMap.of("nn", "4"), new PartitionTrigger(DS2_ID, 22), ImmutableList.<Constraint>of());
    final ProgramSchedule sched31 = new ProgramSchedule("sched31", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new ProgramStatusTrigger(PROG1_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED), ImmutableList.<Constraint>of());
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // event for DS1 or DS2 should trigger nothing. validate it returns an empty collection
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID)).isEmpty());
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID)).isEmpty());
            // event for PROG1 should trigger nothing. it should also return an empty collection
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED)).isEmpty());
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED)).isEmpty());
            Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED)).isEmpty());
        }
    });
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            store.addSchedules(ImmutableList.of(sched11, sched12, sched22, sched31));
        }
    });
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // event for ProgramStatus should trigger only sched31
            Assert.assertEquals(ImmutableSet.of(sched31), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
            Assert.assertEquals(ImmutableSet.of(sched31), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
            Assert.assertEquals(ImmutableSet.of(sched31), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
            // event for DS1 should trigger only sched11
            Assert.assertEquals(ImmutableSet.of(sched11), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
            // event for DS2 triggers only sched12 and sched22
            Assert.assertEquals(ImmutableSet.of(sched12, sched22), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID))));
        }
    });
    final ProgramSchedule sched11New = new ProgramSchedule(sched11.getName(), "time schedule", PROG1_ID, ImmutableMap.of("timeprop", "time"), new TimeTrigger("* * * * *"), ImmutableList.<Constraint>of());
    final ProgramSchedule sched12New = new ProgramSchedule(sched12.getName(), "one partition schedule", PROG1_ID, ImmutableMap.of("pp", "p"), new PartitionTrigger(DS1_ID, 2), ImmutableList.<Constraint>of());
    final ProgramSchedule sched22New = new ProgramSchedule(sched22.getName(), "program3 failed schedule", PROG2_ID, ImmutableMap.of("ss", "s"), new ProgramStatusTrigger(PROG3_ID, ProgramStatus.FAILED), ImmutableList.<Constraint>of());
    final ProgramSchedule sched31New = new ProgramSchedule(sched31.getName(), "program1 failed schedule", PROG3_ID, ImmutableMap.of("abcd", "efgh"), new ProgramStatusTrigger(PROG1_ID, ProgramStatus.FAILED), ImmutableList.<Constraint>of());
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            store.updateSchedule(sched11New);
            store.updateSchedule(sched12New);
            store.updateSchedule(sched22New);
            store.updateSchedule(sched31New);
        }
    });
    txExecutor.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // event for DS1 should trigger only sched12New after update
            Assert.assertEquals(ImmutableSet.of(sched12New), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
            // event for DS2 triggers no schedule after update
            Assert.assertEquals(ImmutableSet.<ProgramSchedule>of(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID))));
            // event for PS triggers only for failed program statuses, not completed nor killed
            Assert.assertEquals(ImmutableSet.of(sched31New), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
            Assert.assertEquals(ImmutableSet.of(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
            Assert.assertEquals(ImmutableSet.of(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
        }
    });
}
Also used : TimeTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger) DynamicTransactionExecutorFactory(co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory) TransactionExecutor(org.apache.tephra.TransactionExecutor) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) TransactionExecutorFactory(co.cask.cdap.data2.transaction.TransactionExecutorFactory) DynamicTransactionExecutorFactory(co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) TransactionAware(org.apache.tephra.TransactionAware) ProgramStatusTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger) PartitionTrigger(co.cask.cdap.internal.app.runtime.schedule.trigger.PartitionTrigger) Test(org.junit.Test)

Aggregations

TransactionExecutorFactory (co.cask.cdap.data2.transaction.TransactionExecutorFactory)7 DynamicTransactionExecutorFactory (co.cask.cdap.data.runtime.DynamicTransactionExecutorFactory)5 DatasetFramework (co.cask.cdap.data2.dataset2.DatasetFramework)5 MetricsCollectionService (co.cask.cdap.api.metrics.MetricsCollectionService)3 ConfigModule (co.cask.cdap.common.guice.ConfigModule)3 DiscoveryRuntimeModule (co.cask.cdap.common.guice.DiscoveryRuntimeModule)3 NoOpMetricsCollectionService (co.cask.cdap.common.metrics.NoOpMetricsCollectionService)3 DatasetInstanceManager (co.cask.cdap.data2.datafabric.dataset.instance.DatasetInstanceManager)3 InMemoryDatasetFramework (co.cask.cdap.data2.dataset2.InMemoryDatasetFramework)3 TransactionSystemClientService (co.cask.cdap.data2.transaction.TransactionSystemClientService)3 TransactionSystemClient (org.apache.tephra.TransactionSystemClient)3 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)2 DatasetModule (co.cask.cdap.api.dataset.module.DatasetModule)2 SystemDatasetInstantiatorFactory (co.cask.cdap.data.dataset.SystemDatasetInstantiatorFactory)2 SystemDatasetRuntimeModule (co.cask.cdap.data.runtime.SystemDatasetRuntimeModule)2 DatasetAdminOpHTTPHandler (co.cask.cdap.data2.datafabric.dataset.service.executor.DatasetAdminOpHTTPHandler)2 AuthenticationContextModules (co.cask.cdap.security.auth.context.AuthenticationContextModules)2 AuthorizationEnforcementModule (co.cask.cdap.security.authorization.AuthorizationEnforcementModule)2 AbstractModule (com.google.inject.AbstractModule)2 Singleton (com.google.inject.Singleton)2