use of org.apache.tephra.TransactionManager in project cdap by caskdata.
the class BaseHiveExploreServiceTest method initialize.
protected static void initialize(CConfiguration cConf, TemporaryFolder tmpFolder, boolean useStandalone, boolean enableAuthorization) throws Exception {
if (!runBefore) {
return;
}
Configuration hConf = new Configuration();
if (enableAuthorization) {
LocationFactory locationFactory = new LocalLocationFactory(tmpFolder.newFolder());
Location authExtensionJar = AppJarHelper.createDeploymentJar(locationFactory, InMemoryAuthorizer.class);
cConf.setBoolean(Constants.Security.ENABLED, true);
cConf.setBoolean(Constants.Security.Authorization.ENABLED, true);
cConf.set(Constants.Security.Authorization.EXTENSION_JAR_PATH, authExtensionJar.toURI().getPath());
cConf.setBoolean(Constants.Security.KERBEROS_ENABLED, false);
cConf.setInt(Constants.Security.Authorization.CACHE_MAX_ENTRIES, 0);
}
List<Module> modules = useStandalone ? createStandaloneModules(cConf, hConf, tmpFolder) : createInMemoryModules(cConf, hConf, tmpFolder);
injector = Guice.createInjector(modules);
if (enableAuthorization) {
injector.getInstance(AuthorizationBootstrapper.class).run();
}
transactionManager = injector.getInstance(TransactionManager.class);
transactionManager.startAndWait();
transactionSystemClient = injector.getInstance(TransactionSystemClient.class);
dsOpService = injector.getInstance(DatasetOpExecutor.class);
dsOpService.startAndWait();
datasetService = injector.getInstance(DatasetService.class);
datasetService.startAndWait();
exploreExecutorService = injector.getInstance(ExploreExecutorService.class);
exploreExecutorService.startAndWait();
datasetFramework = injector.getInstance(DatasetFramework.class);
exploreClient = injector.getInstance(DiscoveryExploreClient.class);
exploreService = injector.getInstance(ExploreService.class);
exploreClient.ping();
notificationService = injector.getInstance(NotificationService.class);
notificationService.startAndWait();
streamService = injector.getInstance(StreamService.class);
streamService.startAndWait();
streamHttpService = injector.getInstance(StreamHttpService.class);
streamHttpService.startAndWait();
exploreTableManager = injector.getInstance(ExploreTableManager.class);
streamAdmin = injector.getInstance(StreamAdmin.class);
streamMetaStore = injector.getInstance(StreamMetaStore.class);
namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
namespacedLocationFactory = injector.getInstance(NamespacedLocationFactory.class);
// create namespaces
// This happens when you create a namespace via REST APIs. However, since we do not start AppFabricServer in
// Explore tests, simulating that scenario by explicitly calling DatasetFramework APIs.
createNamespace(NamespaceId.DEFAULT);
createNamespace(NAMESPACE_ID);
createNamespace(OTHER_NAMESPACE_ID);
}
use of org.apache.tephra.TransactionManager in project cdap by caskdata.
the class ExploreDisabledTest method start.
@BeforeClass
public static void start() throws Exception {
Injector injector = Guice.createInjector(createInMemoryModules(CConfiguration.create(), new Configuration()));
transactionManager = injector.getInstance(TransactionManager.class);
transactionManager.startAndWait();
dsOpExecutor = injector.getInstance(DatasetOpExecutor.class);
dsOpExecutor.startAndWait();
datasetService = injector.getInstance(DatasetService.class);
datasetService.startAndWait();
exploreClient = injector.getInstance(DiscoveryExploreClient.class);
try {
exploreClient.ping();
Assert.fail("Expected not to be able to ping explore client.");
} catch (ServiceUnavailableException e) {
// expected
}
datasetFramework = injector.getInstance(DatasetFramework.class);
namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
NamespacedLocationFactory namespacedLocationFactory = injector.getInstance(NamespacedLocationFactory.class);
NamespaceMeta namespaceMeta = new NamespaceMeta.Builder().setName(namespaceId).build();
namespaceAdmin.create(namespaceMeta);
// This happens when you create a namespace via REST APIs. However, since we do not start AppFabricServer in
// Explore tests, simulating that scenario by explicitly calling DatasetFramework APIs.
namespacedLocationFactory.get(namespaceId).mkdirs();
exploreClient.addNamespace(namespaceMeta);
}
use of org.apache.tephra.TransactionManager in project cdap by caskdata.
the class StorageProviderNamespaceAdminTest method setup.
@BeforeClass
public static void setup() throws IOException {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().getAbsolutePath());
cConf.setBoolean(Constants.Explore.EXPLORE_ENABLED, true);
Injector injector = Guice.createInjector(Modules.override(new AppFabricTestModule(cConf)).with(new AbstractModule() {
@Override
protected void configure() {
// use the DefaultNamespacedLocationFactory here to test proper namespace creation in storage handler and
// not the NamespacedLocationFactoryTestClient
bind(NamespacedLocationFactory.class).to(DefaultNamespacedLocationFactory.class);
}
}));
namespacedLocationFactory = injector.getInstance(NamespacedLocationFactory.class);
storageProviderNamespaceAdmin = injector.getInstance(StorageProviderNamespaceAdmin.class);
// start the dataset service for namespace store to work
transactionManager = injector.getInstance(TransactionManager.class);
transactionManager.startAndWait();
datasetService = injector.getInstance(DatasetService.class);
datasetService.startAndWait();
// we don't use namespace admin here but the store because namespaceadmin will try to create the
// home directory for namespace which we don't want. We just want to store the namespace meta in store
// to look up during the delete.
namespaceStore = injector.getInstance(NamespaceStore.class);
}
use of org.apache.tephra.TransactionManager in project cdap by caskdata.
the class MDSViewStoreTest method init.
@BeforeClass
public static void init() throws Exception {
Injector injector = Guice.createInjector(new ConfigModule(CConfiguration.create(), new Configuration()), new DataSetServiceModules().getInMemoryModules(), new DataSetsModules().getStandaloneModules(), new DataFabricModules().getInMemoryModules(), new DiscoveryRuntimeModule().getInMemoryModules(), new NamespaceClientRuntimeModule().getInMemoryModules(), new LocationRuntimeModule().getInMemoryModules(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new AbstractModule() {
@Override
protected void configure() {
bind(AbstractNamespaceClient.class).to(InMemoryNamespaceClient.class);
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
bind(MetricsCollectionService.class).to(NoOpMetricsCollectionService.class).in(Singleton.class);
bind(ExploreClient.class).to(MockExploreClient.class);
bind(ViewStore.class).to(MDSViewStore.class).in(Scopes.SINGLETON);
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
}
});
viewStore = injector.getInstance(ViewStore.class);
transactionManager = injector.getInstance(TransactionManager.class);
transactionManager.startAndWait();
datasetService = injector.getInstance(DatasetService.class);
datasetService.startAndWait();
}
use of org.apache.tephra.TransactionManager in project cdap by caskdata.
the class RemoteDatasetFrameworkTest method before.
@Before
public void before() throws Exception {
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, "localhost");
cConf.setBoolean(Constants.Dangerous.UNRECOVERABLE_RESET, true);
Configuration txConf = HBaseConfiguration.create();
CConfigurationUtil.copyTxProperties(cConf, txConf);
// ok to pass null, since the impersonator won't actually be called, if kerberos security is not enabled
Impersonator impersonator = new DefaultImpersonator(cConf, null);
// TODO: Refactor to use injector for everything
Injector injector = Guice.createInjector(new ConfigModule(cConf, txConf), new DiscoveryRuntimeModule().getInMemoryModules(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new TransactionInMemoryModule(), new AbstractModule() {
@Override
protected void configure() {
bind(MetricsCollectionService.class).to(NoOpMetricsCollectionService.class).in(Singleton.class);
install(new FactoryModuleBuilder().implement(DatasetDefinitionRegistry.class, DefaultDatasetDefinitionRegistry.class).build(DatasetDefinitionRegistryFactory.class));
// through the injector, we only need RemoteDatasetFramework in these tests
bind(RemoteDatasetFramework.class);
}
});
// Tx Manager to support working with datasets
txManager = new TransactionManager(txConf);
txManager.startAndWait();
InMemoryTxSystemClient txSystemClient = new InMemoryTxSystemClient(txManager);
TransactionSystemClientService txSystemClientService = new DelegatingTransactionSystemClientService(txSystemClient);
DiscoveryService discoveryService = injector.getInstance(DiscoveryService.class);
DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
MetricsCollectionService metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
AuthenticationContext authenticationContext = injector.getInstance(AuthenticationContext.class);
framework = new RemoteDatasetFramework(cConf, discoveryServiceClient, registryFactory, authenticationContext);
SystemDatasetInstantiatorFactory datasetInstantiatorFactory = new SystemDatasetInstantiatorFactory(locationFactory, framework, cConf);
DatasetAdminService datasetAdminService = new DatasetAdminService(framework, cConf, locationFactory, datasetInstantiatorFactory, new NoOpMetadataStore(), impersonator);
ImmutableSet<HttpHandler> handlers = ImmutableSet.<HttpHandler>of(new DatasetAdminOpHTTPHandler(datasetAdminService));
opExecutorService = new DatasetOpExecutorService(cConf, discoveryService, metricsCollectionService, handlers);
opExecutorService.startAndWait();
ImmutableMap<String, DatasetModule> modules = ImmutableMap.<String, DatasetModule>builder().put("memoryTable", new InMemoryTableModule()).put("core", new CoreDatasetsModule()).putAll(DatasetMetaTableUtil.getModules()).build();
InMemoryDatasetFramework mdsFramework = new InMemoryDatasetFramework(registryFactory, modules);
DiscoveryExploreClient exploreClient = new DiscoveryExploreClient(discoveryServiceClient, authenticationContext);
ExploreFacade exploreFacade = new ExploreFacade(exploreClient, cConf);
TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txSystemClient);
AuthorizationEnforcer authorizationEnforcer = injector.getInstance(AuthorizationEnforcer.class);
DatasetTypeManager typeManager = new DatasetTypeManager(cConf, locationFactory, txSystemClientService, txExecutorFactory, mdsFramework, impersonator);
DatasetInstanceManager instanceManager = new DatasetInstanceManager(txSystemClientService, txExecutorFactory, mdsFramework);
PrivilegesManager privilegesManager = injector.getInstance(PrivilegesManager.class);
DatasetTypeService typeService = new DatasetTypeService(typeManager, namespaceQueryAdmin, namespacedLocationFactory, authorizationEnforcer, privilegesManager, authenticationContext, cConf, impersonator, txSystemClientService, mdsFramework, txExecutorFactory, DEFAULT_MODULES);
DatasetOpExecutor opExecutor = new LocalDatasetOpExecutor(cConf, discoveryServiceClient, opExecutorService, authenticationContext);
DatasetInstanceService instanceService = new DatasetInstanceService(typeService, instanceManager, opExecutor, exploreFacade, namespaceQueryAdmin, ownerAdmin, authorizationEnforcer, privilegesManager, authenticationContext);
instanceService.setAuditPublisher(inMemoryAuditPublisher);
service = new DatasetService(cConf, discoveryService, discoveryServiceClient, metricsCollectionService, new InMemoryDatasetOpExecutor(framework), new HashSet<DatasetMetricsReporter>(), typeService, instanceService);
// Start dataset service, wait for it to be discoverable
service.startAndWait();
EndpointStrategy endpointStrategy = new RandomEndpointStrategy(discoveryServiceClient.discover(Constants.Service.DATASET_MANAGER));
Preconditions.checkNotNull(endpointStrategy.pick(5, TimeUnit.SECONDS), "%s service is not up after 5 seconds", service);
createNamespace(NamespaceId.SYSTEM);
createNamespace(NAMESPACE_ID);
}
Aggregations