use of org.apache.twill.discovery.DiscoveryService in project phoenix by apache.
the class BaseTest method setupTxManager.
protected static void setupTxManager() throws SQLException, IOException {
ConnectionInfo connInfo = ConnectionInfo.create(getUrl());
zkClient = ZKClientServices.delegate(ZKClients.reWatchOnExpire(ZKClients.retryOnFailure(ZKClientService.Builder.of(connInfo.getZookeeperConnectionString()).setSessionTimeout(config.getInt(HConstants.ZK_SESSION_TIMEOUT, HConstants.DEFAULT_ZK_SESSION_TIMEOUT)).build(), RetryStrategies.exponentialDelay(500, 2000, TimeUnit.MILLISECONDS))));
zkClient.startAndWait();
DiscoveryService discovery = new ZKDiscoveryService(zkClient);
txManager = new TransactionManager(config, new HDFSTransactionStateStorage(config, new SnapshotCodecProvider(config), new TxMetricsCollector()), new TxMetricsCollector());
txService = new TransactionService(config, zkClient, discovery, Providers.of(txManager));
txService.startAndWait();
}
use of org.apache.twill.discovery.DiscoveryService in project cdap by caskdata.
the class ResourceCoordinatorTest method testAssignment.
@Test
public void testAssignment() throws InterruptedException, ExecutionException {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.Zookeeper.QUORUM, zkServer.getConnectionStr());
String serviceName = "test-assignment";
Injector injector = Guice.createInjector(new ConfigModule(cConf), new ZKClientModule(), new DiscoveryRuntimeModule().getDistributedModules());
ZKClientService zkClient = injector.getInstance(ZKClientService.class);
zkClient.startAndWait();
DiscoveryService discoveryService = injector.getInstance(DiscoveryService.class);
try {
ResourceCoordinator coordinator = new ResourceCoordinator(zkClient, injector.getInstance(DiscoveryServiceClient.class), new BalancedAssignmentStrategy());
coordinator.startAndWait();
try {
ResourceCoordinatorClient client = new ResourceCoordinatorClient(zkClient);
client.startAndWait();
try {
// Create a requirement
ResourceRequirement requirement = ResourceRequirement.builder(serviceName).addPartitions("p", 5, 1).build();
client.submitRequirement(requirement).get();
// Fetch the requirement, just to verify it's the same as the one get submitted.
Assert.assertEquals(requirement, client.fetchRequirement(requirement.getName()).get());
// Register a discovery endpoint
final Discoverable discoverable1 = createDiscoverable(serviceName, 10000);
Cancellable cancelDiscoverable1 = discoveryService.register(ResolvingDiscoverable.of(discoverable1));
// Add a change handler for this discoverable.
final BlockingQueue<Collection<PartitionReplica>> assignmentQueue = new SynchronousQueue<>();
final Semaphore finishSemaphore = new Semaphore(0);
Cancellable cancelSubscribe1 = subscribe(client, discoverable1, assignmentQueue, finishSemaphore);
// Assert that it received the changes.
Collection<PartitionReplica> assigned = assignmentQueue.poll(30, TimeUnit.SECONDS);
Assert.assertNotNull(assigned);
Assert.assertEquals(5, assigned.size());
// Unregister from discovery, the handler should receive a change with empty collection
cancelDiscoverable1.cancel();
Assert.assertTrue(assignmentQueue.poll(30, TimeUnit.SECONDS).isEmpty());
// Register to discovery again, would receive changes.
cancelDiscoverable1 = discoveryService.register(ResolvingDiscoverable.of(discoverable1));
assigned = assignmentQueue.poll(30, TimeUnit.SECONDS);
Assert.assertNotNull(assigned);
Assert.assertEquals(5, assigned.size());
// Register another discoverable
final Discoverable discoverable2 = createDiscoverable(serviceName, 10001);
Cancellable cancelDiscoverable2 = discoveryService.register(ResolvingDiscoverable.of(discoverable2));
// Changes should be received by the handler, with only 3 resources,
// as 2 out of 5 should get moved to the new discoverable.
assigned = assignmentQueue.poll(30, TimeUnit.SECONDS);
Assert.assertNotNull(assigned);
Assert.assertEquals(3, assigned.size());
// Cancel the first discoverable again, should expect empty result.
// This also make sure the latest assignment get cached in the ResourceCoordinatorClient.
// It is the the next test step.
cancelDiscoverable1.cancel();
Assert.assertTrue(assignmentQueue.poll(30, TimeUnit.SECONDS).isEmpty());
// Cancel the handler.
cancelSubscribe1.cancel();
Assert.assertTrue(finishSemaphore.tryAcquire(2, TimeUnit.SECONDS));
// Subscribe to changes for the second discoverable,
// it should see the latest assignment, even though no new fetch from ZK is triggered.
Cancellable cancelSubscribe2 = subscribe(client, discoverable2, assignmentQueue, finishSemaphore);
assigned = assignmentQueue.poll(30, TimeUnit.SECONDS);
Assert.assertNotNull(assigned);
Assert.assertEquals(5, assigned.size());
// Update the requirement to be an empty requirement, the handler should receive an empty collection
client.submitRequirement(ResourceRequirement.builder(serviceName).build());
Assert.assertTrue(assignmentQueue.poll(30, TimeUnit.SECONDS).isEmpty());
// Update the requirement to have one partition, the handler should receive one resource
client.submitRequirement(ResourceRequirement.builder(serviceName).addPartitions("p", 1, 1).build());
assigned = assignmentQueue.poll(30, TimeUnit.SECONDS);
Assert.assertNotNull(assigned);
Assert.assertEquals(1, assigned.size());
// Delete the requirement, the handler should receive a empty collection
client.deleteRequirement(requirement.getName());
Assert.assertTrue(assignmentQueue.poll(30, TimeUnit.SECONDS).isEmpty());
// Cancel the second handler.
cancelSubscribe2.cancel();
Assert.assertTrue(finishSemaphore.tryAcquire(2, TimeUnit.SECONDS));
cancelDiscoverable2.cancel();
} finally {
client.stopAndWait();
}
} finally {
coordinator.stopAndWait();
}
} finally {
zkClient.stopAndWait();
}
}
use of org.apache.twill.discovery.DiscoveryService in project cdap by caskdata.
the class DatasetServiceTestBase method initializeAndStartService.
protected static void initializeAndStartService(CConfiguration cConf) throws Exception {
// TODO: this whole method is a mess. Streamline it!
injector = Guice.createInjector(new ConfigModule(cConf), new DiscoveryRuntimeModule().getInMemoryModules(), new NonCustomLocationUnitTestModule().getModule(), new NamespaceClientRuntimeModule().getInMemoryModules(), new SystemDatasetRuntimeModule().getInMemoryModules(), new TransactionInMemoryModule(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new AbstractModule() {
@Override
protected void configure() {
bind(MetricsCollectionService.class).to(NoOpMetricsCollectionService.class).in(Singleton.class);
install(new FactoryModuleBuilder().implement(DatasetDefinitionRegistry.class, DefaultDatasetDefinitionRegistry.class).build(DatasetDefinitionRegistryFactory.class));
// through the injector, we only need RemoteDatasetFramework in these tests
bind(RemoteDatasetFramework.class);
bind(OwnerStore.class).to(InMemoryOwnerStore.class);
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
}
});
AuthorizationEnforcer authEnforcer = injector.getInstance(AuthorizationEnforcer.class);
AuthenticationContext authenticationContext = injector.getInstance(AuthenticationContext.class);
DiscoveryService discoveryService = injector.getInstance(DiscoveryService.class);
discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
dsFramework = injector.getInstance(RemoteDatasetFramework.class);
// Tx Manager to support working with datasets
txManager = injector.getInstance(TransactionManager.class);
txManager.startAndWait();
TransactionSystemClient txSystemClient = injector.getInstance(TransactionSystemClient.class);
TransactionSystemClientService txSystemClientService = new DelegatingTransactionSystemClientService(txSystemClient);
NamespacedLocationFactory namespacedLocationFactory = injector.getInstance(NamespacedLocationFactory.class);
SystemDatasetInstantiatorFactory datasetInstantiatorFactory = new SystemDatasetInstantiatorFactory(locationFactory, dsFramework, cConf);
// ok to pass null, since the impersonator won't actually be called, if kerberos security is not enabled
Impersonator impersonator = new DefaultImpersonator(cConf, null);
DatasetAdminService datasetAdminService = new DatasetAdminService(dsFramework, cConf, locationFactory, datasetInstantiatorFactory, new NoOpMetadataStore(), impersonator);
ImmutableSet<HttpHandler> handlers = ImmutableSet.<HttpHandler>of(new DatasetAdminOpHTTPHandler(datasetAdminService));
MetricsCollectionService metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
opExecutorService = new DatasetOpExecutorService(cConf, discoveryService, metricsCollectionService, handlers);
opExecutorService.startAndWait();
Map<String, DatasetModule> defaultModules = injector.getInstance(Key.get(new TypeLiteral<Map<String, DatasetModule>>() {
}, Constants.Dataset.Manager.DefaultDatasetModules.class));
ImmutableMap<String, DatasetModule> modules = ImmutableMap.<String, DatasetModule>builder().putAll(defaultModules).putAll(DatasetMetaTableUtil.getModules()).build();
registryFactory = injector.getInstance(DatasetDefinitionRegistryFactory.class);
inMemoryDatasetFramework = new InMemoryDatasetFramework(registryFactory, modules);
DiscoveryExploreClient exploreClient = new DiscoveryExploreClient(discoveryServiceClient, authenticationContext);
ExploreFacade exploreFacade = new ExploreFacade(exploreClient, cConf);
namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
namespaceAdmin.create(NamespaceMeta.DEFAULT);
ownerAdmin = injector.getInstance(OwnerAdmin.class);
NamespaceQueryAdmin namespaceQueryAdmin = injector.getInstance(NamespaceQueryAdmin.class);
TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txSystemClient);
DatasetTypeManager typeManager = new DatasetTypeManager(cConf, locationFactory, txSystemClientService, txExecutorFactory, inMemoryDatasetFramework, impersonator);
DatasetOpExecutor opExecutor = new InMemoryDatasetOpExecutor(dsFramework);
DatasetInstanceManager instanceManager = new DatasetInstanceManager(txSystemClientService, txExecutorFactory, inMemoryDatasetFramework);
PrivilegesManager privilegesManager = injector.getInstance(PrivilegesManager.class);
DatasetTypeService typeService = new DatasetTypeService(typeManager, namespaceAdmin, namespacedLocationFactory, authEnforcer, privilegesManager, authenticationContext, cConf, impersonator, txSystemClientService, inMemoryDatasetFramework, txExecutorFactory, defaultModules);
instanceService = new DatasetInstanceService(typeService, instanceManager, opExecutor, exploreFacade, namespaceQueryAdmin, ownerAdmin, authEnforcer, privilegesManager, authenticationContext);
service = new DatasetService(cConf, discoveryService, discoveryServiceClient, metricsCollectionService, opExecutor, new HashSet<DatasetMetricsReporter>(), typeService, instanceService);
// Start dataset service, wait for it to be discoverable
service.startAndWait();
waitForService(Constants.Service.DATASET_EXECUTOR);
waitForService(Constants.Service.DATASET_MANAGER);
// this usually happens while creating a namespace, however not doing that in data fabric tests
Locations.mkdirsIfNotExists(namespacedLocationFactory.get(NamespaceId.DEFAULT));
}
use of org.apache.twill.discovery.DiscoveryService in project cdap by caskdata.
the class RemoteDatasetFrameworkTest method before.
@Before
public void before() throws Exception {
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, "localhost");
cConf.setBoolean(Constants.Dangerous.UNRECOVERABLE_RESET, true);
Configuration txConf = HBaseConfiguration.create();
CConfigurationUtil.copyTxProperties(cConf, txConf);
// ok to pass null, since the impersonator won't actually be called, if kerberos security is not enabled
Impersonator impersonator = new DefaultImpersonator(cConf, null);
// TODO: Refactor to use injector for everything
Injector injector = Guice.createInjector(new ConfigModule(cConf, txConf), new DiscoveryRuntimeModule().getInMemoryModules(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new TransactionInMemoryModule(), new AbstractModule() {
@Override
protected void configure() {
bind(MetricsCollectionService.class).to(NoOpMetricsCollectionService.class).in(Singleton.class);
install(new FactoryModuleBuilder().implement(DatasetDefinitionRegistry.class, DefaultDatasetDefinitionRegistry.class).build(DatasetDefinitionRegistryFactory.class));
// through the injector, we only need RemoteDatasetFramework in these tests
bind(RemoteDatasetFramework.class);
}
});
// Tx Manager to support working with datasets
txManager = new TransactionManager(txConf);
txManager.startAndWait();
InMemoryTxSystemClient txSystemClient = new InMemoryTxSystemClient(txManager);
TransactionSystemClientService txSystemClientService = new DelegatingTransactionSystemClientService(txSystemClient);
DiscoveryService discoveryService = injector.getInstance(DiscoveryService.class);
DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
MetricsCollectionService metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
AuthenticationContext authenticationContext = injector.getInstance(AuthenticationContext.class);
framework = new RemoteDatasetFramework(cConf, discoveryServiceClient, registryFactory, authenticationContext);
SystemDatasetInstantiatorFactory datasetInstantiatorFactory = new SystemDatasetInstantiatorFactory(locationFactory, framework, cConf);
DatasetAdminService datasetAdminService = new DatasetAdminService(framework, cConf, locationFactory, datasetInstantiatorFactory, new NoOpMetadataStore(), impersonator);
ImmutableSet<HttpHandler> handlers = ImmutableSet.<HttpHandler>of(new DatasetAdminOpHTTPHandler(datasetAdminService));
opExecutorService = new DatasetOpExecutorService(cConf, discoveryService, metricsCollectionService, handlers);
opExecutorService.startAndWait();
ImmutableMap<String, DatasetModule> modules = ImmutableMap.<String, DatasetModule>builder().put("memoryTable", new InMemoryTableModule()).put("core", new CoreDatasetsModule()).putAll(DatasetMetaTableUtil.getModules()).build();
InMemoryDatasetFramework mdsFramework = new InMemoryDatasetFramework(registryFactory, modules);
DiscoveryExploreClient exploreClient = new DiscoveryExploreClient(discoveryServiceClient, authenticationContext);
ExploreFacade exploreFacade = new ExploreFacade(exploreClient, cConf);
TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txSystemClient);
AuthorizationEnforcer authorizationEnforcer = injector.getInstance(AuthorizationEnforcer.class);
DatasetTypeManager typeManager = new DatasetTypeManager(cConf, locationFactory, txSystemClientService, txExecutorFactory, mdsFramework, impersonator);
DatasetInstanceManager instanceManager = new DatasetInstanceManager(txSystemClientService, txExecutorFactory, mdsFramework);
PrivilegesManager privilegesManager = injector.getInstance(PrivilegesManager.class);
DatasetTypeService typeService = new DatasetTypeService(typeManager, namespaceQueryAdmin, namespacedLocationFactory, authorizationEnforcer, privilegesManager, authenticationContext, cConf, impersonator, txSystemClientService, mdsFramework, txExecutorFactory, DEFAULT_MODULES);
DatasetOpExecutor opExecutor = new LocalDatasetOpExecutor(cConf, discoveryServiceClient, opExecutorService, authenticationContext);
DatasetInstanceService instanceService = new DatasetInstanceService(typeService, instanceManager, opExecutor, exploreFacade, namespaceQueryAdmin, ownerAdmin, authorizationEnforcer, privilegesManager, authenticationContext);
instanceService.setAuditPublisher(inMemoryAuditPublisher);
service = new DatasetService(cConf, discoveryService, discoveryServiceClient, metricsCollectionService, new InMemoryDatasetOpExecutor(framework), new HashSet<DatasetMetricsReporter>(), typeService, instanceService);
// Start dataset service, wait for it to be discoverable
service.startAndWait();
EndpointStrategy endpointStrategy = new RandomEndpointStrategy(discoveryServiceClient.discover(Constants.Service.DATASET_MANAGER));
Preconditions.checkNotNull(endpointStrategy.pick(5, TimeUnit.SECONDS), "%s service is not up after 5 seconds", service);
createNamespace(NamespaceId.SYSTEM);
createNamespace(NAMESPACE_ID);
}
use of org.apache.twill.discovery.DiscoveryService in project cdap by caskdata.
the class RoutingToDataSetsTest method before.
@BeforeClass
public static void before() throws Exception {
CConfiguration cConf = CConfiguration.create();
Injector injector = Guice.createInjector(new SecurityModules().getInMemoryModules(), new DiscoveryRuntimeModule().getInMemoryModules(), new AppFabricTestModule(cConf));
// Starting router
DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
AccessTokenTransformer accessTokenTransformer = injector.getInstance(AccessTokenTransformer.class);
RouteStore routeStore = injector.getInstance(RouteStore.class);
SConfiguration sConf = SConfiguration.create();
cConf.set(Constants.Router.ADDRESS, "localhost");
port = Networks.getRandomPort();
cConf.setInt(Constants.Router.ROUTER_PORT, port);
nettyRouter = new NettyRouter(cConf, sConf, InetAddresses.forString("127.0.0.1"), new RouterServiceLookup(cConf, discoveryServiceClient, new RouterPathLookup(), routeStore), new SuccessTokenValidator(), accessTokenTransformer, discoveryServiceClient);
nettyRouter.startAndWait();
// Starting mock DataSet service
DiscoveryService discoveryService = injector.getInstance(DiscoveryService.class);
mockService = new MockHttpService(discoveryService, Constants.Service.DATASET_MANAGER, new MockDatasetTypeHandler(), new MockDatasetInstanceHandler());
mockService.startAndWait();
}
Aggregations