use of io.cdap.cdap.data.runtime.DataFabricModules in project cdap by cdapio.
the class ExploreDisabledTest method createInMemoryModules.
private static List<Module> createInMemoryModules(CConfiguration configuration, Configuration hConf) {
configuration.set(Constants.CFG_DATA_INMEMORY_PERSISTENCE, Constants.InMemoryPersistenceType.MEMORY.name());
configuration.setBoolean(Constants.Explore.EXPLORE_ENABLED, false);
configuration.set(Constants.Explore.LOCAL_DATA_DIR, new File(System.getProperty("java.io.tmpdir"), "hive").getAbsolutePath());
return ImmutableList.of(new ConfigModule(configuration, hConf), RemoteAuthenticatorModules.getNoOpModule(), new IOModule(), new InMemoryDiscoveryModule(), new NonCustomLocationUnitTestModule(), new DataFabricModules().getInMemoryModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new MetricsClientRuntimeModule().getInMemoryModules(), new ExploreRuntimeModule().getInMemoryModules(), new ExploreClientModule(), new NamespaceAdminTestModule(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
bind(MetadataServiceClient.class).to(NoOpMetadataServiceClient.class);
}
});
}
use of io.cdap.cdap.data.runtime.DataFabricModules in project cdap by cdapio.
the class DatasetOpExecutorServiceTest method setUp.
@Before
public void setUp() throws Exception {
Configuration hConf = new Configuration();
CConfiguration cConf = CConfiguration.create();
File datasetDir = new File(TMP_FOLDER.newFolder(), "datasetUser");
Assert.assertTrue(datasetDir.mkdirs());
cConf.set(Constants.Dataset.Manager.OUTPUT_DIR, datasetDir.getAbsolutePath());
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, "localhost");
cConf.set(Constants.Dataset.Executor.ADDRESS, "localhost");
cConf.setInt(Constants.Dataset.Executor.PORT, Networks.getRandomPort());
Injector injector = Guice.createInjector(new ConfigModule(cConf, hConf), RemoteAuthenticatorModules.getNoOpModule(), new IOModule(), new ZKClientModule(), new KafkaClientModule(), new InMemoryDiscoveryModule(), new NonCustomLocationUnitTestModule(), new DataFabricModules().getInMemoryModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new TransactionMetricsModule(), new ExploreClientModule(), new NamespaceAdminTestModule(), new AuthenticationContextModules().getMasterModule(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
bind(MetadataServiceClient.class).to(NoOpMetadataServiceClient.class);
}
});
txManager = injector.getInstance(TransactionManager.class);
txManager.startAndWait();
StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
dsOpExecService = injector.getInstance(DatasetOpExecutorService.class);
dsOpExecService.startAndWait();
managerService = injector.getInstance(DatasetService.class);
managerService.startAndWait();
dsFramework = injector.getInstance(DatasetFramework.class);
// find host
DiscoveryServiceClient discoveryClient = injector.getInstance(DiscoveryServiceClient.class);
endpointStrategy = new RandomEndpointStrategy(() -> discoveryClient.discover(Constants.Service.DATASET_MANAGER));
namespaceAdmin = injector.getInstance(NamespaceAdmin.class);
namespaceAdmin.create(NamespaceMeta.DEFAULT);
namespaceAdmin.create(new NamespaceMeta.Builder().setName(bob.getParent()).build());
}
use of io.cdap.cdap.data.runtime.DataFabricModules in project cdap by caskdata.
the class HBaseMetricsTableTest method setup.
@BeforeClass
public static void setup() throws Exception {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_HDFS_USER, System.getProperty("user.name"));
Injector injector = Guice.createInjector(new DataFabricModules().getDistributedModules(), new ConfigModule(cConf, TEST_HBASE.getConfiguration()), new ZKClientModule(), new ZKDiscoveryModule(), new TransactionMetricsModule(), new DFSLocationModule(), new NamespaceAdminTestModule(), new SystemDatasetRuntimeModule().getDistributedModules(), new DataSetsModules().getInMemoryModules(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getNoOpModule(), new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(DefaultOwnerAdmin.class);
}
});
dsFramework = injector.getInstance(DatasetFramework.class);
tableUtil = injector.getInstance(HBaseTableUtil.class);
ddlExecutor = new HBaseDDLExecutorFactory(cConf, TEST_HBASE.getHBaseAdmin().getConfiguration()).get();
ddlExecutor.createNamespaceIfNotExists(tableUtil.getHBaseNamespace(NamespaceId.SYSTEM));
}
use of io.cdap.cdap.data.runtime.DataFabricModules in project cdap by caskdata.
the class LocalLogAppenderResilientTest method testResilientLogging.
@Test
public void testResilientLogging() throws Exception {
Configuration hConf = new Configuration();
CConfiguration cConf = CConfiguration.create();
File datasetDir = new File(tmpFolder.newFolder(), "datasetUser");
// noinspection ResultOfMethodCallIgnored
datasetDir.mkdirs();
cConf.set(Constants.Dataset.Manager.OUTPUT_DIR, datasetDir.getAbsolutePath());
cConf.set(Constants.Service.MASTER_SERVICES_BIND_ADDRESS, "localhost");
cConf.set(Constants.Dataset.Executor.ADDRESS, "localhost");
cConf.setInt(Constants.Dataset.Executor.PORT, Networks.getRandomPort());
cConf.set(Constants.CFG_LOCAL_DATA_DIR, tmpFolder.newFolder().getAbsolutePath());
Injector injector = Guice.createInjector(new ConfigModule(cConf, hConf), RemoteAuthenticatorModules.getNoOpModule(), new IOModule(), new ZKClientModule(), new KafkaClientModule(), new InMemoryDiscoveryModule(), new NonCustomLocationUnitTestModule(), new DataFabricModules().getInMemoryModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new TransactionMetricsModule(), new ExploreClientModule(), new LocalLogAppenderModule(), new NamespaceAdminTestModule(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(NoOpOwnerAdmin.class);
bind(MetadataServiceClient.class).to(NoOpMetadataServiceClient.class);
}
});
TransactionManager txManager = injector.getInstance(TransactionManager.class);
txManager.startAndWait();
StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
DatasetOpExecutorService opExecutorService = injector.getInstance(DatasetOpExecutorService.class);
opExecutorService.startAndWait();
// Start the logging before starting the service.
LoggingContextAccessor.setLoggingContext(new WorkerLoggingContext("TRL_ACCT_1", "APP_1", "WORKER_1", "RUN", "INSTANCE"));
String logBaseDir = "trl-log/log_files_" + new Random(System.currentTimeMillis()).nextLong();
cConf.set(LoggingConfiguration.LOG_BASE_DIR, logBaseDir);
cConf.setInt(LoggingConfiguration.LOG_MAX_FILE_SIZE_BYTES, 20 * 1024);
final LogAppender appender = injector.getInstance(LocalLogAppender.class);
new LogAppenderInitializer(appender).initialize("TestResilientLogging");
int failureMsgCount = 3;
final CountDownLatch failureLatch = new CountDownLatch(failureMsgCount);
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
loggerContext.getStatusManager().add(new StatusListener() {
@Override
public void addStatusEvent(Status status) {
if (status.getLevel() != Status.ERROR || status.getOrigin() != appender) {
return;
}
Throwable cause = status.getThrowable();
if (cause != null) {
Throwable rootCause = Throwables.getRootCause(cause);
if (rootCause instanceof ServiceUnavailableException) {
String serviceName = ((ServiceUnavailableException) rootCause).getServiceName();
if (Constants.Service.DATASET_MANAGER.equals(serviceName)) {
failureLatch.countDown();
}
}
}
}
});
Logger logger = LoggerFactory.getLogger("TestResilientLogging");
for (int i = 0; i < failureMsgCount; ++i) {
Exception e1 = new Exception("Test Exception1");
Exception e2 = new Exception("Test Exception2", e1);
logger.warn("Test log message " + i + " {} {}", "arg1", "arg2", e2);
}
// Wait for the three failure to append to happen
// The wait time has to be > 3 seconds because DatasetServiceClient has 1 second timeout on discovery
failureLatch.await(5, TimeUnit.SECONDS);
// Start dataset service, wait for it to be discoverable
DatasetService dsService = injector.getInstance(DatasetService.class);
dsService.startAndWait();
final CountDownLatch startLatch = new CountDownLatch(1);
DiscoveryServiceClient discoveryClient = injector.getInstance(DiscoveryServiceClient.class);
discoveryClient.discover(Constants.Service.DATASET_MANAGER).watchChanges(new ServiceDiscovered.ChangeListener() {
@Override
public void onChange(ServiceDiscovered serviceDiscovered) {
if (!Iterables.isEmpty(serviceDiscovered)) {
startLatch.countDown();
}
}
}, Threads.SAME_THREAD_EXECUTOR);
startLatch.await(5, TimeUnit.SECONDS);
// Do some more logging after the service is started.
for (int i = 5; i < 10; ++i) {
Exception e1 = new Exception("Test Exception1");
Exception e2 = new Exception("Test Exception2", e1);
logger.warn("Test log message " + i + " {} {}", "arg1", "arg2", e2);
}
appender.stop();
// Verify - we should have at least 5 events.
LoggingContext loggingContext = new WorkerLoggingContext("TRL_ACCT_1", "APP_1", "WORKER_1", "RUN", "INSTANCE");
FileLogReader logTail = injector.getInstance(FileLogReader.class);
LoggingTester.LogCallback logCallback1 = new LoggingTester.LogCallback();
logTail.getLogPrev(loggingContext, ReadRange.LATEST, 10, Filter.EMPTY_FILTER, logCallback1);
List<LogEvent> allEvents = logCallback1.getEvents();
Assert.assertTrue(allEvents.toString(), allEvents.size() >= 5);
// Finally - stop all services
Services.chainStop(dsService, opExecutorService, txManager);
}
use of io.cdap.cdap.data.runtime.DataFabricModules in project cdap by caskdata.
the class MetricsSuiteTestBase method startMetricsService.
public static Injector startMetricsService(CConfiguration conf) throws Exception {
Injector injector = Guice.createInjector(Modules.override(new ConfigModule(conf), RemoteAuthenticatorModules.getNoOpModule(), new NonCustomLocationUnitTestModule(), new InMemoryDiscoveryModule(), new MetricsHandlerModule(), new MetricsClientRuntimeModule().getInMemoryModules(), new DataFabricModules().getInMemoryModules(), new DataSetsModules().getStandaloneModules(), new DataSetServiceModules().getInMemoryModules(), new ExploreClientModule(), new NamespaceAdminTestModule(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule()).with(new AbstractModule() {
@Override
protected void configure() {
bind(Store.class).to(DefaultStore.class);
bind(UGIProvider.class).to(UnsupportedUGIProvider.class);
bind(OwnerAdmin.class).to(NoOpOwnerAdmin.class);
// TODO (CDAP-14677): find a better way to inject metadata publisher
bind(MetadataServiceClient.class).to(NoOpMetadataServiceClient.class);
}
}));
transactionManager = injector.getInstance(TransactionManager.class);
transactionManager.startAndWait();
StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
dsOpService = injector.getInstance(DatasetOpExecutorService.class);
dsOpService.startAndWait();
datasetService = injector.getInstance(DatasetService.class);
datasetService.startAndWait();
metrics = injector.getInstance(MetricsQueryService.class);
metrics.startAndWait();
collectionService = injector.getInstance(MetricsCollectionService.class);
collectionService.startAndWait();
// initialize the dataset instantiator
DiscoveryServiceClient discoveryClient = injector.getInstance(DiscoveryServiceClient.class);
EndpointStrategy metricsEndPoints = new RandomEndpointStrategy(() -> discoveryClient.discover(Constants.Service.METRICS));
discoverable = metricsEndPoints.pick(1L, TimeUnit.SECONDS);
Assert.assertNotNull("Could not discover metrics service", discoverable);
return injector;
}
Aggregations