use of co.cask.cdap.notifications.feeds.client.NotificationFeedClientModule in project cdap by caskdata.
the class StreamTailer method main.
public static void main(String[] args) throws Exception {
if (args.length < 1) {
System.out.println(String.format("Usage: java %s [streamName]", StreamTailer.class.getName()));
return;
}
String streamName = args[0];
CConfiguration cConf = CConfiguration.create();
Configuration hConf = new Configuration();
String txClientId = StreamTailer.class.getName();
Injector injector = Guice.createInjector(new ConfigModule(cConf, hConf), new DataFabricModules(txClientId).getDistributedModules(), new DataSetsModules().getDistributedModules(), new LocationRuntimeModule().getDistributedModules(), new ExploreClientModule(), new ViewAdminModules().getDistributedModules(), new StreamAdminModules().getDistributedModules(), new AuthorizationEnforcementModule().getDistributedModules(), new AuthenticationContextModules().getMasterModule(), new NotificationFeedClientModule());
StreamAdmin streamAdmin = injector.getInstance(StreamAdmin.class);
//TODO: get namespace from commandline arguments
StreamId streamId = NamespaceId.DEFAULT.stream(streamName);
StreamConfig streamConfig = streamAdmin.getConfig(streamId);
Location streamLocation = streamConfig.getLocation();
List<Location> eventFiles = Lists.newArrayList();
for (Location partition : streamLocation.list()) {
if (!partition.isDirectory()) {
continue;
}
for (Location file : partition.list()) {
if (StreamFileType.EVENT.isMatched(file.getName())) {
eventFiles.add(file);
}
}
}
int generation = StreamUtils.getGeneration(streamConfig);
MultiLiveStreamFileReader reader = new MultiLiveStreamFileReader(streamConfig, ImmutableList.copyOf(Iterables.transform(eventFiles, createOffsetConverter(generation))));
List<StreamEvent> events = Lists.newArrayList();
while (reader.read(events, 10, 100, TimeUnit.MILLISECONDS) >= 0) {
for (StreamEvent event : events) {
System.out.println(event.getTimestamp() + " " + Charsets.UTF_8.decode(event.getBody()));
}
events.clear();
}
reader.close();
}
use of co.cask.cdap.notifications.feeds.client.NotificationFeedClientModule in project cdap by caskdata.
the class HBaseQueueDebugger method createInjector.
private static Injector createInjector(boolean disableAuthorization) throws Exception {
CConfiguration cConf = CConfiguration.create();
if (disableAuthorization && cConf.getBoolean(Constants.Security.Authorization.ENABLED)) {
System.out.println(String.format("Disabling authorization for %s.", HBaseQueueDebugger.class.getSimpleName()));
cConf.setBoolean(Constants.Security.Authorization.ENABLED, false);
}
// Note: login has to happen before any objects that need Kerberos credentials are instantiated.
SecurityUtil.loginForMasterService(cConf);
return Guice.createInjector(new ConfigModule(cConf, HBaseConfiguration.create()), new IOModule(), new ZKClientModule(), new LocationRuntimeModule().getDistributedModules(), new DiscoveryRuntimeModule().getDistributedModules(), new ViewAdminModules().getDistributedModules(), new StreamAdminModules().getDistributedModules(), new NotificationFeedClientModule(), new TwillModule(), new ExploreClientModule(), new DataFabricModules(HBaseQueueDebugger.class.getName()).getDistributedModules(), new ServiceStoreModules().getDistributedModules(), new DataSetsModules().getDistributedModules(), new AppFabricServiceRuntimeModule().getDistributedModules(), new ProgramRunnerRuntimeModule().getDistributedModules(), new SystemDatasetRuntimeModule().getDistributedModules(), new NotificationServiceRuntimeModule().getDistributedModules(), new MetricsClientRuntimeModule().getDistributedModules(), new MetricsStoreModule(), new KafkaClientModule(), new NamespaceStoreModule().getDistributedModules(), new AuthorizationModule(), new AuthorizationEnforcementModule().getMasterModule(), new SecureStoreModules().getDistributedModules(), new MessagingClientModule(), new AbstractModule() {
@Override
protected void configure() {
bind(QueueClientFactory.class).to(HBaseQueueClientFactory.class).in(Singleton.class);
bind(QueueAdmin.class).to(HBaseQueueAdmin.class).in(Singleton.class);
bind(HBaseTableUtil.class).toProvider(HBaseTableUtilFactory.class);
bind(Store.class).annotatedWith(Names.named("defaultStore")).to(DefaultStore.class).in(Singleton.class);
// This is needed because the LocalApplicationManager
// expects a dsframework injection named datasetMDS
bind(DatasetFramework.class).annotatedWith(Names.named("datasetMDS")).to(DatasetFramework.class).in(Singleton.class);
}
});
}
use of co.cask.cdap.notifications.feeds.client.NotificationFeedClientModule in project cdap by caskdata.
the class JobQueueDebugger method createInjector.
private static Injector createInjector() throws Exception {
CConfiguration cConf = CConfiguration.create();
if (cConf.getBoolean(Constants.Security.Authorization.ENABLED)) {
System.out.println(String.format("Disabling authorization for %s.", JobQueueDebugger.class.getSimpleName()));
cConf.setBoolean(Constants.Security.Authorization.ENABLED, false);
}
// Note: login has to happen before any objects that need Kerberos credentials are instantiated.
SecurityUtil.loginForMasterService(cConf);
return Guice.createInjector(new ConfigModule(cConf, HBaseConfiguration.create()), new IOModule(), new ZKClientModule(), new LocationRuntimeModule().getDistributedModules(), new DiscoveryRuntimeModule().getDistributedModules(), new ViewAdminModules().getDistributedModules(), new StreamAdminModules().getDistributedModules(), new NotificationFeedClientModule(), new TwillModule(), new ExploreClientModule(), new DataFabricModules().getDistributedModules(), new ServiceStoreModules().getDistributedModules(), new DataSetsModules().getDistributedModules(), new AppFabricServiceRuntimeModule().getDistributedModules(), new ProgramRunnerRuntimeModule().getDistributedModules(), new SystemDatasetRuntimeModule().getDistributedModules(), new NotificationServiceRuntimeModule().getDistributedModules(), new MetricsClientRuntimeModule().getDistributedModules(), new MetricsStoreModule(), new KafkaClientModule(), new NamespaceStoreModule().getDistributedModules(), new AuthorizationModule(), new AuthorizationEnforcementModule().getMasterModule(), new SecureStoreModules().getDistributedModules(), new MessagingClientModule(), new AbstractModule() {
@Override
protected void configure() {
bind(HBaseTableUtil.class).toProvider(HBaseTableUtilFactory.class);
bind(Store.class).annotatedWith(Names.named("defaultStore")).to(DefaultStore.class).in(Singleton.class);
// This is needed because the LocalApplicationManager
// expects a dsframework injection named datasetMDS
bind(DatasetFramework.class).annotatedWith(Names.named("datasetMDS")).to(DatasetFramework.class).in(Singleton.class);
}
});
}
use of co.cask.cdap.notifications.feeds.client.NotificationFeedClientModule in project cdap by caskdata.
the class UpgradeTool method createInjector.
@VisibleForTesting
Injector createInjector() throws Exception {
return Guice.createInjector(new ConfigModule(cConf, hConf), new LocationRuntimeModule().getDistributedModules(), new ZKClientModule(), new DiscoveryRuntimeModule().getDistributedModules(), new MessagingClientModule(), Modules.override(new DataSetsModules().getDistributedModules()).with(new AbstractModule() {
@Override
protected void configure() {
bind(DatasetFramework.class).to(InMemoryDatasetFramework.class).in(Scopes.SINGLETON);
// the DataSetsModules().getDistributedModules() binds to RemoteDatasetFramework so override that to
// the same InMemoryDatasetFramework
bind(DatasetFramework.class).annotatedWith(Names.named(DataSetsModules.BASE_DATASET_FRAMEWORK)).to(DatasetFramework.class);
install(new FactoryModuleBuilder().implement(DatasetDefinitionRegistry.class, DefaultDatasetDefinitionRegistry.class).build(DatasetDefinitionRegistryFactory.class));
// CDAP-5954 Upgrade tool does not need to record lineage and metadata changes for now.
bind(LineageWriter.class).to(NoOpLineageWriter.class);
}
}), new ViewAdminModules().getDistributedModules(), new StreamAdminModules().getDistributedModules(), new NotificationFeedClientModule(), new TwillModule(), new ExploreClientModule(), new ProgramRunnerRuntimeModule().getDistributedModules(), new ServiceStoreModules().getDistributedModules(), new SystemDatasetRuntimeModule().getDistributedModules(), // don't need real notifications for upgrade, so use the in-memory implementations
new NotificationServiceRuntimeModule().getInMemoryModules(), new KafkaClientModule(), new NamespaceStoreModule().getDistributedModules(), new AuthenticationContextModules().getMasterModule(), new AuthorizationModule(), new AuthorizationEnforcementModule().getMasterModule(), new SecureStoreModules().getDistributedModules(), new DataFabricModules(UpgradeTool.class.getName()).getDistributedModules(), new AppFabricServiceRuntimeModule().getDistributedModules(), new AbstractModule() {
@Override
protected void configure() {
// the DataFabricDistributedModule needs MetricsCollectionService binding and since Upgrade tool does not do
// anything with Metrics we just bind it to NoOpMetricsCollectionService
bind(MetricsCollectionService.class).to(NoOpMetricsCollectionService.class).in(Scopes.SINGLETON);
bind(MetricDatasetFactory.class).to(DefaultMetricDatasetFactory.class).in(Scopes.SINGLETON);
bind(MetricStore.class).to(DefaultMetricStore.class);
}
@Provides
@Singleton
@Named("datasetInstanceManager")
@SuppressWarnings("unused")
public DatasetInstanceManager getDatasetInstanceManager(TransactionSystemClientService txClient, TransactionExecutorFactory txExecutorFactory, @Named("datasetMDS") DatasetFramework framework) {
return new DatasetInstanceManager(txClient, txExecutorFactory, framework);
}
// This is needed because the LocalApplicationManager
// expects a dsframework injection named datasetMDS
@Provides
@Singleton
@Named("datasetMDS")
@SuppressWarnings("unused")
public DatasetFramework getInDsFramework(DatasetFramework dsFramework) {
return dsFramework;
}
});
}
Aggregations