use of co.cask.cdap.api.data.stream.StreamSpecification in project cdap by caskdata.
the class InMemoryStreamMetaStore method listStreams.
@Override
public synchronized Multimap<NamespaceId, StreamSpecification> listStreams() throws Exception {
ImmutableMultimap.Builder<NamespaceId, StreamSpecification> builder = ImmutableMultimap.builder();
for (String namespaceId : streams.keySet()) {
synchronized (streams) {
Collection<String> streamNames = streams.get(namespaceId);
builder.putAll(new NamespaceId(namespaceId), Collections2.transform(streamNames, new Function<String, StreamSpecification>() {
@Nullable
@Override
public StreamSpecification apply(String input) {
return new StreamSpecification.Builder().setName(input).create();
}
}));
}
}
return builder.build();
}
use of co.cask.cdap.api.data.stream.StreamSpecification in project cdap by caskdata.
the class LocalStreamService method runOneIteration.
@Override
protected void runOneIteration() throws Exception {
// Get stream size - which will be the entire size - and send a notification if the size is big enough
for (Map.Entry<NamespaceId, StreamSpecification> streamSpecEntry : streamMetaStore.listStreams().entries()) {
StreamId streamId = streamSpecEntry.getKey().stream(streamSpecEntry.getValue().getName());
StreamSizeAggregator streamSizeAggregator = aggregators.get(streamId);
try {
if (streamSizeAggregator == null) {
// First time that we see this Stream here
StreamConfig config;
try {
config = streamAdmin.getConfig(streamId);
} catch (FileNotFoundException e) {
// this is a stream that has no configuration: ignore it to avoid flooding the logs with exceptions
continue;
}
streamSizeAggregator = createSizeAggregator(streamId, 0, config.getNotificationThresholdMB());
}
streamSizeAggregator.checkAggregatedSize();
} catch (Exception e) {
// Need to catch and not to propagate the exception, otherwise this scheduled service will be terminated
// Just log the exception here as the next run iteration should have the problem fixed
LOG.warn("Exception in aggregating stream size for {}", streamId, e);
}
}
}
use of co.cask.cdap.api.data.stream.StreamSpecification in project cdap by caskdata.
the class ApplicationDetail method fromSpec.
public static ApplicationDetail fromSpec(ApplicationSpecification spec, @Nullable String ownerPrincipal) {
List<ProgramRecord> programs = new ArrayList<>();
for (ProgramSpecification programSpec : spec.getFlows().values()) {
programs.add(new ProgramRecord(ProgramType.FLOW, spec.getName(), programSpec.getName(), programSpec.getDescription()));
}
for (ProgramSpecification programSpec : spec.getMapReduce().values()) {
programs.add(new ProgramRecord(ProgramType.MAPREDUCE, spec.getName(), programSpec.getName(), programSpec.getDescription()));
}
for (ProgramSpecification programSpec : spec.getServices().values()) {
programs.add(new ProgramRecord(ProgramType.SERVICE, spec.getName(), programSpec.getName(), programSpec.getDescription()));
}
for (ProgramSpecification programSpec : spec.getSpark().values()) {
programs.add(new ProgramRecord(ProgramType.SPARK, spec.getName(), programSpec.getName(), programSpec.getDescription()));
}
for (ProgramSpecification programSpec : spec.getWorkers().values()) {
programs.add(new ProgramRecord(ProgramType.WORKER, spec.getName(), programSpec.getName(), programSpec.getDescription()));
}
for (ProgramSpecification programSpec : spec.getWorkflows().values()) {
programs.add(new ProgramRecord(ProgramType.WORKFLOW, spec.getName(), programSpec.getName(), programSpec.getDescription()));
}
List<StreamDetail> streams = new ArrayList<>();
for (StreamSpecification streamSpec : spec.getStreams().values()) {
streams.add(new StreamDetail(streamSpec.getName()));
}
List<DatasetDetail> datasets = new ArrayList<>();
for (DatasetCreationSpec datasetSpec : spec.getDatasets().values()) {
datasets.add(new DatasetDetail(datasetSpec.getInstanceName(), datasetSpec.getTypeName()));
}
List<PluginDetail> plugins = new ArrayList<>();
for (Map.Entry<String, Plugin> pluginEnty : spec.getPlugins().entrySet()) {
plugins.add(new PluginDetail(pluginEnty.getKey(), pluginEnty.getValue().getPluginClass().getName(), pluginEnty.getValue().getPluginClass().getType()));
}
// this is only required if there are old apps lying around that failed to get upgrading during
// the upgrade to v3.2 for some reason. In those cases artifact id will be null until they re-deploy the app.
// in the meantime, we don't want this api call to null pointer exception.
ArtifactSummary summary = spec.getArtifactId() == null ? new ArtifactSummary(spec.getName(), null) : ArtifactSummary.from(spec.getArtifactId());
return new ApplicationDetail(spec.getName(), spec.getAppVersion(), spec.getDescription(), spec.getConfiguration(), streams, datasets, programs, plugins, summary, ownerPrincipal);
}
use of co.cask.cdap.api.data.stream.StreamSpecification in project cdap by caskdata.
the class CDAPEntities method collect.
@Override
public void collect() throws Exception {
reset();
List<NamespaceMeta> namespaceMetas;
namespaceMetas = nsQueryAdmin.list();
namespaces = namespaceMetas.size();
for (NamespaceMeta meta : namespaceMetas) {
List<ApplicationRecord> appRecords = appLifecycleService.getApps(meta.getNamespaceId(), Predicates.<ApplicationRecord>alwaysTrue());
apps += appRecords.size();
Set<ProgramType> programTypes = EnumSet.of(ProgramType.FLOW, ProgramType.MAPREDUCE, ProgramType.SERVICE, ProgramType.SPARK, ProgramType.WORKER, ProgramType.WORKFLOW);
for (ProgramType programType : programTypes) {
programs += programLifecycleService.list(meta.getNamespaceId(), programType).size();
}
artifacts += artifactRepository.getArtifactSummaries(meta.getNamespaceId(), true).size();
datasets += dsFramework.getInstances(meta.getNamespaceId()).size();
List<StreamSpecification> streamSpecs = streamAdmin.listStreams(meta.getNamespaceId());
streams += streamSpecs.size();
for (StreamSpecification streamSpec : streamSpecs) {
StreamId streamId = meta.getNamespaceId().stream(streamSpec.getName());
streamViews += streamAdmin.listViews(streamId).size();
}
}
}
Aggregations