use of co.cask.cdap.proto.ProgramType in project cdap by caskdata.
the class InMemoryProgramRunnerModule method configure.
/**
* Configures a {@link com.google.inject.Binder} via the exposed methods.
*/
@Override
protected void configure() {
// Bind ServiceAnnouncer for service.
bind(ServiceAnnouncer.class).to(DiscoveryServiceAnnouncer.class);
// For Binding queue stuff
bind(QueueReaderFactory.class).in(Scopes.SINGLETON);
// Bind ProgramRunner
MapBinder<ProgramType, ProgramRunner> runnerFactoryBinder = MapBinder.newMapBinder(binder(), ProgramType.class, ProgramRunner.class);
runnerFactoryBinder.addBinding(ProgramType.FLOW).to(FlowProgramRunner.class);
runnerFactoryBinder.addBinding(ProgramType.MAPREDUCE).to(MapReduceProgramRunner.class);
runnerFactoryBinder.addBinding(ProgramType.WORKFLOW).to(WorkflowProgramRunner.class);
runnerFactoryBinder.addBinding(ProgramType.WEBAPP).to(WebappProgramRunner.class);
runnerFactoryBinder.addBinding(ProgramType.WORKER).to(InMemoryWorkerRunner.class);
runnerFactoryBinder.addBinding(ProgramType.SERVICE).to(InMemoryServiceProgramRunner.class);
// Bind these three program runner in private scope
// They should only be used by the ProgramRunners in the runnerFactoryBinder
bind(FlowletProgramRunner.class);
bind(ServiceProgramRunner.class);
bind(WorkerProgramRunner.class);
// ProgramRunnerFactory should be in local mode
bind(ProgramRuntimeProvider.Mode.class).toInstance(ProgramRuntimeProvider.Mode.LOCAL);
bind(ProgramRunnerFactory.class).to(DefaultProgramRunnerFactory.class).in(Scopes.SINGLETON);
// Note: Expose for test cases. Need to refactor test cases.
expose(ProgramRunnerFactory.class);
// Bind and expose runtime service
bind(ProgramRuntimeService.class).to(InMemoryProgramRuntimeService.class).in(Scopes.SINGLETON);
expose(ProgramRuntimeService.class);
// For binding DataSet transaction stuff
install(new DataFabricFacadeModule());
// Create webapp http handler factory.
install(new FactoryModuleBuilder().implement(JarHttpHandler.class, IntactJarHttpHandler.class).build(WebappHttpHandlerFactory.class));
// Create StreamWriter factory.
install(new FactoryModuleBuilder().implement(StreamWriter.class, streamWriterClass).build(StreamWriterFactory.class));
}
use of co.cask.cdap.proto.ProgramType in project cdap by caskdata.
the class LogHandler method getRunIdLogs.
@GET
@Path("/namespaces/{namespace-id}/apps/{app-id}/{program-type}/{program-id}/runs/{run-id}/logs")
public void getRunIdLogs(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String appId, @PathParam("program-type") String programType, @PathParam("program-id") String programId, @PathParam("run-id") String runId, @QueryParam("start") @DefaultValue("-1") long fromTimeSecsParam, @QueryParam("stop") @DefaultValue("-1") long toTimeSecsParam, @QueryParam("escape") @DefaultValue("true") boolean escape, @QueryParam("filter") @DefaultValue("") String filterStr, @QueryParam("format") @DefaultValue("text") String format, @QueryParam("suppress") List<String> suppress) throws NotFoundException {
ProgramType type = ProgramType.valueOfCategoryName(programType);
RunRecordMeta runRecord = getRunRecordMeta(namespaceId, appId, type, programId, runId);
LoggingContext loggingContext = LoggingContextHelper.getLoggingContextWithRunId(namespaceId, appId, programId, type, runId, runRecord.getSystemArgs());
doGetLogs(responder, loggingContext, fromTimeSecsParam, toTimeSecsParam, escape, filterStr, runRecord, format, suppress);
}
use of co.cask.cdap.proto.ProgramType in project cdap by caskdata.
the class CDAPEntities method collect.
@Override
public void collect() throws Exception {
reset();
List<NamespaceMeta> namespaceMetas;
namespaceMetas = nsQueryAdmin.list();
namespaces = namespaceMetas.size();
for (NamespaceMeta meta : namespaceMetas) {
List<ApplicationRecord> appRecords = appLifecycleService.getApps(meta.getNamespaceId(), Predicates.<ApplicationRecord>alwaysTrue());
apps += appRecords.size();
Set<ProgramType> programTypes = EnumSet.of(ProgramType.FLOW, ProgramType.MAPREDUCE, ProgramType.SERVICE, ProgramType.SPARK, ProgramType.WORKER, ProgramType.WORKFLOW);
for (ProgramType programType : programTypes) {
programs += programLifecycleService.list(meta.getNamespaceId(), programType).size();
}
artifacts += artifactRepository.getArtifactSummaries(meta.getNamespaceId(), true).size();
datasets += dsFramework.getInstances(meta.getNamespaceId()).size();
List<StreamSpecification> streamSpecs = streamAdmin.listStreams(meta.getNamespaceId());
streams += streamSpecs.size();
for (StreamSpecification streamSpec : streamSpecs) {
StreamId streamId = meta.getNamespaceId().stream(streamSpec.getName());
streamViews += streamAdmin.listViews(streamId).size();
}
}
}
use of co.cask.cdap.proto.ProgramType in project cdap by caskdata.
the class DistributedSparkProgramRunner method validateOptions.
@Override
protected void validateOptions(Program program, ProgramOptions options) {
super.validateOptions(program, options);
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification for %s", program.getId());
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type for %s", program.getId());
Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only SPARK process type is supported. Program type is %s for %s", processorType, program.getId());
SparkSpecification spec = appSpec.getSpark().get(program.getName());
Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getId());
}
Aggregations