use of io.cdap.cdap.proto.ProgramType in project cdap by caskdata.
the class RemoteExecutionDiscoveryService method discover.
@Override
public ServiceDiscovered discover(String name) {
// In Remote runtime, we don't support program discovery except for the SYSTEM namespace.
for (ProgramType programType : ProgramType.values()) {
if (programType.isDiscoverable() && name.startsWith(programType.getDiscoverableTypeName() + ".") && !name.startsWith(programType.getDiscoverableTypeName() + "." + NamespaceId.SYSTEM.getNamespace() + ".")) {
return new DefaultServiceDiscovered(name);
}
}
// Discovery for system services
DefaultServiceDiscovered serviceDiscovered = services.get(name);
String key = Constants.RuntimeMonitor.DISCOVERY_SERVICE_PREFIX + name;
// Use a while loop to resolve races between discover and cancellable of register
while (serviceDiscovered == null) {
// Try to add the ServiceDiscovered object
serviceDiscovered = new DefaultServiceDiscovered(name);
synchronized (this) {
updateServiceDiscovered(serviceDiscovered, cConf, key);
services.putIfAbsent(name, serviceDiscovered);
}
serviceDiscovered = services.get(name);
}
return serviceDiscovered;
}
use of io.cdap.cdap.proto.ProgramType in project cdap by caskdata.
the class DistributedMapReduceProgramRunner method validateOptions.
@Override
protected void validateOptions(Program program, ProgramOptions options) {
super.validateOptions(program, options);
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MapReduce process type is supported.");
MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName());
}
use of io.cdap.cdap.proto.ProgramType in project cdap by caskdata.
the class DistributedProgramRuntimeService method list.
@Override
public Map<RunId, RuntimeInfo> list(ProgramType type) {
Map<RunId, RuntimeInfo> result = new HashMap<>(super.list(type));
// Table holds the Twill RunId and TwillController associated with the program matching the input type
Table<ProgramId, RunId, TwillController> twillProgramInfo = HashBasedTable.create();
List<RuntimeInfo> runtimeInfos = getRuntimeInfos();
// Goes through all live application and fill the twillProgramInfo table
for (TwillRunner.LiveInfo liveInfo : twillRunner.lookupLive()) {
String appName = liveInfo.getApplicationName();
ProgramId programId = TwillAppNames.fromTwillAppName(appName, false);
if (programId == null) {
continue;
}
if (!type.equals(programId.getType())) {
continue;
}
for (TwillController controller : liveInfo.getControllers()) {
RunId twillRunId = controller.getRunId();
// If it already in the runtime info, no need to lookup
if (runtimeInfos.stream().anyMatch(info -> twillRunId.equals(info.getTwillRunId()))) {
continue;
}
twillProgramInfo.put(programId, twillRunId, controller);
}
}
if (twillProgramInfo.isEmpty()) {
return ImmutableMap.copyOf(result);
}
final Set<RunId> twillRunIds = twillProgramInfo.columnKeySet();
Collection<RunRecordDetail> activeRunRecords;
synchronized (this) {
activeRunRecords = store.getRuns(ProgramRunStatus.RUNNING, record -> record.getTwillRunId() != null && twillRunIds.contains(org.apache.twill.internal.RunIds.fromString(record.getTwillRunId()))).values();
}
for (RunRecordDetail record : activeRunRecords) {
String twillRunId = record.getTwillRunId();
if (twillRunId == null) {
// This is unexpected. Just log and ignore the run record
LOG.warn("No twill runId for in run record {}.", record);
continue;
}
RunId twillRunIdFromRecord = org.apache.twill.internal.RunIds.fromString(twillRunId);
// Get the CDAP RunId from RunRecord
RunId runId = RunIds.fromString(record.getPid());
// Get the Program and TwillController for the current twillRunId
Map<ProgramId, TwillController> mapForTwillId = twillProgramInfo.columnMap().get(twillRunIdFromRecord);
Map.Entry<ProgramId, TwillController> entry = mapForTwillId.entrySet().iterator().next();
// Create RuntimeInfo for the current Twill RunId
if (result.computeIfAbsent(runId, rid -> createRuntimeInfo(entry.getKey(), rid, entry.getValue())) == null) {
LOG.warn("Unable to create runtime info for program {} with run id {}", entry.getKey(), runId);
}
}
return ImmutableMap.copyOf(result);
}
use of io.cdap.cdap.proto.ProgramType in project cdap by caskdata.
the class WorkflowTwillRunnable method createModule.
@Override
protected Module createModule(CConfiguration cConf, Configuration hConf, ProgramOptions programOptions, ProgramRunId programRunId) {
List<Module> modules = new ArrayList<>();
modules.add(super.createModule(cConf, hConf, programOptions, programRunId));
if (ProgramRunners.getClusterMode(programOptions) == ClusterMode.ON_PREMISE) {
modules.add(new DistributedArtifactManagerModule());
} else {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(PluginFinder.class).to(UnsupportedPluginFinder.class);
}
});
}
modules.add(new PrivateModule() {
@Override
protected void configure() {
// Bind ProgramRunner for MR, which is used by Workflow.
// The ProgramRunner for Spark is provided by the DefaultProgramRunnerFactory through the extension mechanism
MapBinder<ProgramType, ProgramRunner> runnerFactoryBinder = MapBinder.newMapBinder(binder(), ProgramType.class, ProgramRunner.class);
runnerFactoryBinder.addBinding(ProgramType.MAPREDUCE).to(MapReduceProgramRunner.class);
// It uses local mode factory because for Workflow we launch the job from the Workflow container directly.
// The actual execution mode of the job is governed by the framework configuration
// For mapreduce, it's in the mapred-site.xml
// for spark, it's in the hConf we shipped from DistributedWorkflowProgramRunner
bind(ProgramRuntimeProvider.Mode.class).toInstance(ProgramRuntimeProvider.Mode.LOCAL);
bind(ProgramRunnerFactory.class).to(DefaultProgramRunnerFactory.class).in(Scopes.SINGLETON);
expose(ProgramRunnerFactory.class);
}
});
return Modules.combine(modules);
}
use of io.cdap.cdap.proto.ProgramType in project cdap by caskdata.
the class DefaultRuntimeJob method createModules.
/**
* Returns list of guice modules used to start the program run.
*/
@VisibleForTesting
List<Module> createModules(RuntimeJobEnvironment runtimeJobEnv, CConfiguration cConf, ProgramRunId programRunId, ProgramOptions programOpts) {
List<Module> modules = new ArrayList<>();
modules.add(new ConfigModule(cConf));
RuntimeMonitorType runtimeMonitorType = SystemArguments.getRuntimeMonitorType(cConf, programOpts);
modules.add(RuntimeMonitors.getRemoteAuthenticatorModule(runtimeMonitorType, programOpts));
modules.add(new IOModule());
modules.add(new TMSLogAppenderModule());
modules.add(new RemoteExecutionDiscoveryModule());
modules.add(new AuthenticationContextModules().getProgramContainerModule(cConf));
modules.add(new MetricsClientRuntimeModule().getDistributedModules());
modules.add(new MessagingServerRuntimeModule().getStandaloneModules());
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(ClusterMode.class).toInstance(ClusterMode.ISOLATED);
bind(UGIProvider.class).to(CurrentUGIProvider.class).in(Scopes.SINGLETON);
// Bindings from the environment
bind(TwillRunner.class).annotatedWith(Constants.AppFabric.ProgramRunner.class).toInstance(runtimeJobEnv.getTwillRunner());
bind(LocationFactory.class).toInstance(runtimeJobEnv.getLocationFactory());
MapBinder<ProgramType, ProgramRunner> defaultProgramRunnerBinder = MapBinder.newMapBinder(binder(), ProgramType.class, ProgramRunner.class);
bind(ProgramRuntimeProvider.Mode.class).toInstance(ProgramRuntimeProvider.Mode.DISTRIBUTED);
bind(ProgramRunnerFactory.class).annotatedWith(Constants.AppFabric.ProgramRunner.class).to(DefaultProgramRunnerFactory.class).in(Scopes.SINGLETON);
bind(ProgramStateWriter.class).to(MessagingProgramStateWriter.class).in(Scopes.SINGLETON);
defaultProgramRunnerBinder.addBinding(ProgramType.MAPREDUCE).to(DistributedMapReduceProgramRunner.class);
defaultProgramRunnerBinder.addBinding(ProgramType.WORKFLOW).to(DistributedWorkflowProgramRunner.class);
defaultProgramRunnerBinder.addBinding(ProgramType.WORKER).to(DistributedWorkerProgramRunner.class);
bind(ProgramRunnerFactory.class).to(DefaultProgramRunnerFactory.class).in(Scopes.SINGLETON);
bind(ProgramRunId.class).toInstance(programRunId);
bind(RuntimeMonitorType.class).toInstance(runtimeMonitorType);
install(new FactoryModuleBuilder().implement(Configurator.class, InMemoryConfigurator.class).build(ConfiguratorFactory.class));
bind(String.class).annotatedWith(Names.named(RemoteIsolatedPluginFinder.ISOLATED_PLUGIN_DIR)).toInstance(programOpts.getArguments().getOption(ProgramOptionConstants.PLUGIN_DIR, DistributedProgramRunner.PLUGIN_DIR));
bind(PluginFinder.class).to(RemoteIsolatedPluginFinder.class);
bind(ArtifactRepositoryReader.class).to(RemoteArtifactRepositoryReader.class).in(Scopes.SINGLETON);
bind(ArtifactRepository.class).to(RemoteArtifactRepository.class);
}
});
return modules;
}
Aggregations