Search in sources :

Example 11 with ServiceSpecification

use of co.cask.cdap.api.service.ServiceSpecification in project cdap by caskdata.

the class DistributedServiceProgramRunner method validateOptions.

@Override
protected void validateOptions(Program program, ProgramOptions options) {
    super.validateOptions(program, options);
    // Extract and verify parameters
    ApplicationSpecification appSpec = program.getApplicationSpecification();
    Preconditions.checkNotNull(appSpec, "Missing application specification.");
    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.SERVICE, "Only SERVICE process type is supported.");
    ServiceSpecification serviceSpec = appSpec.getServices().get(program.getName());
    Preconditions.checkNotNull(serviceSpec, "Missing ServiceSpecification for %s", program.getName());
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) ServiceSpecification(co.cask.cdap.api.service.ServiceSpecification) ProgramType(co.cask.cdap.proto.ProgramType)

Example 12 with ServiceSpecification

use of co.cask.cdap.api.service.ServiceSpecification in project cdap by caskdata.

the class ServiceClientTestRun method testGetServiceSpecification.

@Test
public void testGetServiceSpecification() throws Exception {
    ServiceSpecification serviceSpecification = serviceClient.get(service);
    assertEquals(serviceSpecification.getName(), PingService.NAME);
    assertEquals(serviceSpecification.getHandlers().size(), 1);
}
Also used : ServiceSpecification(co.cask.cdap.api.service.ServiceSpecification) Test(org.junit.Test)

Example 13 with ServiceSpecification

use of co.cask.cdap.api.service.ServiceSpecification in project cdap by caskdata.

the class DefaultStore method setServiceInstances.

@Override
public void setServiceInstances(final ProgramId id, final int instances) {
    Preconditions.checkArgument(instances > 0, "Cannot change number of service instances to %s", instances);
    Transactions.executeUnchecked(transactional, new TxRunnable() {

        @Override
        public void run(DatasetContext context) throws Exception {
            AppMetadataStore metaStore = getAppMetadataStore(context);
            ApplicationSpecification appSpec = getAppSpecOrFail(metaStore, id);
            ServiceSpecification serviceSpec = getServiceSpecOrFail(id, appSpec);
            // Create a new spec copy from the old one, except with updated instances number
            serviceSpec = new ServiceSpecification(serviceSpec.getClassName(), serviceSpec.getName(), serviceSpec.getDescription(), serviceSpec.getHandlers(), serviceSpec.getResources(), instances);
            ApplicationSpecification newAppSpec = replaceServiceSpec(appSpec, id.getProgram(), serviceSpec);
            metaStore.updateAppSpec(id.getNamespace(), id.getApplication(), id.getVersion(), newAppSpec);
        }
    });
    LOG.trace("Setting program instances: namespace: {}, application: {}, service: {}, new instances count: {}", id.getNamespaceId(), id.getApplication(), id.getProgram(), instances);
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) ForwardingApplicationSpecification(co.cask.cdap.internal.app.ForwardingApplicationSpecification) ServiceSpecification(co.cask.cdap.api.service.ServiceSpecification) TxRunnable(co.cask.cdap.api.TxRunnable) DatasetContext(co.cask.cdap.api.data.DatasetContext) TransactionFailureException(org.apache.tephra.TransactionFailureException) ProgramNotFoundException(co.cask.cdap.common.ProgramNotFoundException) ApplicationNotFoundException(co.cask.cdap.common.ApplicationNotFoundException) TransactionNotInProgressException(org.apache.tephra.TransactionNotInProgressException) TransactionConflictException(org.apache.tephra.TransactionConflictException) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) NoSuchElementException(java.util.NoSuchElementException) IOException(java.io.IOException)

Example 14 with ServiceSpecification

use of co.cask.cdap.api.service.ServiceSpecification in project cdap by caskdata.

the class ApplicationRegistrationStage method registerDatasets.

// Register dataset usage, based upon the program specifications.
// Note that worker specifications' datasets are not registered upon app deploy because the useDataset of the
// WorkerConfigurer is deprecated. Workers' access to datasets is aimed to be completely dynamic. Other programs are
// moving in this direction.
// Also, SparkSpecifications are the same in that a Spark program's dataset access is completely dynamic.
private void registerDatasets(ApplicationWithPrograms input) {
    ApplicationSpecification appSpec = input.getSpecification();
    ApplicationId appId = input.getApplicationId();
    NamespaceId namespaceId = appId.getParent();
    for (FlowSpecification flow : appSpec.getFlows().values()) {
        ProgramId programId = appId.flow(flow.getName());
        for (FlowletConnection connection : flow.getConnections()) {
            if (connection.getSourceType().equals(FlowletConnection.Type.STREAM)) {
                usageRegistry.register(programId, namespaceId.stream(connection.getSourceName()));
            }
        }
        for (FlowletDefinition flowlet : flow.getFlowlets().values()) {
            for (String dataset : flowlet.getDatasets()) {
                usageRegistry.register(programId, namespaceId.dataset(dataset));
            }
        }
    }
    for (MapReduceSpecification program : appSpec.getMapReduce().values()) {
        ProgramId programId = appId.mr(program.getName());
        for (String dataset : program.getDataSets()) {
            usageRegistry.register(programId, namespaceId.dataset(dataset));
        }
    }
    for (SparkSpecification sparkSpec : appSpec.getSpark().values()) {
        ProgramId programId = appId.spark(sparkSpec.getName());
        for (String dataset : sparkSpec.getDatasets()) {
            usageRegistry.register(programId, namespaceId.dataset(dataset));
        }
    }
    for (ServiceSpecification serviceSpecification : appSpec.getServices().values()) {
        ProgramId programId = appId.service(serviceSpecification.getName());
        for (HttpServiceHandlerSpecification handlerSpecification : serviceSpecification.getHandlers().values()) {
            for (String dataset : handlerSpecification.getDatasets()) {
                usageRegistry.register(programId, namespaceId.dataset(dataset));
            }
        }
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) DefaultApplicationSpecification(co.cask.cdap.internal.app.DefaultApplicationSpecification) FlowletDefinition(co.cask.cdap.api.flow.FlowletDefinition) SparkSpecification(co.cask.cdap.api.spark.SparkSpecification) ServiceSpecification(co.cask.cdap.api.service.ServiceSpecification) FlowSpecification(co.cask.cdap.api.flow.FlowSpecification) FlowletConnection(co.cask.cdap.api.flow.FlowletConnection) MapReduceSpecification(co.cask.cdap.api.mapreduce.MapReduceSpecification) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ProgramId(co.cask.cdap.proto.id.ProgramId) HttpServiceHandlerSpecification(co.cask.cdap.api.service.http.HttpServiceHandlerSpecification)

Example 15 with ServiceSpecification

use of co.cask.cdap.api.service.ServiceSpecification in project cdap by caskdata.

the class ApplicationSpecificationCodec method deserialize.

@Override
public ApplicationSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
    JsonObject jsonObj = json.getAsJsonObject();
    String name = jsonObj.get("name").getAsString();
    String appVersion = ApplicationId.DEFAULT_VERSION;
    if (jsonObj.has("appVersion")) {
        appVersion = jsonObj.get("appVersion").getAsString();
    }
    String description = jsonObj.get("description").getAsString();
    String configuration = null;
    if (jsonObj.has("configuration")) {
        configuration = jsonObj.get("configuration").getAsString();
    }
    ArtifactId artifactId = context.deserialize(jsonObj.get("artifactId"), ArtifactId.class);
    Map<String, StreamSpecification> streams = deserializeMap(jsonObj.get("streams"), context, StreamSpecification.class);
    Map<String, String> datasetModules = deserializeMap(jsonObj.get("datasetModules"), context, String.class);
    Map<String, DatasetCreationSpec> datasetInstances = deserializeMap(jsonObj.get("datasetInstances"), context, DatasetCreationSpec.class);
    Map<String, FlowSpecification> flows = deserializeMap(jsonObj.get("flows"), context, FlowSpecification.class);
    Map<String, MapReduceSpecification> mapReduces = deserializeMap(jsonObj.get("mapReduces"), context, MapReduceSpecification.class);
    Map<String, SparkSpecification> sparks = deserializeMap(jsonObj.get("sparks"), context, SparkSpecification.class);
    Map<String, WorkflowSpecification> workflows = deserializeMap(jsonObj.get("workflows"), context, WorkflowSpecification.class);
    Map<String, ServiceSpecification> services = deserializeMap(jsonObj.get("services"), context, ServiceSpecification.class);
    Map<String, ScheduleSpecification> schedules = deserializeMap(jsonObj.get("schedules"), context, ScheduleSpecification.class);
    Map<String, ScheduleCreationSpec> programSchedules = deserializeMap(jsonObj.get("programSchedules"), context, ScheduleCreationSpec.class);
    Map<String, WorkerSpecification> workers = deserializeMap(jsonObj.get("workers"), context, WorkerSpecification.class);
    Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
    return new DefaultApplicationSpecification(name, appVersion, description, configuration, artifactId, streams, datasetModules, datasetInstances, flows, mapReduces, sparks, workflows, services, schedules, programSchedules, workers, plugins);
}
Also used : ServiceSpecification(co.cask.cdap.api.service.ServiceSpecification) ArtifactId(co.cask.cdap.api.artifact.ArtifactId) JsonObject(com.google.gson.JsonObject) SparkSpecification(co.cask.cdap.api.spark.SparkSpecification) FlowSpecification(co.cask.cdap.api.flow.FlowSpecification) WorkflowSpecification(co.cask.cdap.api.workflow.WorkflowSpecification) ScheduleSpecification(co.cask.cdap.api.schedule.ScheduleSpecification) StreamSpecification(co.cask.cdap.api.data.stream.StreamSpecification) WorkerSpecification(co.cask.cdap.api.worker.WorkerSpecification) MapReduceSpecification(co.cask.cdap.api.mapreduce.MapReduceSpecification) ScheduleCreationSpec(co.cask.cdap.internal.schedule.ScheduleCreationSpec) DatasetCreationSpec(co.cask.cdap.internal.dataset.DatasetCreationSpec) Plugin(co.cask.cdap.api.plugin.Plugin)

Aggregations

ServiceSpecification (co.cask.cdap.api.service.ServiceSpecification)17 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)7 HttpServiceHandlerSpecification (co.cask.cdap.api.service.http.HttpServiceHandlerSpecification)5 ServiceHttpEndpoint (co.cask.cdap.api.service.http.ServiceHttpEndpoint)4 ProgramId (co.cask.cdap.proto.id.ProgramId)4 Resources (co.cask.cdap.api.Resources)3 ProgramType (co.cask.cdap.proto.ProgramType)3 JsonObject (com.google.gson.JsonObject)3 Test (org.junit.Test)3 FlowSpecification (co.cask.cdap.api.flow.FlowSpecification)2 MapReduceSpecification (co.cask.cdap.api.mapreduce.MapReduceSpecification)2 SparkSpecification (co.cask.cdap.api.spark.SparkSpecification)2 ApplicationId (co.cask.cdap.proto.id.ApplicationId)2 RunId (org.apache.twill.api.RunId)2 AppWithServices (co.cask.cdap.AppWithServices)1 TxRunnable (co.cask.cdap.api.TxRunnable)1 ArtifactId (co.cask.cdap.api.artifact.ArtifactId)1 DatasetContext (co.cask.cdap.api.data.DatasetContext)1 StreamSpecification (co.cask.cdap.api.data.stream.StreamSpecification)1 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)1