use of io.cdap.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class DefaultWorkflowConfigurer method createLocalDataset.
@Override
public void createLocalDataset(String datasetName, String typeName, DatasetProperties properties) {
checkArgument(datasetName != null, "Dataset instance name cannot be null.");
checkArgument(typeName != null, "Dataset type name cannot be null.");
checkArgument(properties != null, "Instance properties name cannot be null.");
DatasetCreationSpec spec = new DatasetCreationSpec(datasetName, typeName, properties);
DatasetCreationSpec existingSpec = localDatasetSpecs.get(datasetName);
if (existingSpec != null && !existingSpec.equals(spec)) {
throw new IllegalArgumentException(String.format("DatasetInstance '%s' was added multiple times with" + " different specifications. Please resolve the conflict so" + " that there is only one specification for the local dataset" + " instance in the Workflow.", datasetName));
}
localDatasetSpecs.put(datasetName, spec);
}
use of io.cdap.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class WorkflowHttpHandler method getWorkflowLocalDatasets.
@GET
@Path("/apps/{app-id}/workflows/{workflow-id}/runs/{run-id}/localdatasets")
public void getWorkflowLocalDatasets(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String applicationId, @PathParam("workflow-id") String workflowId, @PathParam("run-id") String runId) throws NotFoundException, DatasetManagementException, UnauthorizedException {
WorkflowSpecification workflowSpec = getWorkflowSpecForValidRun(namespaceId, applicationId, workflowId, runId);
Map<String, DatasetSpecificationSummary> localDatasetSummaries = new HashMap<>();
for (Map.Entry<String, DatasetCreationSpec> localDatasetEntry : workflowSpec.getLocalDatasetSpecs().entrySet()) {
String mappedDatasetName = localDatasetEntry.getKey() + "." + runId;
String datasetType = localDatasetEntry.getValue().getTypeName();
Map<String, String> datasetProperties = localDatasetEntry.getValue().getProperties().getProperties();
if (datasetFramework.hasInstance(new DatasetId(namespaceId, mappedDatasetName))) {
localDatasetSummaries.put(localDatasetEntry.getKey(), new DatasetSpecificationSummary(mappedDatasetName, datasetType, datasetProperties));
}
}
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(localDatasetSummaries));
}
use of io.cdap.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class DatasetInstanceCreator method createInstances.
/**
* Receives an input containing application specification and location
* and verifies both.
*
* @param namespaceId the namespace to create the dataset instance in
* @param datasets the datasets to create
* @param ownerPrincipal the principal of the owner for the datasets to be created.
* @param authorizingUser the authorizing user who will be making the call
*/
void createInstances(NamespaceId namespaceId, Map<String, DatasetCreationSpec> datasets, @Nullable final KerberosPrincipalId ownerPrincipal, String authorizingUser) throws Exception {
// create dataset instances
for (Map.Entry<String, DatasetCreationSpec> instanceEntry : datasets.entrySet()) {
String instanceName = instanceEntry.getKey();
final DatasetId instanceId = namespaceId.dataset(instanceName);
final DatasetCreationSpec instanceSpec = instanceEntry.getValue();
DatasetSpecification existingSpec = AuthorizationUtil.authorizeAs(authorizingUser, new Callable<DatasetSpecification>() {
@Override
public DatasetSpecification call() throws Exception {
return datasetFramework.getDatasetSpec(instanceId);
}
});
if (existingSpec == null) {
LOG.info("Adding dataset instance: {}", instanceName);
AuthorizationUtil.authorizeAs(authorizingUser, new Callable<Void>() {
@Override
public Void call() throws Exception {
datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, instanceSpec.getProperties(), ownerPrincipal);
return null;
}
});
} else {
if (!existingSpec.getType().equals(instanceSpec.getTypeName())) {
throw new IncompatibleUpdateException(String.format("Existing dataset '%s' of type '%s' may not be updated to type '%s'", instanceName, existingSpec.getType(), instanceSpec.getTypeName()));
}
if (allowDatasetUncheckedUpgrade) {
LOG.info("Updating dataset instance: {}", instanceName);
AuthorizationUtil.authorizeAs(authorizingUser, new Callable<Void>() {
@Override
public Void call() throws Exception {
datasetFramework.updateInstance(instanceId, instanceSpec.getProperties());
return null;
}
});
}
}
}
}
use of io.cdap.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class ApplicationSpecificationCodec method deserialize.
@Override
public ApplicationSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String name = jsonObj.get("name").getAsString();
String appVersion = ApplicationId.DEFAULT_VERSION;
if (jsonObj.has("appVersion")) {
appVersion = jsonObj.get("appVersion").getAsString();
}
String appCDAPVersion = jsonObj.has("appCDAPVersion") ? jsonObj.get("appCDAPVersion").getAsString() : null;
String description = jsonObj.get("description").getAsString();
String configuration = null;
if (jsonObj.has("configuration")) {
configuration = jsonObj.get("configuration").getAsString();
}
ArtifactId artifactId = context.deserialize(jsonObj.get("artifactId"), ArtifactId.class);
Map<String, String> datasetModules = deserializeMap(jsonObj.get("datasetModules"), context, String.class);
Map<String, DatasetCreationSpec> datasetInstances = deserializeMap(jsonObj.get("datasetInstances"), context, DatasetCreationSpec.class);
Map<String, MapReduceSpecification> mapReduces = deserializeMap(jsonObj.get("mapReduces"), context, MapReduceSpecification.class);
Map<String, SparkSpecification> sparks = deserializeMap(jsonObj.get("sparks"), context, SparkSpecification.class);
Map<String, WorkflowSpecification> workflows = deserializeMap(jsonObj.get("workflows"), context, WorkflowSpecification.class);
Map<String, ServiceSpecification> services = deserializeMap(jsonObj.get("services"), context, ServiceSpecification.class);
Map<String, ScheduleCreationSpec> programSchedules = deserializeMap(jsonObj.get("programSchedules"), context, ScheduleCreationSpec.class);
Map<String, WorkerSpecification> workers = deserializeMap(jsonObj.get("workers"), context, WorkerSpecification.class);
Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
return new DefaultApplicationSpecification(name, appVersion, appCDAPVersion, description, configuration, artifactId, datasetModules, datasetInstances, mapReduces, sparks, workflows, services, programSchedules, workers, plugins);
}
use of io.cdap.cdap.internal.dataset.DatasetCreationSpec in project cdap by cdapio.
the class WorkflowVerificationTest method verifyWorkflowWithLocalDatasetSpecification.
private void verifyWorkflowWithLocalDatasetSpecification(ApplicationSpecification appSpec) {
WorkflowSpecification spec = appSpec.getWorkflows().get("WorkflowWithLocalDatasets");
List<WorkflowNode> nodes = spec.getNodes();
Assert.assertTrue(nodes.size() == 2);
WorkflowNode node = nodes.get(0);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
WorkflowActionNode actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.MAPREDUCE, "MR1")));
node = nodes.get(1);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.SPARK, "SP1")));
Map<String, DatasetCreationSpec> localDatasetSpecs = spec.getLocalDatasetSpecs();
Assert.assertEquals(5, localDatasetSpecs.size());
DatasetCreationSpec datasetCreationSpec = localDatasetSpecs.get("mytable");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile_with_properties");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("prop_value", datasetCreationSpec.getProperties().getProperties().get("prop_key"));
datasetCreationSpec = localDatasetSpecs.get("mytablefromtype");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfilefromtype");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("another_prop_value", datasetCreationSpec.getProperties().getProperties().get("another_prop_key"));
// Check if the application specification has correct modules
Map<String, String> datasetModules = appSpec.getDatasetModules();
Assert.assertEquals(2, datasetModules.size());
Assert.assertTrue(datasetModules.containsKey(FileSet.class.getName()));
Assert.assertTrue(datasetModules.containsKey(Table.class.getName()));
}
Aggregations