use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class WorkflowVerificationTest method verifyWorkflowWithLocalDatasetSpecification.
private void verifyWorkflowWithLocalDatasetSpecification(ApplicationSpecification appSpec) {
WorkflowSpecification spec = appSpec.getWorkflows().get("WorkflowWithLocalDatasets");
List<WorkflowNode> nodes = spec.getNodes();
Assert.assertTrue(nodes.size() == 2);
WorkflowNode node = nodes.get(0);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
WorkflowActionNode actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.MAPREDUCE, "MR1")));
node = nodes.get(1);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.SPARK, "SP1")));
Map<String, DatasetCreationSpec> localDatasetSpecs = spec.getLocalDatasetSpecs();
Assert.assertEquals(5, localDatasetSpecs.size());
DatasetCreationSpec datasetCreationSpec = localDatasetSpecs.get("mytable");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile_with_properties");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("prop_value", datasetCreationSpec.getProperties().getProperties().get("prop_key"));
datasetCreationSpec = localDatasetSpecs.get("mytablefromtype");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfilefromtype");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("another_prop_value", datasetCreationSpec.getProperties().getProperties().get("another_prop_key"));
// Check if the application specification has correct modules
Map<String, String> datasetModules = appSpec.getDatasetModules();
Assert.assertEquals(2, datasetModules.size());
Assert.assertTrue(datasetModules.containsKey(FileSet.class.getName()));
Assert.assertTrue(datasetModules.containsKey(Table.class.getName()));
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class WorkflowSpecificationCodec method deserialize.
@Override
public WorkflowSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String className = jsonObj.get("className").getAsString();
String name = jsonObj.get("name").getAsString();
String description = jsonObj.get("description").getAsString();
Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
List<WorkflowNode> nodes = deserializeList(jsonObj.get("nodes"), context, WorkflowNode.class);
Map<String, DatasetCreationSpec> localDatasetSpec = deserializeMap(jsonObj.get("localDatasetSpecs"), context, DatasetCreationSpec.class);
return new WorkflowSpecification(className, name, description, properties, nodes, localDatasetSpec);
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class ApplicationSpecificationCodec method deserialize.
@Override
public ApplicationSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String name = jsonObj.get("name").getAsString();
String appVersion = ApplicationId.DEFAULT_VERSION;
if (jsonObj.has("appVersion")) {
appVersion = jsonObj.get("appVersion").getAsString();
}
String description = jsonObj.get("description").getAsString();
String configuration = null;
if (jsonObj.has("configuration")) {
configuration = jsonObj.get("configuration").getAsString();
}
ArtifactId artifactId = context.deserialize(jsonObj.get("artifactId"), ArtifactId.class);
Map<String, StreamSpecification> streams = deserializeMap(jsonObj.get("streams"), context, StreamSpecification.class);
Map<String, String> datasetModules = deserializeMap(jsonObj.get("datasetModules"), context, String.class);
Map<String, DatasetCreationSpec> datasetInstances = deserializeMap(jsonObj.get("datasetInstances"), context, DatasetCreationSpec.class);
Map<String, FlowSpecification> flows = deserializeMap(jsonObj.get("flows"), context, FlowSpecification.class);
Map<String, MapReduceSpecification> mapReduces = deserializeMap(jsonObj.get("mapReduces"), context, MapReduceSpecification.class);
Map<String, SparkSpecification> sparks = deserializeMap(jsonObj.get("sparks"), context, SparkSpecification.class);
Map<String, WorkflowSpecification> workflows = deserializeMap(jsonObj.get("workflows"), context, WorkflowSpecification.class);
Map<String, ServiceSpecification> services = deserializeMap(jsonObj.get("services"), context, ServiceSpecification.class);
Map<String, ScheduleSpecification> schedules = deserializeMap(jsonObj.get("schedules"), context, ScheduleSpecification.class);
Map<String, ScheduleCreationSpec> programSchedules = deserializeMap(jsonObj.get("programSchedules"), context, ScheduleCreationSpec.class);
Map<String, WorkerSpecification> workers = deserializeMap(jsonObj.get("workers"), context, WorkerSpecification.class);
Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
return new DefaultApplicationSpecification(name, appVersion, description, configuration, artifactId, streams, datasetModules, datasetInstances, flows, mapReduces, sparks, workflows, services, schedules, programSchedules, workers, plugins);
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class ApplicationVerificationStage method verifyData.
protected void verifyData(ApplicationId appId, ApplicationSpecification specification, @Nullable KerberosPrincipalId specifiedOwnerPrincipal) throws DatasetManagementException, UnauthorizedException {
// NOTE: no special restrictions on dataset module names, etc
VerifyResult result;
for (DatasetCreationSpec dataSetCreateSpec : specification.getDatasets().values()) {
result = getVerifier(DatasetCreationSpec.class).verify(appId, dataSetCreateSpec);
if (!result.isSuccess()) {
throw new RuntimeException(result.getMessage());
}
String dsName = dataSetCreateSpec.getInstanceName();
DatasetId datasetInstanceId = appId.getParent().dataset(dsName);
DatasetSpecification existingSpec = dsFramework.getDatasetSpec(datasetInstanceId);
if (existingSpec != null && !existingSpec.getType().equals(dataSetCreateSpec.getTypeName())) {
// New app trying to deploy an dataset with same instanceName but different Type than that of existing.
throw new DataSetException(String.format("Cannot Deploy Dataset : %s with Type : %s : Dataset with different Type Already Exists", dsName, dataSetCreateSpec.getTypeName()));
}
// if the dataset existed verify its owner is same.
if (existingSpec != null) {
verifyOwner(datasetInstanceId, specifiedOwnerPrincipal);
}
}
for (StreamSpecification spec : specification.getStreams().values()) {
result = getVerifier(StreamSpecification.class).verify(appId, spec);
if (!result.isSuccess()) {
throw new RuntimeException(result.getMessage());
}
// if the stream existed verify the owner to be the same
if (store.getStream(appId.getNamespaceId(), spec.getName()) != null) {
verifyOwner(appId.getParent().stream(spec.getName()), specifiedOwnerPrincipal);
}
}
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class DefaultDatasetConfigurer method createDataset.
@Override
public void createDataset(String datasetInstanceName, String typeName, DatasetProperties properties) {
checkArgument(datasetInstanceName != null, "Dataset instance name cannot be null.");
checkArgument(typeName != null, "Dataset type name cannot be null.");
checkArgument(properties != null, "Instance properties name cannot be null.");
DatasetCreationSpec spec = new DatasetCreationSpec(datasetInstanceName, typeName, properties);
DatasetCreationSpec existingSpec = datasetSpecs.get(datasetInstanceName);
if (existingSpec != null && !existingSpec.equals(spec)) {
throw new IllegalArgumentException(String.format("DatasetInstance '%s' was added multiple times with " + "different specifications. Please resolve the conflict so that there is only one specification for " + "the dataset instance.", datasetInstanceName));
}
datasetSpecs.put(datasetInstanceName, spec);
}
Aggregations