use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class WorkflowHttpHandler method getWorkflowLocalDatasets.
@GET
@Path("/apps/{app-id}/workflows/{workflow-id}/runs/{run-id}/localdatasets")
public void getWorkflowLocalDatasets(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String applicationId, @PathParam("workflow-id") String workflowId, @PathParam("run-id") String runId) throws NotFoundException, DatasetManagementException {
WorkflowSpecification workflowSpec = getWorkflowSpecForValidRun(namespaceId, applicationId, workflowId, runId);
Map<String, DatasetSpecificationSummary> localDatasetSummaries = new HashMap<>();
for (Map.Entry<String, DatasetCreationSpec> localDatasetEntry : workflowSpec.getLocalDatasetSpecs().entrySet()) {
String mappedDatasetName = localDatasetEntry.getKey() + "." + runId;
String datasetType = localDatasetEntry.getValue().getTypeName();
Map<String, String> datasetProperties = localDatasetEntry.getValue().getProperties().getProperties();
if (datasetFramework.hasInstance(new DatasetId(namespaceId, mappedDatasetName))) {
localDatasetSummaries.put(localDatasetEntry.getKey(), new DatasetSpecificationSummary(mappedDatasetName, datasetType, datasetProperties));
}
}
responder.sendJson(HttpResponseStatus.OK, localDatasetSummaries);
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class WorkflowHttpHandler method deleteWorkflowLocalDatasets.
@DELETE
@Path("/apps/{app-id}/workflows/{workflow-id}/runs/{run-id}/localdatasets")
public void deleteWorkflowLocalDatasets(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String applicationId, @PathParam("workflow-id") String workflowId, @PathParam("run-id") String runId) throws NotFoundException {
WorkflowSpecification workflowSpec = getWorkflowSpecForValidRun(namespaceId, applicationId, workflowId, runId);
Set<String> errorOnDelete = new HashSet<>();
for (Map.Entry<String, DatasetCreationSpec> localDatasetEntry : workflowSpec.getLocalDatasetSpecs().entrySet()) {
String mappedDatasetName = localDatasetEntry.getKey() + "." + runId;
// try best to delete the local datasets.
try {
datasetFramework.deleteInstance(new DatasetId(namespaceId, mappedDatasetName));
} catch (InstanceNotFoundException e) {
// Dataset instance is already deleted. so its no-op.
} catch (Throwable t) {
errorOnDelete.add(mappedDatasetName);
LOG.error("Failed to delete the Workflow local dataset {}. Reason - {}", mappedDatasetName, t.getMessage());
}
}
if (errorOnDelete.isEmpty()) {
responder.sendStatus(HttpResponseStatus.OK);
return;
}
String errorMessage = "Failed to delete Workflow local datasets - " + Joiner.on(",").join(errorOnDelete);
throw new RuntimeException(errorMessage);
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class WorkflowDriver method createLocalDatasets.
private void createLocalDatasets() throws IOException, DatasetManagementException {
String principal = programOptions.getArguments().getOption(ProgramOptionConstants.PRINCIPAL);
final KerberosPrincipalId principalId = principal == null ? null : new KerberosPrincipalId(principal);
for (final Map.Entry<String, String> entry : datasetFramework.getDatasetNameMapping().entrySet()) {
final String localInstanceName = entry.getValue();
final DatasetId instanceId = new DatasetId(workflowRunId.getNamespace(), localInstanceName);
final DatasetCreationSpec instanceSpec = workflowSpec.getLocalDatasetSpecs().get(entry.getKey());
LOG.debug("Adding Workflow local dataset instance: {}", localInstanceName);
try {
Retries.callWithRetries(new Retries.Callable<Void, Exception>() {
@Override
public Void call() throws Exception {
datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, addLocalDatasetProperty(instanceSpec.getProperties()), principalId);
return null;
}
}, RetryStrategies.fixDelay(Constants.Retry.LOCAL_DATASET_OPERATION_RETRY_DELAY_SECONDS, TimeUnit.SECONDS));
} catch (IOException | DatasetManagementException e) {
throw e;
} catch (Exception e) {
// this should never happen
throw new IllegalStateException(e);
}
}
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class DatasetInstanceCreator method createInstances.
/**
* Receives an input containing application specification and location
* and verifies both.
*
* @param namespaceId the namespace to create the dataset instance in
* @param datasets the datasets to create
* @param ownerPrincipal the principal of the owner for the datasets to be created.
*/
void createInstances(NamespaceId namespaceId, Map<String, DatasetCreationSpec> datasets, @Nullable KerberosPrincipalId ownerPrincipal) throws Exception {
// create dataset instances
for (Map.Entry<String, DatasetCreationSpec> instanceEntry : datasets.entrySet()) {
String instanceName = instanceEntry.getKey();
DatasetId instanceId = namespaceId.dataset(instanceName);
DatasetCreationSpec instanceSpec = instanceEntry.getValue();
DatasetSpecification existingSpec = datasetFramework.getDatasetSpec(instanceId);
if (existingSpec == null) {
LOG.info("Adding dataset instance: {}", instanceName);
datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, instanceSpec.getProperties(), ownerPrincipal);
} else {
if (!existingSpec.getType().equals(instanceSpec.getTypeName())) {
throw new IncompatibleUpdateException(String.format("Existing dataset '%s' of type '%s' may not be updated to type '%s'", instanceName, existingSpec.getType(), instanceSpec.getTypeName()));
}
if (allowDatasetUncheckedUpgrade) {
LOG.info("Updating dataset instance: {}", instanceName);
datasetFramework.updateInstance(instanceId, instanceSpec.getProperties());
}
}
}
}
use of co.cask.cdap.internal.dataset.DatasetCreationSpec in project cdap by caskdata.
the class DefaultWorkflowConfigurer method createLocalDataset.
@Override
public void createLocalDataset(String datasetName, String typeName, DatasetProperties properties) {
checkArgument(datasetName != null, "Dataset instance name cannot be null.");
checkArgument(typeName != null, "Dataset type name cannot be null.");
checkArgument(properties != null, "Instance properties name cannot be null.");
DatasetCreationSpec spec = new DatasetCreationSpec(datasetName, typeName, properties);
DatasetCreationSpec existingSpec = localDatasetSpecs.get(datasetName);
if (existingSpec != null && !existingSpec.equals(spec)) {
throw new IllegalArgumentException(String.format("DatasetInstance '%s' was added multiple times with" + " different specifications. Please resolve the conflict so" + " that there is only one specification for the local dataset" + " instance in the Workflow.", datasetName));
}
localDatasetSpecs.put(datasetName, spec);
}
Aggregations