use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetAdminOpHTTPHandler method update.
@POST
@Path("/data/datasets/{name}/admin/update")
public void update(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("name") String name) {
propagateUserId(request);
InternalDatasetUpdateParams params = GSON.fromJson(request.getContent().toString(Charsets.UTF_8), InternalDatasetUpdateParams.class);
Preconditions.checkArgument(params.getProperties() != null, "Missing required 'instanceProps' parameter.");
Preconditions.checkArgument(params.getTypeMeta() != null, "Missing required 'typeMeta' parameter.");
Preconditions.checkArgument(params.getExistingSpec() != null, "Missing required 'existingSpec' parameter.");
DatasetProperties props = params.getProperties();
DatasetSpecification existing = params.getExistingSpec();
DatasetTypeMeta typeMeta = params.getTypeMeta();
try {
DatasetId instanceId = new DatasetId(namespaceId, name);
DatasetSpecification spec = datasetAdminService.createOrUpdate(instanceId, typeMeta, props, existing);
responder.sendJson(HttpResponseStatus.OK, spec);
} catch (NotFoundException e) {
LOG.debug("Got handler exception", e);
responder.sendString(HttpResponseStatus.NOT_FOUND, StringUtils.defaultIfEmpty(e.getMessage(), ""));
} catch (BadRequestException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (IncompatibleUpdateException e) {
responder.sendString(HttpResponseStatus.CONFLICT, e.getMessage());
} catch (Exception e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetInstanceCreator method createInstances.
/**
* Receives an input containing application specification and location
* and verifies both.
*
* @param namespaceId the namespace to create the dataset instance in
* @param datasets the datasets to create
* @param ownerPrincipal the principal of the owner for the datasets to be created.
*/
void createInstances(NamespaceId namespaceId, Map<String, DatasetCreationSpec> datasets, @Nullable KerberosPrincipalId ownerPrincipal) throws Exception {
// create dataset instances
for (Map.Entry<String, DatasetCreationSpec> instanceEntry : datasets.entrySet()) {
String instanceName = instanceEntry.getKey();
DatasetId instanceId = namespaceId.dataset(instanceName);
DatasetCreationSpec instanceSpec = instanceEntry.getValue();
DatasetSpecification existingSpec = datasetFramework.getDatasetSpec(instanceId);
if (existingSpec == null) {
LOG.info("Adding dataset instance: {}", instanceName);
datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, instanceSpec.getProperties(), ownerPrincipal);
} else {
if (!existingSpec.getType().equals(instanceSpec.getTypeName())) {
throw new IncompatibleUpdateException(String.format("Existing dataset '%s' of type '%s' may not be updated to type '%s'", instanceName, existingSpec.getType(), instanceSpec.getTypeName()));
}
if (allowDatasetUncheckedUpgrade) {
LOG.info("Updating dataset instance: {}", instanceName);
datasetFramework.updateInstance(instanceId, instanceSpec.getProperties());
}
}
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetDefinitionRegistryWithDefaultModules method testCompositeDatasetReconfigure.
// tests that CompositeDatasetDefinition correctly delegates reconfigure() to its embedded types
@Test
public void testCompositeDatasetReconfigure() throws IncompatibleUpdateException {
CompositeDatasetDefinition composite = new CompositeDatasetDefinition("composite", "pedantic", new PedanticDatasetDefinition("pedantic")) {
@Override
public Dataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map arguments, ClassLoader classLoader) throws IOException {
return null;
}
};
DatasetSpecification spec = composite.configure("nn", DatasetProperties.EMPTY);
DatasetSpecification respec = composite.reconfigure("nn", DatasetProperties.EMPTY, spec);
Assert.assertEquals(spec, respec);
try {
composite.reconfigure("nn", DatasetProperties.builder().add("immutable", "x").build(), spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class InMemoryDatasetFramework method updateInstance.
@Override
public void updateInstance(DatasetId datasetInstanceId, DatasetProperties props) throws DatasetManagementException, IOException {
writeLock.lock();
try {
DatasetSpecification oldSpec = instances.get(datasetInstanceId.getParent(), datasetInstanceId);
if (oldSpec == null) {
throw new InstanceNotFoundException(datasetInstanceId.getEntityName());
}
DatasetDefinition def = getDefinitionForType(datasetInstanceId.getParent(), oldSpec.getType());
if (def == null) {
throw new DatasetManagementException(String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", oldSpec.getType(), datasetInstanceId.getParent()));
}
DatasetSpecification spec = AbstractDatasetDefinition.reconfigure(def, datasetInstanceId.getEntityName(), props, oldSpec).setOriginalProperties(props);
if (props.getDescription() != null) {
spec = spec.setDescription(props.getDescription());
}
instances.put(datasetInstanceId.getParent(), datasetInstanceId, spec);
DatasetAdmin admin = def.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec, null);
if (admin instanceof Updatable) {
((Updatable) admin).update(oldSpec);
} else {
admin.upgrade();
}
publishAudit(datasetInstanceId, AuditType.UPDATE);
} catch (IncompatibleUpdateException e) {
throw new InstanceConflictException("Update failed for dataset instance " + datasetInstanceId, e);
} finally {
writeLock.unlock();
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetAdminOpHTTPHandler method update.
@POST
@Path("/data/datasets/{name}/admin/update")
public void update(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("name") String name) {
propagateUserId(request);
InternalDatasetUpdateParams params = GSON.fromJson(request.content().toString(StandardCharsets.UTF_8), InternalDatasetUpdateParams.class);
Preconditions.checkArgument(params.getProperties() != null, "Missing required 'instanceProps' parameter.");
Preconditions.checkArgument(params.getTypeMeta() != null, "Missing required 'typeMeta' parameter.");
Preconditions.checkArgument(params.getExistingSpec() != null, "Missing required 'existingSpec' parameter.");
DatasetProperties props = params.getProperties();
DatasetSpecification existing = params.getExistingSpec();
DatasetTypeMeta typeMeta = params.getTypeMeta();
try {
DatasetId instanceId = new DatasetId(namespaceId, name);
DatasetSpecification spec = datasetAdminService.createOrUpdate(instanceId, typeMeta, props, existing);
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(spec));
} catch (NotFoundException e) {
LOG.debug("Got handler exception", e);
responder.sendString(HttpResponseStatus.NOT_FOUND, StringUtils.defaultIfEmpty(e.getMessage(), ""));
} catch (BadRequestException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (IncompatibleUpdateException e) {
responder.sendString(HttpResponseStatus.CONFLICT, e.getMessage());
} catch (Exception e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
Aggregations