use of co.cask.cdap.api.dataset.DatasetManagementException in project cdap by caskdata.
the class DatasetServiceClient method addInstance.
public void addInstance(String datasetInstanceName, String datasetType, DatasetProperties props, @Nullable KerberosPrincipalId owner) throws DatasetManagementException {
String ownerPrincipal = owner == null ? null : owner.getPrincipal();
DatasetInstanceConfiguration creationProperties = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), ownerPrincipal);
HttpResponse response = doPut("datasets/" + datasetInstanceName, GSON.toJson(creationProperties));
if (HttpResponseStatus.CONFLICT.getCode() == response.getResponseCode()) {
throw new InstanceConflictException(String.format("Failed to add instance %s due to conflict, details: %s", datasetInstanceName, response));
}
if (HttpResponseStatus.OK.getCode() != response.getResponseCode()) {
throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response));
}
}
use of co.cask.cdap.api.dataset.DatasetManagementException in project cdap by caskdata.
the class InMemoryDatasetFramework method updateInstance.
@Override
public void updateInstance(DatasetId datasetInstanceId, DatasetProperties props) throws DatasetManagementException, IOException {
writeLock.lock();
try {
DatasetSpecification oldSpec = instances.get(datasetInstanceId.getParent(), datasetInstanceId);
if (oldSpec == null) {
throw new InstanceNotFoundException(datasetInstanceId.getEntityName());
}
DatasetDefinition def = getDefinitionForType(datasetInstanceId.getParent(), oldSpec.getType());
if (def == null) {
throw new DatasetManagementException(String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", oldSpec.getType(), datasetInstanceId.getParent()));
}
DatasetSpecification spec = AbstractDatasetDefinition.reconfigure(def, datasetInstanceId.getEntityName(), props, oldSpec).setOriginalProperties(props);
if (props.getDescription() != null) {
spec = spec.setDescription(props.getDescription());
}
instances.put(datasetInstanceId.getParent(), datasetInstanceId, spec);
DatasetAdmin admin = def.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec, null);
if (admin instanceof Updatable) {
((Updatable) admin).update(oldSpec);
} else {
admin.upgrade();
}
publishAudit(datasetInstanceId, AuditType.UPDATE);
} catch (IncompatibleUpdateException e) {
throw new InstanceConflictException("Update failed for dataset instance " + datasetInstanceId, e);
} finally {
writeLock.unlock();
}
}
use of co.cask.cdap.api.dataset.DatasetManagementException in project cdap by caskdata.
the class InMemoryDatasetFramework method truncateInstance.
@Override
public void truncateInstance(DatasetId instanceId) throws DatasetManagementException, IOException {
writeLock.lock();
try {
DatasetSpecification spec = instances.get(instanceId.getParent(), instanceId);
if (spec == null) {
throw new InstanceNotFoundException(instanceId.getEntityName());
}
DatasetDefinition def = getDefinitionForType(instanceId.getParent(), spec.getType());
if (def == null) {
throw new DatasetManagementException(String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", spec.getType(), instanceId.getParent()));
}
def.getAdmin(DatasetContext.from(instanceId.getNamespace()), spec, null).truncate();
publishAudit(instanceId, AuditType.TRUNCATE);
} finally {
writeLock.unlock();
}
}
use of co.cask.cdap.api.dataset.DatasetManagementException in project cdap by caskdata.
the class AbstractDatasetFrameworkTest method testBasicManagement.
@Test
public void testBasicManagement() throws Exception {
DatasetTypeId tableType = NAMESPACE_ID.datasetType(Table.class.getName());
// Adding modules
DatasetFramework framework = getFramework();
framework.addModule(IN_MEMORY, new InMemoryTableModule());
framework.addModule(CORE, new CoreDatasetsModule());
framework.addModule(FILE, new FileSetModule());
framework.addModule(KEY_VALUE, new SingleTypeModule(SimpleKVTable.class));
// keyvalue has been added in the system namespace
Assert.assertTrue(framework.hasSystemType(Table.class.getName()));
Assert.assertFalse(framework.hasSystemType(SimpleKVTable.class.getName()));
Assert.assertTrue(framework.hasType(tableType));
Assert.assertTrue(framework.hasType(SIMPLE_KV_TYPE));
// Creating instances
framework.addInstance(Table.class.getName(), MY_TABLE, DatasetProperties.EMPTY);
Assert.assertTrue(framework.hasInstance(MY_TABLE));
DatasetSpecification spec = framework.getDatasetSpec(MY_TABLE);
Assert.assertNotNull(spec);
Assert.assertEquals(MY_TABLE.getEntityName(), spec.getName());
Assert.assertEquals(Table.class.getName(), spec.getType());
framework.addInstance(Table.class.getName(), MY_TABLE2, DatasetProperties.EMPTY);
Assert.assertTrue(framework.hasInstance(MY_TABLE2));
// Update instances
File baseDir = TMP_FOLDER.newFolder();
framework.addInstance(FileSet.class.getName(), MY_DS, FileSetProperties.builder().setBasePath(baseDir.getPath()).setDataExternal(true).build());
// this should fail because it would "internalize" external data
try {
framework.updateInstance(MY_DS, DatasetProperties.EMPTY);
Assert.fail("update should have thrown instance conflict");
} catch (InstanceConflictException e) {
// expected
}
baseDir = TMP_FOLDER.newFolder();
// this should succeed because it simply changes the external path
framework.updateInstance(MY_DS, FileSetProperties.builder().setBasePath(baseDir.getPath()).setDataExternal(true).build());
spec = framework.getDatasetSpec(MY_DS);
Assert.assertNotNull(spec);
Assert.assertEquals(baseDir.getPath(), FileSetProperties.getBasePath(spec.getProperties()));
// cleanup
try {
framework.deleteAllModules(NAMESPACE_ID);
Assert.fail("should not delete modules: there are datasets using their types");
} catch (DatasetManagementException e) {
// expected
}
// types are still there
Assert.assertTrue(framework.hasType(tableType));
Assert.assertTrue(framework.hasType(SIMPLE_KV_TYPE));
framework.deleteAllInstances(NAMESPACE_ID);
Assert.assertEquals(0, framework.getInstances(NAMESPACE_ID).size());
Assert.assertFalse(framework.hasInstance(MY_TABLE));
Assert.assertNull(framework.getDatasetSpec(MY_TABLE));
Assert.assertFalse(framework.hasInstance(MY_TABLE2));
Assert.assertNull(framework.getDatasetSpec(MY_TABLE2));
// now it should succeed
framework.deleteAllModules(NAMESPACE_ID);
Assert.assertTrue(framework.hasSystemType(Table.class.getName()));
Assert.assertFalse(framework.hasType(tableType));
Assert.assertFalse(framework.hasType(SIMPLE_KV_TYPE));
}
use of co.cask.cdap.api.dataset.DatasetManagementException in project cdap by caskdata.
the class MetricsDataMigrator method getOrCreateMetricsTable.
private MetricsTable getOrCreateMetricsTable(String tableName, DatasetProperties empty) throws DataMigrationException {
MetricsTable table = null;
// for default namespace, we have to provide the complete table name.
tableName = "system." + tableName;
// metrics tables are in the system namespace
DatasetId metricsDatasetInstanceId = NamespaceId.DEFAULT.dataset(tableName);
try {
table = DatasetsUtil.getOrCreateDataset(dsFramework, metricsDatasetInstanceId, MetricsTable.class.getName(), empty, null);
} catch (DatasetManagementException | ServiceUnavailableException e) {
String msg = String.format("Cannot access or create table %s.", tableName) + " " + e.getMessage();
LOG.warn(msg);
throw new DataMigrationException(msg);
} catch (IOException e) {
String msg = String.format("Exception while creating table %s", tableName);
LOG.error(msg, e);
throw new DataMigrationException(msg);
}
return table;
}
Aggregations