use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetDefinitionRegistryWithDefaultModules method testIndexedTableReconfigure.
@Test
public void testIndexedTableReconfigure() throws IncompatibleUpdateException {
DatasetDefinition indexedTableDef = registry.get(IndexedTable.class.getName());
Assert.assertTrue(indexedTableDef instanceof Reconfigurable);
DatasetProperties props = TableProperties.builder().setReadlessIncrementSupport(false).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "a,b,c").build();
DatasetSpecification spec = indexedTableDef.configure("idxtb", props);
DatasetProperties compat = TableProperties.builder().setReadlessIncrementSupport(// turning on is ok
true).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "c,b,a").build();
spec = ((Reconfigurable) indexedTableDef).reconfigure("idxtb", compat, spec);
DatasetProperties incompat = TableProperties.builder().setReadlessIncrementSupport(true).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "a,d").build();
try {
((Reconfigurable) indexedTableDef).reconfigure("idxtb", incompat, spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
incompat = TableProperties.builder().setReadlessIncrementSupport(// turning off is not ok
false).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "a,b,c").build();
try {
((Reconfigurable) indexedTableDef).reconfigure("idxtb", incompat, spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetDefinitionRegistryWithDefaultModules method testTimeSeriesReconfigure.
private void testTimeSeriesReconfigure(DatasetDefinition def) throws IncompatibleUpdateException {
DatasetProperties props = DatasetProperties.builder().add(TimeseriesDataset.ATTR_TIME_INTERVAL_TO_STORE_PER_ROW, String.valueOf(TimeUnit.HOURS.toMillis(1))).build();
DatasetProperties compatProps = TableProperties.builder().setTTL(TimeUnit.DAYS.toSeconds(1)).add(TimeseriesDataset.ATTR_TIME_INTERVAL_TO_STORE_PER_ROW, String.valueOf(TimeUnit.HOURS.toMillis(1))).build();
DatasetProperties incompatProps = TableProperties.builder().setTTL(TimeUnit.DAYS.toSeconds(1)).add(TimeseriesDataset.ATTR_TIME_INTERVAL_TO_STORE_PER_ROW, String.valueOf(TimeUnit.HOURS.toMillis(2))).build();
DatasetSpecification spec = def.configure("tt", props);
Assert.assertTrue(def instanceof Reconfigurable);
((Reconfigurable) def).reconfigure("tt", compatProps, spec);
try {
((Reconfigurable) def).reconfigure("tt", incompatProps, spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetDefinitionRegistryWithDefaultModules method testPFSReconfigure.
@Test
public void testPFSReconfigure() {
DatasetDefinition pfsDef = registry.get(PartitionedFileSet.class.getName());
Assert.assertTrue(pfsDef instanceof Reconfigurable);
DatasetProperties props = PartitionedFileSetProperties.builder().setPartitioning(Partitioning.builder().addIntField("i").addStringField("s").build()).build();
DatasetSpecification spec = pfsDef.configure("pfs", props);
DatasetProperties noIprops = PartitionedFileSetProperties.builder().setPartitioning(Partitioning.builder().addStringField("s").build()).build();
try {
((Reconfigurable) pfsDef).reconfigure("pfs", noIprops, spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
DatasetProperties longIprops = PartitionedFileSetProperties.builder().setPartitioning(Partitioning.builder().addLongField("i").addStringField("s").build()).build();
try {
((Reconfigurable) pfsDef).reconfigure("pfs", longIprops, spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
DatasetProperties revProps = PartitionedFileSetProperties.builder().setPartitioning(Partitioning.builder().addStringField("s").addIntField("i").build()).build();
try {
((Reconfigurable) pfsDef).reconfigure("pfs", revProps, spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetDefinitionRegistryWithDefaultModules method testFileSetReconfigure.
private void testFileSetReconfigure(boolean expectSuccess, DatasetDefinition def, Boolean wasExternal, String path, Boolean newExternal, String newPath, DatasetProperties extraProps) throws IncompatibleUpdateException {
Assert.assertTrue(def instanceof Reconfigurable);
DatasetProperties props = buildFileSetProps(extraProps, wasExternal, path);
DatasetProperties newProps = buildFileSetProps(extraProps, newExternal, newPath);
DatasetSpecification spec = def.configure("fs", props);
if (expectSuccess) {
((Reconfigurable) def).reconfigure("fs", newProps, spec);
} else {
try {
((Reconfigurable) def).reconfigure("fs", newProps, spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
}
}
use of co.cask.cdap.api.dataset.IncompatibleUpdateException in project cdap by caskdata.
the class DatasetAdminService method createOrUpdate.
/**
* Configures and creates a Dataset
*
* @param datasetInstanceId dataset instance to be created
* @param typeMeta type meta for the dataset
* @param props dataset instance properties
* @param existing if dataset already exists (in case of update), the existing properties
* @return dataset specification
*/
public DatasetSpecification createOrUpdate(final DatasetId datasetInstanceId, final DatasetTypeMeta typeMeta, final DatasetProperties props, @Nullable final DatasetSpecification existing) throws Exception {
if (existing == null) {
LOG.info("Creating dataset instance {}, type meta: {}", datasetInstanceId, typeMeta);
} else {
LOG.info("Updating dataset instance {}, type meta: {}, existing: {}", datasetInstanceId, typeMeta, existing);
}
try (DatasetClassLoaderProvider classLoaderProvider = new DirectoryClassLoaderProvider(cConf, locationFactory)) {
final DatasetContext context = DatasetContext.from(datasetInstanceId.getNamespace());
UserGroupInformation ugi = getUgiForDataset(impersonator, datasetInstanceId);
final DatasetType type = ImpersonationUtils.doAs(ugi, new Callable<DatasetType>() {
@Override
public DatasetType call() throws Exception {
DatasetType type = dsFramework.getDatasetType(typeMeta, null, classLoaderProvider);
if (type == null) {
throw new BadRequestException(String.format("Cannot instantiate dataset type using provided type meta: %s", typeMeta));
}
return type;
}
});
DatasetSpecification spec = ImpersonationUtils.doAs(ugi, new Callable<DatasetSpecification>() {
@Override
public DatasetSpecification call() throws Exception {
DatasetSpecification spec = existing == null ? type.configure(datasetInstanceId.getEntityName(), props) : type.reconfigure(datasetInstanceId.getEntityName(), props, existing);
DatasetAdmin admin = type.getAdmin(context, spec);
if (existing != null) {
if (admin instanceof Updatable) {
((Updatable) admin).update(existing);
} else {
admin.upgrade();
}
} else {
admin.create();
}
return spec;
}
});
// Writing system metadata should be done without impersonation since user may not have access to system tables.
writeSystemMetadata(datasetInstanceId, spec, props, typeMeta, type, context, existing != null, ugi);
return spec;
} catch (Exception e) {
if (e instanceof IncompatibleUpdateException) {
// this is expected to happen if user provides bad update properties, so we log this as debug
LOG.debug("Incompatible update for dataset '{}'", datasetInstanceId, e);
} else {
LOG.error("Error {} dataset '{}': {}", existing == null ? "creating" : "updating", datasetInstanceId, e.getMessage(), e);
}
throw e;
}
}
Aggregations