use of co.cask.cdap.api.dataset.DatasetContext in project cdap by caskdata.
the class DatasetDefinitionRegistryWithDefaultModules method testCompositeDatasetReconfigure.
// tests that CompositeDatasetDefinition correctly delegates reconfigure() to its embedded types
@Test
public void testCompositeDatasetReconfigure() throws IncompatibleUpdateException {
CompositeDatasetDefinition composite = new CompositeDatasetDefinition("composite", "pedantic", new PedanticDatasetDefinition("pedantic")) {
@Override
public Dataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map arguments, ClassLoader classLoader) throws IOException {
return null;
}
};
DatasetSpecification spec = composite.configure("nn", DatasetProperties.EMPTY);
DatasetSpecification respec = composite.reconfigure("nn", DatasetProperties.EMPTY, spec);
Assert.assertEquals(spec, respec);
try {
composite.reconfigure("nn", DatasetProperties.builder().add("immutable", "x").build(), spec);
Assert.fail("reconfigure should have thrown exception");
} catch (IncompatibleUpdateException e) {
// expected
}
}
use of co.cask.cdap.api.dataset.DatasetContext in project cdap by caskdata.
the class SingleTypeModule method register.
@Override
public void register(DatasetDefinitionRegistry registry) {
final Constructor ctor = findSuitableCtorOrFail(dataSetClass);
DatasetType typeAnn = dataSetClass.getAnnotation(DatasetType.class);
// default type name to dataset class name
String typeName = typeAnn != null ? typeAnn.value() : dataSetClass.getName();
// The ordering is important. It is the same order as the parameters
final Map<String, DatasetDefinition> embeddedDefinitions = Maps.newLinkedHashMap();
final Class<?>[] paramTypes = ctor.getParameterTypes();
Annotation[][] paramAnns = ctor.getParameterAnnotations();
// Gather all dataset name and type information for the @EmbeddedDataset parameters
for (int i = 1; i < paramTypes.length; i++) {
// Must have the EmbeddedDataset as it's the contract of the findSuitableCtorOrFail method
EmbeddedDataset anno = Iterables.filter(Arrays.asList(paramAnns[i]), EmbeddedDataset.class).iterator().next();
String type = anno.type();
// default to dataset class name if dataset type name is not specified through the annotation
if (EmbeddedDataset.DEFAULT_TYPE_NAME.equals(type)) {
type = paramTypes[i].getName();
}
DatasetDefinition embeddedDefinition = registry.get(type);
if (embeddedDefinition == null) {
throw new IllegalStateException(String.format("Unknown Dataset type '%s', specified by parameter number %d of the %s Dataset", type, i, dataSetClass.getName()));
}
embeddedDefinitions.put(anno.value(), embeddedDefinition);
}
registry.add(new CompositeDatasetDefinition<Dataset>(typeName, embeddedDefinitions) {
@Override
public Dataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
List<Object> params = new ArrayList<>();
params.add(spec);
for (Map.Entry<String, DatasetDefinition> entry : embeddedDefinitions.entrySet()) {
params.add(entry.getValue().getDataset(datasetContext, spec.getSpecification(entry.getKey()), arguments, classLoader));
}
try {
return (Dataset) ctor.newInstance(params.toArray());
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
});
}
use of co.cask.cdap.api.dataset.DatasetContext in project cdap by caskdata.
the class NoTxKeyValueTableTest method test.
@Test
public void test() throws IOException {
DatasetDefinition<? extends NoTxKeyValueTable, ? extends DatasetAdmin> def = getDefinition();
DatasetSpecification spec = def.configure("table", DatasetProperties.EMPTY);
ClassLoader cl = NoTxKeyValueTable.class.getClassLoader();
DatasetContext datasetContext = DatasetContext.from(NAMESPACE_ID.getEntityName());
// create & exists
DatasetAdmin admin = def.getAdmin(datasetContext, spec, cl);
Assert.assertFalse(admin.exists());
admin.create();
Assert.assertTrue(admin.exists());
// put/get
NoTxKeyValueTable table = def.getDataset(datasetContext, spec, NO_ARGS, cl);
Assert.assertNull(table.get(KEY1));
table.put(KEY1, VALUE1);
Assert.assertArrayEquals(VALUE1, table.get(KEY1));
Assert.assertNull(table.get(KEY2));
// override
table.put(KEY1, VALUE2);
Assert.assertArrayEquals(VALUE2, table.get(KEY1));
Assert.assertNull(table.get(KEY2));
// delete & truncate
table.put(KEY2, VALUE1);
Assert.assertArrayEquals(VALUE2, table.get(KEY1));
Assert.assertArrayEquals(VALUE1, table.get(KEY2));
table.put(KEY2, null);
Assert.assertNull(table.get(KEY2));
Assert.assertArrayEquals(VALUE2, table.get(KEY1));
admin.truncate();
Assert.assertNull(table.get(KEY1));
Assert.assertNull(table.get(KEY2));
Assert.assertTrue(admin.exists());
admin.drop();
Assert.assertFalse(admin.exists());
// drop should cleanup data
admin.create();
Assert.assertTrue(admin.exists());
Assert.assertNull(table.get(KEY1));
Assert.assertNull(table.get(KEY2));
table.put(KEY1, VALUE1);
Assert.assertArrayEquals(VALUE1, table.get(KEY1));
admin.drop();
Assert.assertFalse(admin.exists());
admin.create();
Assert.assertTrue(admin.exists());
Assert.assertNull(table.get(KEY1));
}
use of co.cask.cdap.api.dataset.DatasetContext in project cdap by caskdata.
the class DatasetAdminService method createOrUpdate.
/**
* Configures and creates a Dataset
*
* @param datasetInstanceId dataset instance to be created
* @param typeMeta type meta for the dataset
* @param props dataset instance properties
* @param existing if dataset already exists (in case of update), the existing properties
* @return dataset specification
*/
public DatasetSpecification createOrUpdate(final DatasetId datasetInstanceId, final DatasetTypeMeta typeMeta, final DatasetProperties props, @Nullable final DatasetSpecification existing) throws Exception {
if (existing == null) {
LOG.info("Creating dataset instance {}, type meta: {}", datasetInstanceId, typeMeta);
} else {
LOG.info("Updating dataset instance {}, type meta: {}, existing: {}", datasetInstanceId, typeMeta, existing);
}
try (DatasetClassLoaderProvider classLoaderProvider = new DirectoryClassLoaderProvider(cConf, locationFactory)) {
final DatasetContext context = DatasetContext.from(datasetInstanceId.getNamespace());
UserGroupInformation ugi = getUgiForDataset(impersonator, datasetInstanceId);
final DatasetType type = ImpersonationUtils.doAs(ugi, new Callable<DatasetType>() {
@Override
public DatasetType call() throws Exception {
DatasetType type = dsFramework.getDatasetType(typeMeta, null, classLoaderProvider);
if (type == null) {
throw new BadRequestException(String.format("Cannot instantiate dataset type using provided type meta: %s", typeMeta));
}
return type;
}
});
DatasetSpecification spec = ImpersonationUtils.doAs(ugi, new Callable<DatasetSpecification>() {
@Override
public DatasetSpecification call() throws Exception {
DatasetSpecification spec = existing == null ? type.configure(datasetInstanceId.getEntityName(), props) : type.reconfigure(datasetInstanceId.getEntityName(), props, existing);
DatasetAdmin admin = type.getAdmin(context, spec);
if (existing != null) {
if (admin instanceof Updatable) {
((Updatable) admin).update(existing);
} else {
admin.upgrade();
}
} else {
admin.create();
}
return spec;
}
});
// Writing system metadata should be done without impersonation since user may not have access to system tables.
writeSystemMetadata(datasetInstanceId, spec, props, typeMeta, type, context, existing != null, ugi);
return spec;
} catch (Exception e) {
if (e instanceof IncompatibleUpdateException) {
// this is expected to happen if user provides bad update properties, so we log this as debug
LOG.debug("Incompatible update for dataset '{}'", datasetInstanceId, e);
} else {
LOG.error("Error {} dataset '{}': {}", existing == null ? "creating" : "updating", datasetInstanceId, e.getMessage(), e);
}
throw e;
}
}
Aggregations