use of co.cask.cdap.data2.metadata.dataset.Metadata in project cdap by caskdata.
the class DatasetAdminService method drop.
public void drop(final DatasetId datasetInstanceId, final DatasetTypeMeta typeMeta, final DatasetSpecification spec) throws Exception {
LOG.info("Dropping dataset with spec: {}, type meta: {}", spec, typeMeta);
try (DatasetClassLoaderProvider classLoaderProvider = new DirectoryClassLoaderProvider(cConf, locationFactory)) {
UserGroupInformation ugi = getUgiForDataset(impersonator, datasetInstanceId);
ImpersonationUtils.doAs(ugi, (Callable<Void>) () -> {
DatasetType type = dsFramework.getDatasetType(typeMeta, null, classLoaderProvider);
if (type == null) {
throw new BadRequestException(String.format("Cannot instantiate dataset type using provided type meta: %s", typeMeta));
}
DatasetAdmin admin = type.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec);
try {
admin.drop();
} finally {
Closeables.closeQuietly(admin);
}
return null;
});
}
// Remove metadata for the dataset
metadataStore.removeMetadata(datasetInstanceId);
}
use of co.cask.cdap.data2.metadata.dataset.Metadata in project cdap by caskdata.
the class SystemDatasetRuntimeModule method bindDefaultModules.
/**
* Add bindings for Dataset modules that are available by default
*/
private void bindDefaultModules(MapBinder<String, DatasetModule> mapBinder) {
mapBinder.addBinding("core").toInstance(new CoreDatasetsModule());
mapBinder.addBinding("fileSet").toInstance(new FileSetModule());
mapBinder.addBinding("timePartitionedFileSet").toInstance(new TimePartitionedFileSetModule());
mapBinder.addBinding("partitionedFileSet").toInstance(new PartitionedFileSetModule());
mapBinder.addBinding("objectMappedTable").toInstance(new ObjectMappedTableModule());
mapBinder.addBinding("cube").toInstance(new CubeModule());
mapBinder.addBinding("usage").toInstance(new UsageDatasetModule());
mapBinder.addBinding("metadata").toInstance(new MetadataDatasetModule());
mapBinder.addBinding("lineage").toInstance(new LineageDatasetModule());
mapBinder.addBinding("externalDataset").toInstance(new ExternalDatasetModule());
}
use of co.cask.cdap.data2.metadata.dataset.Metadata in project cdap by caskdata.
the class MetadataStoreTest method setup.
@BeforeClass
public static void setup() throws IOException {
Injector injector = Guice.createInjector(new ConfigModule(), Modules.override(new DataSetsModules().getInMemoryModules()).with(new AbstractModule() {
@Override
protected void configure() {
// Need the distributed metadata store.
bind(MetadataStore.class).to(DefaultMetadataStore.class);
}
}), new LocationRuntimeModule().getInMemoryModules(), new TransactionInMemoryModule(), new SystemDatasetRuntimeModule().getInMemoryModules(), new NamespaceClientRuntimeModule().getInMemoryModules(), new AuthorizationTestModule(), new AuthorizationEnforcementModule().getInMemoryModules(), new AuthenticationContextModules().getMasterModule(), new AuditModule().getInMemoryModules());
cConf = injector.getInstance(CConfiguration.class);
txManager = injector.getInstance(TransactionManager.class);
txManager.startAndWait();
store = injector.getInstance(MetadataStore.class);
auditPublisher = injector.getInstance(InMemoryAuditPublisher.class);
}
use of co.cask.cdap.data2.metadata.dataset.Metadata in project cdap by caskdata.
the class MdsKey method getMetadataKey.
static String getMetadataKey(String type, byte[] rowKey) {
MDSKey.Splitter keySplitter = new MDSKey(rowKey).split();
// The rowkey is
// [rowPrefix][targetType][targetId][key] for value rows and
// [rowPrefix][targetType][targetId][key][index] for value index rows
// so skip the first few strings.
// Skip rowType
keySplitter.skipBytes();
// Skip targetType
keySplitter.skipString();
// Skip targetId
if (type.equals(EntityIdKeyHelper.TYPE_MAP.get(ProgramId.class))) {
keySplitter.skipString();
keySplitter.skipString();
keySplitter.skipString();
keySplitter.skipString();
} else if (type.equals(EntityIdKeyHelper.TYPE_MAP.get(ApplicationId.class))) {
keySplitter.skipString();
keySplitter.skipString();
} else if (type.equals(EntityIdKeyHelper.TYPE_MAP.get(DatasetId.class))) {
keySplitter.skipString();
keySplitter.skipString();
} else if (type.equals(EntityIdKeyHelper.TYPE_MAP.get(StreamId.class))) {
keySplitter.skipString();
keySplitter.skipString();
} else if (type.equals(EntityIdKeyHelper.TYPE_MAP.get(StreamViewId.class))) {
// skip namespace, stream, view
keySplitter.skipString();
keySplitter.skipString();
keySplitter.skipString();
} else if (type.equals(EntityIdKeyHelper.TYPE_MAP.get(ArtifactId.class))) {
// skip namespace, name, version
keySplitter.skipString();
keySplitter.skipString();
keySplitter.skipString();
} else {
throw new IllegalArgumentException("Illegal Type " + type + " of metadata source.");
}
return keySplitter.getString();
}
use of co.cask.cdap.data2.metadata.dataset.Metadata in project cdap by caskdata.
the class MetadataDataset method getMetadata.
/**
* Returns metadata for a given set of entities
*
* @param targetIds entities for which metadata is required
* @return map of entitiyId to set of metadata for that entity
*/
public Set<Metadata> getMetadata(Set<? extends NamespacedEntityId> targetIds) {
if (targetIds.isEmpty()) {
return Collections.emptySet();
}
List<ImmutablePair<byte[], byte[]>> fuzzyKeys = new ArrayList<>(targetIds.size());
for (NamespacedEntityId targetId : targetIds) {
fuzzyKeys.add(getFuzzyKeyFor(targetId));
}
// Sort fuzzy keys
Collections.sort(fuzzyKeys, FUZZY_KEY_COMPARATOR);
// Scan using fuzzy filter. Scan returns one row per property.
// Group the rows on namespacedId
Multimap<NamespacedEntityId, MetadataEntry> metadataMap = HashMultimap.create();
byte[] start = fuzzyKeys.get(0).getFirst();
byte[] end = Bytes.stopKeyForPrefix(fuzzyKeys.get(fuzzyKeys.size() - 1).getFirst());
try (Scanner scan = indexedTable.scan(new Scan(start, end, new FuzzyRowFilter(fuzzyKeys)))) {
Row next;
while ((next = scan.next()) != null) {
MetadataEntry metadataEntry = convertRow(next);
if (metadataEntry != null) {
metadataMap.put(metadataEntry.getTargetId(), metadataEntry);
}
}
}
// Create metadata objects for each entity from grouped rows
Set<Metadata> metadataSet = new HashSet<>();
for (Map.Entry<NamespacedEntityId, Collection<MetadataEntry>> entry : metadataMap.asMap().entrySet()) {
Map<String, String> properties = new HashMap<>();
Set<String> tags = Collections.emptySet();
for (MetadataEntry metadataEntry : entry.getValue()) {
if (TAGS_KEY.equals(metadataEntry.getKey())) {
tags = splitTags(metadataEntry.getValue());
} else {
properties.put(metadataEntry.getKey(), metadataEntry.getValue());
}
}
metadataSet.add(new Metadata(entry.getKey(), properties, tags));
}
return metadataSet;
}
Aggregations