use of co.cask.cdap.proto.metadata.MetadataScope in project cdap by caskdata.
the class SystemMetadataAuditPublishTest method getAllSystemMetadata.
private Set<String> getAllSystemMetadata() {
Set<String> allMetadata = new HashSet<>();
for (AuditMessage auditMessage : getMetadataUpdateMessages()) {
AuditPayload payload = auditMessage.getPayload();
Assert.assertTrue(payload instanceof MetadataPayload);
MetadataPayload metadataPayload = (MetadataPayload) payload;
Map<MetadataScope, Metadata> additions = metadataPayload.getAdditions();
if (additions.containsKey(MetadataScope.SYSTEM)) {
allMetadata.addAll(additions.get(MetadataScope.SYSTEM).getProperties().keySet());
allMetadata.addAll(additions.get(MetadataScope.SYSTEM).getTags());
}
Map<MetadataScope, Metadata> deletions = metadataPayload.getDeletions();
if (deletions.containsKey(MetadataScope.SYSTEM)) {
allMetadata.addAll(deletions.get(MetadataScope.SYSTEM).getProperties().keySet());
allMetadata.addAll(deletions.get(MetadataScope.SYSTEM).getTags());
}
}
return allMetadata;
}
use of co.cask.cdap.proto.metadata.MetadataScope in project cdap by caskdata.
the class DefaultMetadataStore method search.
private MetadataSearchResponse search(Set<MetadataScope> scopes, String namespaceId, String searchQuery, Set<EntityTypeSimpleName> types, SortInfo sortInfo, int offset, int limit, int numCursors, String cursor, boolean showHidden, Set<EntityScope> entityScope) throws BadRequestException {
if (offset < 0) {
throw new IllegalArgumentException("offset must not be negative");
}
if (limit < 0) {
throw new IllegalArgumentException("limit must not be negative");
}
List<MetadataEntry> results = new LinkedList<>();
List<String> cursors = new LinkedList<>();
for (MetadataScope scope : scopes) {
SearchResults searchResults = getSearchResults(scope, namespaceId, searchQuery, types, sortInfo, offset, limit, numCursors, cursor, showHidden, entityScope);
results.addAll(searchResults.getResults());
cursors.addAll(searchResults.getCursors());
}
// sort if required
Set<NamespacedEntityId> sortedEntities = getSortedEntities(results, sortInfo);
int total = sortedEntities.size();
// pagination is not performed at the dataset level, because:
// 1. scoring is needed for DEFAULT sort info. So perform it here for now.
// 2. Even when using custom sorting, we need to remove elements from the beginning to the offset and the cursors
// at the end
// TODO: Figure out how all of this can be done server (HBase) side
int startIndex = Math.min(offset, sortedEntities.size());
// Account for overflow
int endIndex = (int) Math.min(Integer.MAX_VALUE, (long) offset + limit);
endIndex = Math.min(endIndex, sortedEntities.size());
// add 1 to maxIndex because end index is exclusive
sortedEntities = new LinkedHashSet<>(ImmutableList.copyOf(sortedEntities).subList(startIndex, endIndex));
// Fetch metadata for entities in the result list
// Note: since the fetch is happening in a different transaction, the metadata for entities may have been
// removed. It is okay not to have metadata for some results in case this happens.
Map<NamespacedEntityId, Metadata> systemMetadata = fetchMetadata(sortedEntities, MetadataScope.SYSTEM);
Map<NamespacedEntityId, Metadata> userMetadata = fetchMetadata(sortedEntities, MetadataScope.USER);
return new MetadataSearchResponse(sortInfo.getSortBy() + " " + sortInfo.getSortOrder(), offset, limit, numCursors, total, addMetadataToEntities(sortedEntities, systemMetadata, userMetadata), cursors, showHidden, entityScope);
}
use of co.cask.cdap.proto.metadata.MetadataScope in project cdap by caskdata.
the class DefaultMetadataStore method addMetadataToEntities.
private Set<MetadataSearchResultRecord> addMetadataToEntities(Set<NamespacedEntityId> entities, Map<NamespacedEntityId, Metadata> systemMetadata, Map<NamespacedEntityId, Metadata> userMetadata) {
Set<MetadataSearchResultRecord> result = new LinkedHashSet<>();
for (NamespacedEntityId entity : entities) {
ImmutableMap.Builder<MetadataScope, co.cask.cdap.proto.metadata.Metadata> builder = ImmutableMap.builder();
// Add system metadata
Metadata metadata = systemMetadata.get(entity);
if (metadata != null) {
builder.put(MetadataScope.SYSTEM, new co.cask.cdap.proto.metadata.Metadata(metadata.getProperties(), metadata.getTags()));
}
// Add user metadata
metadata = userMetadata.get(entity);
if (metadata != null) {
builder.put(MetadataScope.USER, new co.cask.cdap.proto.metadata.Metadata(metadata.getProperties(), metadata.getTags()));
}
// Create result
result.add(new MetadataSearchResultRecord(entity, builder.build()));
}
return result;
}
use of co.cask.cdap.proto.metadata.MetadataScope in project cdap by caskdata.
the class AuditMessageTest method testMetadataChange.
@Test
public void testMetadataChange() throws Exception {
String metadataJson = "{\"version\":1,\"time\":3000,\"entityId\":{\"namespace\":\"ns1\",\"application\":\"app1\",\"version\":\"v1\"," + "\"entity\":\"APPLICATION\"},\"user\":\"user1\",\"type\":\"METADATA_CHANGE\",\"payload\":{" + "\"previous\":{\"USER\":{\"properties\":{\"uk\":\"uv\",\"uk1\":\"uv2\"},\"tags\":[\"ut1\",\"ut2\"]}," + "\"SYSTEM\":{\"properties\":{\"sk\":\"sv\"},\"tags\":[]}}," + "\"additions\":{\"SYSTEM\":{\"properties\":{\"sk\":\"sv\"},\"tags\":[\"t1\",\"t2\"]}}," + "\"deletions\":{\"USER\":{\"properties\":{\"uk\":\"uv\"},\"tags\":[\"ut1\"]}}}}";
Map<String, String> userProperties = new HashMap<>();
userProperties.put("uk", "uv");
userProperties.put("uk1", "uv2");
Map<String, String> systemProperties = new HashMap<>();
systemProperties.put("sk", "sv");
Set<String> userTags = new LinkedHashSet<>();
userTags.add("ut1");
userTags.add("ut2");
Map<MetadataScope, Metadata> previous = new LinkedHashMap<>();
previous.put(MetadataScope.USER, new Metadata(Collections.unmodifiableMap(userProperties), Collections.unmodifiableSet(userTags)));
previous.put(MetadataScope.SYSTEM, new Metadata(Collections.unmodifiableMap(systemProperties), Collections.unmodifiableSet(new LinkedHashSet<String>())));
Map<String, String> sysPropertiesAdded = new HashMap<>();
sysPropertiesAdded.put("sk", "sv");
Set<String> systemTagsAdded = new LinkedHashSet<>();
systemTagsAdded.add("t1");
systemTagsAdded.add("t2");
Map<MetadataScope, Metadata> additions = new HashMap<>();
additions.put(MetadataScope.SYSTEM, new Metadata(Collections.unmodifiableMap(sysPropertiesAdded), Collections.unmodifiableSet(systemTagsAdded)));
Map<String, String> userPropertiesDeleted = new HashMap<>();
userPropertiesDeleted.put("uk", "uv");
Set<String> userTagsDeleted = new LinkedHashSet<>();
userTagsDeleted.add("ut1");
Map<MetadataScope, Metadata> deletions = new HashMap<>();
deletions.put(MetadataScope.USER, new Metadata(Collections.unmodifiableMap(userPropertiesDeleted), Collections.unmodifiableSet(userTagsDeleted)));
AuditMessage metadataChange = new AuditMessage(3000L, new NamespaceId("ns1").app("app1", "v1"), "user1", AuditType.METADATA_CHANGE, new MetadataPayload(previous, additions, deletions));
Assert.assertEquals(jsonToMap(metadataJson), jsonToMap(GSON.toJson(metadataChange)));
Assert.assertEquals(metadataChange, GSON.fromJson(metadataJson, AuditMessage.class));
}
use of co.cask.cdap.proto.metadata.MetadataScope in project cdap by caskdata.
the class MetadataStoreTest method testSearchWeight.
@Test
public void testSearchWeight() throws Exception {
ProgramId flow1 = new ProgramId("ns1", "app1", ProgramType.FLOW, "flow1");
StreamId stream1 = new StreamId("ns1", "s1");
DatasetId dataset1 = new DatasetId("ns1", "ds1");
// Add metadata
String multiWordValue = "aV1 av2 , - , av3 - av4_av5 av6";
Map<String, String> flowUserProps = ImmutableMap.of("key1", "value1", "key2", "value2", "multiword", multiWordValue);
Map<String, String> flowSysProps = ImmutableMap.of("sysKey1", "sysValue1");
Set<String> flowUserTags = ImmutableSet.of("tag1", "tag2");
Set<String> streamUserTags = ImmutableSet.of("tag3", "tag4");
Set<String> flowSysTags = ImmutableSet.of("sysTag1");
store.setProperties(MetadataScope.USER, flow1, flowUserProps);
store.setProperties(MetadataScope.SYSTEM, flow1, flowSysProps);
store.addTags(MetadataScope.USER, flow1, flowUserTags.toArray(new String[flowUserTags.size()]));
store.addTags(MetadataScope.SYSTEM, flow1, flowSysTags.toArray(new String[flowSysTags.size()]));
store.addTags(MetadataScope.USER, stream1, streamUserTags.toArray(new String[streamUserTags.size()]));
store.removeTags(MetadataScope.USER, stream1, streamUserTags.toArray(new String[streamUserTags.size()]));
store.setProperties(MetadataScope.USER, stream1, flowUserProps);
store.removeProperties(MetadataScope.USER, stream1, "key1", "key2", "multiword");
Map<String, String> streamUserProps = ImmutableMap.of("sKey1", "sValue1 sValue2", "Key1", "Value1");
store.setProperties(MetadataScope.USER, stream1, streamUserProps);
Map<String, String> datasetUserProps = ImmutableMap.of("sKey1", "sValuee1 sValuee2");
store.setProperties(MetadataScope.USER, dataset1, datasetUserProps);
// Test score and metadata match
MetadataSearchResponse response = search("ns1", "value1 multiword:av2");
Assert.assertEquals(2, response.getTotal());
List<MetadataSearchResultRecord> actual = Lists.newArrayList(response.getResults());
Map<MetadataScope, Metadata> expectedFlowMetadata = ImmutableMap.of(MetadataScope.USER, new Metadata(flowUserProps, flowUserTags), MetadataScope.SYSTEM, new Metadata(flowSysProps, flowSysTags));
Map<MetadataScope, Metadata> expectedStreamMetadata = ImmutableMap.of(MetadataScope.USER, new Metadata(streamUserProps, Collections.<String>emptySet()));
Map<MetadataScope, Metadata> expectedDatasetMetadata = ImmutableMap.of(MetadataScope.USER, new Metadata(datasetUserProps, Collections.<String>emptySet()));
List<MetadataSearchResultRecord> expected = Lists.newArrayList(new MetadataSearchResultRecord(flow1, expectedFlowMetadata), new MetadataSearchResultRecord(stream1, expectedStreamMetadata));
Assert.assertEquals(expected, actual);
response = search("ns1", "value1 sValue*");
Assert.assertEquals(3, response.getTotal());
actual = Lists.newArrayList(response.getResults());
expected = Lists.newArrayList(new MetadataSearchResultRecord(stream1, expectedStreamMetadata), new MetadataSearchResultRecord(dataset1, expectedDatasetMetadata), new MetadataSearchResultRecord(flow1, expectedFlowMetadata));
Assert.assertEquals(expected, actual);
response = search("ns1", "*");
Assert.assertEquals(3, response.getTotal());
actual = Lists.newArrayList(response.getResults());
Assert.assertTrue(actual.containsAll(expected));
}
Aggregations