use of co.cask.cdap.api.dataset.table.Scanner in project cdap by caskdata.
the class MetadataStoreDataset method listKV.
private <T> Map<MDSKey, T> listKV(Scan runScan, Type typeOfT, int limit, @Nullable Predicate<MDSKey> keyFilter, @Nullable Predicate<T> valueFilter) {
try {
Map<MDSKey, T> map = Maps.newLinkedHashMap();
try (Scanner scan = table.scan(runScan)) {
Row next;
while ((limit > 0) && (next = scan.next()) != null) {
MDSKey key = new MDSKey(next.getRow());
byte[] columnValue = next.get(COLUMN);
if (columnValue == null) {
continue;
}
T value = deserialize(key, columnValue, typeOfT);
// Key Filter doesn't pass
if (keyFilter != null && !keyFilter.test(key)) {
continue;
}
// If Value Filter doesn't pass
if (valueFilter != null && !valueFilter.test(value)) {
continue;
}
map.put(key, value);
limit--;
}
return map;
}
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.api.dataset.table.Scanner in project cdap by caskdata.
the class MetadataStoreDataset method listCombinedFilterKV.
private <T> Map<MDSKey, T> listCombinedFilterKV(Scan runScan, Type typeOfT, int limit, @Nullable Predicate<KeyValue<T>> combinedFilter) {
try {
Map<MDSKey, T> map = Maps.newLinkedHashMap();
try (Scanner scan = table.scan(runScan)) {
Row next;
while ((limit > 0) && (next = scan.next()) != null) {
MDSKey key = new MDSKey(next.getRow());
byte[] columnValue = next.get(COLUMN);
if (columnValue == null) {
continue;
}
T value = deserialize(key, columnValue, typeOfT);
KeyValue<T> kv = new KeyValue<>(key, value);
// Combined Filter doesn't pass
if (combinedFilter != null && !combinedFilter.test(kv)) {
continue;
}
map.put(kv.getKey(), kv.getValue());
limit--;
}
return map;
}
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.api.dataset.table.Scanner in project cdap by caskdata.
the class HBaseMetricsTableTest method testCombinedTablePut.
@Test
public void testCombinedTablePut() throws Exception {
MetricsTable v2Table = getTable("v2Table");
MetricsTable v3Table = getTable("v3Table");
MetricsTable combinedMetricsTable = new CombinedHBaseMetricsTable(v2Table, v3Table, 1, cConf, dsFramework);
// Already existing data on v2
v2Table.put(ImmutableSortedMap.<byte[], SortedMap<byte[], Long>>orderedBy(Bytes.BYTES_COMPARATOR).put(A, mapOf(A, Bytes.toLong(A), B, Bytes.toLong(B))).put(B, mapOf(A, Bytes.toLong(A), B, Bytes.toLong(B))).build());
Assert.assertEquals(Bytes.toLong(A), Bytes.toLong(v2Table.get(A, A)));
Assert.assertEquals(Bytes.toLong(B), Bytes.toLong(v2Table.get(A, B)));
Assert.assertEquals(Bytes.toLong(A), Bytes.toLong(v2Table.get(B, A)));
Assert.assertEquals(Bytes.toLong(B), Bytes.toLong(v2Table.get(B, B)));
// Add some gauge metrics to v3 tables
combinedMetricsTable.put(ImmutableSortedMap.<byte[], SortedMap<byte[], Long>>orderedBy(Bytes.BYTES_COMPARATOR).put(B, mapOf(B, Bytes.toLong(B), C, Bytes.toLong(C))).put(C, mapOf(P, Bytes.toLong(P), X, Bytes.toLong(X))).build());
Assert.assertEquals(Bytes.toLong(B), Bytes.toLong(combinedMetricsTable.get(B, B)));
Assert.assertEquals(Bytes.toLong(C), Bytes.toLong(combinedMetricsTable.get(B, C)));
Assert.assertEquals(Bytes.toLong(X), Bytes.toLong(combinedMetricsTable.get(C, X)));
Assert.assertEquals(Bytes.toLong(P), Bytes.toLong(combinedMetricsTable.get(C, P)));
Scanner combinedScanner = combinedMetricsTable.scan(null, null, null);
Row firstRow = combinedScanner.next();
Assert.assertEquals(1L, Bytes.toLong(firstRow.getRow()));
Iterator<Map.Entry<byte[], byte[]>> colIterator = firstRow.getColumns().entrySet().iterator();
Map.Entry<byte[], byte[]> column = colIterator.next();
Assert.assertEquals(1L, Bytes.toLong(column.getKey()));
Assert.assertEquals(1L, Bytes.toLong(column.getValue()));
column = colIterator.next();
Assert.assertEquals(2L, Bytes.toLong(column.getKey()));
Assert.assertEquals(2L, Bytes.toLong(column.getValue()));
Row secondRow = combinedScanner.next();
Assert.assertEquals(2L, Bytes.toLong(secondRow.getRow()));
colIterator = secondRow.getColumns().entrySet().iterator();
column = colIterator.next();
Assert.assertEquals(1L, Bytes.toLong(column.getKey()));
Assert.assertEquals(1L, Bytes.toLong(column.getValue()));
column = colIterator.next();
Assert.assertEquals(2L, Bytes.toLong(column.getKey()));
// this should be latest value whichh is 2
Assert.assertEquals(2L, Bytes.toLong(column.getValue()));
column = colIterator.next();
Assert.assertEquals(3L, Bytes.toLong(column.getKey()));
Assert.assertEquals(3L, Bytes.toLong(column.getValue()));
Row thirdRow = combinedScanner.next();
Assert.assertEquals(3L, Bytes.toLong(thirdRow.getRow()));
colIterator = thirdRow.getColumns().entrySet().iterator();
column = colIterator.next();
Assert.assertEquals(4L, Bytes.toLong(column.getKey()));
Assert.assertEquals(4L, Bytes.toLong(column.getValue()));
column = colIterator.next();
Assert.assertEquals(7L, Bytes.toLong(column.getKey()));
Assert.assertEquals(7L, Bytes.toLong(column.getValue()));
}
use of co.cask.cdap.api.dataset.table.Scanner in project cdap by caskdata.
the class KeyValueTable method scan.
/**
* Scans table.
* @param startRow start row inclusive. {@code null} means start from first row of the table
* @param stopRow stop row exclusive. {@code null} means scan all rows to the end of the table
* @return {@link co.cask.cdap.api.dataset.lib.CloseableIterator} of
* {@link KeyValue KeyValue<byte[], byte[]>}
*/
public CloseableIterator<KeyValue<byte[], byte[]>> scan(byte[] startRow, byte[] stopRow) {
final Scanner scanner = table.scan(startRow, stopRow);
return new AbstractCloseableIterator<KeyValue<byte[], byte[]>>() {
private boolean closed = false;
@Override
protected KeyValue<byte[], byte[]> computeNext() {
if (closed) {
return endOfData();
}
Row next = scanner.next();
if (next != null) {
return new KeyValue<>(next.getRow(), next.get(KEY_COLUMN));
}
close();
return null;
}
@Override
public void close() {
scanner.close();
endOfData();
closed = true;
}
};
}
use of co.cask.cdap.api.dataset.table.Scanner in project cdap by caskdata.
the class ArtifactStore method getPluginClasses.
/**
* Get all plugin classes of the given type that extend the given parent artifact.
* Results are returned as a map from plugin artifact to plugins in that artifact.
*
* @param namespace the namespace to search for plugins. The system namespace is always included
* @param parentArtifactId the id of the artifact to find plugins for
* @param type the type of plugin to look for or {@code null} for matching any type
* @return an unmodifiable map of plugin artifact to plugin classes for all plugin classes accessible by the
* given artifact. The map will never be null. If there are no plugin classes, an empty map will be returned.
* @throws ArtifactNotFoundException if the artifact to find plugins for does not exist
* @throws IOException if there was an exception reading metadata from the metastore
*/
public SortedMap<ArtifactDescriptor, Set<PluginClass>> getPluginClasses(NamespaceId namespace, Id.Artifact parentArtifactId, @Nullable String type) throws ArtifactNotFoundException, IOException {
return Transactionals.execute(transactional, context -> {
Table metaTable = getMetaTable(context);
SortedMap<ArtifactDescriptor, Set<PluginClass>> plugins = getPluginsInArtifact(metaTable, parentArtifactId, input -> type == null || type.equals(input.getType()));
List<Scan> scans = Arrays.asList(scanPlugins(parentArtifactId, type), scanUniversalPlugin(namespace.getNamespace(), type), scanUniversalPlugin(NamespaceId.SYSTEM.getNamespace(), type));
for (Scan scan : scans) {
try (Scanner scanner = metaTable.scan(scan)) {
Row row;
while ((row = scanner.next()) != null) {
addPluginsToMap(namespace, parentArtifactId, plugins, row);
}
}
}
return Collections.unmodifiableSortedMap(plugins);
}, ArtifactNotFoundException.class, IOException.class);
}
Aggregations