use of com.palantir.nexus.db.sql.AgnosticLightResultRow in project atlasdb by palantir.
the class DbKvs method extractResults.
@SuppressWarnings("deprecation")
private Map<Cell, Value> extractResults(DbReadTable table, TableReference tableRef, ClosableIterator<AgnosticLightResultRow> rows) {
Map<Cell, Value> results = Maps.newHashMap();
Map<Cell, OverflowValue> overflowResults = Maps.newHashMap();
try (ClosableIterator<AgnosticLightResultRow> iter = rows) {
boolean hasOverflow = table.hasOverflowValues();
while (iter.hasNext()) {
AgnosticLightResultRow row = iter.next();
Cell cell = Cell.create(row.getBytes(ROW), row.getBytes(COL));
Long overflowId = hasOverflow ? row.getLongObject("overflow") : null;
if (overflowId == null) {
Value value = Value.create(row.getBytes(VAL), row.getLong(TIMESTAMP));
Value oldValue = results.put(cell, value);
if (oldValue != null && oldValue.getTimestamp() > value.getTimestamp()) {
results.put(cell, oldValue);
}
} else {
OverflowValue ov = ImmutableOverflowValue.of(row.getLong(TIMESTAMP), overflowId);
OverflowValue oldOv = overflowResults.put(cell, ov);
if (oldOv != null && oldOv.ts() > ov.ts()) {
overflowResults.put(cell, oldOv);
}
}
}
}
fillOverflowValues(table.getConnectionSupplier(), tableRef, overflowResults, results);
return results;
}
use of com.palantir.nexus.db.sql.AgnosticLightResultRow in project atlasdb by palantir.
the class DbKvs method extractRowColumnRangePageInternal.
private Map<byte[], List<Map.Entry<Cell, Value>>> extractRowColumnRangePageInternal(DbReadTable table, TableReference tableRef, Supplier<ClosableIterator<AgnosticLightResultRow>> rowLoader, Collection<byte[]> allRows) {
Map<Sha256Hash, byte[]> hashesToBytes = Maps.newHashMapWithExpectedSize(allRows.size());
Map<Sha256Hash, List<Cell>> cellsByRow = Maps.newHashMap();
for (byte[] row : allRows) {
Sha256Hash rowHash = Sha256Hash.computeHash(row);
hashesToBytes.put(rowHash, row);
cellsByRow.put(rowHash, Lists.newArrayList());
}
boolean hasOverflow = table.hasOverflowValues();
Map<Cell, Value> values = Maps.newHashMap();
Map<Cell, OverflowValue> overflowValues = Maps.newHashMap();
try (ClosableIterator<AgnosticLightResultRow> iter = rowLoader.get()) {
while (iter.hasNext()) {
AgnosticLightResultRow row = iter.next();
Cell cell = Cell.create(row.getBytes(ROW), row.getBytes(COL));
Sha256Hash rowHash = Sha256Hash.computeHash(cell.getRowName());
cellsByRow.get(rowHash).add(cell);
Long overflowId = hasOverflow ? row.getLongObject("overflow") : null;
if (overflowId == null) {
Value value = Value.create(row.getBytes(VAL), row.getLong(TIMESTAMP));
Value oldValue = values.put(cell, value);
if (oldValue != null && oldValue.getTimestamp() > value.getTimestamp()) {
values.put(cell, oldValue);
}
} else {
OverflowValue ov = ImmutableOverflowValue.of(row.getLong(TIMESTAMP), overflowId);
OverflowValue oldOv = overflowValues.put(cell, ov);
if (oldOv != null && oldOv.ts() > ov.ts()) {
overflowValues.put(cell, oldOv);
}
}
}
}
fillOverflowValues(table.getConnectionSupplier(), tableRef, overflowValues, values);
Map<byte[], List<Map.Entry<Cell, Value>>> results = Maps.newHashMapWithExpectedSize(allRows.size());
for (Entry<Sha256Hash, List<Cell>> e : cellsByRow.entrySet()) {
List<Map.Entry<Cell, Value>> fullResults = Lists.newArrayListWithExpectedSize(e.getValue().size());
for (Cell c : e.getValue()) {
fullResults.add(Iterables.getOnlyElement(ImmutableMap.of(c, values.get(c)).entrySet()));
}
results.put(hashesToBytes.get(e.getKey()), fullResults);
}
return results;
}
use of com.palantir.nexus.db.sql.AgnosticLightResultRow in project atlasdb by palantir.
the class TimestampsByCellResultWithToken method checkNextEntryAndCreateToken.
private TimestampsByCellResultWithToken checkNextEntryAndCreateToken() {
boolean singleRow = finishCellIfNoRowsYet();
if (iterator.hasNext()) {
moreResults = true;
AgnosticLightResultRow nextEntry = iterator.peek();
if (Arrays.equals(nextEntry.getBytes(DbKvs.ROW), currentRow)) {
token = ImmutableToken.builder().row(currentRow).col(currentCol).shouldSkip(singleRow).build();
} else {
flushRowBuffer();
token = ImmutableToken.builder().row(nextEntry.getBytes(DbKvs.ROW)).shouldSkip(false).build();
}
} else {
flushRowBuffer();
if (currentRow != null) {
byte[] nextRow = RangeRequests.getNextStartRowUnlessTerminal(reverse, currentRow);
if (nextRow != null) {
moreResults = true;
token = ImmutableToken.builder().row(nextRow).shouldSkip(false).build();
}
}
}
return this;
}
use of com.palantir.nexus.db.sql.AgnosticLightResultRow in project atlasdb by palantir.
the class TimestampsByCellResultWithToken method getBatchOfTimestamps.
private TimestampsByCellResultWithToken getBatchOfTimestamps(long batchSize) {
while (iterator.hasNext() && entries.size() + rowBuffer.size() < batchSize) {
AgnosticLightResultRow cellResult = iterator.next();
store(cellResult);
}
return this;
}
use of com.palantir.nexus.db.sql.AgnosticLightResultRow in project atlasdb by palantir.
the class DbKvs method doGetAllTimestamps.
private static Multimap<Cell, Long> doGetAllTimestamps(DbReadTable table, Iterable<Cell> cells, long timestamp) {
try (ClosableIterator<AgnosticLightResultRow> iter = table.getAllCells(cells, timestamp, false)) {
Multimap<Cell, Long> results = ArrayListMultimap.create();
while (iter.hasNext()) {
AgnosticLightResultRow row = iter.next();
Cell cell = Cell.create(row.getBytes(ROW), row.getBytes(COL));
long ts = row.getLong(TIMESTAMP);
results.put(cell, ts);
}
return results;
}
}
Aggregations