use of java.util.NavigableMap in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testWriteApplicationToHBase.
@Test
public void testWriteApplicationToHBase() throws Exception {
TimelineEntities te = new TimelineEntities();
ApplicationEntity entity = new ApplicationEntity();
String appId = "application_1000178881110_2002";
entity.setId(appId);
Long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<String, Object>();
infoMap.put("infoMapKey1", "infoMapValue1");
infoMap.put("infoMapKey2", 10);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
String key = "task";
String value = "is_related_to_entity_id_here";
Set<String> isRelatedToSet = new HashSet<String>();
isRelatedToSet.add(value);
Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
isRelatedTo.put(key, isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
key = "container";
value = "relates_to_entity_id_here";
Set<String> relatesToSet = new HashSet<String>();
relatesToSet.add(value);
value = "relates_to_entity_id_here_Second";
relatesToSet.add(value);
Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
relatesTo.put(key, relatesToSet);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<String, String>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
// add aggregated metrics
TimelineEntity aggEntity = new TimelineEntity();
String type = TimelineEntityType.YARN_APPLICATION.toString();
aggEntity.setId(appId);
aggEntity.setType(type);
long cTime2 = 1425016502000L;
aggEntity.setCreatedTime(cTime2);
TimelineMetric aggMetric = new TimelineMetric();
aggMetric.setId("MEM_USAGE");
Map<Long, Number> aggMetricValues = new HashMap<Long, Number>();
long aggTs = ts;
aggMetricValues.put(aggTs - 120000, 102400000L);
aggMetric.setType(Type.SINGLE_VALUE);
aggMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
aggMetric.setValues(aggMetricValues);
Set<TimelineMetric> aggMetrics = new HashSet<>();
aggMetrics.add(aggMetric);
entity.addMetrics(aggMetrics);
te.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "cluster_test_write_app";
String user = "user1";
String flow = "s!ome_f\tlow _n am!e";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
// Write entity again, this time without created time.
entity = new ApplicationEntity();
appId = "application_1000178881110_2002";
entity.setId(appId);
// add the info map in Timeline Entity
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey3", "infoMapValue1");
entity.addInfo(infoMap1);
te = new TimelineEntities();
te.addEntity(entity);
hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
hbi.stop();
infoMap.putAll(infoMap1);
// retrieve the row
ApplicationRowKey applicationRowKey = new ApplicationRowKey(cluster, user, flow, runid, appId);
byte[] rowKey = applicationRowKey.getRowKey();
Get get = new Get(rowKey);
get.setMaxVersions(Integer.MAX_VALUE);
Connection conn = ConnectionFactory.createConnection(c1);
Result result = new ApplicationTable().getResult(c1, conn, get);
assertTrue(result != null);
assertEquals(17, result.size());
// check the row key
byte[] row1 = result.getRow();
assertTrue(isApplicationRowKeyCorrect(row1, cluster, user, flow, runid, appId));
// check info column family
String id1 = ApplicationColumn.ID.readResult(result).toString();
assertEquals(appId, id1);
Long cTime1 = (Long) ApplicationColumn.CREATED_TIME.readResult(result);
assertEquals(cTime, cTime1);
Map<String, Object> infoColumns = ApplicationColumnPrefix.INFO.readResults(result, new StringKeyConverter());
assertEquals(infoMap, infoColumns);
// Remember isRelatedTo is of type Map<String, Set<String>>
for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
Object isRelatedToValue = ApplicationColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
String compoundValue = isRelatedToValue.toString();
// id7?id9?id6
Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
for (String v : isRelatedToEntry.getValue()) {
assertTrue(isRelatedToValues.contains(v));
}
}
// RelatesTo
for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
String compoundValue = ApplicationColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
// id3?id4?id5
Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
for (String v : relatesToEntry.getValue()) {
assertTrue(relatesToValues.contains(v));
}
}
KeyConverter<String> stringKeyConverter = new StringKeyConverter();
// Configuration
Map<String, Object> configColumns = ApplicationColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
assertEquals(conf, configColumns);
NavigableMap<String, NavigableMap<Long, Number>> metricsResult = ApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
matchMetrics(metricValues, metricMap);
// read the timeline entity using the reader this time. In metrics limit
// specify Integer MAX_VALUE. A TIME_SERIES will be returned(if more than
// one value exists for a metric).
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), Integer.MAX_VALUE));
assertNotNull(e1);
// verify attributes
assertEquals(appId, e1.getId());
assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
assertEquals(cTime, e1.getCreatedTime());
Map<String, Object> infoMap2 = e1.getInfo();
assertEquals(infoMap, infoMap2);
Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
assertEquals(isRelatedTo, isRelatedTo2);
Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
assertEquals(relatesTo, relatesTo2);
Map<String, String> conf2 = e1.getConfigs();
assertEquals(conf, conf2);
Set<TimelineMetric> metrics2 = e1.getMetrics();
assertEquals(2, metrics2.size());
for (TimelineMetric metric2 : metrics2) {
Map<Long, Number> metricValues2 = metric2.getValues();
assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
if (metric2.getId().equals("MAP_SLOT_MILLIS")) {
assertEquals(6, metricValues2.size());
matchMetrics(metricValues, metricValues2);
}
if (metric2.getId().equals("MEM_USAGE")) {
assertEquals(1, metricValues2.size());
matchMetrics(aggMetricValues, metricValues2);
}
}
// In metrics limit specify a value of 3. No more than 3 values for a
// metric will be returned.
e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), 3));
assertNotNull(e1);
assertEquals(appId, e1.getId());
assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
assertEquals(conf, e1.getConfigs());
metrics2 = e1.getMetrics();
assertEquals(2, metrics2.size());
for (TimelineMetric metric2 : metrics2) {
Map<Long, Number> metricValues2 = metric2.getValues();
assertTrue(metricValues2.size() <= 3);
assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
}
// Check if single value(latest value) instead of time series is returned
// if metricslimit is not set(null), irrespective of number of metric
// values.
e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), null));
assertNotNull(e1);
assertEquals(appId, e1.getId());
assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
assertEquals(cTime, e1.getCreatedTime());
assertEquals(infoMap, e1.getInfo());
assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
assertEquals(relatesTo, e1.getRelatesToEntities());
assertEquals(conf, e1.getConfigs());
assertEquals(2, e1.getMetrics().size());
for (TimelineMetric metric : e1.getMetrics()) {
assertEquals(1, metric.getValues().size());
assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
assertTrue(metric.getId().equals("MAP_SLOT_MILLIS") || metric.getId().equals("MEM_USAGE"));
assertEquals(1, metric.getValues().size());
if (metric.getId().equals("MAP_SLOT_MILLIS")) {
assertTrue(metric.getValues().containsKey(ts - 20000));
assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
}
if (metric.getId().equals("MEM_USAGE")) {
assertTrue(metric.getValues().containsKey(aggTs - 120000));
assertEquals(aggMetricValues.get(aggTs - 120000), metric.getValues().get(aggTs - 120000));
}
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of java.util.NavigableMap in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testWriteEntityToHBase.
@Test
public void testWriteEntityToHBase() throws Exception {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "hello";
String type = "world";
entity.setId(id);
entity.setType(type);
Long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<String, Object>();
infoMap.put("infoMapKey1", "infoMapValue1");
infoMap.put("infoMapKey2", 10);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
String key = "task";
String value = "is_related_to_entity_id_here";
Set<String> isRelatedToSet = new HashSet<String>();
isRelatedToSet.add(value);
Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
isRelatedTo.put(key, isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
key = "container";
value = "relates_to_entity_id_here";
Set<String> relatesToSet = new HashSet<String>();
relatesToSet.add(value);
value = "relates_to_entity_id_here_Second";
relatesToSet.add(value);
Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
relatesTo.put(key, relatesToSet);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<String, String>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
te.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "cluster_test_write_entity";
String user = "user1";
String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
String appName = ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1).toString();
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.stop();
// scan the table and see that entity exists
Scan s = new Scan();
byte[] startRow = new EntityRowKeyPrefix(cluster, user, flow, runid, appName).getRowKeyPrefix();
s.setStartRow(startRow);
s.setMaxVersions(Integer.MAX_VALUE);
Connection conn = ConnectionFactory.createConnection(c1);
ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
int rowCount = 0;
int colCount = 0;
KeyConverter<String> stringKeyConverter = new StringKeyConverter();
for (Result result : scanner) {
if (result != null && !result.isEmpty()) {
rowCount++;
colCount += result.size();
byte[] row1 = result.getRow();
assertTrue(isRowKeyCorrect(row1, cluster, user, flow, runid, appName, entity));
// check info column family
String id1 = EntityColumn.ID.readResult(result).toString();
assertEquals(id, id1);
String type1 = EntityColumn.TYPE.readResult(result).toString();
assertEquals(type, type1);
Long cTime1 = (Long) EntityColumn.CREATED_TIME.readResult(result);
assertEquals(cTime1, cTime);
Map<String, Object> infoColumns = EntityColumnPrefix.INFO.readResults(result, new StringKeyConverter());
assertEquals(infoMap, infoColumns);
// Remember isRelatedTo is of type Map<String, Set<String>>
for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
Object isRelatedToValue = EntityColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
String compoundValue = isRelatedToValue.toString();
// id7?id9?id6
Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
for (String v : isRelatedToEntry.getValue()) {
assertTrue(isRelatedToValues.contains(v));
}
}
// RelatesTo
for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
String compoundValue = EntityColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
// id3?id4?id5
Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
for (String v : relatesToEntry.getValue()) {
assertTrue(relatesToValues.contains(v));
}
}
// Configuration
Map<String, Object> configColumns = EntityColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
assertEquals(conf, configColumns);
NavigableMap<String, NavigableMap<Long, Number>> metricsResult = EntityColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
matchMetrics(metricValues, metricMap);
}
}
assertEquals(1, rowCount);
assertEquals(16, colCount);
// read the timeline entity using the reader this time
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
assertNotNull(e1);
assertEquals(1, es1.size());
// verify attributes
assertEquals(id, e1.getId());
assertEquals(type, e1.getType());
assertEquals(cTime, e1.getCreatedTime());
Map<String, Object> infoMap2 = e1.getInfo();
assertEquals(infoMap, infoMap2);
Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
assertEquals(isRelatedTo, isRelatedTo2);
Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
assertEquals(relatesTo, relatesTo2);
Map<String, String> conf2 = e1.getConfigs();
assertEquals(conf, conf2);
Set<TimelineMetric> metrics2 = e1.getMetrics();
assertEquals(metrics, metrics2);
for (TimelineMetric metric2 : metrics2) {
Map<Long, Number> metricValues2 = metric2.getValues();
matchMetrics(metricValues, metricValues2);
}
e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
assertEquals(id, e1.getId());
assertEquals(type, e1.getType());
assertEquals(cTime, e1.getCreatedTime());
assertEquals(infoMap, e1.getInfo());
assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
assertEquals(relatesTo, e1.getRelatesToEntities());
assertEquals(conf, e1.getConfigs());
for (TimelineMetric metric : e1.getMetrics()) {
assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
assertEquals(1, metric.getValues().size());
assertTrue(metric.getValues().containsKey(ts - 20000));
assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of java.util.NavigableMap in project hadoop by apache.
the class TimelineEntityReader method readMetrics.
/**
* Helper method for reading and deserializing {@link TimelineMetric} objects
* using the specified column prefix. The timeline metrics then are added to
* the given timeline entity.
*
* @param entity {@link TimelineEntity} object.
* @param result {@link Result} object retrieved from backend.
* @param columnPrefix Metric column prefix
* @throws IOException if any exception is encountered while reading metrics.
*/
protected void readMetrics(TimelineEntity entity, Result result, ColumnPrefix<?> columnPrefix) throws IOException {
NavigableMap<String, NavigableMap<Long, Number>> metricsResult = columnPrefix.readResultsWithTimestamps(result, stringKeyConverter);
for (Map.Entry<String, NavigableMap<Long, Number>> metricResult : metricsResult.entrySet()) {
TimelineMetric metric = new TimelineMetric();
metric.setId(metricResult.getKey());
// Simply assume that if the value set contains more than 1 elements, the
// metric is a TIME_SERIES metric, otherwise, it's a SINGLE_VALUE metric
TimelineMetric.Type metricType = metricResult.getValue().size() > 1 ? TimelineMetric.Type.TIME_SERIES : TimelineMetric.Type.SINGLE_VALUE;
metric.setType(metricType);
metric.addValues(metricResult.getValue());
entity.addMetric(metric);
}
}
use of java.util.NavigableMap in project hadoop by apache.
the class FlowRunCoprocessor method prePut.
/*
* (non-Javadoc)
*
* This method adds the tags onto the cells in the Put. It is presumed that
* all the cells in one Put have the same set of Tags. The existing cell
* timestamp is overwritten for non-metric cells and each such cell gets a new
* unique timestamp generated by {@link TimestampGenerator}
*
* @see
* org.apache.hadoop.hbase.coprocessor.BaseRegionObserver#prePut(org.apache
* .hadoop.hbase.coprocessor.ObserverContext,
* org.apache.hadoop.hbase.client.Put,
* org.apache.hadoop.hbase.regionserver.wal.WALEdit,
* org.apache.hadoop.hbase.client.Durability)
*/
@Override
public void prePut(ObserverContext<RegionCoprocessorEnvironment> e, Put put, WALEdit edit, Durability durability) throws IOException {
Map<String, byte[]> attributes = put.getAttributesMap();
if (!isFlowRunRegion) {
return;
}
// Assumption is that all the cells in a put are the same operation.
List<Tag> tags = new ArrayList<>();
if ((attributes != null) && (attributes.size() > 0)) {
for (Map.Entry<String, byte[]> attribute : attributes.entrySet()) {
Tag t = HBaseTimelineStorageUtils.getTagFromAttribute(attribute);
tags.add(t);
}
byte[] tagByteArray = Tag.fromList(tags);
NavigableMap<byte[], List<Cell>> newFamilyMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], List<Cell>> entry : put.getFamilyCellMap().entrySet()) {
List<Cell> newCells = new ArrayList<>(entry.getValue().size());
for (Cell cell : entry.getValue()) {
// for each cell in the put add the tags
// Assumption is that all the cells in
// one put are the same operation
// also, get a unique cell timestamp for non-metric cells
// this way we don't inadvertently overwrite cell versions
long cellTimestamp = getCellTimestamp(cell.getTimestamp(), tags);
newCells.add(CellUtil.createCell(CellUtil.cloneRow(cell), CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), cellTimestamp, KeyValue.Type.Put, CellUtil.cloneValue(cell), tagByteArray));
}
newFamilyMap.put(entry.getKey(), newCells);
}
// for each entry
// Update the family map for the Put
put.setFamilyCellMap(newFamilyMap);
}
}
use of java.util.NavigableMap in project hbase by apache.
the class AsyncAggregationClient method findMedian.
private static <R, S, P extends Message, Q extends Message, T extends Message> void findMedian(CompletableFuture<R> future, RawAsyncTable table, ColumnInterpreter<R, S, P, Q, T> ci, Scan scan, NavigableMap<byte[], S> sumByRegion) {
double halfSum = ci.divideForAvg(sumByRegion.values().stream().reduce(ci::add).get(), 2L);
S movingSum = null;
byte[] startRow = null;
for (Map.Entry<byte[], S> entry : sumByRegion.entrySet()) {
startRow = entry.getKey();
S newMovingSum = ci.add(movingSum, entry.getValue());
if (ci.divideForAvg(newMovingSum, 1L) > halfSum) {
break;
}
movingSum = newMovingSum;
}
if (startRow != null) {
scan.withStartRow(startRow);
}
// we can not pass movingSum directly to an anonymous class as it is not final.
S baseSum = movingSum;
byte[] family = scan.getFamilies()[0];
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(family);
byte[] weightQualifier = qualifiers.last();
byte[] valueQualifier = qualifiers.first();
table.scan(scan, new RawScanResultConsumer() {
private S sum = baseSum;
private R value = null;
@Override
public void onNext(Result[] results, ScanController controller) {
try {
for (Result result : results) {
Cell weightCell = result.getColumnLatestCell(family, weightQualifier);
R weight = ci.getValue(family, weightQualifier, weightCell);
sum = ci.add(sum, ci.castToReturnType(weight));
if (ci.divideForAvg(sum, 1L) > halfSum) {
if (value != null) {
future.complete(value);
} else {
future.completeExceptionally(new NoSuchElementException());
}
controller.terminate();
return;
}
Cell valueCell = result.getColumnLatestCell(family, valueQualifier);
value = ci.getValue(family, valueQualifier, valueCell);
}
} catch (IOException e) {
future.completeExceptionally(e);
controller.terminate();
}
}
@Override
public void onError(Throwable error) {
future.completeExceptionally(error);
}
@Override
public void onComplete() {
if (!future.isDone()) {
// we should not reach here as the future should be completed in onNext.
future.completeExceptionally(new NoSuchElementException());
}
}
});
}
Aggregations