Search in sources :

Example 6 with Field

use of org.apache.hadoop.hbase.hbtop.field.Field in project hbase by apache.

the class FieldScreenPresenter method arrowDown.

public void arrowDown() {
    if (currentPosition < fields.size() - 1) {
        currentPosition += 1;
        if (moveMode) {
            Field tmp = fields.remove(currentPosition - 1);
            fields.add(currentPosition, tmp);
        }
        showField(currentPosition);
        showField(currentPosition - 1);
        fieldScreenView.refreshTerminal();
    }
}
Also used : Field(org.apache.hadoop.hbase.hbtop.field.Field)

Example 7 with Field

use of org.apache.hadoop.hbase.hbtop.field.Field in project hbase by apache.

the class RegionServerModeStrategy method getRecords.

@Override
public List<Record> getRecords(ClusterMetrics clusterMetrics, List<RecordFilter> pushDownFilters) {
    // Get records from RegionModeStrategy and add REGION_COUNT field
    List<Record> records = regionModeStrategy.selectModeFieldsAndAddCountField(fieldInfos, regionModeStrategy.getRecords(clusterMetrics, pushDownFilters), Field.REGION_COUNT);
    // Aggregation by LONG_REGION_SERVER field
    Map<String, Record> retMap = ModeStrategyUtils.aggregateRecords(records, Field.LONG_REGION_SERVER).stream().collect(Collectors.toMap(r -> r.get(Field.LONG_REGION_SERVER).asString(), r -> r));
    // Add USED_HEAP_SIZE field and MAX_HEAP_SIZE field
    for (ServerMetrics sm : clusterMetrics.getLiveServerMetrics().values()) {
        Record record = retMap.get(sm.getServerName().getServerName());
        if (record == null) {
            continue;
        }
        Record newRecord = Record.builder().putAll(record).put(Field.USED_HEAP_SIZE, sm.getUsedHeapSize()).put(Field.MAX_HEAP_SIZE, sm.getMaxHeapSize()).build();
        retMap.put(sm.getServerName().getServerName(), newRecord);
    }
    return new ArrayList<>(retMap.values());
}
Also used : RecordFilter(org.apache.hadoop.hbase.hbtop.RecordFilter) Arrays(java.util.Arrays) ServerMetrics(org.apache.hadoop.hbase.ServerMetrics) ClusterMetrics(org.apache.hadoop.hbase.ClusterMetrics) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) List(java.util.List) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) Field(org.apache.hadoop.hbase.hbtop.field.Field) FieldInfo(org.apache.hadoop.hbase.hbtop.field.FieldInfo) Map(java.util.Map) Record(org.apache.hadoop.hbase.hbtop.Record) Collections(java.util.Collections) ArrayList(java.util.ArrayList) Record(org.apache.hadoop.hbase.hbtop.Record) ServerMetrics(org.apache.hadoop.hbase.ServerMetrics)

Example 8 with Field

use of org.apache.hadoop.hbase.hbtop.field.Field in project hbase by apache.

the class FieldScreenView method showFieldScreen.

public void showFieldScreen(String sortFieldHeader, List<Field> fields, EnumMap<Field, Boolean> fieldDisplayMap, int currentPosition, int headerMaxLength, int descriptionMaxLength, boolean moveMode) {
    showScreenDescription(sortFieldHeader);
    for (int i = 0; i < fields.size(); i++) {
        Field field = fields.get(i);
        showField(i, field, fieldDisplayMap.get(field), i == currentPosition, headerMaxLength, descriptionMaxLength, moveMode);
    }
}
Also used : Field(org.apache.hadoop.hbase.hbtop.field.Field)

Example 9 with Field

use of org.apache.hadoop.hbase.hbtop.field.Field in project hbase by apache.

the class ClientModeStrategy method aggregateRecordsAndAddDistinct.

/**
 * Aggregate the records and count the unique values for the given distinctField
 *
 * @param records               records to be processed
 * @param groupBy               Field on which group by needs to be done
 * @param distinctField         Field whose unique values needs to be counted
 * @param uniqueCountAssignedTo a target field to which the unique count is assigned to
 * @return aggregated records
 */
List<Record> aggregateRecordsAndAddDistinct(List<Record> records, Field groupBy, Field distinctField, Field uniqueCountAssignedTo) {
    List<Record> result = new ArrayList<>();
    records.stream().collect(Collectors.groupingBy(r -> r.get(groupBy))).values().forEach(val -> {
        Set<FieldValue> distinctValues = new HashSet<>();
        Map<Field, FieldValue> map = new HashMap<>();
        for (Record record : val) {
            for (Map.Entry<Field, FieldValue> field : record.entrySet()) {
                if (distinctField.equals(field.getKey())) {
                    // We will not be adding the field in the new record whose distinct count is required
                    distinctValues.add(record.get(distinctField));
                } else {
                    if (field.getKey().getFieldValueType() == FieldValueType.STRING) {
                        map.put(field.getKey(), field.getValue());
                    } else {
                        if (map.get(field.getKey()) == null) {
                            map.put(field.getKey(), field.getValue());
                        } else {
                            map.put(field.getKey(), map.get(field.getKey()).plus(field.getValue()));
                        }
                    }
                }
            }
        }
        // Add unique count field
        map.put(uniqueCountAssignedTo, uniqueCountAssignedTo.newValue(distinctValues.size()));
        result.add(Record.ofEntries(map.entrySet().stream().map(k -> Record.entry(k.getKey(), k.getValue()))));
    });
    return result;
}
Also used : RecordFilter(org.apache.hadoop.hbase.hbtop.RecordFilter) Arrays(java.util.Arrays) UserMetrics(org.apache.hadoop.hbase.UserMetrics) Set(java.util.Set) ServerMetrics(org.apache.hadoop.hbase.ServerMetrics) HashMap(java.util.HashMap) ClusterMetrics(org.apache.hadoop.hbase.ClusterMetrics) Collectors(java.util.stream.Collectors) FieldValueType(org.apache.hadoop.hbase.hbtop.field.FieldValueType) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) List(java.util.List) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) Field(org.apache.hadoop.hbase.hbtop.field.Field) FieldInfo(org.apache.hadoop.hbase.hbtop.field.FieldInfo) FieldValue(org.apache.hadoop.hbase.hbtop.field.FieldValue) Map(java.util.Map) Record(org.apache.hadoop.hbase.hbtop.Record) Collections(java.util.Collections) Field(org.apache.hadoop.hbase.hbtop.field.Field) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Record(org.apache.hadoop.hbase.hbtop.Record) FieldValue(org.apache.hadoop.hbase.hbtop.field.FieldValue) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet)

Example 10 with Field

use of org.apache.hadoop.hbase.hbtop.field.Field in project hbase by apache.

the class RecordFilter method parse.

/*
   * Parse a filter string and build a RecordFilter instance.
   */
public static RecordFilter parse(String filterString, List<Field> fields, boolean ignoreCase) {
    int index = 0;
    boolean not = isNot(filterString);
    if (not) {
        index += 1;
    }
    StringBuilder fieldString = new StringBuilder();
    while (filterString.length() > index && filterString.charAt(index) != '<' && filterString.charAt(index) != '>' && filterString.charAt(index) != '=') {
        fieldString.append(filterString.charAt(index++));
    }
    if (fieldString.length() == 0 || filterString.length() == index) {
        return null;
    }
    Field field = getField(fields, fieldString.toString());
    if (field == null) {
        return null;
    }
    StringBuilder operatorString = new StringBuilder();
    while (filterString.length() > index && (filterString.charAt(index) == '<' || filterString.charAt(index) == '>' || filterString.charAt(index) == '=')) {
        operatorString.append(filterString.charAt(index++));
    }
    Operator operator = getOperator(operatorString.toString());
    if (operator == null) {
        return null;
    }
    String value = filterString.substring(index);
    FieldValue fieldValue = getFieldValue(field, value);
    if (fieldValue == null) {
        return null;
    }
    return new RecordFilter(ignoreCase, not, field, operator, fieldValue);
}
Also used : Field(org.apache.hadoop.hbase.hbtop.field.Field) FieldValue(org.apache.hadoop.hbase.hbtop.field.FieldValue)

Aggregations

Field (org.apache.hadoop.hbase.hbtop.field.Field)14 ArrayList (java.util.ArrayList)4 List (java.util.List)4 Collectors (java.util.stream.Collectors)4 FieldInfo (org.apache.hadoop.hbase.hbtop.field.FieldInfo)4 InterfaceAudience (org.apache.yetus.audience.InterfaceAudience)3 Arrays (java.util.Arrays)2 Collections (java.util.Collections)2 Map (java.util.Map)2 ClusterMetrics (org.apache.hadoop.hbase.ClusterMetrics)2 ServerMetrics (org.apache.hadoop.hbase.ServerMetrics)2 Record (org.apache.hadoop.hbase.hbtop.Record)2 RecordFilter (org.apache.hadoop.hbase.hbtop.RecordFilter)2 FieldValue (org.apache.hadoop.hbase.hbtop.field.FieldValue)2 Mode (org.apache.hadoop.hbase.hbtop.mode.Mode)2 Test (org.junit.Test)2 EnumMap (java.util.EnumMap)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Objects (java.util.Objects)1