Search in sources :

Example 51 with TreeSet

use of java.util.TreeSet in project hbase by apache.

the class ReplicationQueuesZKImpl method moveQueueUsingMulti.

/**
   * It "atomically" copies one peer's wals queue from another dead region server and returns them
   * all sorted. The new peer id is equal to the old peer id appended with the dead server's znode.
   * @param znode pertaining to the region server to copy the queues from
   * @peerId peerId pertaining to the queue need to be copied
   */
private Pair<String, SortedSet<String>> moveQueueUsingMulti(String znode, String peerId) {
    try {
        // hbase/replication/rs/deadrs
        String deadRSZnodePath = ZKUtil.joinZNode(this.queuesZNode, znode);
        List<ZKUtilOp> listOfOps = new ArrayList<>();
        ReplicationQueueInfo replicationQueueInfo = new ReplicationQueueInfo(peerId);
        String newPeerId = peerId + "-" + znode;
        String newPeerZnode = ZKUtil.joinZNode(this.myQueuesZnode, newPeerId);
        // check the logs queue for the old peer cluster
        String oldClusterZnode = ZKUtil.joinZNode(deadRSZnodePath, peerId);
        List<String> wals = ZKUtil.listChildrenNoWatch(this.zookeeper, oldClusterZnode);
        if (!peerExists(replicationQueueInfo.getPeerId())) {
            LOG.warn("Peer " + replicationQueueInfo.getPeerId() + " didn't exist, will move its queue to avoid the failure of multi op");
            for (String wal : wals) {
                String oldWalZnode = ZKUtil.joinZNode(oldClusterZnode, wal);
                listOfOps.add(ZKUtilOp.deleteNodeFailSilent(oldWalZnode));
            }
            listOfOps.add(ZKUtilOp.deleteNodeFailSilent(oldClusterZnode));
            ZKUtil.multiOrSequential(this.zookeeper, listOfOps, false);
            return null;
        }
        SortedSet<String> logQueue = new TreeSet<>();
        if (wals == null || wals.isEmpty()) {
            listOfOps.add(ZKUtilOp.deleteNodeFailSilent(oldClusterZnode));
        } else {
            // create the new cluster znode
            ZKUtilOp op = ZKUtilOp.createAndFailSilent(newPeerZnode, HConstants.EMPTY_BYTE_ARRAY);
            listOfOps.add(op);
            // get the offset of the logs and set it to new znodes
            for (String wal : wals) {
                String oldWalZnode = ZKUtil.joinZNode(oldClusterZnode, wal);
                byte[] logOffset = ZKUtil.getData(this.zookeeper, oldWalZnode);
                LOG.debug("Creating " + wal + " with data " + Bytes.toString(logOffset));
                String newLogZnode = ZKUtil.joinZNode(newPeerZnode, wal);
                listOfOps.add(ZKUtilOp.createAndFailSilent(newLogZnode, logOffset));
                listOfOps.add(ZKUtilOp.deleteNodeFailSilent(oldWalZnode));
                logQueue.add(wal);
            }
            // add delete op for peer
            listOfOps.add(ZKUtilOp.deleteNodeFailSilent(oldClusterZnode));
            if (LOG.isTraceEnabled())
                LOG.trace(" The multi list size is: " + listOfOps.size());
        }
        ZKUtil.multiOrSequential(this.zookeeper, listOfOps, false);
        LOG.info("Atomically moved " + znode + "/" + peerId + "'s WALs to my queue");
        return new Pair<>(newPeerId, logQueue);
    } catch (KeeperException e) {
        // Multi call failed; it looks like some other regionserver took away the logs.
        LOG.warn("Got exception in copyQueuesFromRSUsingMulti: ", e);
    } catch (InterruptedException e) {
        LOG.warn("Got exception in copyQueuesFromRSUsingMulti: ", e);
        Thread.currentThread().interrupt();
    }
    return null;
}
Also used : TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) ZKUtilOp(org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp) KeeperException(org.apache.zookeeper.KeeperException) Pair(org.apache.hadoop.hbase.util.Pair)

Example 52 with TreeSet

use of java.util.TreeSet in project hbase by apache.

the class MessageLiteToString method reflectivePrintWithIndent.

/**
   * Reflectively prints the {@link MessageLite} to the buffer at given {@code indent} level.
   *
   * @param buffer the buffer to write to
   * @param indent the number of spaces to indent the proto by
   */
private static void reflectivePrintWithIndent(MessageLite messageLite, StringBuilder buffer, int indent) {
    // Build a map of method name to method. We're looking for methods like getFoo(), hasFoo(), and
    // getFooList() which might be useful for building an object's string representation.
    Map<String, Method> nameToNoArgMethod = new HashMap<String, Method>();
    Map<String, Method> nameToMethod = new HashMap<String, Method>();
    Set<String> getters = new TreeSet<String>();
    for (Method method : messageLite.getClass().getDeclaredMethods()) {
        nameToMethod.put(method.getName(), method);
        if (method.getParameterTypes().length == 0) {
            nameToNoArgMethod.put(method.getName(), method);
            if (method.getName().startsWith("get")) {
                getters.add(method.getName());
            }
        }
    }
    for (String getter : getters) {
        String suffix = getter.replaceFirst("get", "");
        if (suffix.endsWith(LIST_SUFFIX) && !suffix.endsWith(BUILDER_LIST_SUFFIX)) {
            String camelCase = suffix.substring(0, 1).toLowerCase() + suffix.substring(1, suffix.length() - LIST_SUFFIX.length());
            // Try to reflectively get the value and toString() the field as if it were repeated. This
            // only works if the method names have not be proguarded out or renamed.
            Method listMethod = nameToNoArgMethod.get("get" + suffix);
            if (listMethod != null) {
                printField(buffer, indent, camelCaseToSnakeCase(camelCase), GeneratedMessageLite.invokeOrDie(listMethod, messageLite));
                continue;
            }
        }
        Method setter = nameToMethod.get("set" + suffix);
        if (setter == null) {
            continue;
        }
        if (suffix.endsWith(BYTES_SUFFIX) && nameToNoArgMethod.containsKey("get" + suffix.substring(0, suffix.length() - "Bytes".length()))) {
            // Heuristic to skip bytes based accessors for string fields.
            continue;
        }
        String camelCase = suffix.substring(0, 1).toLowerCase() + suffix.substring(1);
        // Try to reflectively get the value and toString() the field as if it were optional. This
        // only works if the method names have not be proguarded out or renamed.
        Method getMethod = nameToNoArgMethod.get("get" + suffix);
        Method hasMethod = nameToNoArgMethod.get("has" + suffix);
        // TODO(dweis): Fix proto3 semantics.
        if (getMethod != null) {
            Object value = GeneratedMessageLite.invokeOrDie(getMethod, messageLite);
            final boolean hasValue = hasMethod == null ? !isDefaultValue(value) : (Boolean) GeneratedMessageLite.invokeOrDie(hasMethod, messageLite);
            // TODO(dweis): This doesn't stop printing oneof case twice: value and enum style.
            if (hasValue) {
                printField(buffer, indent, camelCaseToSnakeCase(camelCase), value);
            }
            continue;
        }
    }
    if (messageLite instanceof GeneratedMessageLite.ExtendableMessage) {
        Iterator<Map.Entry<GeneratedMessageLite.ExtensionDescriptor, Object>> iter = ((GeneratedMessageLite.ExtendableMessage<?, ?>) messageLite).extensions.iterator();
        while (iter.hasNext()) {
            Map.Entry<GeneratedMessageLite.ExtensionDescriptor, Object> entry = iter.next();
            printField(buffer, indent, "[" + entry.getKey().getNumber() + "]", entry.getValue());
        }
    }
    if (((GeneratedMessageLite<?, ?>) messageLite).unknownFields != null) {
        ((GeneratedMessageLite<?, ?>) messageLite).unknownFields.printWithIndent(buffer, indent);
    }
}
Also used : HashMap(java.util.HashMap) Method(java.lang.reflect.Method) TreeSet(java.util.TreeSet) Map(java.util.Map) HashMap(java.util.HashMap)

Example 53 with TreeSet

use of java.util.TreeSet in project hbase by apache.

the class TextSortReducer method reduce.

@Override
protected void reduce(ImmutableBytesWritable rowKey, java.lang.Iterable<Text> lines, Reducer<ImmutableBytesWritable, Text, ImmutableBytesWritable, KeyValue>.Context<ImmutableBytesWritable, Text, ImmutableBytesWritable, KeyValue> context) throws java.io.IOException, InterruptedException {
    // although reduce() is called per-row, handle pathological case
    long threshold = context.getConfiguration().getLong("reducer.row.threshold", 1L * (1 << 30));
    Iterator<Text> iter = lines.iterator();
    while (iter.hasNext()) {
        Set<KeyValue> kvs = new TreeSet<>(CellComparator.COMPARATOR);
        long curSize = 0;
        // stop at the end or the RAM threshold
        while (iter.hasNext() && curSize < threshold) {
            Text line = iter.next();
            byte[] lineBytes = line.getBytes();
            try {
                ImportTsv.TsvParser.ParsedLine parsed = parser.parse(lineBytes, line.getLength());
                // Retrieve timestamp if exists
                ts = parsed.getTimestamp(ts);
                cellVisibilityExpr = parsed.getCellVisibility();
                ttl = parsed.getCellTTL();
                // create tags for the parsed line
                List<Tag> tags = new ArrayList<>();
                if (cellVisibilityExpr != null) {
                    tags.addAll(kvCreator.getVisibilityExpressionResolver().createVisibilityExpTags(cellVisibilityExpr));
                }
                // into puts
                if (ttl > 0) {
                    tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
                }
                for (int i = 0; i < parsed.getColumnCount(); i++) {
                    if (i == parser.getRowKeyColumnIndex() || i == parser.getTimestampKeyColumnIndex() || i == parser.getAttributesKeyColumnIndex() || i == parser.getCellVisibilityColumnIndex() || i == parser.getCellTTLColumnIndex()) {
                        continue;
                    }
                    // Creating the KV which needs to be directly written to HFiles. Using the Facade
                    // KVCreator for creation of kvs.
                    Cell cell = this.kvCreator.create(lineBytes, parsed.getRowKeyOffset(), parsed.getRowKeyLength(), parser.getFamily(i), 0, parser.getFamily(i).length, parser.getQualifier(i), 0, parser.getQualifier(i).length, ts, lineBytes, parsed.getColumnOffset(i), parsed.getColumnLength(i), tags);
                    KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
                    kvs.add(kv);
                    curSize += kv.heapSize();
                }
            } catch (ImportTsv.TsvParser.BadTsvLineException | IllegalArgumentException | InvalidLabelException badLine) {
                if (skipBadLines) {
                    System.err.println("Bad line." + badLine.getMessage());
                    incrementBadLineCount(1);
                    continue;
                }
                throw new IOException(badLine);
            }
        }
        context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass() + "(" + StringUtils.humanReadableInt(curSize) + ")");
        int index = 0;
        for (KeyValue kv : kvs) {
            context.write(rowKey, kv);
            if (++index > 0 && index % 100 == 0)
                context.setStatus("Wrote " + index + " key values.");
        }
        // if we have more entries to process
        if (iter.hasNext()) {
            // force flush because we cannot guarantee intra-row sorted order
            context.write(null, null);
        }
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) InvalidLabelException(org.apache.hadoop.hbase.security.visibility.InvalidLabelException) TreeSet(java.util.TreeSet) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) Cell(org.apache.hadoop.hbase.Cell)

Example 54 with TreeSet

use of java.util.TreeSet in project hbase by apache.

the class HFileOutputFormat2 method writePartitions.

/**
   * Write out a {@link SequenceFile} that can be read by
   * {@link TotalOrderPartitioner} that contains the split points in startKeys.
   */
@SuppressWarnings("deprecation")
private static void writePartitions(Configuration conf, Path partitionsPath, List<ImmutableBytesWritable> startKeys) throws IOException {
    LOG.info("Writing partition information to " + partitionsPath);
    if (startKeys.isEmpty()) {
        throw new IllegalArgumentException("No regions passed");
    }
    // We're generating a list of split points, and we don't ever
    // have keys < the first region (which has an empty start key)
    // so we need to remove it. Otherwise we would end up with an
    // empty reducer with index 0
    TreeSet<ImmutableBytesWritable> sorted = new TreeSet<>(startKeys);
    ImmutableBytesWritable first = sorted.first();
    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {
        throw new IllegalArgumentException("First region of table should have empty start key. Instead has: " + Bytes.toStringBinary(first.get()));
    }
    sorted.remove(first);
    // Write the actual file
    FileSystem fs = partitionsPath.getFileSystem(conf);
    SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, partitionsPath, ImmutableBytesWritable.class, NullWritable.class);
    try {
        for (ImmutableBytesWritable startKey : sorted) {
            writer.append(startKey, NullWritable.get());
        }
    } finally {
        writer.close();
    }
}
Also used : ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) SequenceFile(org.apache.hadoop.io.SequenceFile) TreeSet(java.util.TreeSet) FileSystem(org.apache.hadoop.fs.FileSystem) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem)

Example 55 with TreeSet

use of java.util.TreeSet in project hadoop by apache.

the class RumenToSLSConverter method createSLSTasks.

@SuppressWarnings("unchecked")
private static List createSLSTasks(String taskType, List rumenTasks, long offset) {
    int priority = taskType.equals("reduce") ? 10 : 20;
    List array = new ArrayList();
    for (Object e : rumenTasks) {
        Map rumenTask = (Map) e;
        for (Object ee : (List) rumenTask.get("attempts")) {
            Map rumenAttempt = (Map) ee;
            long taskStart = (Long) rumenAttempt.get("startTime");
            long taskFinish = (Long) rumenAttempt.get("finishTime");
            String hostname = (String) rumenAttempt.get("hostName");
            taskStart = taskStart - baseline + offset;
            taskFinish = taskFinish - baseline + offset;
            Map task = new LinkedHashMap();
            task.put("container.host", hostname);
            task.put("container.start.ms", taskStart);
            task.put("container.end.ms", taskFinish);
            task.put("container.priority", priority);
            task.put("container.type", taskType);
            array.add(task);
            String[] rackHost = SLSUtils.getRackHostName(hostname);
            if (rackNodeMap.containsKey(rackHost[0])) {
                rackNodeMap.get(rackHost[0]).add(rackHost[1]);
            } else {
                Set<String> hosts = new TreeSet<String>();
                hosts.add(rackHost[1]);
                rackNodeMap.put(rackHost[0], hosts);
            }
        }
    }
    return array;
}
Also used : TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) LinkedHashMap(java.util.LinkedHashMap)

Aggregations

TreeSet (java.util.TreeSet)3785 ArrayList (java.util.ArrayList)833 Test (org.junit.Test)544 HashMap (java.util.HashMap)500 HashSet (java.util.HashSet)428 Set (java.util.Set)422 Map (java.util.Map)401 IOException (java.io.IOException)374 File (java.io.File)339 List (java.util.List)320 TreeMap (java.util.TreeMap)229 Iterator (java.util.Iterator)189 SortedSet (java.util.SortedSet)186 LinkedList (java.util.LinkedList)110 LinkedHashSet (java.util.LinkedHashSet)106 Date (java.util.Date)94 Collection (java.util.Collection)90 Comparator (java.util.Comparator)85 Test (org.testng.annotations.Test)81 Text (org.apache.hadoop.io.Text)79