Search in sources :

Example 1 with TreeSet

use of java.util.TreeSet in project dropwizard by dropwizard.

the class DropwizardResourceConfig method getEndpointsInfo.

public String getEndpointsInfo() {
    final StringBuilder msg = new StringBuilder(1024);
    final Set<EndpointLogLine> endpointLogLines = new TreeSet<>(new EndpointComparator());
    msg.append("The following paths were found for the configured resources:");
    msg.append(NEWLINE).append(NEWLINE);
    final Set<Class<?>> allResourcesClasses = new HashSet<>();
    for (Class<?> clazz : allClasses()) {
        if (!clazz.isInterface() && Resource.from(clazz) != null) {
            allResourcesClasses.add(clazz);
        }
    }
    for (Class<?> klass : allResourcesClasses) {
        new EndpointLogger(urlPattern, klass).populate(endpointLogLines);
    }
    final Set<Resource> allResources = this.getResources();
    for (Resource res : allResources) {
        for (Resource childRes : res.getChildResources()) {
            for (Class<?> childResHandlerClass : childRes.getHandlerClasses()) {
                EndpointLogger epl = new EndpointLogger(urlPattern, childResHandlerClass);
                epl.populate(cleanUpPath(res.getPath() + epl.rootPath), epl.klass, false, childRes, endpointLogLines);
            }
        }
    }
    if (!endpointLogLines.isEmpty()) {
        for (EndpointLogLine line : endpointLogLines) {
            msg.append(line).append(NEWLINE);
        }
    } else {
        msg.append("    NONE").append(NEWLINE);
    }
    return msg.toString();
}
Also used : TreeSet(java.util.TreeSet) Resource(org.glassfish.jersey.server.model.Resource) HashSet(java.util.HashSet)

Example 2 with TreeSet

use of java.util.TreeSet in project jetty.project by eclipse.

the class AnnotationParser method parse.

protected void parse(Set<? extends Handler> handlers, Bundle bundle) throws Exception {
    URI uri = _bundleToUri.get(bundle);
    if (!_alreadyParsed.add(uri)) {
        return;
    }
    String bundleClasspath = (String) bundle.getHeaders().get(Constants.BUNDLE_CLASSPATH);
    if (bundleClasspath == null) {
        bundleClasspath = ".";
    }
    //order the paths first by the number of tokens in the path second alphabetically.
    TreeSet<String> paths = new TreeSet<String>(new Comparator<String>() {

        public int compare(String o1, String o2) {
            int paths1 = new StringTokenizer(o1, "/", false).countTokens();
            int paths2 = new StringTokenizer(o2, "/", false).countTokens();
            if (paths1 == paths2) {
                return o1.compareTo(o2);
            }
            return paths2 - paths1;
        }
    });
    boolean hasDotPath = false;
    StringTokenizer tokenizer = new StringTokenizer(bundleClasspath, ",;", false);
    while (tokenizer.hasMoreTokens()) {
        String token = tokenizer.nextToken().trim();
        if (!token.startsWith("/")) {
            token = "/" + token;
        }
        if (token.equals("/.")) {
            hasDotPath = true;
        } else if (!token.endsWith(".jar") && !token.endsWith("/")) {
            paths.add(token + "/");
        } else {
            paths.add(token);
        }
    }
    //however it makes our life so much easier during development.
    if (bundle.getEntry("/.classpath") != null) {
        if (bundle.getEntry("/bin/") != null) {
            paths.add("/bin/");
        } else if (bundle.getEntry("/target/classes/") != null) {
            paths.add("/target/classes/");
        }
    }
    Enumeration classes = bundle.findEntries("/", "*.class", true);
    if (classes == null) {
        return;
    }
    while (classes.hasMoreElements()) {
        URL classUrl = (URL) classes.nextElement();
        String path = classUrl.getPath();
        //remove the longest path possible:
        String name = null;
        for (String prefixPath : paths) {
            if (path.startsWith(prefixPath)) {
                name = path.substring(prefixPath.length());
                break;
            }
        }
        if (name == null && hasDotPath) {
            //remove the starting '/'
            name = path.substring(1);
        }
        if (name == null) {
            //or the bundle classpath wasn't simply ".", so skip it
            continue;
        }
        //transform into a classname to pass to the resolver
        String shortName = name.replace('/', '.').substring(0, name.length() - 6);
        if (!isParsed(shortName)) {
            try (InputStream classInputStream = classUrl.openStream()) {
                scanClass(handlers, getResource(bundle), classInputStream);
            }
        }
    }
}
Also used : StringTokenizer(java.util.StringTokenizer) Enumeration(java.util.Enumeration) TreeSet(java.util.TreeSet) InputStream(java.io.InputStream) URI(java.net.URI) URL(java.net.URL)

Example 3 with TreeSet

use of java.util.TreeSet in project druid by druid-io.

the class IndexMerger method makeIndexFiles.

protected File makeIndexFiles(final List<IndexableAdapter> indexes, final AggregatorFactory[] metricAggs, final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics, final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn, final IndexSpec indexSpec) throws IOException {
    List<Metadata> metadataList = Lists.transform(indexes, new Function<IndexableAdapter, Metadata>() {

        @Nullable
        @Override
        public Metadata apply(IndexableAdapter input) {
            return input.getMetadata();
        }
    });
    Metadata segmentMetadata = null;
    if (metricAggs != null) {
        AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length];
        for (int i = 0; i < metricAggs.length; i++) {
            combiningMetricAggs[i] = metricAggs[i].getCombiningFactory();
        }
        segmentMetadata = Metadata.merge(metadataList, combiningMetricAggs);
    } else {
        segmentMetadata = Metadata.merge(metadataList, null);
    }
    final Map<String, ValueType> valueTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
    final Map<String, String> metricTypeNames = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
    final Map<String, ColumnCapabilitiesImpl> columnCapabilities = Maps.newHashMap();
    final List<ColumnCapabilitiesImpl> dimCapabilities = new ArrayList<>();
    for (IndexableAdapter adapter : indexes) {
        for (String dimension : adapter.getDimensionNames()) {
            ColumnCapabilitiesImpl mergedCapabilities = columnCapabilities.get(dimension);
            ColumnCapabilities capabilities = adapter.getCapabilities(dimension);
            if (mergedCapabilities == null) {
                mergedCapabilities = new ColumnCapabilitiesImpl();
            }
            columnCapabilities.put(dimension, mergedCapabilities.merge(capabilities));
        }
        for (String metric : adapter.getMetricNames()) {
            ColumnCapabilitiesImpl mergedCapabilities = columnCapabilities.get(metric);
            ColumnCapabilities capabilities = adapter.getCapabilities(metric);
            if (mergedCapabilities == null) {
                mergedCapabilities = new ColumnCapabilitiesImpl();
            }
            columnCapabilities.put(metric, mergedCapabilities.merge(capabilities));
            valueTypes.put(metric, capabilities.getType());
            metricTypeNames.put(metric, adapter.getMetricType(metric));
        }
    }
    for (String dimension : mergedDimensions) {
        dimCapabilities.add(columnCapabilities.get(dimension));
    }
    Closer closer = Closer.create();
    try {
        final Interval dataInterval;
        final File v8OutDir = new File(outDir, "v8-tmp");
        FileUtils.forceMkdir(v8OutDir);
        registerDeleteDirectory(closer, v8OutDir);
        File tmpPeonFilesDir = new File(v8OutDir, "tmpPeonFiles");
        FileUtils.forceMkdir(tmpPeonFilesDir);
        registerDeleteDirectory(closer, tmpPeonFilesDir);
        final IOPeon ioPeon = new TmpFileIOPeon(tmpPeonFilesDir, true);
        closer.register(ioPeon);
        /*************  Main index.drd file **************/
        progress.progress();
        long startTime = System.currentTimeMillis();
        File indexFile = new File(v8OutDir, "index.drd");
        try (FileOutputStream fileOutputStream = new FileOutputStream(indexFile);
            FileChannel channel = fileOutputStream.getChannel()) {
            channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION }));
            GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.STRING_STRATEGY).writeToChannel(channel);
            GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.STRING_STRATEGY).writeToChannel(channel);
            DateTime minTime = new DateTime(JodaUtils.MAX_INSTANT);
            DateTime maxTime = new DateTime(JodaUtils.MIN_INSTANT);
            for (IndexableAdapter index : indexes) {
                minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart());
                maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd());
            }
            dataInterval = new Interval(minTime, maxTime);
            serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
            serializerUtils.writeString(channel, mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory()));
        }
        IndexIO.checkFileSize(indexFile);
        log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);
        /************* Setup Dim Conversions **************/
        progress.progress();
        startTime = System.currentTimeMillis();
        final ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size());
        final DimensionHandler[] handlers = makeDimensionHandlers(mergedDimensions, dimCapabilities);
        final List<DimensionMerger> mergers = new ArrayList<>();
        for (int i = 0; i < mergedDimensions.size(); i++) {
            DimensionMergerLegacy merger = handlers[i].makeLegacyMerger(indexSpec, v8OutDir, ioPeon, dimCapabilities.get(i), progress);
            mergers.add(merger);
            merger.writeMergedValueMetadata(indexes);
            FileOutputSupplier dimOut = new FileOutputSupplier(merger.makeDimFile(), true);
            merger.writeValueMetadataToFile(dimOut);
            dimOuts.add(dimOut);
        }
        log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);
        /************* Walk through data sets and merge them *************/
        progress.progress();
        startTime = System.currentTimeMillis();
        Iterable<Rowboat> theRows = makeRowIterable(indexes, mergedDimensions, mergedMetrics, rowMergerFn, dimCapabilities, handlers, mergers);
        LongSupplierSerializer timeWriter = CompressionFactory.getLongSerializer(ioPeon, "little_end_time", IndexIO.BYTE_ORDER, indexSpec.getLongEncoding(), CompressedObjectStrategy.DEFAULT_COMPRESSION_STRATEGY);
        timeWriter.open();
        ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size());
        final CompressedObjectStrategy.CompressionStrategy metCompression = indexSpec.getMetricCompression();
        final CompressionFactory.LongEncodingStrategy longEncoding = indexSpec.getLongEncoding();
        for (String metric : mergedMetrics) {
            ValueType type = valueTypes.get(metric);
            switch(type) {
                case LONG:
                    metWriters.add(new LongMetricColumnSerializer(metric, v8OutDir, ioPeon, metCompression, longEncoding));
                    break;
                case FLOAT:
                    metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon, metCompression));
                    break;
                case COMPLEX:
                    final String typeName = metricTypeNames.get(metric);
                    ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);
                    if (serde == null) {
                        throw new ISE("Unknown type[%s]", typeName);
                    }
                    metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde));
                    break;
                default:
                    throw new ISE("Unknown type[%s]", type);
            }
        }
        for (MetricColumnSerializer metWriter : metWriters) {
            metWriter.open();
        }
        int rowCount = 0;
        long time = System.currentTimeMillis();
        List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size());
        for (IndexableAdapter index : indexes) {
            int[] arr = new int[index.getNumRows()];
            Arrays.fill(arr, INVALID_ROW);
            rowNumConversions.add(IntBuffer.wrap(arr));
        }
        for (Rowboat theRow : theRows) {
            progress.progress();
            timeWriter.add(theRow.getTimestamp());
            final Object[] metrics = theRow.getMetrics();
            for (int i = 0; i < metrics.length; ++i) {
                metWriters.get(i).serialize(metrics[i]);
            }
            Object[] dims = theRow.getDims();
            for (int i = 0; i < dims.length; ++i) {
                mergers.get(i).processMergedRow(dims[i]);
            }
            for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) {
                final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey());
                for (Integer rowNum : comprisedRow.getValue()) {
                    while (conversionBuffer.position() < rowNum) {
                        conversionBuffer.put(INVALID_ROW);
                    }
                    conversionBuffer.put(rowCount);
                }
            }
            if ((++rowCount % 500000) == 0) {
                log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - time);
                time = System.currentTimeMillis();
            }
        }
        for (IntBuffer rowNumConversion : rowNumConversions) {
            rowNumConversion.rewind();
        }
        final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER);
        timeFile.delete();
        ByteSink out = Files.asByteSink(timeFile, FileWriteMode.APPEND);
        timeWriter.closeAndConsolidate(out);
        IndexIO.checkFileSize(timeFile);
        for (MetricColumnSerializer metWriter : metWriters) {
            metWriter.close();
        }
        log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - startTime);
        /************ Create Inverted Indexes and Finalize Columns *************/
        startTime = System.currentTimeMillis();
        final File invertedFile = new File(v8OutDir, "inverted.drd");
        Files.touch(invertedFile);
        out = Files.asByteSink(invertedFile, FileWriteMode.APPEND);
        final File geoFile = new File(v8OutDir, "spatial.drd");
        Files.touch(geoFile);
        OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true);
        for (int i = 0; i < mergedDimensions.size(); i++) {
            DimensionMergerLegacy legacyMerger = (DimensionMergerLegacy) mergers.get(i);
            legacyMerger.writeIndexes(rowNumConversions, closer);
            legacyMerger.writeIndexesToFiles(out, spatialOut);
            legacyMerger.writeRowValuesToFile(dimOuts.get(i));
        }
        log.info("outDir[%s] completed inverted.drd and wrote dimensions in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);
        final Function<String, String> dimFilenameFunction = new Function<String, String>() {

            @Override
            public String apply(@Nullable String input) {
                String formatString;
                if (columnCapabilities.get(input).isDictionaryEncoded()) {
                    formatString = "dim_%s.drd";
                } else {
                    formatString = String.format("numeric_dim_%%s_%s.drd", IndexIO.BYTE_ORDER);
                }
                return GuavaUtils.formatFunction(formatString).apply(input);
            }
        };
        final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat(Arrays.asList("index.drd", "inverted.drd", "spatial.drd", String.format("time_%s.drd", IndexIO.BYTE_ORDER)), Iterables.transform(mergedDimensions, dimFilenameFunction), Iterables.transform(mergedMetrics, GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER)))));
        if (segmentMetadata != null) {
            writeMetadataToFile(new File(v8OutDir, "metadata.drd"), segmentMetadata);
            log.info("wrote metadata.drd in outDir[%s].", v8OutDir);
            expectedFiles.add("metadata.drd");
        }
        Map<String, File> files = Maps.newLinkedHashMap();
        for (String fileName : expectedFiles) {
            files.put(fileName, new File(v8OutDir, fileName));
        }
        File smooshDir = new File(v8OutDir, "smoosher");
        FileUtils.forceMkdir(smooshDir);
        for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) {
            entry.getValue().delete();
        }
        for (File file : smooshDir.listFiles()) {
            Files.move(file, new File(v8OutDir, file.getName()));
        }
        if (!smooshDir.delete()) {
            log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir, Arrays.asList(smooshDir.listFiles()));
            throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir));
        }
        createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir, GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.STRING_STRATEGY), GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.STRING_STRATEGY), dataInterval, indexSpec.getBitmapSerdeFactory());
        indexIO.getDefaultIndexIOHandler().convertV8toV9(v8OutDir, outDir, indexSpec);
        return outDir;
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}
Also used : ArrayList(java.util.ArrayList) CompressedObjectStrategy(io.druid.segment.data.CompressedObjectStrategy) DateTime(org.joda.time.DateTime) ComplexMetricColumnSerializer(io.druid.segment.serde.ComplexMetricColumnSerializer) TmpFileIOPeon(io.druid.segment.data.TmpFileIOPeon) TreeSet(java.util.TreeSet) FileOutputStream(java.io.FileOutputStream) IntBuffer(java.nio.IntBuffer) File(java.io.File) Map(java.util.Map) Nullable(javax.annotation.Nullable) ColumnCapabilitiesImpl(io.druid.segment.column.ColumnCapabilitiesImpl) Interval(org.joda.time.Interval) ComplexMetricSerde(io.druid.segment.serde.ComplexMetricSerde) IOPeon(io.druid.segment.data.IOPeon) TmpFileIOPeon(io.druid.segment.data.TmpFileIOPeon) ColumnCapabilities(io.druid.segment.column.ColumnCapabilities) Function(com.google.common.base.Function) ByteSink(com.google.common.io.ByteSink) ISE(io.druid.java.util.common.ISE) ComplexMetricColumnSerializer(io.druid.segment.serde.ComplexMetricColumnSerializer) Closer(com.google.common.io.Closer) ValueType(io.druid.segment.column.ValueType) FileChannel(java.nio.channels.FileChannel) IOException(java.io.IOException) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CompressionFactory(io.druid.segment.data.CompressionFactory) FileOutputSupplier(io.druid.common.guava.FileOutputSupplier) LongSupplierSerializer(io.druid.segment.data.LongSupplierSerializer)

Example 4 with TreeSet

use of java.util.TreeSet in project elasticsearch by elastic.

the class TruncateTranslogIT method corruptRandomTranslogFiles.

private void corruptRandomTranslogFiles(String indexName) throws IOException {
    Set<Path> translogDirs = getTranslogDirs(indexName);
    // treeset makes sure iteration order is deterministic
    Set<Path> files = new TreeSet<>();
    for (Path translogDir : translogDirs) {
        if (Files.isDirectory(translogDir)) {
            logger.info("--> path: {}", translogDir);
            try (DirectoryStream<Path> stream = Files.newDirectoryStream(translogDir)) {
                for (Path item : stream) {
                    logger.info("--> File: {}", item);
                    if (Files.isRegularFile(item) && item.getFileName().toString().startsWith("translog-")) {
                        files.add(item);
                    }
                }
            }
        }
    }
    Path fileToCorrupt = null;
    if (!files.isEmpty()) {
        int corruptions = randomIntBetween(5, 20);
        for (int i = 0; i < corruptions; i++) {
            fileToCorrupt = RandomPicks.randomFrom(random(), files);
            try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) {
                // read
                raf.position(randomIntBetween(0, (int) Math.min(Integer.MAX_VALUE, raf.size() - 1)));
                long filePointer = raf.position();
                ByteBuffer bb = ByteBuffer.wrap(new byte[1]);
                raf.read(bb);
                bb.flip();
                // corrupt
                byte oldValue = bb.get(0);
                byte newValue = (byte) (oldValue + 1);
                bb.put(0, newValue);
                // rewrite
                raf.position(filePointer);
                raf.write(bb);
                logger.info("--> corrupting file {} --  flipping at position {} from {} to {} file: {}", fileToCorrupt, filePointer, Integer.toHexString(oldValue), Integer.toHexString(newValue), fileToCorrupt);
            }
        }
    }
    assertThat("no file corrupted", fileToCorrupt, notNullValue());
}
Also used : Path(java.nio.file.Path) TreeSet(java.util.TreeSet) FileChannel(java.nio.channels.FileChannel) ByteBuffer(java.nio.ByteBuffer)

Example 5 with TreeSet

use of java.util.TreeSet in project hbase by apache.

the class TestRSGroupBasedLoadBalancer method assertRetainedAssignment.

/**
   * Asserts a valid retained assignment plan.
   * <p>
   * Must meet the following conditions:
   * <ul>
   * <li>Every input region has an assignment, and to an online server
   * <li>If a region had an existing assignment to a server with the same
   * address a a currently online server, it will be assigned to it
   * </ul>
   *
   * @param existing
   * @param assignment
   * @throws java.io.IOException
   * @throws java.io.FileNotFoundException
   */
private void assertRetainedAssignment(Map<HRegionInfo, ServerName> existing, List<ServerName> servers, Map<ServerName, List<HRegionInfo>> assignment) throws FileNotFoundException, IOException {
    // Verify condition 1, every region assigned, and to online server
    Set<ServerName> onlineServerSet = new TreeSet<>(servers);
    Set<HRegionInfo> assignedRegions = new TreeSet<>();
    for (Map.Entry<ServerName, List<HRegionInfo>> a : assignment.entrySet()) {
        assertTrue("Region assigned to server that was not listed as online", onlineServerSet.contains(a.getKey()));
        for (HRegionInfo r : a.getValue()) assignedRegions.add(r);
    }
    assertEquals(existing.size(), assignedRegions.size());
    // Verify condition 2, every region must be assigned to correct server.
    Set<String> onlineHostNames = new TreeSet<>();
    for (ServerName s : servers) {
        onlineHostNames.add(s.getHostname());
    }
    for (Map.Entry<ServerName, List<HRegionInfo>> a : assignment.entrySet()) {
        ServerName currentServer = a.getKey();
        for (HRegionInfo r : a.getValue()) {
            ServerName oldAssignedServer = existing.get(r);
            TableName tableName = r.getTable();
            String groupName = getMockedGroupInfoManager().getRSGroupOfTable(tableName);
            assertTrue(StringUtils.isNotEmpty(groupName));
            RSGroupInfo gInfo = getMockedGroupInfoManager().getRSGroup(groupName);
            assertTrue("Region is not correctly assigned to group servers.", gInfo.containsServer(currentServer.getAddress()));
            if (oldAssignedServer != null && onlineHostNames.contains(oldAssignedServer.getHostname())) {
                // different group.
                if (!oldAssignedServer.getAddress().equals(currentServer.getAddress())) {
                    assertFalse(gInfo.containsServer(oldAssignedServer.getAddress()));
                }
            }
        }
    }
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableName(org.apache.hadoop.hbase.TableName) RSGroupInfo(org.apache.hadoop.hbase.rsgroup.RSGroupInfo) TreeSet(java.util.TreeSet) ServerName(org.apache.hadoop.hbase.ServerName) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Aggregations

TreeSet (java.util.TreeSet)3785 ArrayList (java.util.ArrayList)833 Test (org.junit.Test)544 HashMap (java.util.HashMap)500 HashSet (java.util.HashSet)428 Set (java.util.Set)422 Map (java.util.Map)401 IOException (java.io.IOException)374 File (java.io.File)339 List (java.util.List)320 TreeMap (java.util.TreeMap)229 Iterator (java.util.Iterator)189 SortedSet (java.util.SortedSet)186 LinkedList (java.util.LinkedList)110 LinkedHashSet (java.util.LinkedHashSet)106 Date (java.util.Date)94 Collection (java.util.Collection)90 Comparator (java.util.Comparator)85 Test (org.testng.annotations.Test)81 Text (org.apache.hadoop.io.Text)79