use of java.util.TreeMap in project hbase by apache.
the class TestCatalogJanitor method parentWithSpecifiedEndKeyCleanedEvenIfDaughterGoneFirst.
/**
* Make sure parent with specified end key gets cleaned up even if daughter is cleaned up before it.
*
* @param rootDir the test case name, used as the HBase testing utility root
* @param lastEndKey the end key of the split parent
* @throws IOException
* @throws InterruptedException
*/
private void parentWithSpecifiedEndKeyCleanedEvenIfDaughterGoneFirst(final String rootDir, final byte[] lastEndKey) throws IOException, InterruptedException {
HBaseTestingUtility htu = new HBaseTestingUtility();
setRootDirAndCleanIt(htu, rootDir);
MasterServices services = new MockMasterServices(htu);
CatalogJanitor janitor = new CatalogJanitor(services);
final HTableDescriptor htd = createHTableDescriptor();
// Create regions: aaa->{lastEndKey}, aaa->ccc, aaa->bbb, bbb->ccc, etc.
// Parent
HRegionInfo parent = new HRegionInfo(htd.getTableName(), Bytes.toBytes("aaa"), lastEndKey);
// Sleep a second else the encoded name on these regions comes out
// same for all with same start key and made in same second.
Thread.sleep(1001);
// Daughter a
HRegionInfo splita = new HRegionInfo(htd.getTableName(), Bytes.toBytes("aaa"), Bytes.toBytes("ccc"));
Thread.sleep(1001);
// Make daughters of daughter a; splitaa and splitab.
HRegionInfo splitaa = new HRegionInfo(htd.getTableName(), Bytes.toBytes("aaa"), Bytes.toBytes("bbb"));
HRegionInfo splitab = new HRegionInfo(htd.getTableName(), Bytes.toBytes("bbb"), Bytes.toBytes("ccc"));
// Daughter b
HRegionInfo splitb = new HRegionInfo(htd.getTableName(), Bytes.toBytes("ccc"), lastEndKey);
Thread.sleep(1001);
// Make Daughters of daughterb; splitba and splitbb.
HRegionInfo splitba = new HRegionInfo(htd.getTableName(), Bytes.toBytes("ccc"), Bytes.toBytes("ddd"));
HRegionInfo splitbb = new HRegionInfo(htd.getTableName(), Bytes.toBytes("ddd"), lastEndKey);
// First test that our Comparator works right up in CatalogJanitor.
// Just fo kicks.
SortedMap<HRegionInfo, Result> regions = new TreeMap<>(new CatalogJanitor.SplitParentFirstComparator());
// Now make sure that this regions map sorts as we expect it to.
regions.put(parent, createResult(parent, splita, splitb));
regions.put(splitb, createResult(splitb, splitba, splitbb));
regions.put(splita, createResult(splita, splitaa, splitab));
// Assert its properly sorted.
int index = 0;
for (Map.Entry<HRegionInfo, Result> e : regions.entrySet()) {
if (index == 0) {
assertTrue(e.getKey().getEncodedName().equals(parent.getEncodedName()));
} else if (index == 1) {
assertTrue(e.getKey().getEncodedName().equals(splita.getEncodedName()));
} else if (index == 2) {
assertTrue(e.getKey().getEncodedName().equals(splitb.getEncodedName()));
}
index++;
}
// Now play around with the cleanParent function. Create a ref from splita
// up to the parent.
Path splitaRef = createReferences(services, htd, parent, splita, Bytes.toBytes("ccc"), false);
// Make sure actual super parent sticks around because splita has a ref.
assertFalse(janitor.cleanParent(parent, regions.get(parent)));
//splitba, and split bb, do not have dirs in fs. That means that if
// we test splitb, it should get cleaned up.
assertTrue(janitor.cleanParent(splitb, regions.get(splitb)));
// Now remove ref from splita to parent... so parent can be let go and so
// the daughter splita can be split (can't split if still references).
// BUT make the timing such that the daughter gets cleaned up before we
// can get a chance to let go of the parent.
FileSystem fs = FileSystem.get(htu.getConfiguration());
assertTrue(fs.delete(splitaRef, true));
// Create the refs from daughters of splita.
Path splitaaRef = createReferences(services, htd, splita, splitaa, Bytes.toBytes("bbb"), false);
Path splitabRef = createReferences(services, htd, splita, splitab, Bytes.toBytes("bbb"), true);
// Test splita. It should stick around because references from splitab, etc.
assertFalse(janitor.cleanParent(splita, regions.get(splita)));
// Now clean up parent daughter first. Remove references from its daughters.
assertTrue(fs.delete(splitaaRef, true));
assertTrue(fs.delete(splitabRef, true));
assertTrue(janitor.cleanParent(splita, regions.get(splita)));
// Super parent should get cleaned up now both splita and splitb are gone.
assertTrue(janitor.cleanParent(parent, regions.get(parent)));
services.stop("test finished");
janitor.cancel(true);
}
use of java.util.TreeMap in project tomcat by apache.
the class StatusTransformer method writeVMState.
/**
* Write the VM state.
* @param writer The output writer
* @param mode Mode <code>0</code> will generate HTML.
* Mode <code>1</code> will generate XML.
* @throws Exception Propagated JMX error
*/
public static void writeVMState(PrintWriter writer, int mode) throws Exception {
SortedMap<String, MemoryPoolMXBean> memoryPoolMBeans = new TreeMap<>();
for (MemoryPoolMXBean mbean : ManagementFactory.getMemoryPoolMXBeans()) {
String sortKey = mbean.getType() + ":" + mbean.getName();
memoryPoolMBeans.put(sortKey, mbean);
}
if (mode == 0) {
writer.print("<h1>JVM</h1>");
writer.print("<p>");
writer.print(" Free memory: ");
writer.print(formatSize(Long.valueOf(Runtime.getRuntime().freeMemory()), true));
writer.print(" Total memory: ");
writer.print(formatSize(Long.valueOf(Runtime.getRuntime().totalMemory()), true));
writer.print(" Max memory: ");
writer.print(formatSize(Long.valueOf(Runtime.getRuntime().maxMemory()), true));
writer.print("</p>");
writer.write("<table border=\"0\"><thead><tr><th>Memory Pool</th><th>Type</th><th>Initial</th><th>Total</th><th>Maximum</th><th>Used</th></tr></thead><tbody>");
for (MemoryPoolMXBean memoryPoolMBean : memoryPoolMBeans.values()) {
MemoryUsage usage = memoryPoolMBean.getUsage();
writer.write("<tr><td>");
writer.print(memoryPoolMBean.getName());
writer.write("</td><td>");
writer.print(memoryPoolMBean.getType());
writer.write("</td><td>");
writer.print(formatSize(Long.valueOf(usage.getInit()), true));
writer.write("</td><td>");
writer.print(formatSize(Long.valueOf(usage.getCommitted()), true));
writer.write("</td><td>");
writer.print(formatSize(Long.valueOf(usage.getMax()), true));
writer.write("</td><td>");
writer.print(formatSize(Long.valueOf(usage.getUsed()), true));
if (usage.getMax() > 0) {
writer.write(" (" + (usage.getUsed() * 100 / usage.getMax()) + "%)");
}
writer.write("</td></tr>");
}
writer.write("</tbody></table>");
} else if (mode == 1) {
writer.write("<jvm>");
writer.write("<memory");
writer.write(" free='" + Runtime.getRuntime().freeMemory() + "'");
writer.write(" total='" + Runtime.getRuntime().totalMemory() + "'");
writer.write(" max='" + Runtime.getRuntime().maxMemory() + "'/>");
for (MemoryPoolMXBean memoryPoolMBean : memoryPoolMBeans.values()) {
MemoryUsage usage = memoryPoolMBean.getUsage();
writer.write("<memorypool");
writer.write(" name='" + memoryPoolMBean.getName() + "'");
writer.write(" type='" + memoryPoolMBean.getType() + "'");
writer.write(" usageInit='" + usage.getInit() + "'");
writer.write(" usageCommitted='" + usage.getCommitted() + "'");
writer.write(" usageMax='" + usage.getMax() + "'");
writer.write(" usageUsed='" + usage.getUsed() + "'/>");
}
writer.write("</jvm>");
}
}
use of java.util.TreeMap in project cas by apereo.
the class ReturnMappedAttributeReleasePolicy method getAttributesInternal.
@Override
protected Map<String, Object> getAttributesInternal(final Map<String, Object> attrs, final RegisteredService service) {
final Map<String, Object> resolvedAttributes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
resolvedAttributes.putAll(attrs);
final Map<String, Object> attributesToRelease = new HashMap<>(resolvedAttributes.size());
/**
* Map each entry in the aNllowed list into an array first
* by the original key, value and the original entry itself.
* Then process the array to populate the map for allowed attributes
*/
this.allowedAttributes.entrySet().stream().map(entry -> {
final String key = entry.getKey();
return new Object[] { key, resolvedAttributes.get(key), entry };
}).filter(entry -> entry[1] != null).forEach(entry -> {
final String mappedAttributeName = ((Map.Entry<String, String>) entry[2]).getValue();
final Matcher matcherInline = INLINE_GROOVY_PATTERN.matcher(mappedAttributeName);
final Matcher matcherFile = FILE_GROOVY_PATTERN.matcher(mappedAttributeName);
if (matcherInline.find()) {
processInlineGroovyAttribute(resolvedAttributes, attributesToRelease, matcherInline, entry);
} else if (matcherFile.find()) {
processFileBasedGroovyAttributes(resolvedAttributes, attributesToRelease, matcherFile, entry);
} else {
LOGGER.debug("Found attribute [{}] in the list of allowed attributes, mapped to the name [{}]", entry[0], mappedAttributeName);
attributesToRelease.put(mappedAttributeName, entry[1]);
}
});
return attributesToRelease;
}
use of java.util.TreeMap in project hive by apache.
the class HBaseUtils method hashStorageDescriptor.
/**
* Produce a hash for the storage descriptor
* @param sd storage descriptor to hash
* @param md message descriptor to use to generate the hash
* @return the hash as a byte array
*/
static byte[] hashStorageDescriptor(StorageDescriptor sd, MessageDigest md) {
// Note all maps and lists have to be absolutely sorted. Otherwise we'll produce different
// results for hashes based on the OS or JVM being used.
md.reset();
for (FieldSchema fs : sd.getCols()) {
md.update(fs.getName().getBytes(ENCODING));
md.update(fs.getType().getBytes(ENCODING));
if (fs.getComment() != null)
md.update(fs.getComment().getBytes(ENCODING));
}
if (sd.getInputFormat() != null) {
md.update(sd.getInputFormat().getBytes(ENCODING));
}
if (sd.getOutputFormat() != null) {
md.update(sd.getOutputFormat().getBytes(ENCODING));
}
md.update(sd.isCompressed() ? "true".getBytes(ENCODING) : "false".getBytes(ENCODING));
md.update(Integer.toString(sd.getNumBuckets()).getBytes(ENCODING));
if (sd.getSerdeInfo() != null) {
SerDeInfo serde = sd.getSerdeInfo();
if (serde.getName() != null) {
md.update(serde.getName().getBytes(ENCODING));
}
if (serde.getSerializationLib() != null) {
md.update(serde.getSerializationLib().getBytes(ENCODING));
}
if (serde.getParameters() != null) {
SortedMap<String, String> params = new TreeMap<>(serde.getParameters());
for (Map.Entry<String, String> param : params.entrySet()) {
md.update(param.getKey().getBytes(ENCODING));
md.update(param.getValue().getBytes(ENCODING));
}
}
}
if (sd.getBucketCols() != null) {
SortedSet<String> bucketCols = new TreeSet<>(sd.getBucketCols());
for (String bucket : bucketCols) md.update(bucket.getBytes(ENCODING));
}
if (sd.getSortCols() != null) {
SortedSet<Order> orders = new TreeSet<>(sd.getSortCols());
for (Order order : orders) {
md.update(order.getCol().getBytes(ENCODING));
md.update(Integer.toString(order.getOrder()).getBytes(ENCODING));
}
}
if (sd.getSkewedInfo() != null) {
SkewedInfo skewed = sd.getSkewedInfo();
if (skewed.getSkewedColNames() != null) {
SortedSet<String> colnames = new TreeSet<>(skewed.getSkewedColNames());
for (String colname : colnames) md.update(colname.getBytes(ENCODING));
}
if (skewed.getSkewedColValues() != null) {
SortedSet<String> sortedOuterList = new TreeSet<>();
for (List<String> innerList : skewed.getSkewedColValues()) {
SortedSet<String> sortedInnerList = new TreeSet<>(innerList);
sortedOuterList.add(StringUtils.join(sortedInnerList, "."));
}
for (String colval : sortedOuterList) md.update(colval.getBytes(ENCODING));
}
if (skewed.getSkewedColValueLocationMaps() != null) {
SortedMap<String, String> sortedMap = new TreeMap<>();
for (Map.Entry<List<String>, String> smap : skewed.getSkewedColValueLocationMaps().entrySet()) {
SortedSet<String> sortedKey = new TreeSet<>(smap.getKey());
sortedMap.put(StringUtils.join(sortedKey, "."), smap.getValue());
}
for (Map.Entry<String, String> e : sortedMap.entrySet()) {
md.update(e.getKey().getBytes(ENCODING));
md.update(e.getValue().getBytes(ENCODING));
}
}
}
return md.digest();
}
use of java.util.TreeMap in project buck by facebook.
the class HaskellDescriptionUtils method createCompileRule.
/**
* Create a Haskell compile rule that compiles all the given haskell sources in one step and
* pulls interface files from all transitive haskell dependencies.
*/
private static HaskellCompileRule createCompileRule(BuildTarget target, final BuildRuleParams baseParams, final BuildRuleResolver resolver, SourcePathRuleFinder ruleFinder, ImmutableSet<BuildRule> deps, final CxxPlatform cxxPlatform, HaskellConfig haskellConfig, final Linker.LinkableDepType depType, Optional<String> main, Optional<HaskellPackageInfo> packageInfo, ImmutableList<String> flags, HaskellSources sources) throws NoSuchBuildTargetException {
final Map<BuildTarget, ImmutableList<String>> depFlags = new TreeMap<>();
final Map<BuildTarget, ImmutableList<SourcePath>> depIncludes = new TreeMap<>();
final ImmutableSortedMap.Builder<String, HaskellPackage> exposedPackagesBuilder = ImmutableSortedMap.naturalOrder();
final ImmutableSortedMap.Builder<String, HaskellPackage> packagesBuilder = ImmutableSortedMap.naturalOrder();
new AbstractBreadthFirstThrowingTraversal<BuildRule, NoSuchBuildTargetException>(deps) {
private final ImmutableSet<BuildRule> empty = ImmutableSet.of();
@Override
public Iterable<BuildRule> visit(BuildRule rule) throws NoSuchBuildTargetException {
ImmutableSet<BuildRule> ruleDeps = empty;
if (rule instanceof HaskellCompileDep) {
ruleDeps = rule.getDeps();
HaskellCompileInput compileInput = ((HaskellCompileDep) rule).getCompileInput(cxxPlatform, depType);
depFlags.put(rule.getBuildTarget(), compileInput.getFlags());
depIncludes.put(rule.getBuildTarget(), compileInput.getIncludes());
// We add packages from first-order deps as expose modules, and transitively included
// packages as hidden ones.
boolean firstOrderDep = deps.contains(rule);
for (HaskellPackage pkg : compileInput.getPackages()) {
if (firstOrderDep) {
exposedPackagesBuilder.put(pkg.getInfo().getIdentifier(), pkg);
} else {
packagesBuilder.put(pkg.getInfo().getIdentifier(), pkg);
}
}
}
return ruleDeps;
}
}.start();
Collection<CxxPreprocessorInput> cxxPreprocessorInputs = CxxPreprocessables.getTransitiveCxxPreprocessorInput(cxxPlatform, deps);
ExplicitCxxToolFlags.Builder toolFlagsBuilder = CxxToolFlags.explicitBuilder();
PreprocessorFlags.Builder ppFlagsBuilder = PreprocessorFlags.builder();
toolFlagsBuilder.setPlatformFlags(CxxSourceTypes.getPlatformPreprocessFlags(cxxPlatform, CxxSource.Type.C));
for (CxxPreprocessorInput input : cxxPreprocessorInputs) {
ppFlagsBuilder.addAllIncludes(input.getIncludes());
ppFlagsBuilder.addAllFrameworkPaths(input.getFrameworks());
toolFlagsBuilder.addAllRuleFlags(input.getPreprocessorFlags().get(CxxSource.Type.C));
}
ppFlagsBuilder.setOtherFlags(toolFlagsBuilder.build());
PreprocessorFlags ppFlags = ppFlagsBuilder.build();
ImmutableList<String> compileFlags = ImmutableList.<String>builder().addAll(haskellConfig.getCompilerFlags()).addAll(flags).addAll(Iterables.concat(depFlags.values())).build();
ImmutableList<SourcePath> includes = ImmutableList.copyOf(Iterables.concat(depIncludes.values()));
ImmutableSortedMap<String, HaskellPackage> exposedPackages = exposedPackagesBuilder.build();
ImmutableSortedMap<String, HaskellPackage> packages = packagesBuilder.build();
return HaskellCompileRule.from(target, baseParams, ruleFinder, haskellConfig.getCompiler().resolve(resolver), haskellConfig.getHaskellVersion(), compileFlags, ppFlags, cxxPlatform, depType == Linker.LinkableDepType.STATIC ? CxxSourceRuleFactory.PicType.PDC : CxxSourceRuleFactory.PicType.PIC, main, packageInfo, includes, exposedPackages, packages, sources, CxxSourceTypes.getPreprocessor(cxxPlatform, CxxSource.Type.C).resolve(resolver));
}
Aggregations