use of com.google.common.collect.Multiset in project BloodMagic by WayofTime.
the class ItemBoundTool method dropStacks.
protected static void dropStacks(Multiset<ItemStackWrapper> drops, World world, BlockPos posToDrop) {
for (Multiset.Entry<ItemStackWrapper> entry : drops.entrySet()) {
int count = entry.getCount();
ItemStackWrapper stack = entry.getElement();
int maxStackSize = stack.item.getItemStackLimit(stack.toStack(1));
while (count >= maxStackSize) {
world.spawnEntity(new EntityItem(world, posToDrop.getX(), posToDrop.getY(), posToDrop.getZ(), stack.toStack(maxStackSize)));
count -= maxStackSize;
}
if (count > 0)
world.spawnEntity(new EntityItem(world, posToDrop.getX(), posToDrop.getY(), posToDrop.getZ(), stack.toStack(count)));
}
}
use of com.google.common.collect.Multiset in project acidisland by tastybento.
the class LavaCheck method onCobbleGen.
/**
* Magic Cobble Generator
* @param e - event
*/
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onCobbleGen(BlockFromToEvent e) {
// If magic cobble gen isnt used
if (!Settings.useMagicCobbleGen) {
// plugin.getLogger().info("DEBUG: no magic cobble gen");
return;
}
// Only do this in ASkyBlock world
if (!e.getBlock().getWorld().equals(ASkyBlock.getIslandWorld())) {
// plugin.getLogger().info("DEBUG: wrong world");
return;
}
// Do nothing if a new island is being created
if (plugin.isNewIsland()) {
// plugin.getLogger().info("DEBUG: new island in creation");
return;
}
// If only at spawn, do nothing if we're not at spawn
if (Settings.magicCobbleGenOnlyAtSpawn && (!ASkyBlockAPI.getInstance().isAtSpawn(e.getBlock().getLocation()))) {
return;
}
final Block b = e.getBlock();
if (b.getType().equals(Material.WATER) || b.getType().equals(Material.STATIONARY_WATER) || b.getType().equals(Material.LAVA) || b.getType().equals(Material.STATIONARY_LAVA)) {
// plugin.getLogger().info("DEBUG: From block is water or lava. To = " + e.getToBlock().getType());
final Block toBlock = e.getToBlock();
if (toBlock.getType().equals(Material.AIR) && generatesCobble(b, toBlock)) {
// plugin.getLogger().info("DEBUG: potential cobble gen");
// Get island level or use default
long l = Long.MIN_VALUE;
Island island = plugin.getGrid().getIslandAt(b.getLocation());
if (island != null) {
if (island.getOwner() != null) {
l = plugin.getPlayers().getIslandLevel(island.getOwner());
// plugin.getLogger().info("DEBUG: level " + level);
}
}
final long level = l;
// Check if cobble was generated next tick
// Store surrounding blocks and their current material types
final List<Block> prevBlock = new ArrayList<Block>();
final List<Material> prevMat = new ArrayList<Material>();
for (BlockFace face : FACES) {
Block r = toBlock.getRelative(face);
prevBlock.add(r);
prevMat.add(r.getType());
// r = toBlock.getRelative(face,2);
// prevBlock.add(r);
// prevMat.add(r.getType());
}
// Check if they became cobblestone next tick
plugin.getServer().getScheduler().runTask(plugin, new Runnable() {
@Override
public void run() {
Iterator<Block> blockIt = prevBlock.iterator();
Iterator<Material> matIt = prevMat.iterator();
while (blockIt.hasNext() && matIt.hasNext()) {
Block block = blockIt.next();
Material material = matIt.next();
if (block.getType().equals(Material.COBBLESTONE) && !block.getType().equals(material)) {
// plugin.getLogger().info("DEBUG: Cobble generated. Island level = " + level);
if (!Settings.magicCobbleGenChances.isEmpty()) {
Entry<Long, TreeMap<Double, Material>> entry = Settings.magicCobbleGenChances.floorEntry(level);
double maxValue = entry.getValue().lastKey();
double rnd = Util.randomDouble() * maxValue;
Entry<Double, Material> en = entry.getValue().ceilingEntry(rnd);
// plugin.getLogger().info("DEBUG: material = " + en.getValue());
if (en != null) {
block.setType(en.getValue());
// Record stats, per level
if (stats.containsKey(entry.getKey())) {
stats.get(entry.getKey()).add(en.getValue());
} else {
Multiset<Material> set = HashMultiset.create();
set.add(en.getValue());
stats.put(entry.getKey(), set);
}
}
}
}
}
}
});
}
}
}
use of com.google.common.collect.Multiset in project intellij by bazelbuild.
the class SourceDirectoryCalculator method calculateJavaSourceDirectories.
/**
* Adds the java source directories.
*/
private void calculateJavaSourceDirectories(BlazeContext context, WorkspaceRoot workspaceRoot, ArtifactLocationDecoder artifactLocationDecoder, WorkspacePath directoryRoot, Collection<SourceArtifact> javaArtifacts, Collection<JavaPackageReader> javaPackageReaders, Collection<BlazeSourceDirectory> result) {
List<SourceRoot> sourceRootsPerFile = Lists.newArrayList();
// Get java sources
List<ListenableFuture<SourceRoot>> sourceRootFutures = Lists.newArrayList();
for (final SourceArtifact sourceArtifact : javaArtifacts) {
ListenableFuture<SourceRoot> future = executorService.submit(() -> sourceRootForJavaSource(context, artifactLocationDecoder, sourceArtifact, javaPackageReaders));
sourceRootFutures.add(future);
}
try {
for (SourceRoot sourceRoot : Futures.allAsList(sourceRootFutures).get()) {
if (sourceRoot != null) {
sourceRootsPerFile.add(sourceRoot);
}
}
} catch (ExecutionException | InterruptedException e) {
logger.error(e);
throw new IllegalStateException("Could not read sources");
}
// Sort source roots into their respective directories
Map<WorkspacePath, Multiset<SourceRoot>> sourceDirectoryToSourceRoots = new HashMap<>();
for (SourceRoot sourceRoot : sourceRootsPerFile) {
sourceDirectoryToSourceRoots.computeIfAbsent(sourceRoot.workspacePath, k -> HashMultiset.create()).add(sourceRoot);
}
// Create a mapping from directory to package prefix
Map<WorkspacePath, SourceRoot> workspacePathToSourceRoot = Maps.newHashMap();
for (WorkspacePath workspacePath : sourceDirectoryToSourceRoots.keySet()) {
Multiset<SourceRoot> sources = sourceDirectoryToSourceRoots.get(workspacePath);
Multiset<String> packages = HashMultiset.create();
for (Multiset.Entry<SourceRoot> entry : sources.entrySet()) {
packages.setCount(entry.getElement().packagePrefix, entry.getCount());
}
final String directoryPackagePrefix;
// Common case -- all source files agree on a single package
if (packages.elementSet().size() == 1) {
directoryPackagePrefix = packages.elementSet().iterator().next();
} else {
String preferredPackagePrefix = PackagePrefixCalculator.packagePrefixOf(workspacePath);
directoryPackagePrefix = pickMostFrequentlyOccurring(packages, preferredPackagePrefix);
}
SourceRoot candidateRoot = new SourceRoot(workspacePath, directoryPackagePrefix);
workspacePathToSourceRoot.put(workspacePath, candidateRoot);
}
// Add content entry base if it doesn't exist
if (!workspacePathToSourceRoot.containsKey(directoryRoot)) {
SourceRoot candidateRoot = new SourceRoot(directoryRoot, PackagePrefixCalculator.packagePrefixOf(directoryRoot));
workspacePathToSourceRoot.put(directoryRoot, candidateRoot);
}
// First, create a graph of the directory structure from root to each source file
Map<WorkspacePath, SourceRootDirectoryNode> sourceRootDirectoryNodeMap = Maps.newHashMap();
SourceRootDirectoryNode rootNode = new SourceRootDirectoryNode(directoryRoot, null);
sourceRootDirectoryNodeMap.put(directoryRoot, rootNode);
for (SourceRoot sourceRoot : workspacePathToSourceRoot.values()) {
final String sourcePathRelativeToDirectoryRoot = sourcePathRelativeToDirectoryRoot(directoryRoot, sourceRoot.workspacePath);
List<String> pathComponents = !Strings.isNullOrEmpty(sourcePathRelativeToDirectoryRoot) ? PATH_SPLITTER.splitToList(sourcePathRelativeToDirectoryRoot) : ImmutableList.of();
SourceRootDirectoryNode previousNode = rootNode;
for (int i = 0; i < pathComponents.size(); ++i) {
final WorkspacePath workspacePath = getWorkspacePathFromPathComponents(directoryRoot, pathComponents, i + 1);
SourceRootDirectoryNode node = sourceRootDirectoryNodeMap.get(workspacePath);
if (node == null) {
node = new SourceRootDirectoryNode(workspacePath, pathComponents.get(i));
sourceRootDirectoryNodeMap.put(workspacePath, node);
previousNode.children.add(node);
}
previousNode = node;
}
}
// Add package prefix votes at each directory node
for (SourceRoot sourceRoot : workspacePathToSourceRoot.values()) {
final String sourcePathRelativeToDirectoryRoot = sourcePathRelativeToDirectoryRoot(directoryRoot, sourceRoot.workspacePath);
List<String> packageComponents = PACKAGE_SPLITTER.splitToList(sourceRoot.packagePrefix);
List<String> pathComponents = !Strings.isNullOrEmpty(sourcePathRelativeToDirectoryRoot) ? PATH_SPLITTER.splitToList(sourcePathRelativeToDirectoryRoot) : ImmutableList.of();
int packageIndex = packageComponents.size();
int pathIndex = pathComponents.size();
while (pathIndex >= 0 && packageIndex >= 0) {
final WorkspacePath workspacePath = getWorkspacePathFromPathComponents(directoryRoot, pathComponents, pathIndex);
SourceRootDirectoryNode node = sourceRootDirectoryNodeMap.get(workspacePath);
String packagePrefix = PACKAGE_JOINER.join(packageComponents.subList(0, packageIndex));
// Otherwise just add a vote
if (sourceRoot.workspacePath.equals(workspacePath)) {
node.forcedPackagePrefix = packagePrefix;
} else {
node.packagePrefixVotes.add(packagePrefix);
}
String pathComponent = pathIndex > 0 ? pathComponents.get(pathIndex - 1) : "";
String packageComponent = packageIndex > 0 ? packageComponents.get(packageIndex - 1) : "";
if (!pathComponent.equals(packageComponent)) {
break;
}
--packageIndex;
--pathIndex;
}
}
Map<WorkspacePath, SourceRoot> sourceRoots = Maps.newHashMap();
SourceRootDirectoryNode root = sourceRootDirectoryNodeMap.get(directoryRoot);
visitDirectoryNode(sourceRoots, root, null);
for (SourceRoot sourceRoot : sourceRoots.values()) {
result.add(BlazeSourceDirectory.builder(workspaceRoot.fileForPath(sourceRoot.workspacePath)).setPackagePrefix(sourceRoot.packagePrefix).setGenerated(false).build());
}
}
use of com.google.common.collect.Multiset in project presto by prestodb.
the class BucketBalancer method computeAssignmentChanges.
private static Multimap<String, BucketAssignment> computeAssignmentChanges(ClusterState clusterState) {
Multimap<String, BucketAssignment> sourceToAllocationChanges = HashMultimap.create();
Map<String, Long> allocationBytes = new HashMap<>(clusterState.getAssignedBytes());
Set<String> activeNodes = clusterState.getActiveNodes();
for (Distribution distribution : clusterState.getDistributionAssignments().keySet()) {
// number of buckets in this distribution assigned to a node
Multiset<String> allocationCounts = HashMultiset.create();
Collection<BucketAssignment> distributionAssignments = clusterState.getDistributionAssignments().get(distribution);
distributionAssignments.stream().map(BucketAssignment::getNodeIdentifier).forEach(allocationCounts::add);
int currentMin = allocationBytes.keySet().stream().mapToInt(allocationCounts::count).min().getAsInt();
int currentMax = allocationBytes.keySet().stream().mapToInt(allocationCounts::count).max().getAsInt();
int numBuckets = distributionAssignments.size();
int targetMin = (int) Math.floor((numBuckets * 1.0) / clusterState.getActiveNodes().size());
int targetMax = (int) Math.ceil((numBuckets * 1.0) / clusterState.getActiveNodes().size());
log.info("Distribution %s: Current bucket skew: min %s, max %s. Target bucket skew: min %s, max %s", distribution.getId(), currentMin, currentMax, targetMin, targetMax);
for (String source : ImmutableSet.copyOf(allocationCounts)) {
List<BucketAssignment> existingAssignments = distributionAssignments.stream().filter(assignment -> assignment.getNodeIdentifier().equals(source)).collect(toList());
for (BucketAssignment existingAssignment : existingAssignments) {
if (activeNodes.contains(source) && allocationCounts.count(source) <= targetMin) {
break;
}
// identify nodes with bucket counts lower than the computed target, and greedily select from this set based on projected disk utilization.
// greediness means that this may produce decidedly non-optimal results if one looks at the global distribution of buckets->nodes.
// also, this assumes that nodes in a cluster have identical storage capacity
String target = activeNodes.stream().filter(candidate -> !candidate.equals(source) && allocationCounts.count(candidate) < targetMax).sorted(comparingInt(allocationCounts::count)).min(Comparator.comparingDouble(allocationBytes::get)).orElseThrow(() -> new VerifyException("unable to find target for rebalancing"));
long bucketSize = clusterState.getDistributionBucketSize().get(distribution);
// only move bucket if it reduces imbalance
if (activeNodes.contains(source) && (allocationCounts.count(source) == targetMax && allocationCounts.count(target) == targetMin)) {
break;
}
allocationCounts.remove(source);
allocationCounts.add(target);
allocationBytes.compute(source, (k, v) -> v - bucketSize);
allocationBytes.compute(target, (k, v) -> v + bucketSize);
sourceToAllocationChanges.put(existingAssignment.getNodeIdentifier(), new BucketAssignment(existingAssignment.getDistributionId(), existingAssignment.getBucketNumber(), target));
}
}
}
return sourceToAllocationChanges;
}
Aggregations