use of it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap in project druid by druid-io.
the class ListFilteredDimensionSpecDimensionSelectorTest method createDictionaries.
private NonnullPair<Object2IntMap<String>, Int2ObjectMap<String>> createDictionaries(List<List<String>> values) {
Object2IntMap<String> dictionary = new Object2IntOpenHashMap<>();
Int2ObjectMap<String> reverseDictionary = new Int2ObjectOpenHashMap<>();
MutableInt nextId = new MutableInt(0);
for (List<String> multiValue : values) {
for (String value : multiValue) {
int dictId = dictionary.computeIntIfAbsent(value, k -> nextId.getAndIncrement());
reverseDictionary.putIfAbsent(dictId, value);
}
}
return new NonnullPair<>(dictionary, reverseDictionary);
}
use of it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap in project pinot by linkedin.
the class NoDictionarySingleColumnGroupKeyGenerator method createGroupKeyMap.
/**
* Helper method to create the group-key map, depending on the data type.
* Uses primitive maps when possible.
*
* @param keyType DataType for the key
* @return Map
*/
private Map createGroupKeyMap(FieldSpec.DataType keyType) {
Map map;
switch(keyType) {
case INT:
Int2IntMap intMap = new Int2IntOpenHashMap();
intMap.defaultReturnValue(INVALID_ID);
map = intMap;
break;
case LONG:
Long2IntOpenHashMap longMap = new Long2IntOpenHashMap();
longMap.defaultReturnValue(INVALID_ID);
map = longMap;
break;
case FLOAT:
Float2IntOpenHashMap floatMap = new Float2IntOpenHashMap();
floatMap.defaultReturnValue(INVALID_ID);
map = floatMap;
break;
case DOUBLE:
Double2IntOpenHashMap doubleMap = new Double2IntOpenHashMap();
doubleMap.defaultReturnValue(INVALID_ID);
map = doubleMap;
break;
case STRING:
Object2IntOpenHashMap<String> stringMap = new Object2IntOpenHashMap<>();
stringMap.defaultReturnValue(INVALID_ID);
map = stringMap;
break;
default:
throw new IllegalArgumentException("Illegal data type for no-dictionary key generator: " + keyType);
}
return map;
}
use of it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap in project pinot by linkedin.
the class SegmentDictionaryCreator method build.
public void build(boolean[] isSorted) throws Exception {
switch(spec.getDataType()) {
case INT:
final FixedByteSingleValueMultiColWriter intDictionaryWrite = new FixedByteSingleValueMultiColWriter(dictionaryFile, rowCount, 1, V1Constants.Dict.INT_DICTIONARY_COL_SIZE);
intValueToIndexMap = new Int2IntOpenHashMap(rowCount);
int[] sortedInts = (int[]) sortedList;
for (int i = 0; i < rowCount; i++) {
final int entry = sortedInts[i];
intDictionaryWrite.setInt(i, 0, entry);
intValueToIndexMap.put(entry, i);
}
intDictionaryWrite.close();
break;
case FLOAT:
final FixedByteSingleValueMultiColWriter floatDictionaryWrite = new FixedByteSingleValueMultiColWriter(dictionaryFile, rowCount, 1, V1Constants.Dict.FLOAT_DICTIONARY_COL_SIZE);
floatValueToIndexMap = new Float2IntOpenHashMap(rowCount);
float[] sortedFloats = (float[]) sortedList;
for (int i = 0; i < rowCount; i++) {
final float entry = sortedFloats[i];
floatDictionaryWrite.setFloat(i, 0, entry);
floatValueToIndexMap.put(entry, i);
}
floatDictionaryWrite.close();
break;
case LONG:
final FixedByteSingleValueMultiColWriter longDictionaryWrite = new FixedByteSingleValueMultiColWriter(dictionaryFile, rowCount, 1, V1Constants.Dict.LONG_DICTIONARY_COL_SIZE);
longValueToIndexMap = new Long2IntOpenHashMap(rowCount);
long[] sortedLongs = (long[]) sortedList;
for (int i = 0; i < rowCount; i++) {
final long entry = sortedLongs[i];
longDictionaryWrite.setLong(i, 0, entry);
longValueToIndexMap.put(entry, i);
}
longDictionaryWrite.close();
break;
case DOUBLE:
final FixedByteSingleValueMultiColWriter doubleDictionaryWrite = new FixedByteSingleValueMultiColWriter(dictionaryFile, rowCount, 1, V1Constants.Dict.DOUBLE_DICTIONARY_COL_SIZE);
doubleValueToIndexMap = new Double2IntOpenHashMap(rowCount);
double[] sortedDoubles = (double[]) sortedList;
for (int i = 0; i < rowCount; i++) {
final double entry = sortedDoubles[i];
doubleDictionaryWrite.setDouble(i, 0, entry);
doubleValueToIndexMap.put(entry, i);
}
doubleDictionaryWrite.close();
break;
case STRING:
case BOOLEAN:
Object[] sortedObjects = (Object[]) sortedList;
// make sure that there is non-zero sized dictionary JIRA:PINOT-2947
stringColumnMaxLength = 1;
for (final Object e : sortedObjects) {
String val = e.toString();
int length = val.getBytes(utf8CharSet).length;
if (stringColumnMaxLength < length) {
stringColumnMaxLength = length;
}
}
final FixedByteSingleValueMultiColWriter stringDictionaryWrite = new FixedByteSingleValueMultiColWriter(dictionaryFile, rowCount, 1, new int[] { stringColumnMaxLength });
final String[] revised = new String[rowCount];
Map<String, String> revisedMap = new HashMap<String, String>();
for (int i = 0; i < rowCount; i++) {
final String toWrite = sortedObjects[i].toString();
String entry = getPaddedString(toWrite, stringColumnMaxLength, paddingChar);
revised[i] = entry;
if (isSorted[0] && i > 0 && (revised[i - 1].compareTo(entry) > 0)) {
isSorted[0] = false;
}
assert (revised[i].getBytes(utf8CharSet).length == stringColumnMaxLength);
revisedMap.put(revised[i], toWrite);
}
if (revisedMap.size() != sortedObjects.length) {
// Two strings map to the same padded string in the current column
throw new RuntimeException("Number of entries in dictionary != number of unique values in the data in column " + spec.getName());
}
Arrays.sort(revised);
stringValueToIndexMap = new Object2IntOpenHashMap<>(rowCount);
for (int i = 0; i < revised.length; i++) {
stringDictionaryWrite.setString(i, 0, revised[i]);
// No need to store padded value, we can store and lookup by raw value. In certain cases, original sorted order
// may be different from revised sorted order [PINOT-2730], so would need to use the original order in value
// to index map.
String origString = revisedMap.get(revised[i]);
stringValueToIndexMap.put(origString, i);
}
stringDictionaryWrite.close();
break;
default:
throw new RuntimeException("Unhandled type " + spec.getDataType());
}
}
use of it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap in project gatk-protected by broadinstitute.
the class ReadCountCollection method arrangeTargets.
/**
* Rearrange the targets so that they are in a particular order.
* @return a new collection.
* @throws IllegalArgumentException if any of the following is true:
* <ul>
* <li>{@code targetsInOrder} is {@code null},</li>
* <li>is empty,</li>
* <li>it contains {@code null},</li>
* <li>contains any target not present in this collection.</li>
* </ul>
*/
public ReadCountCollection arrangeTargets(final List<Target> targetsInOrder) {
Utils.nonNull(targetsInOrder);
Utils.nonEmpty(targetsInOrder, "the input targets list cannot be empty");
final RealMatrix counts = new Array2DRowRealMatrix(targetsInOrder.size(), columnNames.size());
final Object2IntMap<Target> targetToIndex = new Object2IntOpenHashMap<>(targets.size());
for (int i = 0; i < targets.size(); i++) {
targetToIndex.put(targets.get(i), i);
}
for (int i = 0; i < targetsInOrder.size(); i++) {
final Target target = targetsInOrder.get(i);
Utils.validateArg(targetToIndex.containsKey(target), () -> String.format("target '%s' is not present in the collection", target.getName()));
counts.setRow(i, this.counts.getRow(targetToIndex.getInt(target)));
}
return new ReadCountCollection(new ArrayList<>(targetsInOrder), columnNames, counts, false);
}
use of it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap in project SpongeCommon by SpongePowered.
the class ChunkSaveHelper method writeChunks.
@SuppressWarnings("rawtypes")
public static void writeChunks(File file, boolean logAll) {
try {
if (file.getParentFile() != null) {
file.getParentFile().mkdirs();
}
try (JsonWriter writer = new JsonWriter(new FileWriter(file))) {
writer.setIndent(" ");
writer.beginArray();
for (World spongeWorld : SpongeImpl.getGame().getServer().getWorlds()) {
WorldServer world = (WorldServer) spongeWorld;
writer.beginObject();
writer.name("name").value(((SaveHandler) ((WorldServer) spongeWorld).getSaveHandler()).saveDirectoryName);
writer.name("dimensionId").value(((IMixinWorldServer) spongeWorld).getDimensionId());
writer.name("players").value(world.playerEntities.size());
writer.name("loadedChunks").value(world.getChunkProvider().getLoadedChunks().size());
writer.name("activeChunks").value(world.getChunkProvider().getLoadedChunkCount());
writer.name("entities").value(world.loadedEntityList.size());
writer.name("tiles").value(world.loadedTileEntityList.size());
Object2IntMap<ChunkPos> chunkEntityCounts = new Object2IntOpenHashMap<>();
chunkEntityCounts.defaultReturnValue(0);
Object2IntMap<Class> classEntityCounts = new Object2IntOpenHashMap<>();
classEntityCounts.defaultReturnValue(0);
Object2IntMap<Entity> entityCollisionCounts = new Object2IntOpenHashMap<>();
Set<BlockPos> collidingCoords = new HashSet<>();
for (int i = 0; i < world.loadedEntityList.size(); i++) {
Entity entity = world.loadedEntityList.get(i);
ChunkPos chunkCoords = new ChunkPos((int) entity.posX >> 4, (int) entity.posZ >> 4);
chunkEntityCounts.put(chunkCoords, chunkEntityCounts.getInt(chunkCoords) + 1);
classEntityCounts.put(entity.getClass(), classEntityCounts.getInt(entity.getClass()) + 1);
if ((entity.getCollisionBoundingBox() != null) && logAll) {
BlockPos coords = new BlockPos(GenericMath.floor(entity.posX), GenericMath.floor(entity.posY), GenericMath.floor(entity.posZ));
if (!collidingCoords.contains(coords)) {
collidingCoords.add(coords);
int size = entity.world.getEntitiesWithinAABBExcludingEntity(entity, entity.getCollisionBoundingBox().grow(1, 1, 1)).size();
if (size < 5) {
continue;
}
entityCollisionCounts.put(entity, size);
}
}
}
Object2IntMap<ChunkPos> chunkTileCounts = new Object2IntOpenHashMap<>();
chunkTileCounts.defaultReturnValue(0);
Object2IntMap<Class> classTileCounts = new Object2IntOpenHashMap<>();
classTileCounts.defaultReturnValue(0);
writer.name("tiles").beginArray();
for (int i = 0; i < world.loadedTileEntityList.size(); i++) {
TileEntity tile = world.loadedTileEntityList.get(i);
if (logAll) {
writer.beginObject();
writer.name("type").value(tile.getClass().toString());
writer.name("x").value(tile.getPos().getX());
writer.name("y").value(tile.getPos().getY());
writer.name("z").value(tile.getPos().getZ());
writer.name("isInvalid").value(tile.isInvalid());
// writer.name("canUpdate").value(tile.canUpdate());
writer.name("block").value("" + tile.getBlockType());
writer.endObject();
}
ChunkPos chunkCoords = new ChunkPos(tile.getPos().getX() >> 4, tile.getPos().getZ() >> 4);
chunkTileCounts.put(chunkCoords, chunkTileCounts.getInt(chunkCoords) + 1);
classTileCounts.put(tile.getClass(), classTileCounts.getInt(tile.getClass()) + 1);
}
writer.endArray();
if (logAll) {
writeChunkCounts(writer, "topEntityColliders", entityCollisionCounts, 20);
}
writeChunkCounts(writer, "entitiesByClass", classEntityCounts);
writeChunkCounts(writer, "entitiesByChunk", chunkEntityCounts);
writeChunkCounts(writer, "tilesByClass", classTileCounts);
writeChunkCounts(writer, "tilesByChunk", chunkTileCounts);
// Dimension
writer.endObject();
}
// Dimensions
writer.endArray();
}
} catch (Throwable throwable) {
SpongeImpl.getLogger().error("Could not save chunk info report to " + file);
}
}
Aggregations