use of it.unimi.dsi.fastutil.ints.IntList in project druid by druid-io.
the class IntListUtilsTest method testEmptyRangeIntList.
@Test(expected = IndexOutOfBoundsException.class)
public void testEmptyRangeIntList() {
final IntList list = IntListUtils.fromTo(10, 10);
assertEquals(0, list.size());
list.get(0);
}
use of it.unimi.dsi.fastutil.ints.IntList in project presto by prestodb.
the class ParquetReader method readMap.
private Block readMap(Type type, List<String> path, IntList elementOffsets) throws IOException {
List<Type> parameters = type.getTypeParameters();
checkArgument(parameters.size() == 2, "Maps must have two type parameters, found %d", parameters.size());
Block[] blocks = new Block[parameters.size()];
IntList keyOffsets = new IntArrayList();
IntList valueOffsets = new IntArrayList();
path.add(MAP_TYPE_NAME);
blocks[0] = readBlock(MAP_KEY_NAME, parameters.get(0), path, keyOffsets);
blocks[1] = readBlock(MAP_VALUE_NAME, parameters.get(1), path, valueOffsets);
path.remove(MAP_TYPE_NAME);
if (blocks[0].getPositionCount() == 0) {
for (int i = 0; i < batchSize; i++) {
elementOffsets.add(0);
}
return RunLengthEncodedBlock.create(parameters.get(0), null, batchSize);
}
InterleavedBlock interleavedBlock = new InterleavedBlock(new Block[] { blocks[0], blocks[1] });
int[] offsets = new int[batchSize + 1];
for (int i = 1; i < offsets.length; i++) {
int elementPositionCount = keyOffsets.getInt(i - 1) * 2;
elementOffsets.add(elementPositionCount);
offsets[i] = offsets[i - 1] + elementPositionCount;
}
return new ArrayBlock(batchSize, new boolean[batchSize], offsets, interleavedBlock);
}
use of it.unimi.dsi.fastutil.ints.IntList in project gatk by broadinstitute.
the class ContextCovariate method recordValues.
@Override
public void recordValues(final GATKRead read, final SAMFileHeader header, final ReadCovariates values, final boolean recordIndelValues) {
final int originalReadLength = read.getLength();
// store the original bases and then write Ns over low quality ones
//Note: this makes a copy of the read
final byte[] strandedClippedBases = getStrandedClippedBytes(read, lowQualTail);
//Note: we're using a non-standard library here because boxing came up on profiling as taking 20% of time in applyBQSR.
//IntList avoids boxing
final IntList mismatchKeys = contextWith(strandedClippedBases, mismatchesContextSize, mismatchesKeyMask);
final int readLengthAfterClipping = strandedClippedBases.length;
// due to the clipping of the low quality bases
if (readLengthAfterClipping != originalReadLength) {
// don't bother zeroing out if we are going to overwrite the whole array
for (int i = 0; i < originalReadLength; i++) {
// this base has been clipped off, so zero out the covariate values here
values.addCovariate(0, 0, 0, i);
}
}
final boolean negativeStrand = read.isReverseStrand();
//Note: duplicated the loop to avoid checking recordIndelValues on each iteration
if (recordIndelValues) {
final IntList indelKeys = contextWith(strandedClippedBases, indelsContextSize, indelsKeyMask);
for (int i = 0; i < readLengthAfterClipping; i++) {
final int readOffset = getStrandedOffset(negativeStrand, i, readLengthAfterClipping);
final int indelKey = indelKeys.getInt(i);
values.addCovariate(mismatchKeys.getInt(i), indelKey, indelKey, readOffset);
}
} else {
for (int i = 0; i < readLengthAfterClipping; i++) {
final int readOffset = getStrandedOffset(negativeStrand, i, readLengthAfterClipping);
values.addCovariate(mismatchKeys.getInt(i), 0, 0, readOffset);
}
}
}
use of it.unimi.dsi.fastutil.ints.IntList in project gatk by broadinstitute.
the class ContextCovariate method contextWith.
/**
* calculates the context of a base independent of the covariate mode (mismatch, insertion or deletion)
*
* @param bases the bases in the read to build the context from
* @param contextSize context size to use building the context
* @param mask mask for pulling out just the context bits
*/
private static IntList contextWith(final byte[] bases, final int contextSize, final int mask) {
final int readLength = bases.length;
//Note: we use a specialized collection to avoid the cost of boxing and unboxing that otherwise comes up on the profiler.
final IntList keys = new IntArrayList(readLength);
// the first contextSize-1 bases will not have enough previous context
for (int i = 1; i < contextSize && i <= readLength; i++) {
keys.add(-1);
}
if (readLength < contextSize) {
return keys;
}
final int newBaseOffset = 2 * (contextSize - 1) + LENGTH_BITS;
// get (and add) the key for the context starting at the first base
int currentKey = keyFromContext(bases, 0, contextSize);
keys.add(currentKey);
// if the first key was -1 then there was an N in the context; figure out how many more consecutive contexts it affects
int currentNPenalty = 0;
if (currentKey == -1) {
currentKey = 0;
currentNPenalty = contextSize - 1;
int offset = newBaseOffset;
while (bases[currentNPenalty] != 'N') {
final int baseIndex = BaseUtils.simpleBaseToBaseIndex(bases[currentNPenalty]);
currentKey |= (baseIndex << offset);
offset -= 2;
currentNPenalty--;
}
}
for (int currentIndex = contextSize; currentIndex < readLength; currentIndex++) {
final int baseIndex = BaseUtils.simpleBaseToBaseIndex(bases[currentIndex]);
if (baseIndex == -1) {
// ignore non-ACGT bases
currentNPenalty = contextSize;
// reset the key
currentKey = 0;
} else {
// push this base's contribution onto the key: shift everything 2 bits, mask out the non-context bits, and add the new base and the length in
currentKey = (currentKey >> 2) & mask;
currentKey |= (baseIndex << newBaseOffset);
currentKey |= contextSize;
}
if (currentNPenalty == 0) {
keys.add(currentKey);
} else {
currentNPenalty--;
keys.add(-1);
}
}
return keys;
}
use of it.unimi.dsi.fastutil.ints.IntList in project druid by druid-io.
the class MergeIntIteratorTest method smokeTest.
@Test
public void smokeTest() {
ThreadLocalRandom r = ThreadLocalRandom.current();
for (int i = 0; i < 1000; i++) {
int numIterators = r.nextInt(1, 11);
List<IntList> lists = new ArrayList<>(numIterators);
for (int j = 0; j < numIterators; j++) {
lists.add(new IntArrayList());
}
for (int j = 0; j < 50; j++) {
lists.get(r.nextInt(numIterators)).add(j);
}
for (int j = 0; j < lists.size() + 1; j++) {
assertAscending(mergeAscending(iteratorsFromLists(lists)));
Collections.rotate(lists, 1);
}
for (int j = 0; j < 10; j++) {
Collections.shuffle(lists);
assertAscending(mergeAscending(iteratorsFromLists(lists)));
}
}
}
Aggregations