use of it.unimi.dsi.fastutil.longs.LongOpenHashSet in project druid by druid-io.
the class InDimFilter method getLongPredicateSupplier.
// As the set of filtered values can be large, parsing them as longs should be done only if needed, and only once.
// Pass in a common long predicate supplier to all filters created by .toFilter(), so that
// we only compute the long hashset/array once per query.
// This supplier must be thread-safe, since this DimFilter will be accessed in the query runners.
private Supplier<DruidLongPredicate> getLongPredicateSupplier() {
return new Supplier<DruidLongPredicate>() {
private final Object initLock = new Object();
private DruidLongPredicate predicate;
private void initLongValues() {
if (predicate != null) {
return;
}
synchronized (initLock) {
if (predicate != null) {
return;
}
LongArrayList longs = new LongArrayList(values.size());
for (String value : values) {
Long longValue = GuavaUtils.tryParseLong(value);
if (longValue != null) {
longs.add(longValue);
}
}
if (longs.size() > NUMERIC_HASHING_THRESHOLD) {
final LongOpenHashSet longHashSet = new LongOpenHashSet(longs);
predicate = new DruidLongPredicate() {
@Override
public boolean applyLong(long input) {
return longHashSet.contains(input);
}
};
} else {
final long[] longArray = longs.toLongArray();
Arrays.sort(longArray);
predicate = new DruidLongPredicate() {
@Override
public boolean applyLong(long input) {
return Arrays.binarySearch(longArray, input) >= 0;
}
};
}
}
}
@Override
public DruidLongPredicate get() {
initLongValues();
return predicate;
}
};
}
use of it.unimi.dsi.fastutil.longs.LongOpenHashSet in project geode by apache.
the class PersistentOplogSet method createOplogs.
public void createOplogs(boolean needsOplogs, Map<File, DirectoryHolder> backupFiles) {
LongOpenHashSet foundCrfs = new LongOpenHashSet();
LongOpenHashSet foundDrfs = new LongOpenHashSet();
for (Map.Entry<File, DirectoryHolder> entry : backupFiles.entrySet()) {
File file = entry.getKey();
String absolutePath = file.getAbsolutePath();
int underscorePosition = absolutePath.lastIndexOf("_");
int pointPosition = absolutePath.lastIndexOf(".");
String opid = absolutePath.substring(underscorePosition + 1, pointPosition);
long oplogId = Long.parseLong(opid);
maxRecoveredOplogId = Math.max(maxRecoveredOplogId, oplogId);
// if deleted then don't process it.
if (Oplog.isCRFFile(file.getName())) {
if (!isCrfOplogIdPresent(oplogId)) {
deleteFileOnRecovery(file);
try {
String krfFileName = Oplog.getKRFFilenameFromCRFFilename(file.getAbsolutePath());
File krfFile = new File(krfFileName);
deleteFileOnRecovery(krfFile);
} catch (Exception ex) {
// ignore
}
// this file we unable to delete earlier
continue;
}
} else if (Oplog.isDRFFile(file.getName())) {
if (!isDrfOplogIdPresent(oplogId)) {
deleteFileOnRecovery(file);
// this file we unable to delete earlier
continue;
}
}
Oplog oplog = getChild(oplogId);
if (oplog == null) {
oplog = new Oplog(oplogId, this);
// oplogSet.add(oplog);
addRecoveredOplog(oplog);
}
if (oplog.addRecoveredFile(file, entry.getValue())) {
foundCrfs.add(oplogId);
} else {
foundDrfs.add(oplogId);
}
}
if (needsOplogs) {
verifyOplogs(foundCrfs, foundDrfs);
}
}
use of it.unimi.dsi.fastutil.longs.LongOpenHashSet in project presto by prestodb.
the class ArrayUnionFunction method bigintUnion.
@SqlType("array(bigint)")
public static Block bigintUnion(@SqlType("array(bigint)") Block leftArray, @SqlType("array(bigint)") Block rightArray) {
int leftArrayCount = leftArray.getPositionCount();
int rightArrayCount = rightArray.getPositionCount();
LongSet set = new LongOpenHashSet(leftArrayCount + rightArrayCount);
BlockBuilder distinctElementBlockBuilder = BIGINT.createBlockBuilder(new BlockBuilderStatus(), leftArrayCount + rightArrayCount);
AtomicBoolean containsNull = new AtomicBoolean(false);
appendBigintArray(leftArray, containsNull, set, distinctElementBlockBuilder);
appendBigintArray(rightArray, containsNull, set, distinctElementBlockBuilder);
return distinctElementBlockBuilder.build();
}
use of it.unimi.dsi.fastutil.longs.LongOpenHashSet in project geode by apache.
the class DiskInitFile method verifyOplogs.
public void verifyOplogs(LongOpenHashSet foundCrfs, LongOpenHashSet foundDrfs, LongOpenHashSet expectedCrfIds, LongOpenHashSet expectedDrfIds) {
LongOpenHashSet missingCrfs = calcMissing(foundCrfs, expectedCrfIds);
LongOpenHashSet missingDrfs = calcMissing(foundDrfs, expectedDrfIds);
// Note that finding extra ones is ok; it is possible we died just
// after creating one but before we could record it in the if file
// Or died just after deleting it but before we could record it in the if file.
boolean failed = false;
String msg = null;
if (!missingCrfs.isEmpty()) {
failed = true;
msg = "*.crf files with these ids: " + Arrays.toString(missingCrfs.toArray());
}
if (!missingDrfs.isEmpty()) {
failed = true;
if (msg == null) {
msg = "";
} else {
msg += ", ";
}
msg += "*.drf files with these ids: " + Arrays.toString(missingDrfs.toArray());
}
if (failed) {
msg = "The following required files could not be found: " + msg + ".";
throw new IllegalStateException(msg);
}
}
use of it.unimi.dsi.fastutil.longs.LongOpenHashSet in project symja_android_library by axkr.
the class DateTimeColumn method unique.
@Override
public DateTimeColumn unique() {
LongSet ints = new LongOpenHashSet(data.size());
for (long i : data) {
ints.add(i);
}
DateTimeColumn column = emptyCopy(ints.size());
column.setName(name() + " Unique values");
column.data = LongArrayList.wrap(ints.toLongArray());
return column;
}
Aggregations