use of de.invesdwin.util.collections.iterable.ICloseableIterable in project invesdwin-context-persistence by subes.
the class TimeSeriesStorageCache method readRangeFiles.
protected ICloseableIterable<MemoryFileSummary> readRangeFiles(final FDate from, final FDate to, final Lock readLock, final ISkipFileFunction skipFileFunction) {
return new ICloseableIterable<MemoryFileSummary>() {
@Override
public ICloseableIterator<MemoryFileSummary> iterator() {
final FDate usedFrom;
if (from == null) {
final V firstValue = getFirstValue();
if (firstValue == null) {
return EmptyCloseableIterator.getInstance();
}
usedFrom = extractEndTime.apply(firstValue);
} else {
usedFrom = from;
}
return new ACloseableIterator<MemoryFileSummary>(new TextDescription("%s[%s]: readRangeFiles(%s, %s)", TimeSeriesStorageCache.class.getSimpleName(), hashKey, from, to)) {
// use latest time available even if delegate iterator has no values
private RangeTableRow<String, FDate, MemoryFileSummary> latestFirstTime = fileLookupTable_latestRangeKeyCache.get(usedFrom);
private final ICloseableIterator<RangeTableRow<String, FDate, MemoryFileSummary>> delegate;
{
if (latestFirstTime == null) {
delegate = EmptyCloseableIterator.getInstance();
} else {
delegate = getRangeKeys(hashKey, latestFirstTime.getRangeKey().addMilliseconds(1), to);
}
}
@Override
protected boolean innerHasNext() {
return latestFirstTime != null || delegate.hasNext();
}
private ICloseableIterator<RangeTableRow<String, FDate, MemoryFileSummary>> getRangeKeys(final String hashKey, final FDate from, final FDate to) {
readLock.lock();
try {
final ICloseableIterator<RangeTableRow<String, FDate, MemoryFileSummary>> range = getAllRangeKeys(readLock).iterator();
final GetRangeKeysIterator rangeFiltered = new GetRangeKeysIterator(range, from, to);
if (skipFileFunction != null) {
return new ASkippingIterator<RangeTableRow<String, FDate, MemoryFileSummary>>(rangeFiltered) {
@Override
protected boolean skip(final RangeTableRow<String, FDate, MemoryFileSummary> element) {
if (!rangeFiltered.hasNext()) {
/*
* cannot optimize this further for multiple segments because we don't know
* if a segment further back might be empty or not and thus the last segment
* of interest might have been the previous one from which we skipped the
* last file falsely
*/
return false;
}
return skipFileFunction.skipFile(element.getValue());
}
};
} else {
return rangeFiltered;
}
} finally {
readLock.unlock();
}
}
@Override
protected MemoryFileSummary innerNext() {
final MemoryFileSummary summary;
if (latestFirstTime != null) {
summary = latestFirstTime.getValue();
latestFirstTime = null;
} else {
summary = delegate.next().getValue();
}
return summary;
}
@Override
protected void innerClose() {
delegate.close();
}
};
}
};
}
use of de.invesdwin.util.collections.iterable.ICloseableIterable in project invesdwin-context-persistence by subes.
the class ALiveSegmentedTimeSeriesDBWithCacheTest method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
final AHistoricalCache<TimeRange> segmentFinder = PeriodicalSegmentFinder.newCache(new Duration(2, FTimeUnit.YEARS), false);
table = new ALiveSegmentedTimeSeriesDB<String, FDate>(getClass().getSimpleName()) {
private FDate curTime = null;
@Override
public AHistoricalCache<TimeRange> getSegmentFinder(final String key) {
return segmentFinder;
}
@Override
protected ISerde<FDate> newValueSerde() {
return new TypeDelegateSerde<FDate>(FDate.class);
}
@Override
protected Integer newValueFixedLength() {
return null;
}
@Override
protected String innerHashKeyToString(final String key) {
return key;
}
@Override
protected FDate extractEndTime(final FDate value) {
return value;
}
@Override
protected File getBaseDirectory() {
return ContextProperties.TEMP_DIRECTORY;
}
@Override
protected ICloseableIterable<? extends FDate> downloadSegmentElements(final SegmentedKey<String> segmentedKey) {
return new ASkippingIterable<FDate>(WrapperCloseableIterable.maybeWrap(entities)) {
private final FDate from = segmentedKey.getSegment().getFrom();
private final FDate to = segmentedKey.getSegment().getTo();
@Override
protected boolean skip(final FDate element) {
return element.isBefore(from) || element.isAfter(to);
}
};
}
@Override
public FDate getFirstAvailableHistoricalSegmentFrom(final String key) {
if (entities.isEmpty() || curTime == null) {
return null;
}
final FDate firstTime = FDates.min(curTime, entities.get(0));
final TimeRange firstSegment = segmentFinder.query().getValue(firstTime);
if (firstSegment.getTo().isBeforeOrEqualTo(curTime)) {
return firstSegment.getFrom();
} else {
return segmentFinder.query().getValue(firstSegment.getFrom().addMilliseconds(-1)).getFrom();
}
}
@Override
public FDate getLastAvailableHistoricalSegmentTo(final String key, final FDate updateTo) {
if (entities.isEmpty() || curTime == null) {
return null;
}
final TimeRange lastSegment = segmentFinder.query().getValue(curTime);
if (lastSegment.getTo().isBeforeOrEqualTo(curTime)) {
return lastSegment.getTo();
} else {
return segmentFinder.query().getValue(lastSegment.getFrom().addMilliseconds(-1)).getTo();
}
}
@Override
public void putNextLiveValue(final String key, final FDate nextLiveValue) {
curTime = nextLiveValue;
super.putNextLiveValue(key, nextLiveValue);
}
@Override
protected String getElementsName() {
return "values";
}
};
for (final FDate entity : entities) {
table.putNextLiveValue(KEY, entity);
}
}
use of de.invesdwin.util.collections.iterable.ICloseableIterable in project invesdwin-context-persistence by subes.
the class ALiveSegmentedTimeSeriesDBWithNoCacheAndNoQueryCacheTest method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
final AHistoricalCache<TimeRange> segmentFinder = new AHistoricalCache<TimeRange>() {
private final PeriodicalSegmentFinder calculation = PeriodicalSegmentFinder.newInstance(new Duration(2, FTimeUnit.YEARS));
@Override
protected Integer getInitialMaximumSize() {
return 1000;
}
@Override
protected FDate innerExtractKey(final TimeRange value) {
return value.getFrom();
}
@Override
protected IEvaluateGenericFDate<TimeRange> newLoadValue() {
return this::loadValue;
}
private synchronized TimeRange loadValue(final IFDateProvider pKey) {
final FDate key = pKey.asFDate();
final TimeRange value = calculation.getSegment(key);
final TimeRange upperTimeRange = new TimeRange(value.getFrom().addYears(1), value.getTo().addYears(1));
if (upperTimeRange.containsInclusive(key)) {
return upperTimeRange;
} else {
return new TimeRange(value.getFrom().addYears(-1), value.getTo().addYears(-1));
}
}
@Override
protected FDate innerCalculateNextKey(final FDate key) {
return query().getValue(key).getTo().addMilliseconds(1);
}
@Override
protected FDate innerCalculatePreviousKey(final FDate key) {
return query().getValue(key).getFrom().addMilliseconds(-1);
}
@Override
public void preloadData(final ExecutorService executor) {
// noop
}
};
table = new ALiveSegmentedTimeSeriesDB<String, FDate>(getClass().getSimpleName()) {
private FDate curTime = null;
@Override
public AHistoricalCache<TimeRange> getSegmentFinder(final String key) {
return segmentFinder;
}
@Override
protected ISerde<FDate> newValueSerde() {
return new TypeDelegateSerde<FDate>(FDate.class);
}
@Override
protected Integer newValueFixedLength() {
return null;
}
@Override
protected String innerHashKeyToString(final String key) {
return key;
}
@Override
protected File getBaseDirectory() {
return ContextProperties.TEMP_DIRECTORY;
}
@Override
protected ICloseableIterable<? extends FDate> downloadSegmentElements(final SegmentedKey<String> segmentedKey) {
return new ASkippingIterable<FDate>(WrapperCloseableIterable.maybeWrap(entities)) {
private final FDate from = segmentedKey.getSegment().getFrom();
private final FDate to = segmentedKey.getSegment().getTo();
@Override
protected boolean skip(final FDate element) {
return element.isBefore(from) || element.isAfter(to);
}
};
}
@Override
protected FDate extractEndTime(final FDate value) {
return value;
}
@Override
public FDate getFirstAvailableHistoricalSegmentFrom(final String key) {
if (entities.isEmpty() || curTime == null) {
return null;
}
final FDate firstTime = FDates.min(curTime, entities.get(0));
final TimeRange firstSegment = segmentFinder.query().getValue(firstTime);
if (firstSegment.getTo().isBeforeOrEqualTo(curTime)) {
return firstSegment.getFrom();
} else {
return segmentFinder.query().getValue(firstSegment.getFrom().addMilliseconds(-1)).getFrom();
}
}
@Override
public FDate getLastAvailableHistoricalSegmentTo(final String key, final FDate updateTo) {
if (entities.isEmpty() || curTime == null) {
return null;
}
final TimeRange lastSegment = segmentFinder.query().getValue(curTime);
if (lastSegment.getTo().isBeforeOrEqualTo(curTime)) {
return lastSegment.getTo();
} else {
return segmentFinder.query().getValue(lastSegment.getFrom().addMilliseconds(-1)).getTo();
}
}
@Override
public void putNextLiveValue(final String key, final FDate nextLiveValue) {
curTime = nextLiveValue;
super.putNextLiveValue(key, nextLiveValue);
}
@Override
protected String getElementsName() {
return "values";
}
};
for (final FDate entity : entities) {
table.putNextLiveValue(KEY, entity);
}
}
use of de.invesdwin.util.collections.iterable.ICloseableIterable in project invesdwin-context-persistence by subes.
the class ASegmentedTimeSeriesDBWithoutShiftKeysAndQueryInterceptorTest method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
final AHistoricalCache<TimeRange> segmentFinder = PeriodicalSegmentFinder.newCache(new Duration(2, FTimeUnit.YEARS), false);
table = new ASegmentedTimeSeriesDB<String, FDate>(getClass().getSimpleName()) {
@Override
public AHistoricalCache<TimeRange> getSegmentFinder(final String key) {
return segmentFinder;
}
@Override
protected ISerde<FDate> newValueSerde() {
return new TypeDelegateSerde<FDate>(FDate.class);
}
@Override
protected Integer newValueFixedLength() {
return null;
}
@Override
protected String innerHashKeyToString(final String key) {
return key;
}
@Override
protected File getBaseDirectory() {
return ContextProperties.TEMP_DIRECTORY;
}
@Override
protected ICloseableIterable<? extends FDate> downloadSegmentElements(final SegmentedKey<String> segmentedKey) {
return new ASkippingIterable<FDate>(WrapperCloseableIterable.maybeWrap(entities)) {
private final FDate from = segmentedKey.getSegment().getFrom();
private final FDate to = segmentedKey.getSegment().getTo();
@Override
protected boolean skip(final FDate element) {
return element.isBefore(from) || element.isAfter(to);
}
};
}
@Override
protected FDate extractEndTime(final FDate value) {
return value;
}
@Override
public FDate getFirstAvailableHistoricalSegmentFrom(final String key) {
if (entities.isEmpty()) {
return null;
}
return segmentFinder.query().getValue(entities.get(0)).getFrom();
}
@Override
public FDate getLastAvailableHistoricalSegmentTo(final String key, final FDate updateTo) {
if (entities.isEmpty()) {
return null;
}
return segmentFinder.query().getValue(entities.get(entities.size() - 1)).getTo();
}
@Override
protected String getElementsName() {
return "values";
}
};
}
use of de.invesdwin.util.collections.iterable.ICloseableIterable in project invesdwin-context-persistence by subes.
the class ASegmentedTimeSeriesStorageCache method getSegmentsReverse.
private ICloseableIterable<TimeRange> getSegmentsReverse(final FDate from, final FDate to, final FDate lastAvailableSegmentTo) {
if (from == null || to == null) {
return EmptyCloseableIterable.getInstance();
}
final TimeRange nextSegment = getSegmentFinder(key).query().getValue(from.addMilliseconds(1));
final FDate adjFrom;
if (from.equalsNotNullSafe(lastAvailableSegmentTo) && nextSegment.getFrom().equalsNotNullSafe(from)) {
// adjust for overlapping segments
adjFrom = from.addMilliseconds(-1);
} else {
adjFrom = from;
}
final FDate adjTo = to;
final ICloseableIterable<TimeRange> segments = new ICloseableIterable<TimeRange>() {
@Override
public ICloseableIterator<TimeRange> iterator() {
return new ICloseableIterator<TimeRange>() {
private TimeRange nextSegment = getSegmentFinder(key).query().getValue(adjFrom);
@Override
public boolean hasNext() {
return nextSegment != null && nextSegment.getTo().isAfter(adjTo);
}
@Override
public TimeRange next() {
final TimeRange curSegment = nextSegment;
if (curSegment == null) {
throw new FastNoSuchElementException("ASegmentedTimeSeriesStorageCache getSegments end reached null");
}
// get one segment earlier
nextSegment = getSegmentFinder(key).query().setFutureNullEnabled().getValue(nextSegment.getFrom().addMilliseconds(-1));
return curSegment;
}
@Override
public void close() {
nextSegment = null;
}
};
}
};
final ASkippingIterable<TimeRange> filteredSegments = new ASkippingIterable<TimeRange>(segments) {
@Override
protected boolean skip(final TimeRange element) {
// though additionally skip ranges that exceed the available dates
final FDate segmentTo = element.getTo();
if (segmentTo.isBefore(adjTo)) {
// no need to continue going lower
throw new FastNoSuchElementException("ASegmentedTimeSeriesStorageCache getSegments end reached adjTo");
}
// skip last value and continue with earlier ones
final FDate segmentFrom = element.getFrom();
return segmentFrom.isAfter(adjFrom);
}
};
return filteredSegments;
}
Aggregations