use of org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset in project geowave by locationtech.
the class QueryFilterIterator method acceptInternal.
@Override
public boolean acceptInternal(final Key key, final Value value) {
if (isSet()) {
final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();
final FlattenedUnreadData unreadData = aggregateFieldData(key, value, commonData);
return applyRowFilter(key.getRow(currentRow), commonData, unreadData);
}
// it'll just have to accept everything
return true;
}
use of org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset in project geowave by locationtech.
the class WholeRowAggregationIterator method filter.
@Override
protected boolean filter(final Text currentRow, final List<Key> keys, final List<Value> values) {
if ((aggregationIterator != null) && (aggregationIterator.queryFilterIterator != null)) {
final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();
final List<FlattenedUnreadData> unreadData = new ArrayList<>();
for (int i = 0; (i < keys.size()) && (i < values.size()); i++) {
final Key key = keys.get(i);
final Value value = values.get(i);
final FlattenedUnreadData singleRow = aggregationIterator.queryFilterIterator.aggregateFieldData(key, value, commonData);
if (singleRow != null) {
unreadData.add(singleRow);
}
}
final CommonIndexedPersistenceEncoding encoding = QueryFilterIterator.getEncoding(currentRow, queryFilterIterator.partitionKeyLength, commonData, unreadData.isEmpty() ? null : new UnreadFieldDataList(unreadData));
boolean queryFilterResult = true;
if (aggregationIterator.queryFilterIterator.isSet()) {
queryFilterResult = aggregationIterator.queryFilterIterator.applyRowFilter(encoding);
}
if (queryFilterResult) {
aggregationIterator.aggregateRow(currentRow, queryFilterIterator.model, encoding);
}
}
// we don't want to return anything but the aggregation result
return false;
}
use of org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset in project geowave by locationtech.
the class AggregationIterator method aggregateRow.
protected void aggregateRow(final Text currentRow, final CommonIndexModel model, final CommonIndexedPersistenceEncoding persistenceEncoding) {
if (adapter == null) {
aggregationFunction.aggregate(null, persistenceEncoding);
endRowOfAggregation = currentRow;
} else if (((Short) (persistenceEncoding.getInternalAdapterId())).equals((adapter.getAdapterId()))) {
final PersistentDataset<Object> adapterExtendedValues = new MultiFieldPersistentDataset<>();
if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) {
((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues(adapter, model);
final PersistentDataset<Object> existingExtValues = ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData();
if (existingExtValues != null) {
adapterExtendedValues.addValues(existingExtValues.getValues());
}
}
final IndexedAdapterPersistenceEncoding encoding = new IndexedAdapterPersistenceEncoding(persistenceEncoding.getInternalAdapterId(), persistenceEncoding.getDataId(), persistenceEncoding.getInsertionPartitionKey(), persistenceEncoding.getInsertionSortKey(), persistenceEncoding.getDuplicateCount(), persistenceEncoding.getCommonData(), new MultiFieldPersistentDataset<byte[]>(), adapterExtendedValues);
// the data adapter can't use the numeric index strategy and only
// the common index model to decode which is the case for feature
// data, we pass along a null strategy to eliminate the necessity to
// send a serialization of the strategy in the options of this
// iterator
final Object row = adapter.decode(encoding, indexMapping, new IndexImpl(null, model));
if (row != null) {
// for now ignore field info
aggregationFunction.aggregate(adapter, row);
endRowOfAggregation = currentRow;
}
}
}
use of org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset in project geowave by locationtech.
the class MemoryDataStoreOperations method createReader.
@Override
public <T> RowReader<T> createReader(final ReaderParams<T> readerParams) {
final SortedSet<MemoryStoreEntry> internalData = storeData.get(readerParams.getIndex().getName());
int counter = 0;
List<MemoryStoreEntry> retVal = new ArrayList<>();
final Collection<SinglePartitionQueryRanges> partitionRanges = readerParams.getQueryRanges().getPartitionQueryRanges();
if ((partitionRanges == null) || partitionRanges.isEmpty()) {
retVal.addAll(internalData);
// remove unauthorized
final Iterator<MemoryStoreEntry> it = retVal.iterator();
while (it.hasNext()) {
if (!isAuthorized(it.next(), readerParams.getAdditionalAuthorizations())) {
it.remove();
}
}
if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0) && (retVal.size() > readerParams.getLimit())) {
retVal = retVal.subList(0, readerParams.getLimit());
}
} else {
for (final SinglePartitionQueryRanges p : partitionRanges) {
for (final ByteArrayRange r : p.getSortKeyRanges()) {
final SortedSet<MemoryStoreEntry> set;
if (r.isSingleValue()) {
set = Sets.newTreeSet(internalData.subSet(new MemoryStoreEntry(p.getPartitionKey(), r.getStart()), new MemoryStoreEntry(p.getPartitionKey(), ByteArrayUtils.getNextPrefix(r.getStart()))));
} else {
set = Sets.newTreeSet(internalData.tailSet(new MemoryStoreEntry(p.getPartitionKey(), r.getStart())).headSet(new MemoryStoreEntry(p.getPartitionKey(), r.getEndAsNextPrefix())));
}
// remove unauthorized
final Iterator<MemoryStoreEntry> it = set.iterator();
while (it.hasNext()) {
final MemoryStoreEntry entry = it.next();
if (!isAuthorized(entry, readerParams.getAdditionalAuthorizations())) {
it.remove();
} else if (!ArrayUtils.contains(readerParams.getAdapterIds(), entry.row.getAdapterId())) {
it.remove();
}
}
if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0) && ((counter + set.size()) > readerParams.getLimit())) {
final List<MemoryStoreEntry> subset = new ArrayList<>(set);
retVal.addAll(subset.subList(0, readerParams.getLimit() - counter));
break;
} else {
retVal.addAll(set);
counter += set.size();
if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0) && (counter >= readerParams.getLimit())) {
break;
}
}
}
}
}
return new MyIndexReader<>(Iterators.filter(retVal.iterator(), new Predicate<MemoryStoreEntry>() {
@Override
public boolean apply(final MemoryStoreEntry input) {
if ((readerParams.getFilter() != null) && options.isServerSideLibraryEnabled()) {
final PersistentDataset<Object> commonData = new MultiFieldPersistentDataset<>();
final List<FlattenedUnreadData> unreadData = new ArrayList<>();
final List<String> commonIndexFieldNames = DataStoreUtils.getUniqueDimensionFields(readerParams.getIndex().getIndexModel());
for (final GeoWaveValue v : input.getRow().getFieldValues()) {
unreadData.add(DataStoreUtils.aggregateFieldData(input.getRow(), v, commonData, readerParams.getIndex().getIndexModel(), commonIndexFieldNames));
}
return readerParams.getFilter().accept(readerParams.getIndex().getIndexModel(), new DeferredReadCommonIndexedPersistenceEncoding(input.getRow().getAdapterId(), input.getRow().getDataId(), input.getRow().getPartitionKey(), input.getRow().getSortKey(), input.getRow().getNumberOfDuplicates(), commonData, unreadData.isEmpty() ? null : new UnreadFieldDataList(unreadData)));
}
return true;
}
}), readerParams.getRowTransformer());
}
use of org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset in project geowave by locationtech.
the class BasicQueryByClassTest method testDisjointCasesWithPersistence.
@Test
public void testDisjointCasesWithPersistence() {
final List<MultiDimensionalNumericData> expectedResults = new ArrayList<>();
expectedResults.add(new BasicNumericDataset(new NumericData[] { new ConstrainedIndexValue(0.3, 0.7), new ConstrainedIndexValue(0.1, 2.3) }));
expectedResults.add(new BasicNumericDataset(new NumericData[] { new ConstrainedIndexValue(0.3, 0.7), new ConstrainedIndexValue(3.4, 3.7) }));
final ConstraintSet cs1 = new ConstraintSet();
cs1.addConstraint(ExampleDimensionOne.class, new ConstraintData(new ConstrainedIndexValue(0.3, 0.5), true));
cs1.addConstraint(ExampleDimensionOne.class, new ConstraintData(new ConstrainedIndexValue(0.4, 0.7), true));
final ConstraintSet cs2a = new ConstraintSet();
cs2a.addConstraint(ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(0.1, 0.2), true));
cs2a.addConstraint(ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(2.1, 2.3), true));
final ConstraintSet cs2b = new ConstraintSet();
cs2b.addConstraint(ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(3.4, 3.7), true));
final ConstraintsByClass constraints = new ConstraintsByClass(Arrays.asList(cs2a, cs2b)).merge(Collections.singletonList(cs1));
assertEquals(expectedResults, constraints.getIndexConstraints(new IndexImpl(new ExampleNumericIndexStrategy(), null)));
final byte[] image = new BasicQueryByClass(constraints).toBinary();
final BasicQueryByClass query = new BasicQueryByClass();
query.fromBinary(image);
final Index index = new CustomNameIndex(new ExampleNumericIndexStrategy(), new BasicIndexModel(new NumericDimensionField[] { new ExampleDimensionOne(), new ExampleDimensionTwo() }), "22");
assertEquals(expectedResults, query.getIndexConstraints(index));
final List<QueryFilter> filters = query.createFilters(index);
assertEquals(1, filters.size());
final Map<String, ConstrainedIndexValue> fieldIdToValueMap = new HashMap<>();
fieldIdToValueMap.put("one", new ConstrainedIndexValue(0.4, 0.4));
fieldIdToValueMap.put("two", new ConstrainedIndexValue(0.5, 0.5));
final CommonIndexModel model = null;
assertTrue(filters.get(0).accept(model, new CommonIndexedPersistenceEncoding((short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), // duplicate count
1, new MultiFieldPersistentDataset(fieldIdToValueMap), null)));
fieldIdToValueMap.put("one", new ConstrainedIndexValue(0.1, 0.1));
assertFalse(filters.get(0).accept(model, new CommonIndexedPersistenceEncoding((short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), // duplicate count
1, new MultiFieldPersistentDataset(fieldIdToValueMap), null)));
fieldIdToValueMap.put("one", new ConstrainedIndexValue(0.4, 0.4));
fieldIdToValueMap.put("two", new ConstrainedIndexValue(5.0, 5.0));
assertFalse(filters.get(0).accept(model, new CommonIndexedPersistenceEncoding((short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), // duplicate count
1, new MultiFieldPersistentDataset(fieldIdToValueMap), null)));
/**
* Tests the 'OR' Case
*/
fieldIdToValueMap.put("two", new ConstrainedIndexValue(3.5, 3.5));
assertTrue(filters.get(0).accept(model, new CommonIndexedPersistenceEncoding((short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), // duplicate count
1, new MultiFieldPersistentDataset(fieldIdToValueMap), null)));
}
Aggregations