use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.
the class PushRuntimeTest method scanSortGbySelectWrite.
@Test
public void scanSortGbySelectWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(), FloatSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor scanner = new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanner, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the sort (by nation id)
RecordDescriptor sortDesc = scannerDesc;
InMemorySortOperatorDescriptor sort = new InMemorySortOperatorDescriptor(spec, new int[] { 3 }, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, sortDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sort, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the group-by
NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
RecordDescriptor ntsDesc = sortDesc;
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
RecordDescriptor aggDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg }, new RecordDescriptor[] { ntsDesc, aggDesc });
NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(new AlgebricksPipeline[] { pipeline }, new int[] { 3 }, new int[] {});
RecordDescriptor gbyDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
PreclusteredGroupOperatorDescriptor gby = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 3 }, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, npaaf, gbyDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, gby, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the algebricks op.
IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3), // Canadian customers
new TupleFieldEvaluatorFactory(0));
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 }, BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
RecordDescriptor selectDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0, new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, sort, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), sort, 0, gby, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), gby, 0, algebricksOp, 0);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("9", buf.toString());
outFile.delete();
}
use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.
the class MillisecondsFromDayTimeDurationDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
private IPointable argPtr0 = new VoidPointable();
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
AMutableInt64 aInt64 = new AMutableInt64(0);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
eval0.evaluate(tuple, argPtr0);
byte[] bytes = argPtr0.getByteArray();
int offset = argPtr0.getStartOffset();
if (bytes[offset] != ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 0, bytes[offset], ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG);
}
aInt64.setValue(ADayTimeDurationSerializerDeserializer.getDayTime(bytes, offset + 1));
int64Serde.serialize(aInt64, out);
result.set(resultStorage);
}
};
}
};
}
use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.
the class OverlapBinsDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private final DataOutput out = resultStorage.getDataOutput();
private final IPointable argPtr0 = new VoidPointable();
private final IPointable argPtr1 = new VoidPointable();
private final IPointable argPtr2 = new VoidPointable();
private final IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private final IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
private final IScalarEvaluator eval2 = args[2].createScalarEvaluator(ctx);
// for output
private OrderedListBuilder listBuilder = new OrderedListBuilder();
private ArrayBackedValueStorage listStorage = new ArrayBackedValueStorage();
protected final AOrderedListType intListType = new AOrderedListType(BuiltinType.AINTERVAL, null);
private final AMutableInterval aInterval = new AMutableInterval(0, 0, (byte) -1);
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
private final GregorianCalendarSystem gregCalSys = GregorianCalendarSystem.getInstance();
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
eval0.evaluate(tuple, argPtr0);
eval1.evaluate(tuple, argPtr1);
eval2.evaluate(tuple, argPtr2);
byte[] bytes0 = argPtr0.getByteArray();
int offset0 = argPtr0.getStartOffset();
ATypeTag type0 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes0[offset0]);
long intervalStart;
long intervalEnd;
byte intervalTypeTag;
if (type0 == ATypeTag.INTERVAL) {
intervalStart = AIntervalSerializerDeserializer.getIntervalStart(bytes0, offset0 + 1);
intervalEnd = AIntervalSerializerDeserializer.getIntervalEnd(bytes0, offset0 + 1);
intervalTypeTag = AIntervalSerializerDeserializer.getIntervalTimeType(bytes0, offset0 + 1);
if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
intervalStart = intervalStart * GregorianCalendarSystem.CHRONON_OF_DAY;
}
} else {
throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
// get the anchor instance time
byte[] bytes1 = argPtr1.getByteArray();
int offset1 = argPtr1.getStartOffset();
ATypeTag type1 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes1[offset1]);
if (intervalTypeTag != bytes1[offset1]) {
throw new IncompatibleTypeException(getIdentifier(), intervalTypeTag, bytes1[offset1]);
}
long anchorTime;
switch(type1) {
case DATE:
anchorTime = ADateSerializerDeserializer.getChronon(bytes1, offset1 + 1) * GregorianCalendarSystem.CHRONON_OF_DAY;
break;
case TIME:
anchorTime = ATimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
case DATETIME:
anchorTime = ADateTimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
default:
throw new TypeMismatchException(getIdentifier(), 1, bytes1[offset1], ATypeTag.SERIALIZED_DATE_TYPE_TAG, ATypeTag.SERIALIZED_TIME_TYPE_TAG, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
}
byte[] bytes2 = argPtr2.getByteArray();
int offset2 = argPtr2.getStartOffset();
ATypeTag type2 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes2[offset2]);
int yearMonth = 0;
long dayTime = 0;
long firstBinIndex;
switch(type2) {
case YEARMONTHDURATION:
yearMonth = AYearMonthDurationSerializerDeserializer.getYearMonth(bytes2, offset2 + 1);
int yearStart = gregCalSys.getYear(anchorTime);
int monthStart = gregCalSys.getMonthOfYear(anchorTime, yearStart);
int yearToBin = gregCalSys.getYear(intervalStart);
int monthToBin = gregCalSys.getMonthOfYear(intervalStart, yearToBin);
int totalMonths = (yearToBin - yearStart) * 12 + (monthToBin - monthStart);
firstBinIndex = totalMonths / yearMonth + ((totalMonths < 0 && totalMonths % yearMonth != 0) ? -1 : 0);
if (firstBinIndex > Integer.MAX_VALUE) {
throw new OverflowException(getIdentifier());
}
if (firstBinIndex < Integer.MIN_VALUE) {
throw new UnderflowException(getIdentifier());
}
break;
case DAYTIMEDURATION:
dayTime = ADayTimeDurationSerializerDeserializer.getDayTime(bytes2, offset2 + 1);
long totalChronon = intervalStart - anchorTime;
firstBinIndex = totalChronon / dayTime + ((totalChronon < 0 && totalChronon % dayTime != 0) ? -1 : 0);
break;
default:
throw new TypeMismatchException(getIdentifier(), 2, bytes2[offset2], ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG, ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG);
}
long binStartChronon;
long binEndChronon;
int binOffset;
listBuilder.reset(intListType);
try {
if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
binOffset = 0;
do {
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), false);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), false);
binStartChronon = binStartChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binStartChronon < 0 && binStartChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
binEndChronon = binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binEndChronon < 0 && binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
listBuilder.addItem(listStorage);
binOffset++;
} while (binEndChronon < intervalEnd);
} else if (intervalTypeTag == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
if (yearMonth != 0) {
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
binOffset = 0;
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), true);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), true);
if (binStartChronon < 0 || binStartChronon >= GregorianCalendarSystem.CHRONON_OF_DAY) {
// avoid the case where a time bin is before 00:00:00 or no early than 24:00:00
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
while (!((binStartChronon < intervalStart && binEndChronon <= intervalStart) || (binStartChronon >= intervalEnd && binEndChronon > intervalEnd))) {
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
listBuilder.addItem(listStorage);
binOffset++;
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), true);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), true);
if (binStartChronon == GregorianCalendarSystem.CHRONON_OF_DAY) {
break;
}
if (binEndChronon < binStartChronon) {
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
}
} else if (intervalTypeTag == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
binOffset = 0;
do {
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), false);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), false);
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
listBuilder.addItem(listStorage);
binOffset++;
} while (binEndChronon < intervalEnd);
} else {
throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_DATE_TYPE_TAG, ATypeTag.SERIALIZED_TIME_TYPE_TAG, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
}
listBuilder.write(out, true);
} catch (IOException e1) {
throw new HyracksDataException(e1);
}
result.set(resultStorage);
}
};
}
};
}
use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.
the class ParseTimeDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
private IPointable argPtr0 = new VoidPointable();
private IPointable argPtr1 = new VoidPointable();
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private AMutableTime aTime = new AMutableTime(0);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
eval0.evaluate(tuple, argPtr0);
eval1.evaluate(tuple, argPtr1);
byte[] bytes0 = argPtr0.getByteArray();
int offset0 = argPtr0.getStartOffset();
int len0 = argPtr0.getLength();
byte[] bytes1 = argPtr1.getByteArray();
int offset1 = argPtr1.getStartOffset();
int len1 = argPtr1.getLength();
if (bytes0[offset0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
if (bytes1[offset1] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 1, bytes1[offset1], ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
utf8Ptr.set(bytes0, offset0 + 1, len0 - 1);
int start0 = utf8Ptr.getCharStartOffset();
int length0 = utf8Ptr.getUTF8Length();
utf8Ptr.set(bytes1, offset1 + 1, len1 - 1);
int start1 = utf8Ptr.getCharStartOffset();
int length1 = utf8Ptr.getUTF8Length();
long chronon = 0;
int formatStart = start1;
int formatLength;
boolean processSuccessfully = false;
while (!processSuccessfully && formatStart < start1 + length1) {
// search for "|"
formatLength = 0;
for (; formatStart + formatLength < start1 + length1; formatLength++) {
if (bytes1[formatStart + formatLength] == '|') {
break;
}
}
try {
chronon = DT_UTILS.parseDateTime(bytes0, start0, length0, bytes1, formatStart, formatLength, DateTimeParseMode.TIME_ONLY);
} catch (AsterixTemporalTypeParseException ex) {
formatStart += formatLength + 1;
continue;
}
processSuccessfully = true;
}
if (!processSuccessfully) {
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_TIME_TYPE_TAG);
}
aTime.setValue((int) chronon);
timeSerde.serialize(aTime, out);
result.set(resultStorage);
}
};
}
};
}
use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.
the class YearMonthDurationComparatorDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
private IPointable argPtr0 = new VoidPointable();
private IPointable argPtr1 = new VoidPointable();
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ABoolean> boolSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
eval0.evaluate(tuple, argPtr0);
eval1.evaluate(tuple, argPtr1);
byte[] bytes0 = argPtr0.getByteArray();
int offset0 = argPtr0.getStartOffset();
byte[] bytes1 = argPtr1.getByteArray();
int offset1 = argPtr1.getStartOffset();
if (bytes0[offset0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_DURATION_TYPE_TAG);
}
if (bytes1[offset1] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
throw new TypeMismatchException(getIdentifier(), 1, bytes1[offset1], ATypeTag.SERIALIZED_DURATION_TYPE_TAG);
}
if ((ADurationSerializerDeserializer.getDayTime(bytes0, offset0 + 1) != 0) || (ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1) != 0)) {
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG);
}
if (ADurationSerializerDeserializer.getYearMonth(bytes0, offset0 + 1) > ADurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1)) {
boolSerde.serialize(isGreaterThan ? ABoolean.TRUE : ABoolean.FALSE, out);
} else {
boolSerde.serialize(isGreaterThan ? ABoolean.FALSE : ABoolean.TRUE, out);
}
result.set(resultStorage);
}
};
}
};
}
Aggregations