use of com.linkedin.pinot.common.segment.SegmentMetadata in project pinot by linkedin.
the class RetentionManagerTest method testRetentionWithHoursTimeUnit.
/**
* @throws JSONException
* @throws UnsupportedEncodingException
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testRetentionWithHoursTimeUnit() throws JSONException, UnsupportedEncodingException, IOException, InterruptedException {
_retentionManager = new RetentionManager(_pinotHelixResourceManager, 5);
_retentionManager.start();
long theDayAfterTomorrowSinceEpoch = System.currentTimeMillis() / 1000 / 60 / 60 / 24 + 2;
long hoursSinceEpochTimeStamp = theDayAfterTomorrowSinceEpoch * 24;
for (int i = 0; i < 10; ++i) {
SegmentMetadata segmentMetadata = getTimeSegmentMetadataImpl("373056", "373056", TimeUnit.HOURS.toString());
registerSegmentMetadata(segmentMetadata);
Thread.sleep(100);
}
for (int i = 0; i < 10; ++i) {
SegmentMetadata segmentMetadata = getTimeSegmentMetadataImpl(hoursSinceEpochTimeStamp + "", hoursSinceEpochTimeStamp + "", TimeUnit.HOURS.toString());
registerSegmentMetadata(segmentMetadata);
Thread.sleep(100);
}
validate(20, _offlineTableName, 10, true);
cleanupSegments(_offlineTableName);
}
use of com.linkedin.pinot.common.segment.SegmentMetadata in project pinot by linkedin.
the class ValidationManagerTest method testTotalDocumentCountOffline.
@Test
public void testTotalDocumentCountOffline() throws Exception {
// Create a bunch of dummy segments
String testTableName = "TestTableTotalDocCountTest";
DummyMetadata metadata1 = new DummyMetadata(testTableName, 10);
DummyMetadata metadata2 = new DummyMetadata(testTableName, 20);
DummyMetadata metadata3 = new DummyMetadata(testTableName, 30);
// Add them to a list
List<SegmentMetadata> segmentMetadataList = new ArrayList<SegmentMetadata>();
segmentMetadataList.add(metadata1);
segmentMetadataList.add(metadata2);
segmentMetadataList.add(metadata3);
Assert.assertEquals(ValidationManager.computeOfflineTotalDocumentInSegments(segmentMetadataList), 60);
}
use of com.linkedin.pinot.common.segment.SegmentMetadata in project pinot by linkedin.
the class AggregationPlanNode method run.
@Override
public Operator run() {
TransformExpressionOperator transformOperator = (TransformExpressionOperator) _transformPlanNode.run();
SegmentMetadata segmentMetadata = _indexSegment.getSegmentMetadata();
return new AggregationOperator(AggregationFunctionUtils.getAggregationFunctionContexts(_aggregationInfos, segmentMetadata), transformOperator, segmentMetadata.getTotalRawDocs());
}
use of com.linkedin.pinot.common.segment.SegmentMetadata in project pinot by linkedin.
the class SimpleSegmentMetadata method load.
public static SegmentMetadata load(Configuration properties) {
final SegmentMetadata segmentMetadata = new SimpleSegmentMetadata();
((SimpleSegmentMetadata) segmentMetadata).setSize(properties.getLong(SEGMENT_SIZE, 0));
return segmentMetadata;
}
use of com.linkedin.pinot.common.segment.SegmentMetadata in project pinot by linkedin.
the class BaseHllStarTreeIndexTest method testHardCodedQueries.
void testHardCodedQueries(IndexSegment segment, Schema schema) throws Exception {
// only use metric corresponding to columnsToDeriveHllFields
List<String> metricNames = new ArrayList<>();
for (String column : columnsToDeriveHllFields) {
metricNames.add(column + HLL_CONFIG.getHllDeriveColumnSuffix());
}
SegmentMetadata segmentMetadata = segment.getSegmentMetadata();
LOGGER.info("[Schema] Dim: {} Metric: {}", schema.getDimensionNames(), schema.getMetricNames());
for (int i = 0; i < _hardCodedQueries.length; i++) {
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(_hardCodedQueries[i]);
FilterQueryTree filterQueryTree = RequestUtils.generateFilterQueryTree(brokerRequest);
Assert.assertTrue(RequestUtils.isFitForStarTreeIndex(segmentMetadata, filterQueryTree, brokerRequest));
// Group -> Projected values of each group
Map<String, long[]> expectedResult = computeHllUsingRawDocs(segment, metricNames, brokerRequest);
Map<String, long[]> actualResult = computeHllUsingAggregatedDocs(segment, metricNames, brokerRequest);
Assert.assertEquals(expectedResult.size(), actualResult.size(), "Mis-match in number of groups");
for (Map.Entry<String, long[]> entry : expectedResult.entrySet()) {
String expectedKey = entry.getKey();
Assert.assertTrue(actualResult.containsKey(expectedKey));
long[] expectedSums = entry.getValue();
long[] actualSums = actualResult.get(expectedKey);
for (int j = 0; j < expectedSums.length; j++) {
LOGGER.info("actual hll: {} ", actualSums[j]);
LOGGER.info("expected hll: {} ", expectedSums[j]);
Assert.assertEquals(actualSums[j], expectedSums[j], "Mis-match hll for key '" + expectedKey + "', Metric: " + metricNames.get(j) + ", Random Seed: " + _randomSeed);
}
}
}
}
Aggregations