use of com.linkedin.pinot.core.common.Block in project pinot by linkedin.
the class ChunkIndexCreationDriverImplTest method test4.
@Test(enabled = false)
public void test4() throws Exception {
final IndexSegmentImpl segment = (IndexSegmentImpl) Loaders.IndexSegment.load(INDEX_DIR.listFiles()[0], ReadMode.mmap);
final ImmutableDictionaryReader d = segment.getDictionaryFor("column1");
final List<String> rhs = new ArrayList<String>();
rhs.add(d.get(new Random().nextInt(d.length())).toString());
final Predicate p = new EqPredicate("column1", rhs);
final DataSource ds = segment.getDataSource("column1", p);
final Block bl = ds.nextBlock();
final BlockDocIdSet idSet = bl.getBlockDocIdSet();
final BlockDocIdIterator it = idSet.iterator();
int docId = it.next();
final StringBuilder b = new StringBuilder();
while (docId != Constants.EOF) {
b.append(docId + ",");
docId = it.next();
}
// System.out.println(b.toString());
}
use of com.linkedin.pinot.core.common.Block in project pinot by linkedin.
the class RealtimeSegmentTest method test2.
@Test
public void test2() throws Exception {
DataSource ds = segmentWithoutInvIdx.getDataSource("column1");
Block b = ds.nextBlock();
BlockValSet set = b.getBlockValueSet();
BlockSingleValIterator it = (BlockSingleValIterator) set.iterator();
BlockMetadata metadata = b.getMetadata();
while (it.next()) {
int dicId = it.nextIntVal();
}
}
use of com.linkedin.pinot.core.common.Block in project pinot by linkedin.
the class RealtimeTableDataManagerTest method testSetup.
public void testSetup() throws Exception {
final HLRealtimeSegmentDataManager manager = new HLRealtimeSegmentDataManager(realtimeSegmentZKMetadata, tableConfig, instanceZKMetadata, null, tableDataManagerConfig.getDataDir(), ReadMode.valueOf(tableDataManagerConfig.getReadMode()), getTestSchema(), new ServerMetrics(new MetricsRegistry()));
final long start = System.currentTimeMillis();
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
if (System.currentTimeMillis() - start >= (SEGMENT_CONSUMING_TIME)) {
keepOnRunning = false;
}
}
}, 1000, 1000 * 60 * 1);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("count");
BlockValSet valSet = mDs.nextBlock().getBlockValueSet();
BlockSingleValIterator valIt = (BlockSingleValIterator) valSet.iterator();
int val = valIt.nextIntVal();
while (val != Constants.EOF) {
val = valIt.nextIntVal();
sum += val;
}
} catch (Exception e) {
LOGGER.info("count column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan metric col count : " + (stop - start) + " sum : " + sum);
}
}, 20000, 1000 * 5);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("viewerId");
BlockValSet valSet = mDs.nextBlock().getBlockValueSet();
BlockSingleValIterator valIt = (BlockSingleValIterator) valSet.iterator();
int val = valIt.nextIntVal();
while (val != Constants.EOF) {
val = valIt.nextIntVal();
sum += val;
}
} catch (Exception e) {
LOGGER.info("viewerId column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan SV dimension col viewerId : " + (stop - start) + " sum : " + sum);
}
}, 20000, 1000 * 5);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("daysSinceEpoch");
BlockValSet valSet = mDs.nextBlock().getBlockValueSet();
BlockSingleValIterator valIt = (BlockSingleValIterator) valSet.iterator();
int val = valIt.nextIntVal();
while (val != Constants.EOF) {
val = valIt.nextIntVal();
sum += val;
}
} catch (Exception e) {
LOGGER.info("daysSinceEpoch column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan SV time col daysSinceEpoch : " + (stop - start) + " sum : " + sum);
}
}, 20000, 1000 * 5);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
float sumOfLengths = 0F;
float counter = 0F;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("viewerCompanies");
Block b = mDs.nextBlock();
BlockValSet valSet = b.getBlockValueSet();
BlockMultiValIterator valIt = (BlockMultiValIterator) valSet.iterator();
BlockMetadata m = b.getMetadata();
int maxVams = m.getMaxNumberOfMultiValues();
while (valIt.hasNext()) {
int[] vals = new int[maxVams];
int len = valIt.nextIntVal(vals);
for (int i = 0; i < len; i++) {
sum += vals[i];
}
sumOfLengths += len;
counter++;
}
} catch (Exception e) {
LOGGER.info("daysSinceEpoch column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan MV col viewerCompanies : " + (stop - start) + " sum : " + sum + " average len : " + (sumOfLengths / counter));
}
}, 20000, 1000 * 5);
while (keepOnRunning) {
// Wait for keepOnRunning to be set to false
}
}
use of com.linkedin.pinot.core.common.Block in project pinot by linkedin.
the class DefaultGroupKeyGeneratorTest method setup.
@BeforeClass
private void setup() throws Exception {
GenericRow[] segmentData = new GenericRow[NUM_ROWS];
int value = _random.nextInt(MAX_STEP_LENGTH);
// Generate random values for the segment.
for (int i = 0; i < UNIQUE_ROWS; i++) {
Map<String, Object> map = new HashMap<>();
for (String singleValueColumn : SINGLE_VALUE_COLUMNS) {
map.put(singleValueColumn, value);
value += 1 + _random.nextInt(MAX_STEP_LENGTH);
}
for (String multiValueColumn : MULTI_VALUE_COLUMNS) {
int numMultiValues = 1 + _random.nextInt(MAX_NUM_MULTI_VALUES);
Integer[] values = new Integer[numMultiValues];
for (int k = 0; k < numMultiValues; k++) {
values[k] = value;
value += 1 + _random.nextInt(MAX_STEP_LENGTH);
}
map.put(multiValueColumn, values);
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
segmentData[i] = genericRow;
}
for (int i = UNIQUE_ROWS; i < NUM_ROWS; i += UNIQUE_ROWS) {
System.arraycopy(segmentData, 0, segmentData, i, UNIQUE_ROWS);
}
// Create an index segment with the random values.
Schema schema = new Schema();
for (String singleValueColumn : SINGLE_VALUE_COLUMNS) {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(singleValueColumn, FieldSpec.DataType.INT, true);
schema.addField(dimensionFieldSpec);
}
for (String multiValueColumn : MULTI_VALUE_COLUMNS) {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(multiValueColumn, FieldSpec.DataType.INT, false);
schema.addField(dimensionFieldSpec);
}
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
FileUtils.deleteQuietly(new File(INDEX_DIR_PATH));
config.setOutDir(INDEX_DIR_PATH);
config.setSegmentName(SEGMENT_NAME);
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(config, new TestDataRecordReader(schema, segmentData));
driver.build();
IndexSegment indexSegment = Loaders.IndexSegment.load(new File(INDEX_DIR_PATH, SEGMENT_NAME), ReadMode.heap);
// Get a data fetcher for the index segment.
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
Map<String, Block> blockMap = new HashMap<>();
for (String column : indexSegment.getColumnNames()) {
DataSource dataSource = indexSegment.getDataSource(column);
dataSourceMap.put(column, dataSource);
blockMap.put(column, dataSource.getNextBlock());
}
// Generate a random test doc id set.
int num1 = _random.nextInt(50);
int num2 = num1 + 1 + _random.nextInt(50);
for (int i = 0; i < 20; i += 2) {
_testDocIdSet[i] = num1 + 50 * i;
_testDocIdSet[i + 1] = num2 + 50 * i;
}
DataFetcher dataFetcher = new DataFetcher(dataSourceMap);
DocIdSetBlock docIdSetBlock = new DocIdSetBlock(_testDocIdSet, _testDocIdSet.length);
ProjectionBlock projectionBlock = new ProjectionBlock(blockMap, new DataBlockCache(dataFetcher), docIdSetBlock);
_transformBlock = new TransformBlock(projectionBlock, new HashMap<String, BlockValSet>());
}
use of com.linkedin.pinot.core.common.Block in project pinot by linkedin.
the class RealtimeFileBasedReaderTest method testDataSourceWithoutPredicateForMultiValueDimensionColumns.
private void testDataSourceWithoutPredicateForMultiValueDimensionColumns() {
for (FieldSpec spec : schema.getAllFieldSpecs()) {
if (!spec.isSingleValueField()) {
DataSource offlineDS = offlineSegment.getDataSource(spec.getName());
DataSource realtimeDS = realtimeSegment.getDataSource(spec.getName());
Block offlineBlock = offlineDS.nextBlock();
Block realtimeBlock = realtimeDS.nextBlock();
BlockMetadata offlineMetadata = offlineBlock.getMetadata();
BlockMetadata realtimeMetadata = realtimeBlock.getMetadata();
BlockMultiValIterator offlineValIterator = (BlockMultiValIterator) offlineBlock.getBlockValueSet().iterator();
BlockMultiValIterator realtimeValIterator = (BlockMultiValIterator) realtimeBlock.getBlockValueSet().iterator();
Assert.assertEquals(offlineSegment.getSegmentMetadata().getTotalDocs(), realtimeSegment.getAggregateDocumentCount());
while (realtimeValIterator.hasNext()) {
int[] offlineIds = new int[offlineBlock.getMetadata().getMaxNumberOfMultiValues()];
int[] realtimeIds = new int[realtimeBlock.getMetadata().getMaxNumberOfMultiValues()];
int Olen = offlineValIterator.nextIntVal(offlineIds);
int Rlen = realtimeValIterator.nextIntVal(realtimeIds);
Assert.assertEquals(Olen, Rlen);
for (int i = 0; i < Olen; i++) {
Assert.assertEquals(offlineMetadata.getDictionary().get(offlineIds[i]), realtimeMetadata.getDictionary().get(realtimeIds[i]));
}
}
}
}
}
Aggregations