use of com.linkedin.pinot.common.metrics.ServerMetrics in project pinot by linkedin.
the class RealtimeTableDataManagerTest method testSetup.
public void testSetup() throws Exception {
final HLRealtimeSegmentDataManager manager = new HLRealtimeSegmentDataManager(realtimeSegmentZKMetadata, tableConfig, instanceZKMetadata, null, tableDataManagerConfig.getDataDir(), ReadMode.valueOf(tableDataManagerConfig.getReadMode()), getTestSchema(), new ServerMetrics(new MetricsRegistry()));
final long start = System.currentTimeMillis();
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
if (System.currentTimeMillis() - start >= (SEGMENT_CONSUMING_TIME)) {
keepOnRunning = false;
}
}
}, 1000, 1000 * 60 * 1);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("count");
BlockValSet valSet = mDs.nextBlock().getBlockValueSet();
BlockSingleValIterator valIt = (BlockSingleValIterator) valSet.iterator();
int val = valIt.nextIntVal();
while (val != Constants.EOF) {
val = valIt.nextIntVal();
sum += val;
}
} catch (Exception e) {
LOGGER.info("count column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan metric col count : " + (stop - start) + " sum : " + sum);
}
}, 20000, 1000 * 5);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("viewerId");
BlockValSet valSet = mDs.nextBlock().getBlockValueSet();
BlockSingleValIterator valIt = (BlockSingleValIterator) valSet.iterator();
int val = valIt.nextIntVal();
while (val != Constants.EOF) {
val = valIt.nextIntVal();
sum += val;
}
} catch (Exception e) {
LOGGER.info("viewerId column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan SV dimension col viewerId : " + (stop - start) + " sum : " + sum);
}
}, 20000, 1000 * 5);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("daysSinceEpoch");
BlockValSet valSet = mDs.nextBlock().getBlockValueSet();
BlockSingleValIterator valIt = (BlockSingleValIterator) valSet.iterator();
int val = valIt.nextIntVal();
while (val != Constants.EOF) {
val = valIt.nextIntVal();
sum += val;
}
} catch (Exception e) {
LOGGER.info("daysSinceEpoch column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan SV time col daysSinceEpoch : " + (stop - start) + " sum : " + sum);
}
}, 20000, 1000 * 5);
TimerService.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
long start = System.currentTimeMillis();
long sum = 0;
float sumOfLengths = 0F;
float counter = 0F;
try {
RealtimeSegment segment = (RealtimeSegment) manager.getSegment();
RealtimeColumnDataSource mDs = (RealtimeColumnDataSource) segment.getDataSource("viewerCompanies");
Block b = mDs.nextBlock();
BlockValSet valSet = b.getBlockValueSet();
BlockMultiValIterator valIt = (BlockMultiValIterator) valSet.iterator();
BlockMetadata m = b.getMetadata();
int maxVams = m.getMaxNumberOfMultiValues();
while (valIt.hasNext()) {
int[] vals = new int[maxVams];
int len = valIt.nextIntVal(vals);
for (int i = 0; i < len; i++) {
sum += vals[i];
}
sumOfLengths += len;
counter++;
}
} catch (Exception e) {
LOGGER.info("daysSinceEpoch column exception");
e.printStackTrace();
}
long stop = System.currentTimeMillis();
LOGGER.info("time to scan MV col viewerCompanies : " + (stop - start) + " sum : " + sum + " average len : " + (sumOfLengths / counter));
}
}, 20000, 1000 * 5);
while (keepOnRunning) {
// Wait for keepOnRunning to be set to false
}
}
use of com.linkedin.pinot.common.metrics.ServerMetrics in project pinot by linkedin.
the class ScheduledRequestHandlerTest method setupTestMethod.
@BeforeTest
public void setupTestMethod() {
serverMetrics = new ServerMetrics(new MetricsRegistry());
channelHandlerContext = mock(ChannelHandlerContext.class, RETURNS_DEEP_STUBS);
when(channelHandlerContext.channel().remoteAddress()).thenAnswer(new Answer<InetSocketAddress>() {
@Override
public InetSocketAddress answer(InvocationOnMock invocationOnMock) throws Throwable {
return new InetSocketAddress("localhost", 60000);
}
});
queryScheduler = mock(QueryScheduler.class);
queryExecutor = new ServerQueryExecutorV1Impl();
}
use of com.linkedin.pinot.common.metrics.ServerMetrics in project pinot by linkedin.
the class RealtimeFileBasedReaderTest method setUp.
private void setUp(SegmentVersion segmentVersion) throws Exception {
filePath = RealtimeFileBasedReaderTest.class.getClassLoader().getResource(AVRO_DATA).getFile();
fieldTypeMap = new HashMap<>();
fieldTypeMap.put("column1", FieldType.DIMENSION);
fieldTypeMap.put("column2", FieldType.DIMENSION);
fieldTypeMap.put("column3", FieldType.DIMENSION);
fieldTypeMap.put("column4", FieldType.DIMENSION);
fieldTypeMap.put("column5", FieldType.DIMENSION);
fieldTypeMap.put("column6", FieldType.DIMENSION);
fieldTypeMap.put("column7", FieldType.DIMENSION);
fieldTypeMap.put("column8", FieldType.DIMENSION);
fieldTypeMap.put("column9", FieldType.DIMENSION);
fieldTypeMap.put("column10", FieldType.DIMENSION);
fieldTypeMap.put("weeksSinceEpochSunday", FieldType.DIMENSION);
fieldTypeMap.put("daysSinceEpoch", FieldType.DIMENSION);
fieldTypeMap.put("column13", FieldType.TIME);
fieldTypeMap.put("count", FieldType.METRIC);
schema = SegmentTestUtils.extractSchemaFromAvro(new File(filePath), fieldTypeMap, TimeUnit.MINUTES);
StreamProviderConfig config = new FileBasedStreamProviderConfig(FileFormat.AVRO, filePath, schema);
StreamProvider provider = new FileBasedStreamProviderImpl();
final String tableName = RealtimeFileBasedReaderTest.class.getSimpleName() + ".noTable";
provider.init(config, tableName, new ServerMetrics(new MetricsRegistry()));
realtimeSegment = RealtimeSegmentImplTest.createRealtimeSegmentImpl(schema, 100000, tableName, segmentName, AVRO_DATA, new ServerMetrics(new MetricsRegistry()));
GenericRow row = provider.next(new GenericRow());
while (row != null) {
realtimeSegment.index(row);
row = provider.next(row);
}
provider.shutdown();
if (new File("/tmp/realtime").exists()) {
FileUtils.deleteQuietly(new File("/tmp/realtime"));
}
RealtimeSegmentConverter conveter = new RealtimeSegmentConverter(realtimeSegment, "/tmp/realtime", schema, tableName, segmentName, null);
conveter.build(segmentVersion);
offlineSegment = Loaders.IndexSegment.load(new File("/tmp/realtime").listFiles()[0], ReadMode.mmap);
}
use of com.linkedin.pinot.common.metrics.ServerMetrics in project pinot by linkedin.
the class RealtimeSegmentTest method before.
@BeforeClass
public static void before() throws Exception {
filePath = RealtimeFileBasedReaderTest.class.getClassLoader().getResource(AVRO_DATA).getFile();
fieldTypeMap = new HashMap<String, FieldSpec.FieldType>();
fieldTypeMap.put("column1", FieldType.DIMENSION);
fieldTypeMap.put("column2", FieldType.DIMENSION);
fieldTypeMap.put("column3", FieldType.DIMENSION);
fieldTypeMap.put("column4", FieldType.DIMENSION);
fieldTypeMap.put("column5", FieldType.DIMENSION);
fieldTypeMap.put("column6", FieldType.DIMENSION);
fieldTypeMap.put("column7", FieldType.DIMENSION);
fieldTypeMap.put("column8", FieldType.DIMENSION);
fieldTypeMap.put("column9", FieldType.DIMENSION);
fieldTypeMap.put("column10", FieldType.DIMENSION);
fieldTypeMap.put("weeksSinceEpochSunday", FieldType.DIMENSION);
fieldTypeMap.put("daysSinceEpoch", FieldType.DIMENSION);
fieldTypeMap.put("column13", FieldType.TIME);
fieldTypeMap.put("count", FieldType.METRIC);
schema = SegmentTestUtils.extractSchemaFromAvro(new File(filePath), fieldTypeMap, TimeUnit.MINUTES);
StreamProviderConfig config = new FileBasedStreamProviderConfig(FileFormat.AVRO, filePath, schema);
// System.out.println(config);
StreamProvider provider = new FileBasedStreamProviderImpl();
final String tableName = RealtimeSegmentTest.class.getSimpleName() + ".noTable";
provider.init(config, tableName, new ServerMetrics(new MetricsRegistry()));
List<String> invertedIdxCols = new ArrayList<>();
invertedIdxCols.add("count");
segmentWithInvIdx = new RealtimeSegmentImpl(schema, 100000, tableName, "noSegment", AVRO_DATA, new ServerMetrics(new MetricsRegistry()), invertedIdxCols, 2);
segmentWithoutInvIdx = RealtimeSegmentImplTest.createRealtimeSegmentImpl(schema, 100000, tableName, "noSegment", AVRO_DATA, new ServerMetrics(new MetricsRegistry()));
GenericRow row = provider.next(new GenericRow());
while (row != null) {
segmentWithInvIdx.index(row);
segmentWithoutInvIdx.index(row);
row = GenericRow.createOrReuseRow(row);
row = provider.next(row);
}
provider.shutdown();
}
use of com.linkedin.pinot.common.metrics.ServerMetrics in project pinot by linkedin.
the class RealtimeQueriesSentinelTest method getRealtimeSegment.
private IndexSegment getRealtimeSegment() throws IOException {
RealtimeSegmentImpl realtimeSegmentImpl = RealtimeSegmentImplTest.createRealtimeSegmentImpl(PINOT_SCHEMA, 100000, "testTable", "testTable_testTable", AVRO_DATA, new ServerMetrics(new MetricsRegistry()));
realtimeSegmentImpl.setSegmentMetadata(getRealtimeSegmentZKMetadata());
try {
DataFileStream<GenericRecord> avroReader = AvroUtils.getAvroReader(new File(TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA))));
GenericRow genericRow = null;
while (avroReader.hasNext()) {
GenericRecord avroRecord = avroReader.next();
genericRow = GenericRow.createOrReuseRow(genericRow);
genericRow = AVRO_RECORD_TRANSFORMER.transform(avroRecord, genericRow);
// System.out.println(genericRow);
realtimeSegmentImpl.index(genericRow);
}
} catch (Exception e) {
e.printStackTrace();
}
// System.out.println("Current raw events indexed: " + realtimeSegmentImpl.getRawDocumentCount() + ", totalDocs = "
// + realtimeSegmentImpl.getSegmentMetadata().getTotalDocs());
realtimeSegmentImpl.setSegmentMetadata(getRealtimeSegmentZKMetadata());
return realtimeSegmentImpl;
}
Aggregations