use of com.linkedin.pinot.common.data.MetricFieldSpec in project pinot by linkedin.
the class AutoloadPinotMetricsServiceTest method testRefreshDataset.
@Test(dependsOnMethods = { "testAddNewDataset" })
public void testRefreshDataset() throws Exception {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec("newDimension", DataType.STRING, true);
schema.addField(dimensionFieldSpec);
testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, datasetConfig);
Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
DatasetConfigDTO newDatasetConfig1 = datasetConfigDAO.findByDataset(dataset);
Assert.assertEquals(newDatasetConfig1.getDataset(), dataset);
Assert.assertEquals(Sets.newHashSet(newDatasetConfig1.getDimensions()), Sets.newHashSet(schema.getDimensionNames()));
MetricFieldSpec metricFieldSpec = new MetricFieldSpec("newMetric", DataType.LONG);
schema.addField(metricFieldSpec);
testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, newDatasetConfig1);
Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
List<MetricConfigDTO> metricConfigs = metricConfigDAO.findByDataset(dataset);
List<String> schemaMetricNames = schema.getMetricNames();
List<Long> metricIds = new ArrayList<>();
Assert.assertEquals(metricConfigs.size(), schemaMetricNames.size());
for (MetricConfigDTO metricConfig : metricConfigs) {
Assert.assertTrue(schemaMetricNames.contains(metricConfig.getName()));
metricIds.add(metricConfig.getId());
}
DashboardConfigDTO dashboardConfig = dashboardConfigDAO.findByName(DashboardConfigBean.DEFAULT_DASHBOARD_PREFIX + dataset);
Assert.assertEquals(dashboardConfig.getMetricIds(), metricIds);
}
use of com.linkedin.pinot.common.data.MetricFieldSpec in project pinot by linkedin.
the class DataFetcherTest method setup.
@BeforeClass
private void setup() throws Exception {
GenericRow[] segmentData = new GenericRow[NUM_ROWS];
// Generate random dimension and metric values.
for (int i = 0; i < NUM_ROWS; i++) {
double randomDouble = _random.nextDouble();
String randomDoubleString = String.valueOf(randomDouble);
_dimensionValues[i] = randomDoubleString;
_intMetricValues[i] = (int) randomDouble;
_longMetricValues[i] = (long) randomDouble;
_floatMetricValues[i] = (float) randomDouble;
_doubleMetricValues[i] = randomDouble;
HashMap<String, Object> map = new HashMap<>();
map.put(DIMENSION_NAME, _dimensionValues[i]);
map.put(INT_METRIC_NAME, _intMetricValues[i]);
map.put(LONG_METRIC_NAME, _longMetricValues[i]);
map.put(FLOAT_METRIC_NAME, _floatMetricValues[i]);
map.put(DOUBLE_METRIC_NAME, _doubleMetricValues[i]);
map.put(NO_DICT_INT_METRIC_NAME, _intMetricValues[i]);
map.put(NO_DICT_LONG_METRIC_NAME, _longMetricValues[i]);
map.put(NO_DICT_FLOAT_METRIC_NAME, _floatMetricValues[i]);
map.put(NO_DICT_DOUBLE_METRIC_NAME, _doubleMetricValues[i]);
GenericRow genericRow = new GenericRow();
genericRow.init(map);
segmentData[i] = genericRow;
}
// Create an index segment with the random dimension and metric values.
final Schema schema = new Schema();
schema.addField(new DimensionFieldSpec(DIMENSION_NAME, FieldSpec.DataType.STRING, true));
schema.addField(new MetricFieldSpec(INT_METRIC_NAME, FieldSpec.DataType.INT));
schema.addField(new MetricFieldSpec(LONG_METRIC_NAME, FieldSpec.DataType.LONG));
schema.addField(new MetricFieldSpec(FLOAT_METRIC_NAME, FieldSpec.DataType.FLOAT));
schema.addField(new MetricFieldSpec(DOUBLE_METRIC_NAME, FieldSpec.DataType.DOUBLE));
schema.addField(new MetricFieldSpec(NO_DICT_INT_METRIC_NAME, FieldSpec.DataType.INT));
schema.addField(new MetricFieldSpec(NO_DICT_LONG_METRIC_NAME, FieldSpec.DataType.LONG));
schema.addField(new MetricFieldSpec(NO_DICT_FLOAT_METRIC_NAME, FieldSpec.DataType.FLOAT));
schema.addField(new MetricFieldSpec(NO_DICT_DOUBLE_METRIC_NAME, FieldSpec.DataType.DOUBLE));
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
FileUtils.deleteQuietly(new File(INDEX_DIR_PATH));
config.setOutDir(INDEX_DIR_PATH);
config.setSegmentName(SEGMENT_NAME);
config.setRawIndexCreationColumns(Arrays.asList(NO_DICT_INT_METRIC_NAME, NO_DICT_LONG_METRIC_NAME, NO_DICT_FLOAT_METRIC_NAME, NO_DICT_DOUBLE_METRIC_NAME));
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(config, new TestDataRecordReader(schema, segmentData));
driver.build();
IndexSegment indexSegment = Loaders.IndexSegment.load(new File(INDEX_DIR_PATH, SEGMENT_NAME), ReadMode.heap);
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
for (String column : indexSegment.getColumnNames()) {
dataSourceMap.put(column, indexSegment.getDataSource(column));
}
// Get a data fetcher for the index segment.
_dataFetcher = new DataFetcher(dataSourceMap);
}
use of com.linkedin.pinot.common.data.MetricFieldSpec in project pinot by linkedin.
the class PinotSegmentRecordReaderTest method createPinotSchema.
private Schema createPinotSchema() {
Schema testSchema = new Schema();
testSchema.setSchemaName("schema");
FieldSpec spec;
spec = new DimensionFieldSpec(D_SV_1, DataType.STRING, true);
testSchema.addField(spec);
spec = new DimensionFieldSpec(D_MV_1, DataType.STRING, false);
testSchema.addField(spec);
spec = new MetricFieldSpec(M1, DataType.INT);
testSchema.addField(spec);
spec = new MetricFieldSpec(M2, DataType.FLOAT);
testSchema.addField(spec);
spec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, TIME));
testSchema.addField(spec);
return testSchema;
}
use of com.linkedin.pinot.common.data.MetricFieldSpec in project pinot by linkedin.
the class MetricBuffer method fromBytes.
public static MetricBuffer fromBytes(byte[] bytes, List<MetricFieldSpec> metricFieldSpecs) {
ByteBuffer buffer = ByteBuffer.wrap(bytes);
Object[] values = new Object[metricFieldSpecs.size()];
for (int i = 0; i < metricFieldSpecs.size(); i++) {
MetricFieldSpec metric = metricFieldSpecs.get(i);
if (metric.getDerivedMetricType() == DerivedMetricType.HLL) {
// TODO: buffer reuse
byte[] hllBytes = new byte[metric.getFieldSize()];
buffer.get(hllBytes);
values[i] = HllUtil.buildHllFromBytes(hllBytes);
} else {
switch(metric.getDataType()) {
case SHORT:
values[i] = buffer.getShort();
break;
case INT:
values[i] = buffer.getInt();
break;
case LONG:
values[i] = buffer.getLong();
break;
case FLOAT:
values[i] = buffer.getFloat();
break;
case DOUBLE:
values[i] = buffer.getDouble();
break;
default:
throw new IllegalArgumentException("Unsupported metric type " + metric.getDataType());
}
}
}
return new MetricBuffer(values, metricFieldSpecs);
}
use of com.linkedin.pinot.common.data.MetricFieldSpec in project pinot by linkedin.
the class MetricBuffer method toBytes.
public byte[] toBytes(int numBytes) throws IOException {
byte[] bytes = new byte[numBytes];
ByteBuffer buffer = ByteBuffer.wrap(bytes);
for (int i = 0; i < metricFieldSpecs.size(); i++) {
MetricFieldSpec metric = metricFieldSpecs.get(i);
if (metric.getDerivedMetricType() == DerivedMetricType.HLL) {
buffer.put(((HyperLogLog) values[i]).getBytes());
} else {
switch(metric.getDataType()) {
case SHORT:
buffer.putShort(((Number) values[i]).shortValue());
break;
case INT:
buffer.putInt(((Number) values[i]).intValue());
break;
case LONG:
buffer.putLong(((Number) values[i]).longValue());
break;
case FLOAT:
buffer.putFloat(((Number) values[i]).floatValue());
break;
case DOUBLE:
buffer.putDouble(((Number) values[i]).doubleValue());
break;
default:
throw new IllegalArgumentException("Unsupported metric type " + metric.getDataType());
}
}
}
return bytes;
}
Aggregations