use of com.linkedin.pinot.common.data.DimensionFieldSpec in project pinot by linkedin.
the class TransformGroupByTest method buildSchema.
/**
* Helper method to build a schema with one string dimension, and one double metric columns.
*/
private static Schema buildSchema() {
Schema schema = new Schema();
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(DIMENSION_NAME, FieldSpec.DataType.STRING, true);
schema.addField(dimensionFieldSpec);
MetricFieldSpec metricFieldSpec = new MetricFieldSpec(METRIC_NAME, FieldSpec.DataType.DOUBLE);
schema.addField(metricFieldSpec);
TimeFieldSpec timeFieldSpec = new TimeFieldSpec(TIME_COLUMN_NAME, FieldSpec.DataType.LONG, TimeUnit.MILLISECONDS);
schema.setTimeFieldSpec(timeFieldSpec);
return schema;
}
use of com.linkedin.pinot.common.data.DimensionFieldSpec in project pinot by linkedin.
the class BitmapInvertedIndexCreatorTest method testMultiValue.
@Test
public void testMultiValue() throws IOException {
boolean singleValue = false;
String colName = "multi_value_col";
FieldSpec spec = new DimensionFieldSpec(colName, DataType.INT, singleValue);
int numDocs = 20;
int[][] data = new int[numDocs][];
int maxLength = 10;
int cardinality = 10;
File indexDirHeap = new File("/tmp/indexDirHeap");
FileUtils.forceMkdir(indexDirHeap);
indexDirHeap.mkdirs();
File indexDirOffHeap = new File("/tmp/indexDirOffHeap");
FileUtils.forceMkdir(indexDirOffHeap);
indexDirOffHeap.mkdirs();
File bitmapIndexFileOffHeap = new File(indexDirOffHeap, colName + V1Constants.Indexes.BITMAP_INVERTED_INDEX_FILE_EXTENSION);
File bitmapIndexFileHeap = new File(indexDirHeap, colName + V1Constants.Indexes.BITMAP_INVERTED_INDEX_FILE_EXTENSION);
// GENERATE RANDOM MULTI VALUE DATA SET
Random r = new Random();
Map<Integer, Set<Integer>> postingListMap = new HashMap<>();
for (int i = 0; i < cardinality; i++) {
postingListMap.put(i, new LinkedHashSet<Integer>());
}
int totalNumberOfEntries = 0;
for (int docId = 0; docId < numDocs; docId++) {
int length = r.nextInt(maxLength);
data[docId] = new int[length];
totalNumberOfEntries += length;
for (int j = 0; j < length; j++) {
data[docId][j] = r.nextInt(cardinality);
postingListMap.get(data[docId][j]).add(docId);
}
LOGGER.debug("docId:" + docId + " dictId:" + data[docId]);
}
for (int i = 0; i < cardinality; i++) {
LOGGER.debug("Posting list for " + i + " : " + postingListMap.get(i));
}
// GENERATE BITMAP USING OffHeapCreator and validate
OffHeapBitmapInvertedIndexCreator offHeapCreator = new OffHeapBitmapInvertedIndexCreator(indexDirOffHeap, cardinality, numDocs, totalNumberOfEntries, spec);
for (int i = 0; i < numDocs; i++) {
offHeapCreator.add(i, data[i]);
}
offHeapCreator.seal();
validate(colName, bitmapIndexFileOffHeap, cardinality, postingListMap);
// GENERATE BITMAP USING HeapCreator and validate
HeapBitmapInvertedIndexCreator heapCreator = new HeapBitmapInvertedIndexCreator(indexDirHeap, cardinality, numDocs, totalNumberOfEntries, spec);
for (int i = 0; i < numDocs; i++) {
heapCreator.add(i, data[i]);
}
heapCreator.seal();
validate(colName, bitmapIndexFileHeap, cardinality, postingListMap);
// assert that the file sizes and contents are the same
Assert.assertEquals(bitmapIndexFileHeap.length(), bitmapIndexFileHeap.length());
Assert.assertTrue(FileUtils.contentEquals(bitmapIndexFileHeap, bitmapIndexFileHeap));
FileUtils.deleteQuietly(indexDirHeap);
FileUtils.deleteQuietly(indexDirOffHeap);
}
use of com.linkedin.pinot.common.data.DimensionFieldSpec in project pinot by linkedin.
the class StringDictionaryPerfTest method buildSegment.
/**
* Helper method to build a segment:
* <ul>
* <li> Segment contains one string column </li>
* <li> Row values for the column are randomly generated strings of length 1 to 100 </li>
* </ul>
*
* @param dictLength Length of the dictionary
* @throws Exception
*/
public void buildSegment(int dictLength) throws Exception {
Schema schema = new Schema();
String segmentName = "perfTestSegment" + System.currentTimeMillis();
_indexDir = new File(TMP_DIR + File.separator + segmentName);
_indexDir.deleteOnExit();
FieldSpec fieldSpec = new DimensionFieldSpec(COLUMN_NAME, FieldSpec.DataType.STRING, true);
schema.addField(fieldSpec);
_dictLength = dictLength;
_inputStrings = new String[dictLength];
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
config.setOutDir(_indexDir.getParent());
config.setFormat(FileFormat.AVRO);
config.setSegmentName(segmentName);
Random random = new Random(System.nanoTime());
final List<GenericRow> data = new ArrayList<>();
Set<String> uniqueStrings = new HashSet<>(dictLength);
int i = 0;
while (i < dictLength) {
HashMap<String, Object> map = new HashMap<>();
String randomString = RandomStringUtils.randomAlphanumeric(1 + random.nextInt(MAX_STRING_LENGTH));
if (uniqueStrings.contains(randomString)) {
continue;
}
_inputStrings[i] = randomString;
uniqueStrings.add(randomString);
map.put("test", _inputStrings[i++]);
GenericRow genericRow = new GenericRow();
genericRow.init(map);
data.add(genericRow);
}
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader reader = getGenericRowRecordReader(schema, data);
driver.init(config, reader);
driver.build();
}
use of com.linkedin.pinot.common.data.DimensionFieldSpec in project pinot by linkedin.
the class RawIndexBenchmark method buildSegment.
/**
* Helper method that builds a segment containing two columns both with data from input file.
* The first column has raw indices (no dictionary), where as the second column is dictionary encoded.
*
* @throws Exception
*/
private File buildSegment() throws Exception {
Schema schema = new Schema();
for (int i = 0; i < NUM_COLUMNS; i++) {
String column = "column_" + i;
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(column, FieldSpec.DataType.STRING, true);
schema.addField(dimensionFieldSpec);
}
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
config.setRawIndexCreationColumns(Collections.singletonList(_rawIndexColumn));
config.setOutDir(SEGMENT_DIR_NAME);
config.setSegmentName(SEGMENT_NAME);
BufferedReader reader = new BufferedReader(new FileReader(_dataFile));
String value;
final List<GenericRow> rows = new ArrayList<>();
System.out.println("Reading data...");
while ((value = reader.readLine()) != null) {
HashMap<String, Object> map = new HashMap<>();
for (FieldSpec fieldSpec : schema.getAllFieldSpecs()) {
map.put(fieldSpec.getName(), value);
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
rows.add(genericRow);
_numRows++;
if (_numRows % 1000000 == 0) {
System.out.println("Read rows: " + _numRows);
}
}
System.out.println("Generating segment...");
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader recordReader = new TestRecordReader(rows, schema);
driver.init(config, recordReader);
driver.build();
return new File(SEGMENT_DIR_NAME, SEGMENT_NAME);
}
use of com.linkedin.pinot.common.data.DimensionFieldSpec in project pinot by linkedin.
the class AutoloadPinotMetricsServiceTest method testRefreshDataset.
@Test(dependsOnMethods = { "testAddNewDataset" })
public void testRefreshDataset() throws Exception {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec("newDimension", DataType.STRING, true);
schema.addField(dimensionFieldSpec);
testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, datasetConfig);
Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
DatasetConfigDTO newDatasetConfig1 = datasetConfigDAO.findByDataset(dataset);
Assert.assertEquals(newDatasetConfig1.getDataset(), dataset);
Assert.assertEquals(Sets.newHashSet(newDatasetConfig1.getDimensions()), Sets.newHashSet(schema.getDimensionNames()));
MetricFieldSpec metricFieldSpec = new MetricFieldSpec("newMetric", DataType.LONG);
schema.addField(metricFieldSpec);
testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, newDatasetConfig1);
Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
List<MetricConfigDTO> metricConfigs = metricConfigDAO.findByDataset(dataset);
List<String> schemaMetricNames = schema.getMetricNames();
List<Long> metricIds = new ArrayList<>();
Assert.assertEquals(metricConfigs.size(), schemaMetricNames.size());
for (MetricConfigDTO metricConfig : metricConfigs) {
Assert.assertTrue(schemaMetricNames.contains(metricConfig.getName()));
metricIds.add(metricConfig.getId());
}
DashboardConfigDTO dashboardConfig = dashboardConfigDAO.findByName(DashboardConfigBean.DEFAULT_DASHBOARD_PREFIX + dataset);
Assert.assertEquals(dashboardConfig.getMetricIds(), metricIds);
}
Aggregations