use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class DefaultAggregationExecutorTest method setupSegment.
/**
* Helper method to setup the index segment on which to perform aggregation tests.
* - Generates a segment with {@link #NUM_METRIC_COLUMNS} and {@link #NUM_ROWS}
* - Random 'double' data filled in the metric columns. The data is also populated
* into the _inputData[], so it can be used to test the results.
*
* @throws Exception
*/
private void setupSegment() throws Exception {
if (INDEX_DIR.exists()) {
FileUtils.deleteQuietly(INDEX_DIR);
}
SegmentGeneratorConfig config = new SegmentGeneratorConfig();
config.setSegmentName(SEGMENT_NAME);
config.setOutDir(INDEX_DIR.getAbsolutePath());
Schema schema = buildSchema();
config.setSchema(schema);
final List<GenericRow> data = new ArrayList<>();
for (int i = 0; i < NUM_ROWS; i++) {
Map<String, Object> map = new HashMap<String, Object>();
for (int j = 0; j < _columns.length; j++) {
String metricName = _columns[j];
double value = _random.nextDouble() * MAX_VALUE;
_inputData[j][i] = value;
map.put(metricName, value);
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
data.add(genericRow);
_docIdSet[i] = i;
}
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader reader = new TestUtils.GenericRowRecordReader(schema, data);
driver.init(config, reader);
driver.build();
_indexSegment = Loaders.IndexSegment.load(new File(INDEX_DIR, driver.getSegmentName()), ReadMode.heap);
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class StringDictionaryPerfTest method buildSegment.
/**
* Helper method to build a segment:
* <ul>
* <li> Segment contains one string column </li>
* <li> Row values for the column are randomly generated strings of length 1 to 100 </li>
* </ul>
*
* @param dictLength Length of the dictionary
* @throws Exception
*/
public void buildSegment(int dictLength) throws Exception {
Schema schema = new Schema();
String segmentName = "perfTestSegment" + System.currentTimeMillis();
_indexDir = new File(TMP_DIR + File.separator + segmentName);
_indexDir.deleteOnExit();
FieldSpec fieldSpec = new DimensionFieldSpec(COLUMN_NAME, FieldSpec.DataType.STRING, true);
schema.addField(fieldSpec);
_dictLength = dictLength;
_inputStrings = new String[dictLength];
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
config.setOutDir(_indexDir.getParent());
config.setFormat(FileFormat.AVRO);
config.setSegmentName(segmentName);
Random random = new Random(System.nanoTime());
final List<GenericRow> data = new ArrayList<>();
Set<String> uniqueStrings = new HashSet<>(dictLength);
int i = 0;
while (i < dictLength) {
HashMap<String, Object> map = new HashMap<>();
String randomString = RandomStringUtils.randomAlphanumeric(1 + random.nextInt(MAX_STRING_LENGTH));
if (uniqueStrings.contains(randomString)) {
continue;
}
_inputStrings[i] = randomString;
uniqueStrings.add(randomString);
map.put("test", _inputStrings[i++]);
GenericRow genericRow = new GenericRow();
genericRow.init(map);
data.add(genericRow);
}
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader reader = getGenericRowRecordReader(schema, data);
driver.init(config, reader);
driver.build();
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class RawIndexBenchmark method buildSegment.
/**
* Helper method that builds a segment containing two columns both with data from input file.
* The first column has raw indices (no dictionary), where as the second column is dictionary encoded.
*
* @throws Exception
*/
private File buildSegment() throws Exception {
Schema schema = new Schema();
for (int i = 0; i < NUM_COLUMNS; i++) {
String column = "column_" + i;
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(column, FieldSpec.DataType.STRING, true);
schema.addField(dimensionFieldSpec);
}
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
config.setRawIndexCreationColumns(Collections.singletonList(_rawIndexColumn));
config.setOutDir(SEGMENT_DIR_NAME);
config.setSegmentName(SEGMENT_NAME);
BufferedReader reader = new BufferedReader(new FileReader(_dataFile));
String value;
final List<GenericRow> rows = new ArrayList<>();
System.out.println("Reading data...");
while ((value = reader.readLine()) != null) {
HashMap<String, Object> map = new HashMap<>();
for (FieldSpec fieldSpec : schema.getAllFieldSpecs()) {
map.put(fieldSpec.getName(), value);
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
rows.add(genericRow);
_numRows++;
if (_numRows % 1000000 == 0) {
System.out.println("Read rows: " + _numRows);
}
}
System.out.println("Generating segment...");
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader recordReader = new TestRecordReader(rows, schema);
driver.init(config, recordReader);
driver.build();
return new File(SEGMENT_DIR_NAME, SEGMENT_NAME);
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class DataFetcherTest method setup.
@BeforeClass
private void setup() throws Exception {
GenericRow[] segmentData = new GenericRow[NUM_ROWS];
// Generate random dimension and metric values.
for (int i = 0; i < NUM_ROWS; i++) {
double randomDouble = _random.nextDouble();
String randomDoubleString = String.valueOf(randomDouble);
_dimensionValues[i] = randomDoubleString;
_intMetricValues[i] = (int) randomDouble;
_longMetricValues[i] = (long) randomDouble;
_floatMetricValues[i] = (float) randomDouble;
_doubleMetricValues[i] = randomDouble;
HashMap<String, Object> map = new HashMap<>();
map.put(DIMENSION_NAME, _dimensionValues[i]);
map.put(INT_METRIC_NAME, _intMetricValues[i]);
map.put(LONG_METRIC_NAME, _longMetricValues[i]);
map.put(FLOAT_METRIC_NAME, _floatMetricValues[i]);
map.put(DOUBLE_METRIC_NAME, _doubleMetricValues[i]);
map.put(NO_DICT_INT_METRIC_NAME, _intMetricValues[i]);
map.put(NO_DICT_LONG_METRIC_NAME, _longMetricValues[i]);
map.put(NO_DICT_FLOAT_METRIC_NAME, _floatMetricValues[i]);
map.put(NO_DICT_DOUBLE_METRIC_NAME, _doubleMetricValues[i]);
GenericRow genericRow = new GenericRow();
genericRow.init(map);
segmentData[i] = genericRow;
}
// Create an index segment with the random dimension and metric values.
final Schema schema = new Schema();
schema.addField(new DimensionFieldSpec(DIMENSION_NAME, FieldSpec.DataType.STRING, true));
schema.addField(new MetricFieldSpec(INT_METRIC_NAME, FieldSpec.DataType.INT));
schema.addField(new MetricFieldSpec(LONG_METRIC_NAME, FieldSpec.DataType.LONG));
schema.addField(new MetricFieldSpec(FLOAT_METRIC_NAME, FieldSpec.DataType.FLOAT));
schema.addField(new MetricFieldSpec(DOUBLE_METRIC_NAME, FieldSpec.DataType.DOUBLE));
schema.addField(new MetricFieldSpec(NO_DICT_INT_METRIC_NAME, FieldSpec.DataType.INT));
schema.addField(new MetricFieldSpec(NO_DICT_LONG_METRIC_NAME, FieldSpec.DataType.LONG));
schema.addField(new MetricFieldSpec(NO_DICT_FLOAT_METRIC_NAME, FieldSpec.DataType.FLOAT));
schema.addField(new MetricFieldSpec(NO_DICT_DOUBLE_METRIC_NAME, FieldSpec.DataType.DOUBLE));
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
FileUtils.deleteQuietly(new File(INDEX_DIR_PATH));
config.setOutDir(INDEX_DIR_PATH);
config.setSegmentName(SEGMENT_NAME);
config.setRawIndexCreationColumns(Arrays.asList(NO_DICT_INT_METRIC_NAME, NO_DICT_LONG_METRIC_NAME, NO_DICT_FLOAT_METRIC_NAME, NO_DICT_DOUBLE_METRIC_NAME));
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(config, new TestDataRecordReader(schema, segmentData));
driver.build();
IndexSegment indexSegment = Loaders.IndexSegment.load(new File(INDEX_DIR_PATH, SEGMENT_NAME), ReadMode.heap);
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
for (String column : indexSegment.getColumnNames()) {
dataSourceMap.put(column, indexSegment.getDataSource(column));
}
// Get a data fetcher for the index segment.
_dataFetcher = new DataFetcher(dataSourceMap);
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class PinotSegmentRecordReaderTest method testPinotSegmentRecordReader.
@Test
public void testPinotSegmentRecordReader() throws Exception {
List<GenericRow> outputRows = new ArrayList<>();
PinotSegmentRecordReader pinotSegmentRecordReader = new PinotSegmentRecordReader(segmentIndexDir);
pinotSegmentRecordReader.init();
while (pinotSegmentRecordReader.hasNext()) {
outputRows.add(pinotSegmentRecordReader.next());
}
pinotSegmentRecordReader.close();
Assert.assertEquals(outputRows.size(), rows.size(), "Number of rows returned by PinotSegmentRecordReader is incorrect");
for (int i = 0; i < outputRows.size(); i++) {
GenericRow outputRow = outputRows.get(i);
GenericRow row = rows.get(i);
Assert.assertEquals(outputRow.getValue(D_SV_1), row.getValue(D_SV_1));
Assert.assertEquals(outputRow.getValue(D_MV_1), row.getValue(D_MV_1));
Assert.assertEquals(outputRow.getValue(M1), row.getValue(M1));
Assert.assertEquals(outputRow.getValue(M2), row.getValue(M2));
Assert.assertEquals(outputRow.getValue(TIME), row.getValue(TIME));
}
}
Aggregations