use of com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig in project pinot by linkedin.
the class TransformGroupByTest method buildSegment.
/**
* Helper method to build a segment with one dimension column containing values
* from {@link #_dimensionValues}, and one metric column.
*
* Also builds the expected group by result as it builds the segments.
*
* @param segmentDirName Name of segment directory
* @param segmentName Name of segment
* @param schema Schema for segment
* @return Schema built for the segment
* @throws Exception
*/
private RecordReader buildSegment(String segmentDirName, String segmentName, Schema schema) throws Exception {
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
config.setOutDir(segmentDirName);
config.setFormat(FileFormat.AVRO);
config.setTableName(TABLE_NAME);
config.setSegmentName(segmentName);
Random random = new Random(RANDOM_SEED);
long currentTimeMillis = System.currentTimeMillis();
// Divide the day into fixed parts, and decrement time column value by this delta, so as to get
// continuous days in the input. This gives about 10 days per 10k rows.
long timeDelta = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS) / 1000;
final List<GenericRow> data = new ArrayList<>();
int numDimValues = _dimensionValues.length;
for (int row = 0; row < NUM_ROWS; row++) {
HashMap<String, Object> map = new HashMap<>();
map.put(DIMENSION_NAME, _dimensionValues[random.nextInt(numDimValues)]);
map.put(METRIC_NAME, random.nextDouble());
map.put(TIME_COLUMN_NAME, currentTimeMillis);
currentTimeMillis -= timeDelta;
GenericRow genericRow = new GenericRow();
genericRow.init(map);
data.add(genericRow);
}
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader reader = new TestUtils.GenericRowRecordReader(schema, data);
driver.init(config, reader);
driver.build();
LOGGER.info("Built segment {} at {}", segmentName, segmentDirName);
return reader;
}
use of com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig in project pinot by linkedin.
the class TransformExpressionOperatorTest method buildSegment.
/**
* Helper method to build a segment with {@link #NUM_METRICS} metrics with random
* data as per the schema.
*
* @param segmentDirName Name of segment directory
* @param segmentName Name of segment
* @param schema Schema for segment
* @return Schema built for the segment
* @throws Exception
*/
private Schema buildSegment(String segmentDirName, String segmentName, Schema schema) throws Exception {
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
config.setOutDir(segmentDirName);
config.setFormat(FileFormat.AVRO);
config.setSegmentName(segmentName);
Random random = new Random(RANDOM_SEED);
final List<GenericRow> data = new ArrayList<>();
_values = new double[NUM_ROWS][NUM_METRICS];
for (int row = 0; row < NUM_ROWS; row++) {
HashMap<String, Object> map = new HashMap<>();
// Metric columns.
for (int i = 0; i < NUM_METRICS; i++) {
String metName = schema.getMetricFieldSpecs().get(i).getName();
double value = random.nextInt(MAX_METRIC_VALUE) + random.nextDouble() + 1.0;
map.put(metName, value);
_values[row][i] = value;
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
data.add(genericRow);
}
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader reader = new TestUtils.GenericRowRecordReader(schema, data);
driver.init(config, reader);
driver.build();
LOGGER.info("Built segment {} at {}", segmentName, segmentDirName);
return schema;
}
use of com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig in project pinot by linkedin.
the class QueriesSentinelTest method setupSegmentFor.
private void setupSegmentFor(String table) throws Exception {
final String filePath = TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
if (INDEX_DIR.exists()) {
FileUtils.deleteQuietly(INDEX_DIR);
}
INDEX_DIR.mkdir();
final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), new File(INDEX_DIR, "segment"), "daysSinceEpoch", TimeUnit.DAYS, table);
final SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();
driver.init(config);
driver.build();
// System.out.println("built at : " + INDEX_DIR.getAbsolutePath());
}
use of com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig in project pinot by linkedin.
the class DefaultAggregationExecutorTest method setupSegment.
/**
* Helper method to setup the index segment on which to perform aggregation tests.
* - Generates a segment with {@link #NUM_METRIC_COLUMNS} and {@link #NUM_ROWS}
* - Random 'double' data filled in the metric columns. The data is also populated
* into the _inputData[], so it can be used to test the results.
*
* @throws Exception
*/
private void setupSegment() throws Exception {
if (INDEX_DIR.exists()) {
FileUtils.deleteQuietly(INDEX_DIR);
}
SegmentGeneratorConfig config = new SegmentGeneratorConfig();
config.setSegmentName(SEGMENT_NAME);
config.setOutDir(INDEX_DIR.getAbsolutePath());
Schema schema = buildSchema();
config.setSchema(schema);
final List<GenericRow> data = new ArrayList<>();
for (int i = 0; i < NUM_ROWS; i++) {
Map<String, Object> map = new HashMap<String, Object>();
for (int j = 0; j < _columns.length; j++) {
String metricName = _columns[j];
double value = _random.nextDouble() * MAX_VALUE;
_inputData[j][i] = value;
map.put(metricName, value);
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
data.add(genericRow);
_docIdSet[i] = i;
}
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader reader = new TestUtils.GenericRowRecordReader(schema, data);
driver.init(config, reader);
driver.build();
_indexSegment = Loaders.IndexSegment.load(new File(INDEX_DIR, driver.getSegmentName()), ReadMode.heap);
}
use of com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig in project pinot by linkedin.
the class StringDictionaryPerfTest method buildSegment.
/**
* Helper method to build a segment:
* <ul>
* <li> Segment contains one string column </li>
* <li> Row values for the column are randomly generated strings of length 1 to 100 </li>
* </ul>
*
* @param dictLength Length of the dictionary
* @throws Exception
*/
public void buildSegment(int dictLength) throws Exception {
Schema schema = new Schema();
String segmentName = "perfTestSegment" + System.currentTimeMillis();
_indexDir = new File(TMP_DIR + File.separator + segmentName);
_indexDir.deleteOnExit();
FieldSpec fieldSpec = new DimensionFieldSpec(COLUMN_NAME, FieldSpec.DataType.STRING, true);
schema.addField(fieldSpec);
_dictLength = dictLength;
_inputStrings = new String[dictLength];
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
config.setOutDir(_indexDir.getParent());
config.setFormat(FileFormat.AVRO);
config.setSegmentName(segmentName);
Random random = new Random(System.nanoTime());
final List<GenericRow> data = new ArrayList<>();
Set<String> uniqueStrings = new HashSet<>(dictLength);
int i = 0;
while (i < dictLength) {
HashMap<String, Object> map = new HashMap<>();
String randomString = RandomStringUtils.randomAlphanumeric(1 + random.nextInt(MAX_STRING_LENGTH));
if (uniqueStrings.contains(randomString)) {
continue;
}
_inputStrings[i] = randomString;
uniqueStrings.add(randomString);
map.put("test", _inputStrings[i++]);
GenericRow genericRow = new GenericRow();
genericRow.init(map);
data.add(genericRow);
}
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
RecordReader reader = getGenericRowRecordReader(schema, data);
driver.init(config, reader);
driver.build();
}
Aggregations