use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class RealtimeQueriesSentinelTest method getRealtimeSegment.
private IndexSegment getRealtimeSegment() throws IOException {
RealtimeSegmentImpl realtimeSegmentImpl = RealtimeSegmentImplTest.createRealtimeSegmentImpl(PINOT_SCHEMA, 100000, "testTable", "testTable_testTable", AVRO_DATA, new ServerMetrics(new MetricsRegistry()));
realtimeSegmentImpl.setSegmentMetadata(getRealtimeSegmentZKMetadata());
try {
DataFileStream<GenericRecord> avroReader = AvroUtils.getAvroReader(new File(TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA))));
GenericRow genericRow = null;
while (avroReader.hasNext()) {
GenericRecord avroRecord = avroReader.next();
genericRow = GenericRow.createOrReuseRow(genericRow);
genericRow = AVRO_RECORD_TRANSFORMER.transform(avroRecord, genericRow);
// System.out.println(genericRow);
realtimeSegmentImpl.index(genericRow);
}
} catch (Exception e) {
e.printStackTrace();
}
// System.out.println("Current raw events indexed: " + realtimeSegmentImpl.getRawDocumentCount() + ", totalDocs = "
// + realtimeSegmentImpl.getSegmentMetadata().getTotalDocs());
realtimeSegmentImpl.setSegmentMetadata(getRealtimeSegmentZKMetadata());
return realtimeSegmentImpl;
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class TransformGroupByTest method testGroupByString.
/**
* Test for group-by with transformed string dimension column.
*/
@Test
public void testGroupByString() throws Exception {
String query = String.format("select sum(%s) from xformSegTable group by ToUpper(%s)", METRIC_NAME, DIMENSION_NAME);
AggregationGroupByResult groupByResult = executeGroupByQuery(_indexSegment, query);
Assert.assertNotNull(groupByResult);
// Compute the expected answer for the query.
Map<String, Double> expectedValuesMap = new HashMap<>();
_recordReader.rewind();
for (int row = 0; row < NUM_ROWS; row++) {
GenericRow genericRow = _recordReader.next();
String key = ((String) genericRow.getValue(DIMENSION_NAME)).toUpperCase();
Double value = (Double) genericRow.getValue(METRIC_NAME);
Double prevValue = expectedValuesMap.get(key);
if (prevValue == null) {
expectedValuesMap.put(key, value);
} else {
expectedValuesMap.put(key, prevValue + value);
}
}
compareGroupByResults(groupByResult, expectedValuesMap);
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class TransformGroupByTest method testTimeRollUp.
/**
* Test for group-by with transformed time column from millis to days.
*
* @throws Exception
*/
@Test
public void testTimeRollUp() throws Exception {
String query = String.format("select sum(%s) from xformSegTable group by timeConvert(%s, 'MILLISECONDS', 'DAYS')", METRIC_NAME, TIME_COLUMN_NAME);
AggregationGroupByResult groupByResult = executeGroupByQuery(_indexSegment, query);
Assert.assertNotNull(groupByResult);
Iterator<GroupKeyGenerator.GroupKey> groupKeyIterator = groupByResult.getGroupKeyIterator();
Assert.assertNotNull(groupKeyIterator);
// Compute the expected answer for the query.
Map<String, Double> expectedValuesMap = new HashMap<>();
_recordReader.rewind();
for (int row = 0; row < NUM_ROWS; row++) {
GenericRow genericRow = _recordReader.next();
long daysSinceEpoch = TimeUnit.DAYS.convert(((Long) genericRow.getValue(TIME_COLUMN_NAME)), TimeUnit.MILLISECONDS);
Double value = (Double) genericRow.getValue(METRIC_NAME);
String key = String.valueOf(daysSinceEpoch);
Double prevValue = expectedValuesMap.get(key);
if (prevValue == null) {
expectedValuesMap.put(key, value);
} else {
expectedValuesMap.put(key, prevValue + value);
}
}
compareGroupByResults(groupByResult, expectedValuesMap);
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class PinotSegmentToCsvConverter method convert.
@Override
public void convert() throws Exception {
PinotSegmentRecordReader recordReader = new PinotSegmentRecordReader(new File(_segmentDir));
try {
recordReader.init();
try (BufferedWriter recordWriter = new BufferedWriter(new FileWriter(_outputFile))) {
if (_withHeader) {
GenericRow row = recordReader.next();
recordWriter.write(StringUtils.join(row.getFieldNames(), _delimiter));
recordWriter.newLine();
recordReader.rewind();
}
while (recordReader.hasNext()) {
GenericRow row = recordReader.next();
String[] fields = row.getFieldNames();
List<String> record = new ArrayList<>(fields.length);
for (String field : fields) {
Object value = row.getValue(field);
if (value instanceof Object[]) {
record.add(StringUtils.join((Object[]) value, _listDelimiter));
} else {
record.add(value.toString());
}
}
recordWriter.write(StringUtils.join(record, _delimiter));
recordWriter.newLine();
}
}
} finally {
recordReader.close();
}
}
use of com.linkedin.pinot.core.data.GenericRow in project pinot by linkedin.
the class PinotSegmentToJsonConverter method convert.
@Override
public void convert() throws Exception {
PinotSegmentRecordReader recordReader = new PinotSegmentRecordReader(new File(_segmentDir));
try {
recordReader.init();
try (BufferedWriter recordWriter = new BufferedWriter(new FileWriter(_outputFile))) {
while (recordReader.hasNext()) {
GenericRow row = recordReader.next();
JSONObject record = new JSONObject();
for (String field : row.getFieldNames()) {
Object value = row.getValue(field);
if (value instanceof Object[]) {
record.put(field, new JSONArray(value));
} else {
record.put(field, value);
}
}
recordWriter.write(record.toString());
recordWriter.newLine();
}
}
} finally {
recordReader.close();
}
}
Aggregations