use of org.apache.druid.query.aggregation.hyperloglog.HyperUniquesSerde in project druid by druid-io.
the class SegmentGenerator method generateIncrementalIndex.
public IncrementalIndex generateIncrementalIndex(final DataSegment dataSegment, final GeneratorSchemaInfo schemaInfo, final Granularity granularity, final int numRows) {
// In case we need to generate hyperUniques.
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
final String dataHash = Hashing.sha256().newHasher().putString(dataSegment.getId().toString(), StandardCharsets.UTF_8).putString(schemaInfo.toString(), StandardCharsets.UTF_8).putString(granularity.toString(), StandardCharsets.UTF_8).putInt(numRows).hash().toString();
final DataGenerator dataGenerator = new DataGenerator(schemaInfo.getColumnSchemas(), dataSegment.getId().hashCode(), /* Use segment identifier hashCode as seed */
schemaInfo.getDataInterval(), numRows);
final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder().withDimensionsSpec(schemaInfo.getDimensionsSpec()).withMetrics(schemaInfo.getAggsArray()).withRollup(schemaInfo.isWithRollup()).withQueryGranularity(granularity).build();
final List<InputRow> rows = new ArrayList<>();
for (int i = 0; i < numRows; i++) {
final InputRow row = dataGenerator.nextRow();
rows.add(row);
if ((i + 1) % 20000 == 0) {
log.info("%,d/%,d rows generated for[%s].", i + 1, numRows, dataSegment);
}
}
log.info("%,d/%,d rows generated for[%s].", numRows, numRows, dataSegment);
return makeIncrementalIndex(dataSegment.getId(), dataHash, 0, rows, indexSchema);
}
use of org.apache.druid.query.aggregation.hyperloglog.HyperUniquesSerde in project druid by druid-io.
the class ComplexMetricsTest method testRegister.
@Test
public void testRegister() {
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
ComplexMetricSerde serde = ComplexMetrics.getSerdeForType("hyperUnique");
Assert.assertNotNull(serde);
Assert.assertTrue(serde instanceof HyperUniquesSerde);
}
use of org.apache.druid.query.aggregation.hyperloglog.HyperUniquesSerde in project druid by druid-io.
the class ComplexMetricsTest method testRegisterDuplicate.
@Test
public void testRegisterDuplicate() {
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
ComplexMetricSerde serde = ComplexMetrics.getSerdeForType("hyperUnique");
Assert.assertNotNull(serde);
Assert.assertTrue(serde instanceof HyperUniquesSerde);
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
serde = ComplexMetrics.getSerdeForType("hyperUnique");
Assert.assertNotNull(serde);
Assert.assertTrue(serde instanceof HyperUniquesSerde);
}
use of org.apache.druid.query.aggregation.hyperloglog.HyperUniquesSerde in project druid by druid-io.
the class ScanBenchmark method setup.
/**
* Setup everything common for benchmarking both the incremental-index and the queriable-index.
*/
@Setup
public void setup() {
log.info("SETUP CALLED AT " + +System.currentTimeMillis());
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
setupQueries();
String[] schemaQuery = schemaAndQuery.split("\\.");
String schemaName = schemaQuery[0];
String queryName = schemaQuery[1];
schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schemaName);
queryBuilder = SCHEMA_QUERY_MAP.get(schemaName).get(queryName);
queryBuilder.limit(limit);
query = queryBuilder.build();
generator = new DataGenerator(schemaInfo.getColumnSchemas(), System.currentTimeMillis(), schemaInfo.getDataInterval(), rowsPerSegment);
final ScanQueryConfig config = new ScanQueryConfig().setLegacy(false);
factory = new ScanQueryRunnerFactory(new ScanQueryQueryToolChest(config, DefaultGenericQueryMetricsFactory.instance()), new ScanQueryEngine(), new ScanQueryConfig());
}
use of org.apache.druid.query.aggregation.hyperloglog.HyperUniquesSerde in project druid by druid-io.
the class IndexMergeBenchmark method setup.
@Setup
public void setup() throws IOException {
log.info("SETUP CALLED AT " + System.currentTimeMillis());
indexMergerV9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, getSegmentWriteOutMediumFactory(factoryType));
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
indexesToMerge = new ArrayList<>();
schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema);
for (int i = 0; i < numSegments; i++) {
DataGenerator gen = new DataGenerator(schemaInfo.getColumnSchemas(), RNG_SEED + i, schemaInfo.getDataInterval(), rowsPerSegment);
IncrementalIndex incIndex = makeIncIndex();
gen.addToIndex(incIndex, rowsPerSegment);
tmpDir = FileUtils.createTempDir();
log.info("Using temp dir: " + tmpDir.getAbsolutePath());
File indexFile = indexMergerV9.persist(incIndex, tmpDir, new IndexSpec(), null);
QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
indexesToMerge.add(qIndex);
}
}
Aggregations