use of io.druid.segment.DimensionHandler in project druid by druid-io.
the class IncrementalIndex method loadDimensionIterable.
/*
* Currently called to initialize IncrementalIndex dimension order during index creation
* Index dimension ordering could be changed to initialize from DimensionsSpec after resolution of
* https://github.com/druid-io/druid/issues/2011
*/
public void loadDimensionIterable(Iterable<String> oldDimensionOrder, Map<String, ColumnCapabilitiesImpl> oldColumnCapabilities) {
synchronized (dimensionDescs) {
if (!dimensionDescs.isEmpty()) {
throw new ISE("Cannot load dimension order when existing order[%s] is not empty.", dimensionDescs.keySet());
}
for (String dim : oldDimensionOrder) {
if (dimensionDescs.get(dim) == null) {
ColumnCapabilitiesImpl capabilities = oldColumnCapabilities.get(dim);
columnCapabilities.put(dim, capabilities);
DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(dim, capabilities, null);
addNewDimension(dim, capabilities, handler);
}
}
}
}
use of io.druid.segment.DimensionHandler in project druid by druid-io.
the class RowboatTest method testBiggerCompare.
@Test
public void testBiggerCompare() {
DimensionHandler[] handlers = getDefaultHandlers(14);
Rowboat rb1 = new Rowboat(0, new int[][] { new int[] { 0 }, new int[] { 138 }, new int[] { 44 }, new int[] { 374 }, new int[] { 0 }, new int[] { 0 }, new int[] { 552 }, new int[] { 338 }, new int[] { 910 }, new int[] { 25570 }, new int[] { 9 }, new int[] { 0 }, new int[] { 0 }, new int[] { 0 } }, new Object[] { 1.0, 47.0, "someMetric" }, 0, handlers);
Rowboat rb2 = new Rowboat(0, new int[][] { new int[] { 0 }, new int[] { 138 }, new int[] { 44 }, new int[] { 374 }, new int[] { 0 }, new int[] { 0 }, new int[] { 553 }, new int[] { 338 }, new int[] { 910 }, new int[] { 25580 }, new int[] { 9 }, new int[] { 0 }, new int[] { 0 }, new int[] { 0 } }, new Object[] { 1.0, 47.0, "someMetric" }, 0, handlers);
Assert.assertNotEquals(0, rb1.compareTo(rb2));
}
use of io.druid.segment.DimensionHandler in project druid by druid-io.
the class RowboatTest method testRowboatCompare.
@Test
public void testRowboatCompare() {
DimensionHandler[] handlers = getDefaultHandlers(3);
Rowboat rb1 = new Rowboat(12345L, new int[][] { new int[] { 1 }, new int[] { 2 } }, new Object[] { new Integer(7) }, 5, handlers);
Rowboat rb2 = new Rowboat(12345L, new int[][] { new int[] { 1 }, new int[] { 2 } }, new Object[] { new Integer(7) }, 5, handlers);
Assert.assertEquals(0, rb1.compareTo(rb2));
Rowboat rb3 = new Rowboat(12345L, new int[][] { new int[] { 3 }, new int[] { 2 } }, new Object[] { new Integer(7) }, 5, handlers);
Assert.assertNotEquals(0, rb1.compareTo(rb3));
}
use of io.druid.segment.DimensionHandler in project druid by druid-io.
the class IncrementalIndex method toTimeAndDims.
@VisibleForTesting
TimeAndDims toTimeAndDims(InputRow row) throws IndexSizeExceededException {
row = formatRow(row);
if (row.getTimestampFromEpoch() < minTimestamp) {
throw new IAE("Cannot add row[%s] because it is below the minTimestamp[%s]", row, new DateTime(minTimestamp));
}
final List<String> rowDimensions = row.getDimensions();
Object[] dims;
List<Object> overflow = null;
synchronized (dimensionDescs) {
dims = new Object[dimensionDescs.size()];
for (String dimension : rowDimensions) {
boolean wasNewDim = false;
ColumnCapabilitiesImpl capabilities;
DimensionDesc desc = dimensionDescs.get(dimension);
if (desc != null) {
capabilities = desc.getCapabilities();
} else {
wasNewDim = true;
capabilities = columnCapabilities.get(dimension);
if (capabilities == null) {
capabilities = new ColumnCapabilitiesImpl();
// For schemaless type discovery, assume everything is a String for now, can change later.
capabilities.setType(ValueType.STRING);
capabilities.setDictionaryEncoded(true);
capabilities.setHasBitmapIndexes(true);
columnCapabilities.put(dimension, capabilities);
}
DimensionHandler handler = DimensionHandlerUtils.getHandlerFromCapabilities(dimension, capabilities, null);
desc = addNewDimension(dimension, capabilities, handler);
}
DimensionHandler handler = desc.getHandler();
DimensionIndexer indexer = desc.getIndexer();
Object dimsKey = indexer.processRowValsToUnsortedEncodedKeyComponent(row.getRaw(dimension));
// Set column capabilities as data is coming in
if (!capabilities.hasMultipleValues() && dimsKey != null && handler.getLengthOfEncodedKeyComponent(dimsKey) > 1) {
capabilities.setHasMultipleValues(true);
}
if (wasNewDim) {
if (overflow == null) {
overflow = Lists.newArrayList();
}
overflow.add(dimsKey);
} else if (desc.getIndex() > dims.length || dims[desc.getIndex()] != null) {
/*
* index > dims.length requires that we saw this dimension and added it to the dimensionOrder map,
* otherwise index is null. Since dims is initialized based on the size of dimensionOrder on each call to add,
* it must have been added to dimensionOrder during this InputRow.
*
* if we found an index for this dimension it means we've seen it already. If !(index > dims.length) then
* we saw it on a previous input row (this its safe to index into dims). If we found a value in
* the dims array for this index, it means we have seen this dimension already on this input row.
*/
throw new ISE("Dimension[%s] occurred more than once in InputRow", dimension);
} else {
dims[desc.getIndex()] = dimsKey;
}
}
}
if (overflow != null) {
// Merge overflow and non-overflow
Object[] newDims = new Object[dims.length + overflow.size()];
System.arraycopy(dims, 0, newDims, 0, dims.length);
for (int i = 0; i < overflow.size(); ++i) {
newDims[dims.length + i] = overflow.get(i);
}
dims = newDims;
}
long truncated = 0;
if (row.getTimestamp() != null) {
truncated = gran.bucketStart(row.getTimestamp()).getMillis();
}
return new TimeAndDims(Math.max(truncated, minTimestamp), dims, dimensionDescsList);
}
use of io.druid.segment.DimensionHandler in project druid by druid-io.
the class IncrementalIndexAdapter method getRows.
@Override
public Iterable<Rowboat> getRows() {
return new Iterable<Rowboat>() {
@Override
public Iterator<Rowboat> iterator() {
final List<IncrementalIndex.DimensionDesc> dimensions = index.getDimensions();
final DimensionHandler[] handlers = new DimensionHandler[dimensions.size()];
final DimensionIndexer[] indexers = new DimensionIndexer[dimensions.size()];
for (IncrementalIndex.DimensionDesc dimension : dimensions) {
handlers[dimension.getIndex()] = dimension.getHandler();
indexers[dimension.getIndex()] = dimension.getIndexer();
}
/*
* Note that the transform function increments a counter to determine the rowNum of
* the iterated Rowboats. We need to return a new iterator on each
* iterator() call to ensure the counter starts at 0.
*/
return Iterators.transform(index.getFacts().entrySet().iterator(), new Function<Map.Entry<IncrementalIndex.TimeAndDims, Integer>, Rowboat>() {
int count = 0;
@Override
public Rowboat apply(Map.Entry<IncrementalIndex.TimeAndDims, Integer> input) {
final IncrementalIndex.TimeAndDims timeAndDims = input.getKey();
final Object[] dimValues = timeAndDims.getDims();
final int rowOffset = input.getValue();
Object[] dims = new Object[dimValues.length];
for (IncrementalIndex.DimensionDesc dimension : dimensions) {
final int dimIndex = dimension.getIndex();
if (dimIndex >= dimValues.length || dimValues[dimIndex] == null) {
continue;
}
final DimensionIndexer indexer = indexers[dimIndex];
Object sortedDimVals = indexer.convertUnsortedEncodedKeyComponentToSortedEncodedKeyComponent(dimValues[dimIndex]);
dims[dimIndex] = sortedDimVals;
}
Object[] metrics = new Object[index.getMetricAggs().length];
for (int i = 0; i < metrics.length; i++) {
metrics[i] = index.getMetricObjectValue(rowOffset, i);
}
return new Rowboat(timeAndDims.getTimestamp(), dims, metrics, count++, handlers);
}
});
}
};
}
Aggregations