use of io.druid.java.util.common.ISE in project druid by druid-io.
the class QueryableIndexIndexableAdapter method getRows.
@Override
public Iterable<Rowboat> getRows() {
return new Iterable<Rowboat>() {
@Override
public Iterator<Rowboat> iterator() {
return new Iterator<Rowboat>() {
final GenericColumn timestamps = input.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn();
final Closeable[] metrics;
final Closeable[] columns;
final Closer closer = Closer.create();
final int numMetrics = getMetricNames().size();
final DimensionHandler[] handlers = new DimensionHandler[availableDimensions.size()];
Collection<DimensionHandler> handlerSet = input.getDimensionHandlers().values();
int currRow = 0;
boolean done = false;
{
closer.register(timestamps);
handlerSet.toArray(handlers);
this.columns = FluentIterable.from(handlerSet).transform(new Function<DimensionHandler, Closeable>() {
@Override
public Closeable apply(DimensionHandler handler) {
Column column = input.getColumn(handler.getDimensionName());
return handler.getSubColumn(column);
}
}).toArray(Closeable.class);
for (Closeable column : columns) {
closer.register(column);
}
final Indexed<String> availableMetrics = getMetricNames();
metrics = new Closeable[availableMetrics.size()];
for (int i = 0; i < metrics.length; ++i) {
final Column column = input.getColumn(availableMetrics.get(i));
final ValueType type = column.getCapabilities().getType();
switch(type) {
case FLOAT:
case LONG:
metrics[i] = column.getGenericColumn();
break;
case COMPLEX:
metrics[i] = column.getComplexColumn();
break;
default:
throw new ISE("Cannot handle type[%s]", type);
}
}
for (Closeable metricColumn : metrics) {
closer.register(metricColumn);
}
}
@Override
public boolean hasNext() {
final boolean hasNext = currRow < numRows;
if (!hasNext && !done) {
CloseQuietly.close(closer);
done = true;
}
return hasNext;
}
@Override
public Rowboat next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
final Object[] dims = new Object[columns.length];
int dimIndex = 0;
for (final Closeable column : columns) {
dims[dimIndex] = handlers[dimIndex].getEncodedKeyComponentFromColumn(column, currRow);
dimIndex++;
}
Object[] metricArray = new Object[numMetrics];
for (int i = 0; i < metricArray.length; ++i) {
if (metrics[i] instanceof IndexedFloatsGenericColumn) {
metricArray[i] = ((GenericColumn) metrics[i]).getFloatSingleValueRow(currRow);
} else if (metrics[i] instanceof IndexedLongsGenericColumn) {
metricArray[i] = ((GenericColumn) metrics[i]).getLongSingleValueRow(currRow);
} else if (metrics[i] instanceof ComplexColumn) {
metricArray[i] = ((ComplexColumn) metrics[i]).getRowValue(currRow);
}
}
final Rowboat retVal = new Rowboat(timestamps.getLongSingleValueRow(currRow), dims, metricArray, currRow, handlers);
++currRow;
return retVal;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
use of io.druid.java.util.common.ISE in project druid by druid-io.
the class SelectQueryEngine method process.
public Sequence<Result<SelectResultValue>> process(final SelectQuery query, final Segment segment) {
final StorageAdapter adapter = segment.asStorageAdapter();
if (adapter == null) {
throw new ISE("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
// at the point where this code is called, only one datasource should exist.
String dataSource = Iterables.getOnlyElement(query.getDataSource().getNames());
final Iterable<DimensionSpec> dims;
if (query.getDimensions() == null || query.getDimensions().isEmpty()) {
dims = DefaultDimensionSpec.toSpec(adapter.getAvailableDimensions());
} else {
dims = query.getDimensions();
}
final Iterable<String> metrics;
if (query.getMetrics() == null || query.getMetrics().isEmpty()) {
metrics = adapter.getAvailableMetrics();
} else {
metrics = query.getMetrics();
}
List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals);
// should be rewritten with given interval
final String segmentId = DataSegmentUtils.withInterval(dataSource, segment.getIdentifier(), intervals.get(0));
final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
return QueryRunnerHelper.makeCursorBasedQuery(adapter, query.getQuerySegmentSpec().getIntervals(), filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<SelectResultValue>>() {
@Override
public Result<SelectResultValue> apply(Cursor cursor) {
final SelectResultValueBuilder builder = new SelectResultValueBuilder(cursor.getTime(), query.getPagingSpec(), query.isDescending());
final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME);
final List<ColumnSelectorPlus<SelectColumnSelectorStrategy>> selectorPlusList = Arrays.asList(DimensionHandlerUtils.createColumnSelectorPluses(STRATEGY_FACTORY, Lists.newArrayList(dims), cursor));
for (DimensionSpec dimSpec : dims) {
builder.addDimension(dimSpec.getOutputName());
}
final Map<String, ObjectColumnSelector> metSelectors = Maps.newHashMap();
for (String metric : metrics) {
final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric);
metSelectors.put(metric, metricSelector);
builder.addMetric(metric);
}
final PagingOffset offset = query.getPagingOffset(segmentId);
cursor.advanceTo(offset.startDelta());
int lastOffset = offset.startOffset();
for (; !cursor.isDone() && offset.hasNext(); cursor.advance(), offset.next()) {
final Map<String, Object> theEvent = singleEvent(EventHolder.timestampKey, timestampColumnSelector, selectorPlusList, metSelectors);
builder.addEntry(new EventHolder(segmentId, lastOffset = offset.current(), theEvent));
}
builder.finished(segmentId, lastOffset);
return builder.build();
}
});
}
use of io.druid.java.util.common.ISE in project druid by druid-io.
the class StringDimensionMergerLegacy method writeIndexes.
@Override
public void writeIndexes(List<IntBuffer> segmentRowNumConversions, Closer closer) throws IOException {
final SerializerUtils serializerUtils = new SerializerUtils();
long dimStartTime = System.currentTimeMillis();
final BitmapSerdeFactory bitmapSerdeFactory = indexSpec.getBitmapSerdeFactory();
String bmpFilename = String.format("%s.inverted", dimensionName);
bitmapWriter = new GenericIndexedWriter<>(ioPeon, bmpFilename, bitmapSerdeFactory.getObjectStrategy());
bitmapWriter.open();
final MappedByteBuffer dimValsMapped = Files.map(dictionaryFile);
closer.register(new Closeable() {
@Override
public void close() throws IOException {
ByteBufferUtils.unmap(dimValsMapped);
}
});
if (!dimensionName.equals(serializerUtils.readString(dimValsMapped))) {
throw new ISE("dimensions[%s] didn't equate!? This is a major WTF moment.", dimensionName);
}
Indexed<String> dimVals = GenericIndexed.read(dimValsMapped, GenericIndexed.STRING_STRATEGY);
log.info("Starting dimension[%s] with cardinality[%,d]", dimensionName, dimVals.size());
final BitmapFactory bmpFactory = bitmapSerdeFactory.getBitmapFactory();
RTree tree = null;
spatialWriter = null;
boolean hasSpatial = capabilities.hasSpatialIndexes();
if (hasSpatial) {
String spatialFilename = String.format("%s.spatial", dimensionName);
spatialWriter = new ByteBufferWriter<>(ioPeon, spatialFilename, new IndexedRTree.ImmutableRTreeObjectStrategy(bmpFactory));
spatialWriter.open();
tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50, bmpFactory), bmpFactory);
}
IndexSeeker[] dictIdSeeker = toIndexSeekers(adapters, dimConversions, dimensionName);
//Iterate all dim values's dictionary id in ascending order which in line with dim values's compare result.
for (int dictId = 0; dictId < dimVals.size(); dictId++) {
progress.progress();
mergeBitmaps(segmentRowNumConversions, dimVals, bmpFactory, tree, hasSpatial, dictIdSeeker, dictId, adapters, dimensionName, nullRowsBitmap, bitmapWriter);
}
log.info("Completed dimension[%s] in %,d millis.", dimensionName, System.currentTimeMillis() - dimStartTime);
if (hasSpatial) {
spatialWriter.write(ImmutableRTree.newImmutableFromMutable(tree));
}
}
use of io.druid.java.util.common.ISE in project druid by druid-io.
the class IndexMergerV9 method makeMetricsColumns.
private void makeMetricsColumns(final FileSmoosher v9Smoosher, final ProgressIndicator progress, final List<String> mergedMetrics, final Map<String, ValueType> metricsValueTypes, final Map<String, String> metricTypeNames, final List<GenericColumnSerializer> metWriters) throws IOException {
final String section = "make metric columns";
progress.startSection(section);
long startTime = System.currentTimeMillis();
for (int i = 0; i < mergedMetrics.size(); ++i) {
String metric = mergedMetrics.get(i);
long metricStartTime = System.currentTimeMillis();
GenericColumnSerializer writer = metWriters.get(i);
writer.close();
final ColumnDescriptor.Builder builder = ColumnDescriptor.builder();
ValueType type = metricsValueTypes.get(metric);
switch(type) {
case LONG:
builder.setValueType(ValueType.LONG);
builder.addSerde(LongGenericColumnPartSerde.serializerBuilder().withByteOrder(IndexIO.BYTE_ORDER).withDelegate((LongColumnSerializer) writer).build());
break;
case FLOAT:
builder.setValueType(ValueType.FLOAT);
builder.addSerde(FloatGenericColumnPartSerde.serializerBuilder().withByteOrder(IndexIO.BYTE_ORDER).withDelegate((FloatColumnSerializer) writer).build());
break;
case COMPLEX:
final String typeName = metricTypeNames.get(metric);
builder.setValueType(ValueType.COMPLEX);
builder.addSerde(ComplexColumnPartSerde.serializerBuilder().withTypeName(typeName).withDelegate(writer).build());
break;
default:
throw new ISE("Unknown type[%s]", type);
}
makeColumn(v9Smoosher, metric, builder.build());
log.info("Completed metric column[%s] in %,d millis.", metric, System.currentTimeMillis() - metricStartTime);
}
log.info("Completed metric columns in %,d millis.", System.currentTimeMillis() - startTime);
progress.stopSection(section);
}
use of io.druid.java.util.common.ISE in project druid by druid-io.
the class LoggingProgressIndicator method startSection.
@Override
public void startSection(String section) {
log.info("[%s]: Starting [%s]", progressName, section);
Stopwatch sectionWatch = sections.get(section);
if (sectionWatch != null) {
throw new ISE("[%s]: Cannot start progress tracker for [%s]. It is already started.", progressName, section);
}
sectionWatch = Stopwatch.createStarted();
sections.put(section, sectionWatch);
}
Aggregations