Search in sources :

Example 61 with ISE

use of io.druid.java.util.common.ISE in project druid by druid-io.

the class ScanQueryRunnerTest method verify.

private static void verify(Iterable<ScanResultValue> expectedResults, Iterable<ScanResultValue> actualResults) {
    Iterator<ScanResultValue> expectedIter = expectedResults.iterator();
    Iterator<ScanResultValue> actualIter = actualResults.iterator();
    while (expectedIter.hasNext()) {
        ScanResultValue expected = expectedIter.next();
        ScanResultValue actual = actualIter.next();
        Assert.assertEquals(expected.getSegmentId(), actual.getSegmentId());
        Set exColumns = Sets.newTreeSet(expected.getColumns());
        Set acColumns = Sets.newTreeSet(actual.getColumns());
        Assert.assertEquals(exColumns, acColumns);
        Iterator<Map<String, Object>> expectedEvts = ((List<Map<String, Object>>) expected.getEvents()).iterator();
        Iterator<Map<String, Object>> actualEvts = ((List<Map<String, Object>>) actual.getEvents()).iterator();
        while (expectedEvts.hasNext()) {
            Map<String, Object> exHolder = expectedEvts.next();
            Map<String, Object> acHolder = actualEvts.next();
            for (Map.Entry<String, Object> ex : exHolder.entrySet()) {
                Object actVal = acHolder.get(ex.getKey());
                // work around for current II limitations
                if (acHolder.get(ex.getKey()) instanceof Double) {
                    actVal = ((Double) actVal).floatValue();
                }
                Assert.assertEquals("invalid value for " + ex.getKey(), ex.getValue(), actVal);
            }
        }
        if (actualEvts.hasNext()) {
            throw new ISE("This event iterator should be exhausted!");
        }
    }
    if (actualIter.hasNext()) {
        throw new ISE("This iterator should be exhausted!");
    }
}
Also used : Set(java.util.Set) List(java.util.List) ISE(io.druid.java.util.common.ISE) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Example 62 with ISE

use of io.druid.java.util.common.ISE in project druid by druid-io.

the class GraphiteEmitter method emit.

@Override
public void emit(Event event) {
    if (!started.get()) {
        throw new ISE("WTF emit was called while service is not started yet");
    }
    if (event instanceof ServiceMetricEvent) {
        final GraphiteEvent graphiteEvent = graphiteEventConverter.druidEventToGraphite((ServiceMetricEvent) event);
        if (graphiteEvent == null) {
            return;
        }
        try {
            final boolean isSuccessful = eventsQueue.offer(graphiteEvent, graphiteEmitterConfig.getEmitWaitTime(), TimeUnit.MILLISECONDS);
            if (!isSuccessful) {
                if (countLostEvents.getAndIncrement() % 1000 == 0) {
                    log.error("Lost total of [%s] events because of emitter queue is full. Please increase the capacity or/and the consumer frequency", countLostEvents.get());
                }
            }
        } catch (InterruptedException e) {
            log.error(e, "got interrupted with message [%s]", e.getMessage());
            Thread.currentThread().interrupt();
        }
    } else if (!emitterList.isEmpty() && event instanceof AlertEvent) {
        for (Emitter emitter : emitterList) {
            emitter.emit(event);
        }
    } else if (event instanceof AlertEvent) {
        AlertEvent alertEvent = (AlertEvent) event;
        log.error("The following alert is dropped, description is [%s], severity is [%s]", alertEvent.getDescription(), alertEvent.getSeverity());
    } else {
        log.error("unknown event type [%s]", event.getClass());
    }
}
Also used : Emitter(com.metamx.emitter.core.Emitter) AlertEvent(com.metamx.emitter.service.AlertEvent) ISE(io.druid.java.util.common.ISE) ServiceMetricEvent(com.metamx.emitter.service.ServiceMetricEvent)

Example 63 with ISE

use of io.druid.java.util.common.ISE in project druid by druid-io.

the class WhiteListBasedConverter method readMap.

private ImmutableSortedMap<String, ImmutableSet<String>> readMap(final String mapPath) {
    String fileContent;
    String actualPath = mapPath;
    try {
        if (Strings.isNullOrEmpty(mapPath)) {
            URL resource = this.getClass().getClassLoader().getResource("defaultWhiteListMap.json");
            actualPath = resource.getFile();
            LOGGER.info("using default whiteList map located at [%s]", actualPath);
            fileContent = Resources.toString(resource, Charset.defaultCharset());
        } else {
            fileContent = Files.asCharSource(new File(mapPath), Charset.forName("UTF-8")).read();
        }
        return mapper.reader(new TypeReference<ImmutableSortedMap<String, ImmutableSet<String>>>() {
        }).readValue(fileContent);
    } catch (IOException e) {
        throw new ISE(e, "Got an exception while parsing file [%s]", actualPath);
    }
}
Also used : ImmutableSet(com.google.common.collect.ImmutableSet) ISE(io.druid.java.util.common.ISE) TypeReference(com.fasterxml.jackson.core.type.TypeReference) IOException(java.io.IOException) File(java.io.File) URL(java.net.URL)

Example 64 with ISE

use of io.druid.java.util.common.ISE in project druid by druid-io.

the class ScanQueryEngine method process.

public Sequence<ScanResultValue> process(final ScanQuery query, final Segment segment, final Map<String, Object> responseContext) {
    if (responseContext.get(ScanQueryRunnerFactory.CTX_COUNT) != null) {
        int count = (int) responseContext.get(ScanQueryRunnerFactory.CTX_COUNT);
        if (count >= query.getLimit()) {
            return Sequences.empty();
        }
    }
    final Long timeoutAt = (long) responseContext.get(ScanQueryRunnerFactory.CTX_TIMEOUT_AT);
    final long start = System.currentTimeMillis();
    final StorageAdapter adapter = segment.asStorageAdapter();
    if (adapter == null) {
        throw new ISE("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
    }
    List<String> allDims = Lists.newLinkedList(adapter.getAvailableDimensions());
    List<String> allMetrics = Lists.newLinkedList(adapter.getAvailableMetrics());
    final List<String> allColumns = Lists.newLinkedList();
    if (query.getColumns() != null && !query.getColumns().isEmpty()) {
        if (!query.getColumns().contains(ScanResultValue.timestampKey)) {
            allColumns.add(ScanResultValue.timestampKey);
        }
        allColumns.addAll(query.getColumns());
        allDims.retainAll(query.getColumns());
        allMetrics.retainAll(query.getColumns());
    } else {
        if (!allDims.contains(ScanResultValue.timestampKey)) {
            allColumns.add(ScanResultValue.timestampKey);
        }
        allColumns.addAll(allDims);
        allColumns.addAll(allMetrics);
    }
    final List<DimensionSpec> dims = DefaultDimensionSpec.toSpec(allDims);
    final List<String> metrics = allMetrics;
    final List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
    Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals);
    final String segmentId = segment.getIdentifier();
    final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
    if (responseContext.get(ScanQueryRunnerFactory.CTX_COUNT) == null) {
        responseContext.put(ScanQueryRunnerFactory.CTX_COUNT, 0);
    }
    final int limit = query.getLimit() - (int) responseContext.get(ScanQueryRunnerFactory.CTX_COUNT);
    return Sequences.concat(Sequences.map(adapter.makeCursors(filter, intervals.get(0), VirtualColumns.EMPTY, Granularities.ALL, query.isDescending()), new Function<Cursor, Sequence<ScanResultValue>>() {

        @Override
        public Sequence<ScanResultValue> apply(final Cursor cursor) {
            return new BaseSequence<>(new BaseSequence.IteratorMaker<ScanResultValue, Iterator<ScanResultValue>>() {

                @Override
                public Iterator<ScanResultValue> make() {
                    final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME);
                    final List<ColumnSelectorPlus<SelectQueryEngine.SelectColumnSelectorStrategy>> selectorPlusList = Arrays.asList(DimensionHandlerUtils.createColumnSelectorPluses(STRATEGY_FACTORY, Lists.newArrayList(dims), cursor));
                    final Map<String, ObjectColumnSelector> metSelectors = Maps.newHashMap();
                    for (String metric : metrics) {
                        final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric);
                        metSelectors.put(metric, metricSelector);
                    }
                    final int batchSize = query.getBatchSize();
                    return new Iterator<ScanResultValue>() {

                        private int offset = 0;

                        @Override
                        public boolean hasNext() {
                            return !cursor.isDone() && offset < limit;
                        }

                        @Override
                        public ScanResultValue next() {
                            if (System.currentTimeMillis() >= timeoutAt) {
                                throw new QueryInterruptedException(new TimeoutException());
                            }
                            int lastOffset = offset;
                            Object events = null;
                            String resultFormat = query.getResultFormat();
                            if (ScanQuery.RESULT_FORMAT_VALUE_VECTOR.equals(resultFormat)) {
                                throw new UnsupportedOperationException("valueVector is not supported now");
                            } else if (ScanQuery.RESULT_FORMAT_COMPACTED_LIST.equals(resultFormat)) {
                                events = rowsToCompactedList();
                            } else {
                                events = rowsToList();
                            }
                            responseContext.put(ScanQueryRunnerFactory.CTX_COUNT, (int) responseContext.get(ScanQueryRunnerFactory.CTX_COUNT) + (offset - lastOffset));
                            responseContext.put(ScanQueryRunnerFactory.CTX_TIMEOUT_AT, timeoutAt - (System.currentTimeMillis() - start));
                            return new ScanResultValue(segmentId, allColumns, events);
                        }

                        @Override
                        public void remove() {
                            throw new UnsupportedOperationException();
                        }

                        private Object rowsToCompactedList() {
                            return Lists.transform((List<Map<String, Object>>) rowsToList(), new Function<Map<String, Object>, Object>() {

                                @Override
                                public Object apply(Map<String, Object> input) {
                                    List eventValues = Lists.newArrayListWithExpectedSize(allColumns.size());
                                    for (String expectedColumn : allColumns) {
                                        eventValues.add(input.get(expectedColumn));
                                    }
                                    return eventValues;
                                }
                            });
                        }

                        private Object rowsToList() {
                            List<Map<String, Object>> events = Lists.newArrayListWithCapacity(batchSize);
                            for (int i = 0; !cursor.isDone() && i < batchSize && offset < limit; cursor.advance(), i++, offset++) {
                                final Map<String, Object> theEvent = SelectQueryEngine.singleEvent(ScanResultValue.timestampKey, timestampColumnSelector, selectorPlusList, metSelectors);
                                events.add(theEvent);
                            }
                            return events;
                        }

                        private Object rowsToValueVector() {
                            // only support list now, we can support ValueVector or Arrow in future
                            return rowsToList();
                        }
                    };
                }

                @Override
                public void cleanup(Iterator<ScanResultValue> iterFromMake) {
                }
            });
        }
    }));
}
Also used : DimensionSpec(io.druid.query.dimension.DimensionSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) StorageAdapter(io.druid.segment.StorageAdapter) Cursor(io.druid.segment.Cursor) Function(com.google.common.base.Function) Iterator(java.util.Iterator) ISE(io.druid.java.util.common.ISE) LongColumnSelector(io.druid.segment.LongColumnSelector) List(java.util.List) ObjectColumnSelector(io.druid.segment.ObjectColumnSelector) QueryInterruptedException(io.druid.query.QueryInterruptedException) TimeoutException(java.util.concurrent.TimeoutException) BaseSequence(io.druid.java.util.common.guava.BaseSequence) Filter(io.druid.query.filter.Filter) Map(java.util.Map) Interval(org.joda.time.Interval)

Example 65 with ISE

use of io.druid.java.util.common.ISE in project druid by druid-io.

the class BatchServerInventoryView method updateInnerInventory.

@Override
protected DruidServer updateInnerInventory(DruidServer container, String inventoryKey, Set<DataSegment> inventory) {
    Set<DataSegment> filteredInventory = filterInventory(container, inventory);
    Set<DataSegment> existing = zNodes.get(inventoryKey);
    if (existing == null) {
        throw new ISE("Trying to update an inventoryKey[%s] that didn't exist?!", inventoryKey);
    }
    for (DataSegment segment : Sets.difference(filteredInventory, existing)) {
        addSingleInventory(container, segment);
    }
    for (DataSegment segment : Sets.difference(existing, filteredInventory)) {
        removeSingleInventory(container, segment.getIdentifier());
    }
    zNodes.put(inventoryKey, filteredInventory);
    return container;
}
Also used : ISE(io.druid.java.util.common.ISE) DataSegment(io.druid.timeline.DataSegment)

Aggregations

ISE (io.druid.java.util.common.ISE)158 IOException (java.io.IOException)37 Map (java.util.Map)23 Test (org.junit.Test)21 File (java.io.File)20 List (java.util.List)19 DateTime (org.joda.time.DateTime)18 ArrayList (java.util.ArrayList)17 DataSegment (io.druid.timeline.DataSegment)15 Interval (org.joda.time.Interval)15 Function (com.google.common.base.Function)14 TimeoutException (java.util.concurrent.TimeoutException)12 IAE (io.druid.java.util.common.IAE)10 HashMap (java.util.HashMap)10 ExecutionException (java.util.concurrent.ExecutionException)10 Stopwatch (com.google.common.base.Stopwatch)9 DimensionSpec (io.druid.query.dimension.DimensionSpec)9 ImmutableMap (com.google.common.collect.ImmutableMap)8 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)8 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)8