use of javax.annotation.Nullable in project druid by druid-io.
the class TimeseriesQueryQueryToolChest method getCacheStrategy.
@Override
public CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> getCacheStrategy(final TimeseriesQuery query) {
return new CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery>() {
private final List<AggregatorFactory> aggs = query.getAggregatorSpecs();
@Override
public boolean isCacheable(TimeseriesQuery query, boolean willMergeRunners) {
return true;
}
@Override
public byte[] computeCacheKey(TimeseriesQuery query) {
return new CacheKeyBuilder(TIMESERIES_QUERY).appendBoolean(query.isDescending()).appendBoolean(query.isSkipEmptyBuckets()).appendCacheable(query.getGranularity()).appendCacheable(query.getDimensionsFilter()).appendCacheablesIgnoringOrder(query.getAggregatorSpecs()).appendCacheable(query.getVirtualColumns()).build();
}
@Override
public TypeReference<Object> getCacheObjectClazz() {
return OBJECT_TYPE_REFERENCE;
}
@Override
public Function<Result<TimeseriesResultValue>, Object> prepareForCache() {
return new Function<Result<TimeseriesResultValue>, Object>() {
@Override
public Object apply(final Result<TimeseriesResultValue> input) {
TimeseriesResultValue results = input.getValue();
final List<Object> retVal = Lists.newArrayListWithCapacity(1 + aggs.size());
retVal.add(input.getTimestamp().getMillis());
for (AggregatorFactory agg : aggs) {
retVal.add(results.getMetric(agg.getName()));
}
return retVal;
}
};
}
@Override
public Function<Object, Result<TimeseriesResultValue>> pullFromCache() {
return new Function<Object, Result<TimeseriesResultValue>>() {
private final Granularity granularity = query.getGranularity();
@Override
public Result<TimeseriesResultValue> apply(@Nullable Object input) {
List<Object> results = (List<Object>) input;
Map<String, Object> retVal = Maps.newLinkedHashMap();
Iterator<AggregatorFactory> aggsIter = aggs.iterator();
Iterator<Object> resultIter = results.iterator();
DateTime timestamp = granularity.toDateTime(((Number) resultIter.next()).longValue());
while (aggsIter.hasNext() && resultIter.hasNext()) {
final AggregatorFactory factory = aggsIter.next();
retVal.put(factory.getName(), factory.deserialize(resultIter.next()));
}
return new Result<TimeseriesResultValue>(timestamp, new TimeseriesResultValue(retVal));
}
};
}
};
}
use of javax.annotation.Nullable in project druid by druid-io.
the class IncrementalIndexStorageAdapter method makeCursors.
@Override
public Sequence<Cursor> makeCursors(final Filter filter, final Interval interval, final VirtualColumns virtualColumns, final Granularity gran, final boolean descending) {
if (index.isEmpty()) {
return Sequences.empty();
}
Interval actualIntervalTmp = interval;
final Interval dataInterval = new Interval(getMinTime().getMillis(), gran.bucketEnd(getMaxTime()).getMillis());
if (!actualIntervalTmp.overlaps(dataInterval)) {
return Sequences.empty();
}
if (actualIntervalTmp.getStart().isBefore(dataInterval.getStart())) {
actualIntervalTmp = actualIntervalTmp.withStart(dataInterval.getStart());
}
if (actualIntervalTmp.getEnd().isAfter(dataInterval.getEnd())) {
actualIntervalTmp = actualIntervalTmp.withEnd(dataInterval.getEnd());
}
final Interval actualInterval = actualIntervalTmp;
Iterable<Interval> iterable = gran.getIterable(actualInterval);
if (descending) {
iterable = Lists.reverse(ImmutableList.copyOf(iterable));
}
return Sequences.map(Sequences.simple(iterable), new Function<Interval, Cursor>() {
EntryHolder currEntry = new EntryHolder();
@Override
public Cursor apply(@Nullable final Interval interval) {
final long timeStart = Math.max(interval.getStartMillis(), actualInterval.getStartMillis());
return new Cursor() {
private final ValueMatcher filterMatcher = makeFilterMatcher(filter, this);
private Iterator<Map.Entry<IncrementalIndex.TimeAndDims, Integer>> baseIter;
private Iterable<Map.Entry<IncrementalIndex.TimeAndDims, Integer>> cursorIterable;
private boolean emptyRange;
final DateTime time;
int numAdvanced = -1;
boolean done;
{
cursorIterable = index.getFacts().timeRangeIterable(descending, timeStart, Math.min(actualInterval.getEndMillis(), gran.increment(interval.getStart()).getMillis()));
emptyRange = !cursorIterable.iterator().hasNext();
time = gran.toDateTime(interval.getStartMillis());
reset();
}
@Override
public DateTime getTime() {
return time;
}
@Override
public void advance() {
if (!baseIter.hasNext()) {
done = true;
return;
}
while (baseIter.hasNext()) {
BaseQuery.checkInterrupted();
currEntry.set(baseIter.next());
if (filterMatcher.matches()) {
return;
}
}
if (!filterMatcher.matches()) {
done = true;
}
}
@Override
public void advanceUninterruptibly() {
if (!baseIter.hasNext()) {
done = true;
return;
}
while (baseIter.hasNext()) {
if (Thread.currentThread().isInterrupted()) {
return;
}
currEntry.set(baseIter.next());
if (filterMatcher.matches()) {
return;
}
}
if (!filterMatcher.matches()) {
done = true;
}
}
@Override
public void advanceTo(int offset) {
int count = 0;
while (count < offset && !isDone()) {
advance();
count++;
}
}
@Override
public boolean isDone() {
return done;
}
@Override
public boolean isDoneOrInterrupted() {
return isDone() || Thread.currentThread().isInterrupted();
}
@Override
public void reset() {
baseIter = cursorIterable.iterator();
if (numAdvanced == -1) {
numAdvanced = 0;
} else {
Iterators.advance(baseIter, numAdvanced);
}
BaseQuery.checkInterrupted();
boolean foundMatched = false;
while (baseIter.hasNext()) {
currEntry.set(baseIter.next());
if (filterMatcher.matches()) {
foundMatched = true;
break;
}
numAdvanced++;
}
done = !foundMatched && (emptyRange || !baseIter.hasNext());
}
@Override
public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) {
if (virtualColumns.exists(dimensionSpec.getDimension())) {
return virtualColumns.makeDimensionSelector(dimensionSpec, this);
}
return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec));
}
private DimensionSelector makeDimensionSelectorUndecorated(DimensionSpec dimensionSpec) {
final String dimension = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
if (dimension.equals(Column.TIME_COLUMN_NAME)) {
DimensionSelector selector = new SingleScanTimeDimSelector(makeLongColumnSelector(dimension), extractionFn, descending);
return selector;
}
final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(dimensionSpec.getDimension());
if (dimensionDesc == null) {
// not a dimension, column may be a metric
ColumnCapabilities capabilities = getColumnCapabilities(dimension);
if (capabilities == null) {
return NullDimensionSelector.instance();
}
if (capabilities.getType() == ValueType.LONG) {
return new LongWrappingDimensionSelector(makeLongColumnSelector(dimension), extractionFn);
}
if (capabilities.getType() == ValueType.FLOAT) {
return new FloatWrappingDimensionSelector(makeFloatColumnSelector(dimension), extractionFn);
}
// if we can't wrap the base column, just return a column of all nulls
return NullDimensionSelector.instance();
} else {
final DimensionIndexer indexer = dimensionDesc.getIndexer();
return indexer.makeDimensionSelector(dimensionSpec, currEntry, dimensionDesc);
}
}
@Override
public FloatColumnSelector makeFloatColumnSelector(String columnName) {
if (virtualColumns.exists(columnName)) {
return virtualColumns.makeFloatColumnSelector(columnName, this);
}
final Integer dimIndex = index.getDimensionIndex(columnName);
if (dimIndex != null) {
final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName);
final DimensionIndexer indexer = dimensionDesc.getIndexer();
return indexer.makeFloatColumnSelector(currEntry, dimensionDesc);
}
final Integer metricIndexInt = index.getMetricIndex(columnName);
if (metricIndexInt == null) {
return ZeroFloatColumnSelector.instance();
}
final int metricIndex = metricIndexInt;
return new FloatColumnSelector() {
@Override
public float get() {
return index.getMetricFloatValue(currEntry.getValue(), metricIndex);
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector) {
inspector.visit("index", index);
}
};
}
@Override
public LongColumnSelector makeLongColumnSelector(String columnName) {
if (virtualColumns.exists(columnName)) {
return virtualColumns.makeLongColumnSelector(columnName, this);
}
if (columnName.equals(Column.TIME_COLUMN_NAME)) {
class TimeLongColumnSelector implements LongColumnSelector {
@Override
public long get() {
return currEntry.getKey().getTimestamp();
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector) {
}
}
return new TimeLongColumnSelector();
}
final Integer dimIndex = index.getDimensionIndex(columnName);
if (dimIndex != null) {
final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName);
final DimensionIndexer indexer = dimensionDesc.getIndexer();
return indexer.makeLongColumnSelector(currEntry, dimensionDesc);
}
final Integer metricIndexInt = index.getMetricIndex(columnName);
if (metricIndexInt == null) {
return ZeroLongColumnSelector.instance();
}
final int metricIndex = metricIndexInt;
return new LongColumnSelector() {
@Override
public long get() {
return index.getMetricLongValue(currEntry.getValue(), metricIndex);
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector) {
inspector.visit("index", index);
}
};
}
@Override
public ObjectColumnSelector makeObjectColumnSelector(String column) {
if (virtualColumns.exists(column)) {
return virtualColumns.makeObjectColumnSelector(column, this);
}
if (column.equals(Column.TIME_COLUMN_NAME)) {
return new ObjectColumnSelector<Long>() {
@Override
public Class classOfObject() {
return Long.TYPE;
}
@Override
public Long get() {
return currEntry.getKey().getTimestamp();
}
};
}
final Integer metricIndexInt = index.getMetricIndex(column);
if (metricIndexInt != null) {
final int metricIndex = metricIndexInt;
final Class classOfObject = index.getMetricClass(column);
return new ObjectColumnSelector() {
@Override
public Class classOfObject() {
return classOfObject;
}
@Override
public Object get() {
return index.getMetricObjectValue(currEntry.getValue(), metricIndex);
}
};
}
IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(column);
if (dimensionDesc == null) {
return null;
} else {
final int dimensionIndex = dimensionDesc.getIndex();
final DimensionIndexer indexer = dimensionDesc.getIndexer();
return new ObjectColumnSelector<Object>() {
@Override
public Class classOfObject() {
return Object.class;
}
@Override
public Object get() {
IncrementalIndex.TimeAndDims key = currEntry.getKey();
if (key == null) {
return null;
}
Object[] dims = key.getDims();
if (dimensionIndex >= dims.length) {
return null;
}
return indexer.convertUnsortedEncodedKeyComponentToActualArrayOrList(dims[dimensionIndex], DimensionIndexer.ARRAY);
}
};
}
}
@Nullable
@Override
public ColumnCapabilities getColumnCapabilities(String columnName) {
if (virtualColumns.exists(columnName)) {
return virtualColumns.getColumnCapabilities(columnName);
}
return index.getCapabilities(columnName);
}
};
}
});
}
use of javax.annotation.Nullable in project druid by druid-io.
the class FilteredAggregatorTest method makeColumnSelector.
private ColumnSelectorFactory makeColumnSelector(final TestFloatColumnSelector selector) {
return new ColumnSelectorFactory() {
@Override
public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) {
final String dimensionName = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
if (dimensionName.equals("dim")) {
return dimensionSpec.decorate(new DimensionSelector() {
@Override
public IndexedInts getRow() {
if (selector.getIndex() % 3 == 2) {
return ArrayBasedIndexedInts.of(new int[] { 1 });
} else {
return ArrayBasedIndexedInts.of(new int[] { 0 });
}
}
@Override
public ValueMatcher makeValueMatcher(String value) {
return DimensionSelectorUtils.makeValueMatcherGeneric(this, value);
}
@Override
public ValueMatcher makeValueMatcher(Predicate<String> predicate) {
return DimensionSelectorUtils.makeValueMatcherGeneric(this, predicate);
}
@Override
public int getValueCardinality() {
return 2;
}
@Override
public String lookupName(int id) {
switch(id) {
case 0:
return "a";
case 1:
return "b";
default:
throw new IllegalArgumentException();
}
}
@Override
public boolean nameLookupPossibleInAdvance() {
return true;
}
@Nullable
@Override
public IdLookup idLookup() {
return new IdLookup() {
@Override
public int lookupId(String name) {
switch(name) {
case "a":
return 0;
case "b":
return 1;
default:
throw new IllegalArgumentException();
}
}
};
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector) {
}
});
} else {
throw new UnsupportedOperationException();
}
}
@Override
public LongColumnSelector makeLongColumnSelector(String columnName) {
throw new UnsupportedOperationException();
}
@Override
public FloatColumnSelector makeFloatColumnSelector(String columnName) {
if (columnName.equals("value")) {
return selector;
} else {
throw new UnsupportedOperationException();
}
}
@Override
public ObjectColumnSelector makeObjectColumnSelector(String columnName) {
throw new UnsupportedOperationException();
}
@Override
public ColumnCapabilities getColumnCapabilities(String columnName) {
ColumnCapabilitiesImpl caps;
if (columnName.equals("value")) {
caps = new ColumnCapabilitiesImpl();
caps.setType(ValueType.FLOAT);
caps.setDictionaryEncoded(false);
caps.setHasBitmapIndexes(false);
} else {
caps = new ColumnCapabilitiesImpl();
caps.setType(ValueType.STRING);
caps.setDictionaryEncoded(true);
caps.setHasBitmapIndexes(true);
}
return caps;
}
};
}
use of javax.annotation.Nullable in project druid by druid-io.
the class IndexIOTest method constructionFeeder.
@Parameterized.Parameters
public static Iterable<Object[]> constructionFeeder() {
final Map<String, Object> map = ImmutableMap.<String, Object>of();
final Map<String, Object> map00 = ImmutableMap.<String, Object>of("dim0", ImmutableList.<String>of("dim00", "dim01"));
final Map<String, Object> map10 = ImmutableMap.<String, Object>of("dim1", "dim10");
final Map<String, Object> map0null = new HashMap<>();
map0null.put("dim0", null);
final Map<String, Object> map1null = new HashMap<>();
map1null.put("dim1", null);
final Map<String, Object> mapAll = ImmutableMap.<String, Object>of("dim0", ImmutableList.<String>of("dim00", "dim01"), "dim1", "dim10");
final List<Map<String, Object>> maps = ImmutableList.of(map, map00, map10, map0null, map1null, mapAll);
return Iterables.<Object[]>concat(// First iterable tests permutations of the maps which are expected to be equal
Iterables.<Object[]>concat(new Iterable<Iterable<Object[]>>() {
@Override
public Iterator<Iterable<Object[]>> iterator() {
return new Iterator<Iterable<Object[]>>() {
long nextBitset = 1L;
@Override
public boolean hasNext() {
return nextBitset < (1L << maps.size());
}
@Override
public Iterable<Object[]> next() {
final BitSet bitset = BitSet.valueOf(new long[] { nextBitset++ });
final List<Map<String, Object>> myMaps = filterByBitset(maps, bitset);
return Collections2.transform(Collections2.permutations(myMaps), new Function<List<Map<String, Object>>, Object[]>() {
@Nullable
@Override
public Object[] apply(List<Map<String, Object>> input) {
return new Object[] { input, input, null };
}
});
}
@Override
public void remove() {
throw new UOE("Remove not suported");
}
};
}
}), // Second iterable tests combinations of the maps which may or may not be equal
Iterables.<Object[]>concat(new Iterable<Iterable<Object[]>>() {
@Override
public Iterator<Iterable<Object[]>> iterator() {
return new Iterator<Iterable<Object[]>>() {
long nextMap1Bits = 1L;
@Override
public boolean hasNext() {
return nextMap1Bits < (1L << maps.size());
}
@Override
public Iterable<Object[]> next() {
final BitSet bitset1 = BitSet.valueOf(new long[] { nextMap1Bits++ });
final List<Map<String, Object>> maplist1 = filterByBitset(maps, bitset1);
return new Iterable<Object[]>() {
@Override
public Iterator<Object[]> iterator() {
return new Iterator<Object[]>() {
long nextMap2Bits = 1L;
@Override
public boolean hasNext() {
return nextMap2Bits < (1L << maps.size());
}
@Override
public Object[] next() {
final List<Map<String, Object>> maplist2 = filterByBitset(maps, BitSet.valueOf(new long[] { nextMap2Bits++ }));
return new Object[] { maplist1, maplist2, filterNullValues(maplist1).equals(filterNullValues(maplist2)) ? null : SegmentValidationException.class };
}
@Override
public void remove() {
throw new UOE("remove not supported");
}
};
}
};
}
@Override
public void remove() {
throw new UOE("Remove not supported");
}
};
}
}));
}
use of javax.annotation.Nullable in project hive by apache.
the class DruidStorageHandler method commitCreateTable.
@Override
public void commitCreateTable(Table table) throws MetaException {
if (MetaStoreUtils.isExternalTable(table)) {
return;
}
Lifecycle lifecycle = new Lifecycle();
LOG.info(String.format("Committing table [%s] to the druid metastore", table.getDbName()));
final Path tableDir = getSegmentDescriptorDir();
try {
List<DataSegment> segmentList = DruidStorageHandlerUtils.getPublishedSegments(tableDir, getConf());
LOG.info(String.format("Found [%d] segments under path [%s]", segmentList.size(), tableDir));
druidSqlMetadataStorageUpdaterJobHandler.publishSegments(druidMetadataStorageTablesConfig.getSegmentsTable(), segmentList, DruidStorageHandlerUtils.JSON_MAPPER);
final String coordinatorAddress = HiveConf.getVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_COORDINATOR_DEFAULT_ADDRESS);
int maxTries = HiveConf.getIntVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_MAX_TRIES);
final String dataSourceName = table.getParameters().get(Constants.DRUID_DATA_SOURCE);
LOG.info(String.format("checking load status from coordinator [%s]", coordinatorAddress));
// check if the coordinator is up
httpClient = makeHttpClient(lifecycle);
try {
lifecycle.start();
} catch (Exception e) {
Throwables.propagate(e);
}
String coordinatorResponse = null;
try {
coordinatorResponse = RetryUtils.retry(new Callable<String>() {
@Override
public String call() throws Exception {
return DruidStorageHandlerUtils.getURL(httpClient, new URL(String.format("http://%s/status", coordinatorAddress)));
}
}, new Predicate<Throwable>() {
@Override
public boolean apply(@Nullable Throwable input) {
return input instanceof IOException;
}
}, maxTries);
} catch (Exception e) {
console.printInfo("Will skip waiting for data loading");
return;
}
if (Strings.isNullOrEmpty(coordinatorResponse)) {
console.printInfo("Will skip waiting for data loading");
return;
}
console.printInfo(String.format("Waiting for the loading of [%s] segments", segmentList.size()));
long passiveWaitTimeMs = HiveConf.getLongVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_PASSIVE_WAIT_TIME);
ImmutableSet<URL> setOfUrls = FluentIterable.from(segmentList).transform(new Function<DataSegment, URL>() {
@Override
public URL apply(DataSegment dataSegment) {
try {
//Need to make sure that we are using UTC since most of the druid cluster use UTC by default
return new URL(String.format("http://%s/druid/coordinator/v1/datasources/%s/segments/%s", coordinatorAddress, dataSourceName, DataSegment.makeDataSegmentIdentifier(dataSegment.getDataSource(), new DateTime(dataSegment.getInterval().getStartMillis(), DateTimeZone.UTC), new DateTime(dataSegment.getInterval().getEndMillis(), DateTimeZone.UTC), dataSegment.getVersion(), dataSegment.getShardSpec())));
} catch (MalformedURLException e) {
Throwables.propagate(e);
}
return null;
}
}).toSet();
int numRetries = 0;
while (numRetries++ < maxTries && !setOfUrls.isEmpty()) {
setOfUrls = ImmutableSet.copyOf(Sets.filter(setOfUrls, new Predicate<URL>() {
@Override
public boolean apply(URL input) {
try {
String result = DruidStorageHandlerUtils.getURL(httpClient, input);
LOG.debug(String.format("Checking segment [%s] response is [%s]", input, result));
return Strings.isNullOrEmpty(result);
} catch (IOException e) {
LOG.error(String.format("Error while checking URL [%s]", input), e);
return true;
}
}
}));
try {
if (!setOfUrls.isEmpty()) {
Thread.sleep(passiveWaitTimeMs);
}
} catch (InterruptedException e) {
Thread.interrupted();
Throwables.propagate(e);
}
}
if (!setOfUrls.isEmpty()) {
// We are not Throwing an exception since it might be a transient issue that is blocking loading
console.printError(String.format("Wait time exhausted and we have [%s] out of [%s] segments not loaded yet", setOfUrls.size(), segmentList.size()));
}
} catch (IOException e) {
LOG.error("Exception while commit", e);
Throwables.propagate(e);
} finally {
cleanWorkingDir();
lifecycle.stop();
}
}
Aggregations