use of org.apache.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class ExpressionVectorSelectors method createVectorBindings.
private static Expr.VectorInputBinding createVectorBindings(Expr.BindingAnalysis bindingAnalysis, VectorColumnSelectorFactory vectorColumnSelectorFactory) {
ExpressionVectorInputBinding binding = new ExpressionVectorInputBinding(vectorColumnSelectorFactory.getReadableVectorInspector());
final List<String> columns = bindingAnalysis.getRequiredBindingsList();
for (String columnName : columns) {
final ColumnCapabilities columnCapabilities = vectorColumnSelectorFactory.getColumnCapabilities(columnName);
// null capabilities should be backed by a nil vector selector since it means the column effectively doesnt exist
if (columnCapabilities != null) {
switch(columnCapabilities.getType()) {
case FLOAT:
case DOUBLE:
binding.addNumeric(columnName, ExpressionType.DOUBLE, vectorColumnSelectorFactory.makeValueSelector(columnName));
break;
case LONG:
binding.addNumeric(columnName, ExpressionType.LONG, vectorColumnSelectorFactory.makeValueSelector(columnName));
break;
default:
binding.addObjectSelector(columnName, ExpressionType.STRING, vectorColumnSelectorFactory.makeObjectSelector(columnName));
}
}
}
return binding;
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class ExpressionVirtualColumn method capabilities.
@Override
public ColumnCapabilities capabilities(ColumnInspector inspector, String columnName) {
final ExpressionPlan plan = ExpressionPlanner.plan(inspector, parsedExpression.get());
final ColumnCapabilities inferred = plan.inferColumnCapabilities(outputType);
// if we can infer the column capabilities from the expression plan, then use that
if (inferred != null) {
// explicit outputType is used as a hint, how did it compare to the planners inferred output type?
if (outputType != null && inferred.getType() != outputType.getType()) {
// but mismatches involving strings and arrays might be worth knowing about so warn
if (!inferred.isNumeric() && !outputType.isNumeric()) {
log.warn("Projected output type %s of expression %s does not match provided type %s", inferred.asTypeString(), expression, outputType);
} else {
log.debug("Projected output type %s of expression %s does not match provided type %s", inferred.asTypeString(), expression, outputType);
}
}
return inferred;
}
// fallback to default capabilities
return capabilities(columnName);
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class QueryableIndexColumnSelectorFactory method makeDimensionSelectorUndecorated.
private DimensionSelector makeDimensionSelectorUndecorated(DimensionSpec dimensionSpec) {
final String dimension = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
final ColumnHolder columnHolder = index.getColumnHolder(dimension);
if (columnHolder == null) {
return DimensionSelector.constant(null, extractionFn);
}
if (dimension.equals(ColumnHolder.TIME_COLUMN_NAME)) {
return new SingleScanTimeDimensionSelector(makeColumnValueSelector(dimension), extractionFn, descending);
}
ColumnCapabilities capabilities = columnHolder.getCapabilities();
if (columnHolder.getCapabilities().isNumeric()) {
return ValueTypes.makeNumericWrappingDimensionSelector(capabilities.getType(), makeColumnValueSelector(dimension), extractionFn);
}
final DictionaryEncodedColumn column = getCachedColumn(dimension, DictionaryEncodedColumn.class);
if (column != null) {
return column.makeDimensionSelector(offset, extractionFn);
} else {
return DimensionSelector.constant(null, extractionFn);
}
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class IncrementalIndexColumnSelectorFactory method makeDimensionSelectorUndecorated.
private DimensionSelector makeDimensionSelectorUndecorated(DimensionSpec dimensionSpec) {
final String dimension = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
if (dimension.equals(ColumnHolder.TIME_COLUMN_NAME)) {
return new SingleScanTimeDimensionSelector(makeColumnValueSelector(dimension), extractionFn, descending);
}
final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(dimensionSpec.getDimension());
if (dimensionDesc == null) {
// not a dimension, column may be a metric
ColumnCapabilities capabilities = getColumnCapabilities(dimension);
if (capabilities == null) {
return DimensionSelector.constant(null, extractionFn);
}
if (capabilities.isNumeric()) {
return ValueTypes.makeNumericWrappingDimensionSelector(capabilities.getType(), makeColumnValueSelector(dimension), extractionFn);
}
// if we can't wrap the base column, just return a column of all nulls
return DimensionSelector.constant(null, extractionFn);
} else {
final DimensionIndexer indexer = dimensionDesc.getIndexer();
return indexer.makeDimensionSelector(dimensionSpec, rowHolder, dimensionDesc);
}
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class LookupSegmentTest method test_asStorageAdapter_getColumnCapabilitiesV.
@Test
public void test_asStorageAdapter_getColumnCapabilitiesV() {
final ColumnCapabilities capabilities = LOOKUP_SEGMENT.asStorageAdapter().getColumnCapabilities("v");
// Note: the "v" column does not actually have multiple values, but the RowBasedStorageAdapter doesn't allow
// reporting complete single-valued capabilities. It would be good to change this in the future, so query engines
// running on top of lookups can take advantage of singly-valued optimizations.
Assert.assertEquals(ValueType.STRING, capabilities.getType());
Assert.assertTrue(capabilities.hasMultipleValues().isUnknown());
Assert.assertFalse(capabilities.isDictionaryEncoded().isTrue());
}
Aggregations