use of org.apache.druid.segment.column.ColumnCapabilities in project druid by apache.
the class RowBasedColumnSelectorFactoryTest method testCapabilitiesDouble.
@Test
public void testCapabilitiesDouble() {
ColumnCapabilities caps = RowBasedColumnSelectorFactory.getColumnCapabilities(ROW_SIGNATURE, DOUBLE_COLUMN_NAME);
Assert.assertEquals(ValueType.DOUBLE, caps.getType());
Assert.assertFalse(caps.hasBitmapIndexes());
Assert.assertFalse(caps.isDictionaryEncoded().isTrue());
Assert.assertFalse(caps.areDictionaryValuesSorted().isTrue());
Assert.assertFalse(caps.areDictionaryValuesUnique().isTrue());
Assert.assertFalse(caps.hasMultipleValues().isMaybeTrue());
Assert.assertFalse(caps.hasSpatialIndexes());
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by apache.
the class RowBasedColumnSelectorFactoryTest method testCapabilitiesComplex.
@Test
public void testCapabilitiesComplex() {
ColumnCapabilities caps = RowBasedColumnSelectorFactory.getColumnCapabilities(ROW_SIGNATURE, COMPLEX_COLUMN_NAME);
Assert.assertEquals(SOME_COMPLEX, caps.toColumnType());
Assert.assertFalse(caps.hasBitmapIndexes());
Assert.assertFalse(caps.isDictionaryEncoded().isTrue());
Assert.assertFalse(caps.areDictionaryValuesSorted().isTrue());
Assert.assertFalse(caps.areDictionaryValuesUnique().isTrue());
Assert.assertTrue(caps.hasMultipleValues().isUnknown());
Assert.assertFalse(caps.hasSpatialIndexes());
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by apache.
the class RowBasedColumnSelectorFactoryTest method testCapabilitiesTime.
@Test
public void testCapabilitiesTime() {
// time column takes a special path
ColumnCapabilities caps = RowBasedColumnSelectorFactory.getColumnCapabilities(ROW_SIGNATURE, ColumnHolder.TIME_COLUMN_NAME);
Assert.assertEquals(ValueType.LONG, caps.getType());
Assert.assertFalse(caps.hasBitmapIndexes());
Assert.assertFalse(caps.isDictionaryEncoded().isTrue());
Assert.assertFalse(caps.areDictionaryValuesSorted().isTrue());
Assert.assertFalse(caps.areDictionaryValuesUnique().isTrue());
Assert.assertFalse(caps.hasMultipleValues().isMaybeTrue());
Assert.assertFalse(caps.hasSpatialIndexes());
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class ColumnarLongsEncodeDataFromSegmentBenchmark method initializeSegmentValueIntermediaryFile.
/**
* writes column values to an intermediary text file, 1 per line, encoders read from this file as input to write
* encoded column files.
*/
private void initializeSegmentValueIntermediaryFile() throws IOException {
File dir = getTmpDir();
File dataFile = new File(dir, getColumnDataFileName(segmentName, columnName));
if (!dataFile.exists()) {
final IndexIO indexIO = new IndexIO(new DefaultObjectMapper(), () -> 0);
try (final QueryableIndex index = indexIO.loadIndex(new File(segmentPath))) {
final Set<String> columnNames = new LinkedHashSet<>();
columnNames.add(ColumnHolder.TIME_COLUMN_NAME);
Iterables.addAll(columnNames, index.getColumnNames());
final ColumnHolder column = index.getColumnHolder(columnName);
final ColumnCapabilities capabilities = column.getCapabilities();
try (Writer writer = Files.newBufferedWriter(dataFile.toPath(), StandardCharsets.UTF_8)) {
if (!capabilities.is(ValueType.LONG)) {
throw new RuntimeException("Invalid column type, expected 'Long'");
}
LongsColumn theColumn = (LongsColumn) column.getColumn();
for (int i = 0; i < theColumn.length(); i++) {
long value = theColumn.getLongSingleValueRow(i);
writer.write(value + "\n");
}
}
}
}
}
use of org.apache.druid.segment.column.ColumnCapabilities in project druid by druid-io.
the class SegmentAnalyzer method analyze.
public Map<String, ColumnAnalysis> analyze(Segment segment) {
Preconditions.checkNotNull(segment, "segment");
// index is null for incremental-index-based segments, but storageAdapter is always available
final QueryableIndex index = segment.asQueryableIndex();
final StorageAdapter storageAdapter = segment.asStorageAdapter();
// get length and column names from storageAdapter
final int length = storageAdapter.getNumRows();
Map<String, ColumnAnalysis> columns = new TreeMap<>();
final RowSignature rowSignature = storageAdapter.getRowSignature();
for (String columnName : rowSignature.getColumnNames()) {
final ColumnCapabilities capabilities;
if (storageAdapter instanceof IncrementalIndexStorageAdapter) {
// See javadocs for getSnapshotColumnCapabilities for a discussion of why we need to do this.
capabilities = ((IncrementalIndexStorageAdapter) storageAdapter).getSnapshotColumnCapabilities(columnName);
} else {
capabilities = storageAdapter.getColumnCapabilities(columnName);
}
final ColumnAnalysis analysis;
switch(capabilities.getType()) {
case LONG:
final int bytesPerRow = ColumnHolder.TIME_COLUMN_NAME.equals(columnName) ? NUM_BYTES_IN_TIMESTAMP : Long.BYTES;
analysis = analyzeNumericColumn(capabilities, length, bytesPerRow);
break;
case FLOAT:
analysis = analyzeNumericColumn(capabilities, length, NUM_BYTES_IN_TEXT_FLOAT);
break;
case DOUBLE:
analysis = analyzeNumericColumn(capabilities, length, Double.BYTES);
break;
case STRING:
if (index != null) {
analysis = analyzeStringColumn(capabilities, index.getColumnHolder(columnName));
} else {
analysis = analyzeStringColumn(capabilities, storageAdapter, columnName);
}
break;
case COMPLEX:
final ColumnHolder columnHolder = index != null ? index.getColumnHolder(columnName) : null;
analysis = analyzeComplexColumn(capabilities, columnHolder);
break;
default:
log.warn("Unknown column type[%s].", capabilities.asTypeString());
analysis = ColumnAnalysis.error(StringUtils.format("unknown_type_%s", capabilities.asTypeString()));
}
columns.put(columnName, analysis);
}
return columns;
}
Aggregations