Search in sources :

Example 46 with FieldSpec

use of com.linkedin.pinot.common.data.FieldSpec in project pinot by linkedin.

the class BitmapInvertedIndexCreatorTest method testMultiValue.

@Test
public void testMultiValue() throws IOException {
    boolean singleValue = false;
    String colName = "multi_value_col";
    FieldSpec spec = new DimensionFieldSpec(colName, DataType.INT, singleValue);
    int numDocs = 20;
    int[][] data = new int[numDocs][];
    int maxLength = 10;
    int cardinality = 10;
    File indexDirHeap = new File("/tmp/indexDirHeap");
    FileUtils.forceMkdir(indexDirHeap);
    indexDirHeap.mkdirs();
    File indexDirOffHeap = new File("/tmp/indexDirOffHeap");
    FileUtils.forceMkdir(indexDirOffHeap);
    indexDirOffHeap.mkdirs();
    File bitmapIndexFileOffHeap = new File(indexDirOffHeap, colName + V1Constants.Indexes.BITMAP_INVERTED_INDEX_FILE_EXTENSION);
    File bitmapIndexFileHeap = new File(indexDirHeap, colName + V1Constants.Indexes.BITMAP_INVERTED_INDEX_FILE_EXTENSION);
    // GENERATE RANDOM MULTI VALUE DATA SET
    Random r = new Random();
    Map<Integer, Set<Integer>> postingListMap = new HashMap<>();
    for (int i = 0; i < cardinality; i++) {
        postingListMap.put(i, new LinkedHashSet<Integer>());
    }
    int totalNumberOfEntries = 0;
    for (int docId = 0; docId < numDocs; docId++) {
        int length = r.nextInt(maxLength);
        data[docId] = new int[length];
        totalNumberOfEntries += length;
        for (int j = 0; j < length; j++) {
            data[docId][j] = r.nextInt(cardinality);
            postingListMap.get(data[docId][j]).add(docId);
        }
        LOGGER.debug("docId:" + docId + "  dictId:" + data[docId]);
    }
    for (int i = 0; i < cardinality; i++) {
        LOGGER.debug("Posting list for " + i + " : " + postingListMap.get(i));
    }
    // GENERATE BITMAP USING OffHeapCreator and validate
    OffHeapBitmapInvertedIndexCreator offHeapCreator = new OffHeapBitmapInvertedIndexCreator(indexDirOffHeap, cardinality, numDocs, totalNumberOfEntries, spec);
    for (int i = 0; i < numDocs; i++) {
        offHeapCreator.add(i, data[i]);
    }
    offHeapCreator.seal();
    validate(colName, bitmapIndexFileOffHeap, cardinality, postingListMap);
    // GENERATE BITMAP USING HeapCreator and validate
    HeapBitmapInvertedIndexCreator heapCreator = new HeapBitmapInvertedIndexCreator(indexDirHeap, cardinality, numDocs, totalNumberOfEntries, spec);
    for (int i = 0; i < numDocs; i++) {
        heapCreator.add(i, data[i]);
    }
    heapCreator.seal();
    validate(colName, bitmapIndexFileHeap, cardinality, postingListMap);
    // assert that the file sizes and contents are the same
    Assert.assertEquals(bitmapIndexFileHeap.length(), bitmapIndexFileHeap.length());
    Assert.assertTrue(FileUtils.contentEquals(bitmapIndexFileHeap, bitmapIndexFileHeap));
    FileUtils.deleteQuietly(indexDirHeap);
    FileUtils.deleteQuietly(indexDirOffHeap);
}
Also used : LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) HashMap(java.util.HashMap) OffHeapBitmapInvertedIndexCreator(com.linkedin.pinot.core.segment.creator.impl.inv.OffHeapBitmapInvertedIndexCreator) HeapBitmapInvertedIndexCreator(com.linkedin.pinot.core.segment.creator.impl.inv.HeapBitmapInvertedIndexCreator) FieldSpec(com.linkedin.pinot.common.data.FieldSpec) DimensionFieldSpec(com.linkedin.pinot.common.data.DimensionFieldSpec) Random(java.util.Random) OffHeapBitmapInvertedIndexCreator(com.linkedin.pinot.core.segment.creator.impl.inv.OffHeapBitmapInvertedIndexCreator) File(java.io.File) DimensionFieldSpec(com.linkedin.pinot.common.data.DimensionFieldSpec) Test(org.testng.annotations.Test)

Example 47 with FieldSpec

use of com.linkedin.pinot.common.data.FieldSpec in project pinot by linkedin.

the class StringDictionaryPerfTest method buildSegment.

/**
   * Helper method to build a segment:
   * <ul>
   *   <li> Segment contains one string column </li>
   *   <li> Row values for the column are randomly generated strings of length 1 to 100 </li>
   * </ul>
   *
   * @param dictLength Length of the dictionary
   * @throws Exception
   */
public void buildSegment(int dictLength) throws Exception {
    Schema schema = new Schema();
    String segmentName = "perfTestSegment" + System.currentTimeMillis();
    _indexDir = new File(TMP_DIR + File.separator + segmentName);
    _indexDir.deleteOnExit();
    FieldSpec fieldSpec = new DimensionFieldSpec(COLUMN_NAME, FieldSpec.DataType.STRING, true);
    schema.addField(fieldSpec);
    _dictLength = dictLength;
    _inputStrings = new String[dictLength];
    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
    config.setOutDir(_indexDir.getParent());
    config.setFormat(FileFormat.AVRO);
    config.setSegmentName(segmentName);
    Random random = new Random(System.nanoTime());
    final List<GenericRow> data = new ArrayList<>();
    Set<String> uniqueStrings = new HashSet<>(dictLength);
    int i = 0;
    while (i < dictLength) {
        HashMap<String, Object> map = new HashMap<>();
        String randomString = RandomStringUtils.randomAlphanumeric(1 + random.nextInt(MAX_STRING_LENGTH));
        if (uniqueStrings.contains(randomString)) {
            continue;
        }
        _inputStrings[i] = randomString;
        uniqueStrings.add(randomString);
        map.put("test", _inputStrings[i++]);
        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        data.add(genericRow);
    }
    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    RecordReader reader = getGenericRowRecordReader(schema, data);
    driver.init(config, reader);
    driver.build();
}
Also used : HashMap(java.util.HashMap) Schema(com.linkedin.pinot.common.data.Schema) RecordReader(com.linkedin.pinot.core.data.readers.RecordReader) ArrayList(java.util.ArrayList) DimensionFieldSpec(com.linkedin.pinot.common.data.DimensionFieldSpec) FieldSpec(com.linkedin.pinot.common.data.FieldSpec) SegmentIndexCreationDriverImpl(com.linkedin.pinot.core.segment.creator.impl.SegmentIndexCreationDriverImpl) GenericRow(com.linkedin.pinot.core.data.GenericRow) Random(java.util.Random) SegmentGeneratorConfig(com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig) File(java.io.File) DimensionFieldSpec(com.linkedin.pinot.common.data.DimensionFieldSpec) HashSet(java.util.HashSet)

Example 48 with FieldSpec

use of com.linkedin.pinot.common.data.FieldSpec in project pinot by linkedin.

the class RawIndexBenchmark method buildSegment.

/**
   * Helper method that builds a segment containing two columns both with data from input file.
   * The first column has raw indices (no dictionary), where as the second column is dictionary encoded.
   *
   * @throws Exception
   */
private File buildSegment() throws Exception {
    Schema schema = new Schema();
    for (int i = 0; i < NUM_COLUMNS; i++) {
        String column = "column_" + i;
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(column, FieldSpec.DataType.STRING, true);
        schema.addField(dimensionFieldSpec);
    }
    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
    config.setRawIndexCreationColumns(Collections.singletonList(_rawIndexColumn));
    config.setOutDir(SEGMENT_DIR_NAME);
    config.setSegmentName(SEGMENT_NAME);
    BufferedReader reader = new BufferedReader(new FileReader(_dataFile));
    String value;
    final List<GenericRow> rows = new ArrayList<>();
    System.out.println("Reading data...");
    while ((value = reader.readLine()) != null) {
        HashMap<String, Object> map = new HashMap<>();
        for (FieldSpec fieldSpec : schema.getAllFieldSpecs()) {
            map.put(fieldSpec.getName(), value);
        }
        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        rows.add(genericRow);
        _numRows++;
        if (_numRows % 1000000 == 0) {
            System.out.println("Read rows: " + _numRows);
        }
    }
    System.out.println("Generating segment...");
    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    RecordReader recordReader = new TestRecordReader(rows, schema);
    driver.init(config, recordReader);
    driver.build();
    return new File(SEGMENT_DIR_NAME, SEGMENT_NAME);
}
Also used : TestRecordReader(com.linkedin.pinot.core.data.readers.TestRecordReader) HashMap(java.util.HashMap) Schema(com.linkedin.pinot.common.data.Schema) RecordReader(com.linkedin.pinot.core.data.readers.RecordReader) TestRecordReader(com.linkedin.pinot.core.data.readers.TestRecordReader) ArrayList(java.util.ArrayList) FieldSpec(com.linkedin.pinot.common.data.FieldSpec) DimensionFieldSpec(com.linkedin.pinot.common.data.DimensionFieldSpec) SegmentIndexCreationDriverImpl(com.linkedin.pinot.core.segment.creator.impl.SegmentIndexCreationDriverImpl) GenericRow(com.linkedin.pinot.core.data.GenericRow) SegmentGeneratorConfig(com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) File(java.io.File) DimensionFieldSpec(com.linkedin.pinot.common.data.DimensionFieldSpec)

Example 49 with FieldSpec

use of com.linkedin.pinot.common.data.FieldSpec in project pinot by linkedin.

the class BaseDefaultColumnHandler method computeDefaultColumnActionMap.

/**
   * Compute the action needed for each column.
   * This method compares the column metadata across schema and segment.
   *
   * @return Action Map for each column.
   */
private Map<String, DefaultColumnAction> computeDefaultColumnActionMap() {
    Map<String, DefaultColumnAction> defaultColumnActionMap = new HashMap<>();
    // Compute ADD and UPDATE actions.
    Collection<String> columnsInSchema = _schema.getColumnNames();
    for (String column : columnsInSchema) {
        FieldSpec fieldSpecInSchema = _schema.getFieldSpecFor(column);
        Preconditions.checkNotNull(fieldSpecInSchema);
        FieldSpec.FieldType fieldTypeInSchema = fieldSpecInSchema.getFieldType();
        ColumnMetadata columnMetadata = _segmentMetadata.getColumnMetadataFor(column);
        if (columnMetadata != null) {
            // Only check for auto-generated column.
            if (!columnMetadata.isAutoGenerated()) {
                continue;
            }
            // Check the field type matches.
            FieldSpec.FieldType fieldTypeInMetadata = columnMetadata.getFieldType();
            if (fieldTypeInMetadata != fieldTypeInSchema) {
                String failureMessage = "Field type: " + fieldTypeInMetadata + " for auto-generated column: " + column + " does not match field type: " + fieldTypeInSchema + " in schema, throw exception to drop and re-download the segment.";
                throw new RuntimeException(failureMessage);
            }
            // Check the data type and default value matches.
            FieldSpec.DataType dataTypeInMetadata = columnMetadata.getDataType();
            FieldSpec.DataType dataTypeInSchema = fieldSpecInSchema.getDataType();
            boolean isSingleValueInMetadata = columnMetadata.isSingleValue();
            boolean isSingleValueInSchema = fieldSpecInSchema.isSingleValueField();
            String defaultValueInMetadata = columnMetadata.getDefaultNullValueString();
            String defaultValueInSchema = fieldSpecInSchema.getDefaultNullValue().toString();
            if (dataTypeInMetadata != dataTypeInSchema || isSingleValueInMetadata != isSingleValueInSchema || !defaultValueInSchema.equals(defaultValueInMetadata)) {
                if (fieldTypeInMetadata == FieldSpec.FieldType.DIMENSION) {
                    defaultColumnActionMap.put(column, DefaultColumnAction.UPDATE_DIMENSION);
                } else {
                    Preconditions.checkState(fieldTypeInMetadata == FieldSpec.FieldType.METRIC);
                    defaultColumnActionMap.put(column, DefaultColumnAction.UPDATE_METRIC);
                }
            }
        } else {
            switch(fieldTypeInSchema) {
                case DIMENSION:
                    defaultColumnActionMap.put(column, DefaultColumnAction.ADD_DIMENSION);
                    break;
                case METRIC:
                    defaultColumnActionMap.put(column, DefaultColumnAction.ADD_METRIC);
                    break;
                default:
                    LOGGER.warn("Skip adding default column for column: {} with field type: {}", column, fieldTypeInSchema);
                    break;
            }
        }
    }
    // Compute REMOVE actions.
    Set<String> columnsInMetadata = _segmentMetadata.getAllColumns();
    for (String column : columnsInMetadata) {
        if (!columnsInSchema.contains(column)) {
            ColumnMetadata columnMetadata = _segmentMetadata.getColumnMetadataFor(column);
            // Only remove auto-generated columns.
            if (columnMetadata.isAutoGenerated()) {
                FieldSpec.FieldType fieldTypeInMetadata = columnMetadata.getFieldType();
                if (fieldTypeInMetadata == FieldSpec.FieldType.DIMENSION) {
                    defaultColumnActionMap.put(column, DefaultColumnAction.REMOVE_DIMENSION);
                } else {
                    Preconditions.checkState(fieldTypeInMetadata == FieldSpec.FieldType.METRIC);
                    defaultColumnActionMap.put(column, DefaultColumnAction.REMOVE_METRIC);
                }
            }
        }
    }
    return defaultColumnActionMap;
}
Also used : ColumnMetadata(com.linkedin.pinot.core.segment.index.ColumnMetadata) HashMap(java.util.HashMap) FieldSpec(com.linkedin.pinot.common.data.FieldSpec)

Example 50 with FieldSpec

use of com.linkedin.pinot.common.data.FieldSpec in project pinot by linkedin.

the class BaseDefaultColumnHandler method createColumnV1Indices.

/**
   * Helper method to create the V1 indices (dictionary and forward index) for a column.
   *
   * @param column column name.
   */
protected void createColumnV1Indices(String column) throws Exception {
    FieldSpec fieldSpec = _schema.getFieldSpecFor(column);
    Preconditions.checkNotNull(fieldSpec);
    // Generate column index creation information.
    int totalDocs = _segmentMetadata.getTotalDocs();
    int totalRawDocs = _segmentMetadata.getTotalRawDocs();
    int totalAggDocs = totalDocs - totalRawDocs;
    FieldSpec.DataType dataType = fieldSpec.getDataType();
    Object defaultValue = fieldSpec.getDefaultNullValue();
    boolean isSingleValue = fieldSpec.isSingleValueField();
    int maxNumberOfMultiValueElements = isSingleValue ? 0 : 1;
    int dictionaryElementSize = 0;
    Object sortedArray;
    switch(dataType) {
        case STRING:
            Preconditions.checkState(defaultValue instanceof String);
            String stringDefaultValue = (String) defaultValue;
            // Length of the UTF-8 encoded byte array.
            dictionaryElementSize = stringDefaultValue.getBytes("UTF8").length;
            sortedArray = new String[] { stringDefaultValue };
            break;
        case INT:
            Preconditions.checkState(defaultValue instanceof Integer);
            sortedArray = new int[] { (Integer) defaultValue };
            break;
        case LONG:
            Preconditions.checkState(defaultValue instanceof Long);
            sortedArray = new long[] { (Long) defaultValue };
            break;
        case FLOAT:
            Preconditions.checkState(defaultValue instanceof Float);
            sortedArray = new float[] { (Float) defaultValue };
            break;
        case DOUBLE:
            Preconditions.checkState(defaultValue instanceof Double);
            sortedArray = new double[] { (Double) defaultValue };
            break;
        default:
            throw new UnsupportedOperationException("Unsupported data type: " + dataType + " for column: " + column);
    }
    ColumnIndexCreationInfo columnIndexCreationInfo = new ColumnIndexCreationInfo(true, /*createDictionary*/
    defaultValue, /*min*/
    defaultValue, /*max*/
    sortedArray, ForwardIndexType.FIXED_BIT_COMPRESSED, InvertedIndexType.SORTED_INDEX, isSingleValue, /*isSortedColumn*/
    false, /*hasNulls*/
    totalDocs, /*totalNumberOfEntries*/
    maxNumberOfMultiValueElements, -1, /* Unused max length*/
    true, /*isAutoGenerated*/
    defaultValue);
    // Create dictionary.
    // We will have only one value in the dictionary.
    SegmentDictionaryCreator segmentDictionaryCreator = new SegmentDictionaryCreator(false, /*hasNulls*/
    sortedArray, fieldSpec, _indexDir, V1Constants.Str.DEFAULT_STRING_PAD_CHAR);
    segmentDictionaryCreator.build(new boolean[] { true });
    segmentDictionaryCreator.close();
    // Create forward index.
    if (isSingleValue) {
        // Single-value column.
        SingleValueSortedForwardIndexCreator svFwdIndexCreator = new SingleValueSortedForwardIndexCreator(_indexDir, 1, /*cardinality*/
        fieldSpec);
        for (int docId = 0; docId < totalDocs; docId++) {
            svFwdIndexCreator.add(0, /*dictionaryId*/
            docId);
        }
        svFwdIndexCreator.close();
    } else {
        // Multi-value column.
        MultiValueUnsortedForwardIndexCreator mvFwdIndexCreator = new MultiValueUnsortedForwardIndexCreator(fieldSpec, _indexDir, 1, /*cardinality*/
        totalDocs, /*numDocs*/
        totalDocs, /*totalNumberOfValues*/
        false);
        int[] dictionaryIds = { 0 };
        for (int docId = 0; docId < totalDocs; docId++) {
            mvFwdIndexCreator.index(docId, dictionaryIds);
        }
        mvFwdIndexCreator.close();
    }
    // Add the column metadata information to the metadata properties.
    SegmentColumnarIndexCreator.addColumnMetadataInfo(_segmentProperties, column, columnIndexCreationInfo, totalDocs, totalRawDocs, totalAggDocs, fieldSpec, true, /*hasDictionary*/
    dictionaryElementSize, true, /*hasInvertedIndex*/
    null);
}
Also used : SegmentDictionaryCreator(com.linkedin.pinot.core.segment.creator.impl.SegmentDictionaryCreator) SingleValueSortedForwardIndexCreator(com.linkedin.pinot.core.segment.creator.impl.fwd.SingleValueSortedForwardIndexCreator) FieldSpec(com.linkedin.pinot.common.data.FieldSpec) ColumnIndexCreationInfo(com.linkedin.pinot.core.segment.creator.ColumnIndexCreationInfo) MultiValueUnsortedForwardIndexCreator(com.linkedin.pinot.core.segment.creator.impl.fwd.MultiValueUnsortedForwardIndexCreator)

Aggregations

FieldSpec (com.linkedin.pinot.common.data.FieldSpec)52 DimensionFieldSpec (com.linkedin.pinot.common.data.DimensionFieldSpec)28 Test (org.testng.annotations.Test)15 TimeFieldSpec (com.linkedin.pinot.common.data.TimeFieldSpec)14 MetricFieldSpec (com.linkedin.pinot.common.data.MetricFieldSpec)13 File (java.io.File)11 Schema (com.linkedin.pinot.common.data.Schema)10 SegmentDictionaryCreator (com.linkedin.pinot.core.segment.creator.impl.SegmentDictionaryCreator)7 HashMap (java.util.HashMap)7 TimeGranularitySpec (com.linkedin.pinot.common.data.TimeGranularitySpec)6 AbstractColumnStatisticsCollector (com.linkedin.pinot.core.segment.creator.AbstractColumnStatisticsCollector)6 Random (java.util.Random)5 Block (com.linkedin.pinot.core.common.Block)4 BlockMetadata (com.linkedin.pinot.core.common.BlockMetadata)4 DataSource (com.linkedin.pinot.core.common.DataSource)4 GenericRow (com.linkedin.pinot.core.data.GenericRow)4 SegmentGeneratorConfig (com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig)4 SegmentIndexCreationDriverImpl (com.linkedin.pinot.core.segment.creator.impl.SegmentIndexCreationDriverImpl)4 ArrayList (java.util.ArrayList)4 DataType (com.linkedin.pinot.common.data.FieldSpec.DataType)3