Search in sources :

Example 71 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class SegmentColumnarIndexCreator method writeMetadata.

void writeMetadata() throws ConfigurationException {
    PropertiesConfiguration properties = new PropertiesConfiguration(new File(file, V1Constants.MetadataKeys.METADATA_FILE_NAME));
    properties.setProperty(SEGMENT_CREATOR_VERSION, config.getCreatorVersion());
    properties.setProperty(SEGMENT_PADDING_CHARACTER, StringEscapeUtils.escapeJava(Character.toString(config.getPaddingCharacter())));
    properties.setProperty(SEGMENT_NAME, segmentName);
    properties.setProperty(TABLE_NAME, config.getTableName());
    properties.setProperty(DIMENSIONS, config.getDimensions());
    properties.setProperty(METRICS, config.getMetrics());
    properties.setProperty(TIME_COLUMN_NAME, config.getTimeColumnName());
    properties.setProperty(TIME_INTERVAL, "not_there");
    properties.setProperty(SEGMENT_TOTAL_RAW_DOCS, String.valueOf(totalRawDocs));
    properties.setProperty(SEGMENT_TOTAL_AGGREGATE_DOCS, String.valueOf(totalAggDocs));
    properties.setProperty(SEGMENT_TOTAL_DOCS, String.valueOf(totalDocs));
    properties.setProperty(STAR_TREE_ENABLED, String.valueOf(config.isEnableStarTreeIndex()));
    properties.setProperty(SEGMENT_TOTAL_ERRORS, String.valueOf(totalErrors));
    properties.setProperty(SEGMENT_TOTAL_NULLS, String.valueOf(totalNulls));
    properties.setProperty(SEGMENT_TOTAL_CONVERSIONS, String.valueOf(totalConversions));
    properties.setProperty(SEGMENT_TOTAL_NULL_COLS, String.valueOf(totalNullCols));
    StarTreeIndexSpec starTreeIndexSpec = config.getStarTreeIndexSpec();
    if (starTreeIndexSpec != null) {
        properties.setProperty(STAR_TREE_SPLIT_ORDER, starTreeIndexSpec.getDimensionsSplitOrder());
        properties.setProperty(STAR_TREE_MAX_LEAF_RECORDS, starTreeIndexSpec.getMaxLeafRecords());
        properties.setProperty(STAR_TREE_SKIP_STAR_NODE_CREATION_FOR_DIMENSIONS, starTreeIndexSpec.getSkipStarNodeCreationForDimensions());
        properties.setProperty(STAR_TREE_SKIP_MATERIALIZATION_CARDINALITY, starTreeIndexSpec.getskipMaterializationCardinalityThreshold());
        properties.setProperty(STAR_TREE_SKIP_MATERIALIZATION_FOR_DIMENSIONS, starTreeIndexSpec.getskipMaterializationForDimensions());
    }
    HllConfig hllConfig = config.getHllConfig();
    Map<String, String> derivedHllFieldToOriginMap = null;
    if (hllConfig != null) {
        properties.setProperty(SEGMENT_HLL_LOG2M, hllConfig.getHllLog2m());
        derivedHllFieldToOriginMap = hllConfig.getDerivedHllFieldToOriginMap();
    }
    String timeColumn = config.getTimeColumnName();
    if (indexCreationInfoMap.get(timeColumn) != null) {
        properties.setProperty(SEGMENT_START_TIME, indexCreationInfoMap.get(timeColumn).getMin());
        properties.setProperty(SEGMENT_END_TIME, indexCreationInfoMap.get(timeColumn).getMax());
        properties.setProperty(TIME_UNIT, config.getSegmentTimeUnit());
    }
    if (config.containsCustomProperty(SEGMENT_START_TIME)) {
        properties.setProperty(SEGMENT_START_TIME, config.getStartTime());
    }
    if (config.containsCustomProperty(SEGMENT_END_TIME)) {
        properties.setProperty(SEGMENT_END_TIME, config.getEndTime());
    }
    if (config.containsCustomProperty(TIME_UNIT)) {
        properties.setProperty(TIME_UNIT, config.getSegmentTimeUnit());
    }
    for (Map.Entry<String, String> entry : config.getCustomProperties().entrySet()) {
        properties.setProperty(entry.getKey(), entry.getValue());
    }
    for (Map.Entry<String, ColumnIndexCreationInfo> entry : indexCreationInfoMap.entrySet()) {
        String column = entry.getKey();
        ColumnIndexCreationInfo columnIndexCreationInfo = entry.getValue();
        SegmentDictionaryCreator dictionaryCreator = dictionaryCreatorMap.get(column);
        int dictionaryElementSize = (dictionaryCreator != null) ? dictionaryCreator.getStringColumnMaxLength() : 0;
        // TODO: after fixing the server-side dependency on HAS_INVERTED_INDEX and deployed, set HAS_INVERTED_INDEX properly
        // The hasInvertedIndex flag in segment metadata is picked up in ColumnMetadata, and will be used during the query
        // plan phase. If it is set to false, then inverted indexes are not used in queries even if they are created via table
        // configs on segment load. So, we set it to true here for now, until we fix the server to update the value inside
        // ColumnMetadata, export information to the query planner that the inverted index available is current and can be used.
        //
        //    boolean hasInvertedIndex = invertedIndexCreatorMap.containsKey();
        boolean hasInvertedIndex = true;
        String hllOriginColumn = null;
        if (derivedHllFieldToOriginMap != null) {
            hllOriginColumn = derivedHllFieldToOriginMap.get(column);
        }
        addColumnMetadataInfo(properties, column, columnIndexCreationInfo, totalDocs, totalRawDocs, totalAggDocs, schema.getFieldSpecFor(column), dictionaryCreatorMap.containsKey(column), dictionaryElementSize, hasInvertedIndex, hllOriginColumn);
    }
    properties.save();
}
Also used : HllConfig(com.linkedin.pinot.core.startree.hll.HllConfig) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) StarTreeIndexSpec(com.linkedin.pinot.common.data.StarTreeIndexSpec) ColumnIndexCreationInfo(com.linkedin.pinot.core.segment.creator.ColumnIndexCreationInfo) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map)

Example 72 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class RealtimeQueriesSentinelTest method setup.

@BeforeClass
public void setup() throws Exception {
    TableDataManagerProvider.setServerMetrics(new ServerMetrics(new MetricsRegistry()));
    PINOT_SCHEMA = getTestSchema();
    PINOT_SCHEMA.setSchemaName("realtimeSchema");
    AVRO_RECORD_TRANSFORMER = new AvroRecordToPinotRowGenerator(PINOT_SCHEMA);
    final IndexSegment indexSegment = getRealtimeSegment();
    setUpTestQueries("testTable");
    CONFIG_BUILDER = new TestingServerPropertiesBuilder("testTable");
    final PropertiesConfiguration serverConf = CONFIG_BUILDER.build();
    serverConf.setDelimiterParsingDisabled(false);
    final FileBasedInstanceDataManager instanceDataManager = FileBasedInstanceDataManager.getInstanceDataManager();
    instanceDataManager.init(new FileBasedInstanceDataManagerConfig(serverConf.subset("pinot.server.instance")));
    instanceDataManager.start();
    instanceDataManager.getTableDataManager("testTable");
    instanceDataManager.getTableDataManager("testTable").addSegment(indexSegment);
    QUERY_EXECUTOR = new ServerQueryExecutorV1Impl(false);
    QUERY_EXECUTOR.init(serverConf.subset("pinot.server.query.executor"), instanceDataManager, new ServerMetrics(new MetricsRegistry()));
}
Also used : MetricsRegistry(com.yammer.metrics.core.MetricsRegistry) AvroRecordToPinotRowGenerator(com.linkedin.pinot.core.realtime.impl.kafka.AvroRecordToPinotRowGenerator) IndexSegment(com.linkedin.pinot.core.indexsegment.IndexSegment) FileBasedInstanceDataManager(com.linkedin.pinot.core.data.manager.offline.FileBasedInstanceDataManager) ServerQueryExecutorV1Impl(com.linkedin.pinot.core.query.executor.ServerQueryExecutorV1Impl) ServerMetrics(com.linkedin.pinot.common.metrics.ServerMetrics) FileBasedInstanceDataManagerConfig(com.linkedin.pinot.core.data.manager.config.FileBasedInstanceDataManagerConfig) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) BeforeClass(org.testng.annotations.BeforeClass)

Example 73 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class TestingServerPropertiesBuilder method build.

public PropertiesConfiguration build() throws IOException {
    final File file = new File("/tmp/" + TestingServerPropertiesBuilder.class.toString());
    if (file.exists()) {
        FileUtils.deleteDirectory(file);
    }
    file.mkdir();
    final File bootsDir = new File(file, "bootstrap");
    final File dataDir = new File(file, "data");
    bootsDir.mkdir();
    dataDir.mkdir();
    final PropertiesConfiguration config = new PropertiesConfiguration();
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, "id"), "0");
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, "bootstrap.segment.dir"), bootsDir.getAbsolutePath());
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, "dataDir"), dataDir.getAbsolutePath());
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, "bootstrap.segment.dir"), "0");
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, "data.manager.class"), "com.linkedin.pinot.core.data.manager.InstanceDataManager");
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, "segment.metadata.loader.class"), "com.linkedin.pinot.core.indexsegment.columnar.ColumnarSegmentMetadataLoader");
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, "tableName"), StringUtils.join(tableNames, ","));
    for (final String table : tableNames) {
        config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, table, "dataManagerType"), "offline");
        config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, table, "readMode"), "heap");
        config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, INSTANCE_PREFIC, table, "numQueryExecutorThreads"), "50");
    }
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, EXECUTOR_PREFIX, "class"), "com.linkedin.pinot.core.query.executor.ServerQueryExecutor");
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, EXECUTOR_PREFIX, "timeout"), "150000");
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, "requestHandlerFactory.class"), "com.linkedin.pinot.server.request.SimpleRequestHandlerFactory");
    config.addProperty(StringUtil.join(".", PINOT_SERVER_PREFIX, "netty.port"), "8882");
    config.setDelimiterParsingDisabled(true);
    final Iterator<String> keys = config.getKeys();
    while (keys.hasNext()) {
        final String key = keys.next();
        System.out.println(key + "  : " + config.getProperty(key));
    }
    return config;
}
Also used : File(java.io.File) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration)

Example 74 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class FilterOperatorBenchmark method main.

public static void main(String[] args) throws Exception {
    String rootDir = args[0];
    File[] segmentDirs = new File(rootDir).listFiles();
    String query = args[1];
    AtomicInteger totalDocsMatched = new AtomicInteger(0);
    Pql2Compiler pql2Compiler = new Pql2Compiler();
    BrokerRequest brokerRequest = pql2Compiler.compileToBrokerRequest(query);
    List<Callable<Void>> segmentProcessors = new ArrayList<>();
    long[] timesSpent = new long[segmentDirs.length];
    for (int i = 0; i < segmentDirs.length; i++) {
        File indexSegmentDir = segmentDirs[i];
        System.out.println("Loading " + indexSegmentDir.getName());
        Configuration tableDataManagerConfig = new PropertiesConfiguration();
        List<String> invertedColumns = new ArrayList<>();
        FilenameFilter filter = new FilenameFilter() {

            @Override
            public boolean accept(File dir, String name) {
                return name.endsWith(".bitmap.inv");
            }
        };
        String[] indexFiles = indexSegmentDir.list(filter);
        for (String indexFileName : indexFiles) {
            invertedColumns.add(indexFileName.replace(".bitmap.inv", ""));
        }
        tableDataManagerConfig.setProperty(IndexLoadingConfigMetadata.KEY_OF_LOADING_INVERTED_INDEX, invertedColumns);
        IndexLoadingConfigMetadata indexLoadingConfigMetadata = new IndexLoadingConfigMetadata(tableDataManagerConfig);
        IndexSegmentImpl indexSegmentImpl = (IndexSegmentImpl) Loaders.IndexSegment.load(indexSegmentDir, ReadMode.heap, indexLoadingConfigMetadata);
        segmentProcessors.add(new SegmentProcessor(i, indexSegmentImpl, brokerRequest, totalDocsMatched, timesSpent));
    }
    ExecutorService executorService = Executors.newCachedThreadPool();
    for (int run = 0; run < 5; run++) {
        System.out.println("START RUN:" + run);
        totalDocsMatched.set(0);
        long start = System.currentTimeMillis();
        List<Future<Void>> futures = executorService.invokeAll(segmentProcessors);
        for (int i = 0; i < futures.size(); i++) {
            futures.get(i).get();
        }
        long end = System.currentTimeMillis();
        System.out.println("Total docs matched:" + totalDocsMatched + " took:" + (end - start));
        System.out.println("Times spent:" + Arrays.toString(timesSpent));
        System.out.println("END RUN:" + run);
    }
    System.exit(0);
}
Also used : Configuration(org.apache.commons.configuration.Configuration) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) Pql2Compiler(com.linkedin.pinot.pql.parsers.Pql2Compiler) ArrayList(java.util.ArrayList) IndexLoadingConfigMetadata(com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) Callable(java.util.concurrent.Callable) FilenameFilter(java.io.FilenameFilter) IndexSegmentImpl(com.linkedin.pinot.core.segment.index.IndexSegmentImpl) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) BrokerRequest(com.linkedin.pinot.common.request.BrokerRequest) File(java.io.File)

Example 75 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class HelixServerStarter method startDefault.

public static HelixServerStarter startDefault() throws Exception {
    final Configuration configuration = new PropertiesConfiguration();
    final int port = 8003;
    configuration.addProperty(CommonConstants.Helix.KEY_OF_SERVER_NETTY_PORT, port);
    configuration.addProperty("pinot.server.instance.dataDir", "/tmp/PinotServer/test" + port + "/index");
    configuration.addProperty("pinot.server.instance.segmentTarDir", "/tmp/PinotServer/test" + port + "/segmentTar");
    final HelixServerStarter pinotHelixStarter = new HelixServerStarter("quickstart", "localhost:2191", configuration);
    return pinotHelixStarter;
}
Also used : Configuration(org.apache.commons.configuration.Configuration) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration)

Aggregations

PropertiesConfiguration (org.apache.commons.configuration.PropertiesConfiguration)118 File (java.io.File)38 Configuration (org.apache.commons.configuration.Configuration)33 ConfigurationException (org.apache.commons.configuration.ConfigurationException)33 IOException (java.io.IOException)12 Test (org.testng.annotations.Test)11 BeforeClass (org.testng.annotations.BeforeClass)10 MetricsRegistry (com.yammer.metrics.core.MetricsRegistry)9 IndexLoadingConfigMetadata (com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata)8 FileBasedInstanceDataManager (com.linkedin.pinot.core.data.manager.offline.FileBasedInstanceDataManager)8 FileInputStream (java.io.FileInputStream)7 CompositeConfiguration (org.apache.commons.configuration.CompositeConfiguration)7 URL (java.net.URL)6 ServerQueryExecutorV1Impl (com.linkedin.pinot.core.query.executor.ServerQueryExecutorV1Impl)5 ServerConf (com.linkedin.pinot.server.conf.ServerConf)5 FileNotFoundException (java.io.FileNotFoundException)5 Path (java.nio.file.Path)5 HashMap (java.util.HashMap)5 Properties (java.util.Properties)5 CloudRuntimeException (com.cloud.utils.exception.CloudRuntimeException)4