Search in sources :

Example 66 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class HelixBrokerStarter method startDefault.

public static HelixBrokerStarter startDefault() throws Exception {
    Configuration configuration = new PropertiesConfiguration();
    int port = 5001;
    configuration.addProperty(CommonConstants.Helix.KEY_OF_BROKER_QUERY_PORT, port);
    configuration.addProperty("pinot.broker.timeoutMs", 500 * 1000L);
    final HelixBrokerStarter pinotHelixBrokerStarter = new HelixBrokerStarter("quickstart", "localhost:2122", configuration);
    return pinotHelixBrokerStarter;
}
Also used : Configuration(org.apache.commons.configuration.Configuration) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration)

Example 67 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class BrokerServerBuilderTest method main.

public static void main(String[] args) throws Exception {
    PropertiesConfiguration config = new PropertiesConfiguration(new File(BrokerServerBuilderTest.class.getClassLoader().getResource("broker.properties").toURI()));
    final BrokerServerBuilder bld = new BrokerServerBuilder(config, null, null, null);
    bld.buildNetwork();
    bld.buildHTTP();
    bld.start();
    Runtime.getRuntime().addShutdownHook(new Thread() {

        @Override
        public void run() {
            try {
                bld.stop();
            } catch (Exception e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    });
    BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
    while (true) {
        String command = br.readLine();
        if (command.equals("exit")) {
            bld.stop();
        }
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) File(java.io.File)

Example 68 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class TableDataManagerConfig method getDefaultHelixTableDataManagerConfig.

public static TableDataManagerConfig getDefaultHelixTableDataManagerConfig(InstanceDataManagerConfig instanceDataManagerConfig, String tableName) throws ConfigurationException {
    TableType tableType = TableNameBuilder.getTableTypeFromTableName(tableName);
    assert tableType != null;
    Configuration defaultConfig = new PropertiesConfiguration();
    defaultConfig.addProperty(TABLE_DATA_MANAGER_NAME, tableName);
    String dataDir = instanceDataManagerConfig.getInstanceDataDir() + "/" + tableName;
    defaultConfig.addProperty(TABLE_DATA_MANAGER_DATA_DIRECTORY, dataDir);
    defaultConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_COLUMN_MIN_MAX_VALUE_GENERATOR_MODE, ColumnMinMaxValueGeneratorMode.TIME.toString());
    if (instanceDataManagerConfig.getReadMode() != null) {
        defaultConfig.addProperty(READ_MODE, instanceDataManagerConfig.getReadMode().toString());
    } else {
        defaultConfig.addProperty(READ_MODE, ReadMode.heap);
    }
    int avgMultiValueCount = DEFAULT_REALTIME_AVG_MULTI_VALUE_COUNT;
    if (instanceDataManagerConfig.getAvgMultiValueCount() != null) {
        try {
            avgMultiValueCount = Integer.valueOf(instanceDataManagerConfig.getAvgMultiValueCount());
        } catch (NumberFormatException e) {
        // Ignore
        }
    }
    defaultConfig.addProperty(REALTIME_AVG_MULTI_VALUE_COUNT, new Integer(avgMultiValueCount));
    if (instanceDataManagerConfig.getSegmentFormatVersion() != null) {
        defaultConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, instanceDataManagerConfig.getSegmentFormatVersion());
    }
    if (instanceDataManagerConfig.isEnableDefaultColumns()) {
        defaultConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_ENABLE_DEFAULT_COLUMNS, true);
    }
    TableDataManagerConfig tableDataManagerConfig = new TableDataManagerConfig(defaultConfig);
    switch(tableType) {
        case OFFLINE:
            defaultConfig.addProperty(TABLE_DATA_MANAGER_TYPE, "offline");
            break;
        case REALTIME:
            defaultConfig.addProperty(TABLE_DATA_MANAGER_TYPE, "realtime");
            break;
        default:
            throw new UnsupportedOperationException("Not supported table type for - " + tableName);
    }
    return tableDataManagerConfig;
}
Also used : TableType(com.linkedin.pinot.common.utils.CommonConstants.Helix.TableType) Configuration(org.apache.commons.configuration.Configuration) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration)

Example 69 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class SegmentFormatConverterV1ToV2 method convert.

@Override
public void convert(File indexSegmentDir) throws Exception {
    SegmentMetadataImpl segmentMetadataImpl = new SegmentMetadataImpl(indexSegmentDir);
    SegmentDirectory segmentDirectory = SegmentDirectory.createFromLocalFS(indexSegmentDir, segmentMetadataImpl, ReadMode.mmap);
    Set<String> columns = segmentMetadataImpl.getAllColumns();
    SegmentDirectory.Writer segmentWriter = segmentDirectory.createWriter();
    for (String column : columns) {
        ColumnMetadata columnMetadata = segmentMetadataImpl.getColumnMetadataFor(column);
        if (columnMetadata.isSorted()) {
            // no need to change sorted forward index
            continue;
        }
        PinotDataBuffer fwdIndexBuffer = segmentWriter.getIndexFor(column, ColumnIndexType.FORWARD_INDEX);
        if (columnMetadata.isSingleValue() && !columnMetadata.isSorted()) {
            // since we use dictionary to encode values, we wont have any negative values in forward
            // index
            boolean signed = false;
            SingleColumnSingleValueReader v1Reader = new com.linkedin.pinot.core.io.reader.impl.v1.FixedBitSingleValueReader(fwdIndexBuffer, segmentMetadataImpl.getTotalDocs(), columnMetadata.getBitsPerElement(), false);
            File convertedFwdIndexFile = new File(indexSegmentDir, column + V1Constants.Indexes.UN_SORTED_SV_FWD_IDX_FILE_EXTENTION + ".tmp");
            SingleColumnSingleValueWriter v2Writer = new com.linkedin.pinot.core.io.writer.impl.v2.FixedBitSingleValueWriter(convertedFwdIndexFile, segmentMetadataImpl.getTotalDocs(), columnMetadata.getBitsPerElement());
            for (int row = 0; row < segmentMetadataImpl.getTotalDocs(); row++) {
                int value = v1Reader.getInt(row);
                v2Writer.setInt(row, value);
            }
            v1Reader.close();
            v2Writer.close();
            File fwdIndexFileCopy = new File(indexSegmentDir, column + V1Constants.Indexes.UN_SORTED_SV_FWD_IDX_FILE_EXTENTION + ".orig");
            segmentWriter.removeIndex(column, ColumnIndexType.FORWARD_INDEX);
            // FIXME
            PinotDataBuffer newIndexBuffer = segmentWriter.newIndexFor(column, ColumnIndexType.FORWARD_INDEX, (int) convertedFwdIndexFile.length());
            newIndexBuffer.readFrom(convertedFwdIndexFile);
            convertedFwdIndexFile.delete();
        }
        if (!columnMetadata.isSingleValue()) {
            // since we use dictionary to encode values, we wont have any negative values in forward
            // index
            boolean signed = false;
            SingleColumnMultiValueReader v1Reader = new com.linkedin.pinot.core.io.reader.impl.v1.FixedBitMultiValueReader(fwdIndexBuffer, segmentMetadataImpl.getTotalDocs(), columnMetadata.getTotalNumberOfEntries(), columnMetadata.getBitsPerElement(), signed);
            File convertedFwdIndexFile = new File(indexSegmentDir, column + V1Constants.Indexes.UN_SORTED_MV_FWD_IDX_FILE_EXTENTION + ".tmp");
            SingleColumnMultiValueWriter v2Writer = new com.linkedin.pinot.core.io.writer.impl.v2.FixedBitMultiValueWriter(convertedFwdIndexFile, segmentMetadataImpl.getTotalDocs(), columnMetadata.getTotalNumberOfEntries(), columnMetadata.getBitsPerElement());
            int[] values = new int[columnMetadata.getMaxNumberOfMultiValues()];
            for (int row = 0; row < segmentMetadataImpl.getTotalDocs(); row++) {
                int length = v1Reader.getIntArray(row, values);
                int[] copy = new int[length];
                System.arraycopy(values, 0, copy, 0, length);
                v2Writer.setIntArray(row, copy);
            }
            v1Reader.close();
            v2Writer.close();
            segmentWriter.removeIndex(column, ColumnIndexType.FORWARD_INDEX);
            PinotDataBuffer newIndexBuffer = segmentWriter.newIndexFor(column, ColumnIndexType.FORWARD_INDEX, (int) convertedFwdIndexFile.length());
            newIndexBuffer.readFrom(convertedFwdIndexFile);
            convertedFwdIndexFile.delete();
        }
    }
    File metadataFile = new File(indexSegmentDir, V1Constants.MetadataKeys.METADATA_FILE_NAME);
    File metadataFileCopy = new File(indexSegmentDir, V1Constants.MetadataKeys.METADATA_FILE_NAME + ".orig");
    bis = new BufferedInputStream(new FileInputStream(metadataFile));
    bos = new BufferedOutputStream(new FileOutputStream(metadataFileCopy));
    IOUtils.copy(bis, bos);
    bis.close();
    bos.close();
    final PropertiesConfiguration properties = new PropertiesConfiguration(metadataFileCopy);
    // update the segment version
    properties.setProperty(V1Constants.MetadataKeys.Segment.SEGMENT_VERSION, SegmentVersion.v2.toString());
    metadataFile.delete();
    properties.save(metadataFile);
}
Also used : SingleColumnMultiValueWriter(com.linkedin.pinot.core.io.writer.SingleColumnMultiValueWriter) ColumnMetadata(com.linkedin.pinot.core.segment.index.ColumnMetadata) SegmentDirectory(com.linkedin.pinot.core.segment.store.SegmentDirectory) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) SingleColumnSingleValueWriter(com.linkedin.pinot.core.io.writer.SingleColumnSingleValueWriter) BufferedInputStream(java.io.BufferedInputStream) SegmentMetadataImpl(com.linkedin.pinot.core.segment.index.SegmentMetadataImpl) SingleColumnMultiValueReader(com.linkedin.pinot.core.io.reader.SingleColumnMultiValueReader) BufferedOutputStream(java.io.BufferedOutputStream) SingleColumnSingleValueReader(com.linkedin.pinot.core.io.reader.SingleColumnSingleValueReader) FileInputStream(java.io.FileInputStream) PinotDataBuffer(com.linkedin.pinot.core.segment.memory.PinotDataBuffer) FileOutputStream(java.io.FileOutputStream) File(java.io.File)

Example 70 with PropertiesConfiguration

use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.

the class SegmentV1V2ToV3FormatConverter method createMetadataFile.

private void createMetadataFile(File currentDir, File v3Dir) throws ConfigurationException {
    File v2MetadataFile = new File(currentDir, V1Constants.MetadataKeys.METADATA_FILE_NAME);
    File v3MetadataFile = new File(v3Dir, V1Constants.MetadataKeys.METADATA_FILE_NAME);
    final PropertiesConfiguration properties = new PropertiesConfiguration(v2MetadataFile);
    // update the segment version
    properties.setProperty(V1Constants.MetadataKeys.Segment.SEGMENT_VERSION, SegmentVersion.v3.toString());
    properties.save(v3MetadataFile);
}
Also used : File(java.io.File) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration)

Aggregations

PropertiesConfiguration (org.apache.commons.configuration.PropertiesConfiguration)118 File (java.io.File)38 Configuration (org.apache.commons.configuration.Configuration)33 ConfigurationException (org.apache.commons.configuration.ConfigurationException)33 IOException (java.io.IOException)12 Test (org.testng.annotations.Test)11 BeforeClass (org.testng.annotations.BeforeClass)10 MetricsRegistry (com.yammer.metrics.core.MetricsRegistry)9 IndexLoadingConfigMetadata (com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata)8 FileBasedInstanceDataManager (com.linkedin.pinot.core.data.manager.offline.FileBasedInstanceDataManager)8 FileInputStream (java.io.FileInputStream)7 CompositeConfiguration (org.apache.commons.configuration.CompositeConfiguration)7 URL (java.net.URL)6 ServerQueryExecutorV1Impl (com.linkedin.pinot.core.query.executor.ServerQueryExecutorV1Impl)5 ServerConf (com.linkedin.pinot.server.conf.ServerConf)5 FileNotFoundException (java.io.FileNotFoundException)5 Path (java.nio.file.Path)5 HashMap (java.util.HashMap)5 Properties (java.util.Properties)5 CloudRuntimeException (com.cloud.utils.exception.CloudRuntimeException)4