use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.
the class HelixBrokerStarter method startDefault.
public static HelixBrokerStarter startDefault() throws Exception {
Configuration configuration = new PropertiesConfiguration();
int port = 5001;
configuration.addProperty(CommonConstants.Helix.KEY_OF_BROKER_QUERY_PORT, port);
configuration.addProperty("pinot.broker.timeoutMs", 500 * 1000L);
final HelixBrokerStarter pinotHelixBrokerStarter = new HelixBrokerStarter("quickstart", "localhost:2122", configuration);
return pinotHelixBrokerStarter;
}
use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.
the class BrokerServerBuilderTest method main.
public static void main(String[] args) throws Exception {
PropertiesConfiguration config = new PropertiesConfiguration(new File(BrokerServerBuilderTest.class.getClassLoader().getResource("broker.properties").toURI()));
final BrokerServerBuilder bld = new BrokerServerBuilder(config, null, null, null);
bld.buildNetwork();
bld.buildHTTP();
bld.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
bld.stop();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
while (true) {
String command = br.readLine();
if (command.equals("exit")) {
bld.stop();
}
}
}
use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.
the class TableDataManagerConfig method getDefaultHelixTableDataManagerConfig.
public static TableDataManagerConfig getDefaultHelixTableDataManagerConfig(InstanceDataManagerConfig instanceDataManagerConfig, String tableName) throws ConfigurationException {
TableType tableType = TableNameBuilder.getTableTypeFromTableName(tableName);
assert tableType != null;
Configuration defaultConfig = new PropertiesConfiguration();
defaultConfig.addProperty(TABLE_DATA_MANAGER_NAME, tableName);
String dataDir = instanceDataManagerConfig.getInstanceDataDir() + "/" + tableName;
defaultConfig.addProperty(TABLE_DATA_MANAGER_DATA_DIRECTORY, dataDir);
defaultConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_COLUMN_MIN_MAX_VALUE_GENERATOR_MODE, ColumnMinMaxValueGeneratorMode.TIME.toString());
if (instanceDataManagerConfig.getReadMode() != null) {
defaultConfig.addProperty(READ_MODE, instanceDataManagerConfig.getReadMode().toString());
} else {
defaultConfig.addProperty(READ_MODE, ReadMode.heap);
}
int avgMultiValueCount = DEFAULT_REALTIME_AVG_MULTI_VALUE_COUNT;
if (instanceDataManagerConfig.getAvgMultiValueCount() != null) {
try {
avgMultiValueCount = Integer.valueOf(instanceDataManagerConfig.getAvgMultiValueCount());
} catch (NumberFormatException e) {
// Ignore
}
}
defaultConfig.addProperty(REALTIME_AVG_MULTI_VALUE_COUNT, new Integer(avgMultiValueCount));
if (instanceDataManagerConfig.getSegmentFormatVersion() != null) {
defaultConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_SEGMENT_FORMAT_VERSION, instanceDataManagerConfig.getSegmentFormatVersion());
}
if (instanceDataManagerConfig.isEnableDefaultColumns()) {
defaultConfig.addProperty(IndexLoadingConfigMetadata.KEY_OF_ENABLE_DEFAULT_COLUMNS, true);
}
TableDataManagerConfig tableDataManagerConfig = new TableDataManagerConfig(defaultConfig);
switch(tableType) {
case OFFLINE:
defaultConfig.addProperty(TABLE_DATA_MANAGER_TYPE, "offline");
break;
case REALTIME:
defaultConfig.addProperty(TABLE_DATA_MANAGER_TYPE, "realtime");
break;
default:
throw new UnsupportedOperationException("Not supported table type for - " + tableName);
}
return tableDataManagerConfig;
}
use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.
the class SegmentFormatConverterV1ToV2 method convert.
@Override
public void convert(File indexSegmentDir) throws Exception {
SegmentMetadataImpl segmentMetadataImpl = new SegmentMetadataImpl(indexSegmentDir);
SegmentDirectory segmentDirectory = SegmentDirectory.createFromLocalFS(indexSegmentDir, segmentMetadataImpl, ReadMode.mmap);
Set<String> columns = segmentMetadataImpl.getAllColumns();
SegmentDirectory.Writer segmentWriter = segmentDirectory.createWriter();
for (String column : columns) {
ColumnMetadata columnMetadata = segmentMetadataImpl.getColumnMetadataFor(column);
if (columnMetadata.isSorted()) {
// no need to change sorted forward index
continue;
}
PinotDataBuffer fwdIndexBuffer = segmentWriter.getIndexFor(column, ColumnIndexType.FORWARD_INDEX);
if (columnMetadata.isSingleValue() && !columnMetadata.isSorted()) {
// since we use dictionary to encode values, we wont have any negative values in forward
// index
boolean signed = false;
SingleColumnSingleValueReader v1Reader = new com.linkedin.pinot.core.io.reader.impl.v1.FixedBitSingleValueReader(fwdIndexBuffer, segmentMetadataImpl.getTotalDocs(), columnMetadata.getBitsPerElement(), false);
File convertedFwdIndexFile = new File(indexSegmentDir, column + V1Constants.Indexes.UN_SORTED_SV_FWD_IDX_FILE_EXTENTION + ".tmp");
SingleColumnSingleValueWriter v2Writer = new com.linkedin.pinot.core.io.writer.impl.v2.FixedBitSingleValueWriter(convertedFwdIndexFile, segmentMetadataImpl.getTotalDocs(), columnMetadata.getBitsPerElement());
for (int row = 0; row < segmentMetadataImpl.getTotalDocs(); row++) {
int value = v1Reader.getInt(row);
v2Writer.setInt(row, value);
}
v1Reader.close();
v2Writer.close();
File fwdIndexFileCopy = new File(indexSegmentDir, column + V1Constants.Indexes.UN_SORTED_SV_FWD_IDX_FILE_EXTENTION + ".orig");
segmentWriter.removeIndex(column, ColumnIndexType.FORWARD_INDEX);
// FIXME
PinotDataBuffer newIndexBuffer = segmentWriter.newIndexFor(column, ColumnIndexType.FORWARD_INDEX, (int) convertedFwdIndexFile.length());
newIndexBuffer.readFrom(convertedFwdIndexFile);
convertedFwdIndexFile.delete();
}
if (!columnMetadata.isSingleValue()) {
// since we use dictionary to encode values, we wont have any negative values in forward
// index
boolean signed = false;
SingleColumnMultiValueReader v1Reader = new com.linkedin.pinot.core.io.reader.impl.v1.FixedBitMultiValueReader(fwdIndexBuffer, segmentMetadataImpl.getTotalDocs(), columnMetadata.getTotalNumberOfEntries(), columnMetadata.getBitsPerElement(), signed);
File convertedFwdIndexFile = new File(indexSegmentDir, column + V1Constants.Indexes.UN_SORTED_MV_FWD_IDX_FILE_EXTENTION + ".tmp");
SingleColumnMultiValueWriter v2Writer = new com.linkedin.pinot.core.io.writer.impl.v2.FixedBitMultiValueWriter(convertedFwdIndexFile, segmentMetadataImpl.getTotalDocs(), columnMetadata.getTotalNumberOfEntries(), columnMetadata.getBitsPerElement());
int[] values = new int[columnMetadata.getMaxNumberOfMultiValues()];
for (int row = 0; row < segmentMetadataImpl.getTotalDocs(); row++) {
int length = v1Reader.getIntArray(row, values);
int[] copy = new int[length];
System.arraycopy(values, 0, copy, 0, length);
v2Writer.setIntArray(row, copy);
}
v1Reader.close();
v2Writer.close();
segmentWriter.removeIndex(column, ColumnIndexType.FORWARD_INDEX);
PinotDataBuffer newIndexBuffer = segmentWriter.newIndexFor(column, ColumnIndexType.FORWARD_INDEX, (int) convertedFwdIndexFile.length());
newIndexBuffer.readFrom(convertedFwdIndexFile);
convertedFwdIndexFile.delete();
}
}
File metadataFile = new File(indexSegmentDir, V1Constants.MetadataKeys.METADATA_FILE_NAME);
File metadataFileCopy = new File(indexSegmentDir, V1Constants.MetadataKeys.METADATA_FILE_NAME + ".orig");
bis = new BufferedInputStream(new FileInputStream(metadataFile));
bos = new BufferedOutputStream(new FileOutputStream(metadataFileCopy));
IOUtils.copy(bis, bos);
bis.close();
bos.close();
final PropertiesConfiguration properties = new PropertiesConfiguration(metadataFileCopy);
// update the segment version
properties.setProperty(V1Constants.MetadataKeys.Segment.SEGMENT_VERSION, SegmentVersion.v2.toString());
metadataFile.delete();
properties.save(metadataFile);
}
use of org.apache.commons.configuration.PropertiesConfiguration in project pinot by linkedin.
the class SegmentV1V2ToV3FormatConverter method createMetadataFile.
private void createMetadataFile(File currentDir, File v3Dir) throws ConfigurationException {
File v2MetadataFile = new File(currentDir, V1Constants.MetadataKeys.METADATA_FILE_NAME);
File v3MetadataFile = new File(v3Dir, V1Constants.MetadataKeys.METADATA_FILE_NAME);
final PropertiesConfiguration properties = new PropertiesConfiguration(v2MetadataFile);
// update the segment version
properties.setProperty(V1Constants.MetadataKeys.Segment.SEGMENT_VERSION, SegmentVersion.v3.toString());
properties.save(v3MetadataFile);
}
Aggregations