use of org.testng.annotations.BeforeClass in project pinot by linkedin.
the class QueryExecutorTest method setup.
@BeforeClass
public void setup() throws Exception {
serverMetrics = new ServerMetrics(new MetricsRegistry());
TableDataManagerProvider.setServerMetrics(serverMetrics);
File confDir = new File(QueryExecutorTest.class.getClassLoader().getResource("conf").toURI());
setupSegmentList(2);
// ServerBuilder serverBuilder = new ServerBuilder(confDir.getAbsolutePath());
String configFilePath = confDir.getAbsolutePath();
// build _serverConf
PropertiesConfiguration serverConf = new PropertiesConfiguration();
serverConf.setDelimiterParsingDisabled(false);
serverConf.load(new File(configFilePath, PINOT_PROPERTIES));
FileBasedInstanceDataManager instanceDataManager = FileBasedInstanceDataManager.getInstanceDataManager();
instanceDataManager.init(new FileBasedInstanceDataManagerConfig(serverConf.subset("pinot.server.instance")));
instanceDataManager.start();
for (int i = 0; i < 2; ++i) {
instanceDataManager.getTableDataManager("midas");
instanceDataManager.getTableDataManager("midas").addSegment(_indexSegmentList.get(i));
}
_queryExecutor = new ServerQueryExecutorV1Impl();
_queryExecutor.init(serverConf.subset("pinot.server.query.executor"), instanceDataManager, new ServerMetrics(new MetricsRegistry()));
}
use of org.testng.annotations.BeforeClass in project pinot by linkedin.
the class SelectionOperatorServiceTest method setUp.
@BeforeClass
public void setUp() {
// SELECT * FROM table ORDER BY int DESC LIMIT 1, 2.
_selectionOrderBy.setSelectionColumns(Arrays.asList(_columnNames));
SelectionSort selectionSort = new SelectionSort();
selectionSort.setColumn("int");
selectionSort.setIsAsc(false);
_selectionOrderBy.setSelectionSortSequence(Collections.singletonList(selectionSort));
_selectionOrderBy.setSize(2);
_selectionOrderBy.setOffset(1);
}
use of org.testng.annotations.BeforeClass in project pinot by linkedin.
the class HybridClusterIntegrationTest method setUp.
@BeforeClass
public void setUp() throws Exception {
//Clean up
ensureDirectoryExistsAndIsEmpty(_tmpDir);
ensureDirectoryExistsAndIsEmpty(_segmentDir);
ensureDirectoryExistsAndIsEmpty(_tarDir);
tableName = TABLE_NAME;
// Start Zk, Kafka and Pinot
startHybridCluster(10);
// Unpack the Avro files
TarGzCompressionUtils.unTar(new File(TestUtils.getFileFromResourceUrl(OfflineClusterIntegrationTest.class.getClassLoader().getResource("On_Time_On_Time_Performance_2014_100k_subset_nonulls.tar.gz"))), _tmpDir);
_tmpDir.mkdirs();
final List<File> avroFiles = getAllAvroFiles();
File schemaFile = getSchemaFile();
schema = Schema.fromFile(schemaFile);
addSchema(schemaFile, schema.getSchemaName());
final List<String> invertedIndexColumns = makeInvertedIndexColumns();
final String sortedColumn = makeSortedColumn();
// Create Pinot table
addHybridTable(tableName, "DaysSinceEpoch", "daysSinceEpoch", KafkaStarterUtils.DEFAULT_ZK_STR, KAFKA_TOPIC, schema.getSchemaName(), TENANT_NAME, TENANT_NAME, avroFiles.get(0), sortedColumn, invertedIndexColumns, null, false);
LOGGER.info("Running with Sorted column=" + sortedColumn + " and inverted index columns = " + invertedIndexColumns);
// Create a subset of the first 8 segments (for offline) and the last 6 segments (for realtime)
final List<File> offlineAvroFiles = getOfflineAvroFiles(avroFiles);
final List<File> realtimeAvroFiles = getRealtimeAvroFiles(avroFiles);
// Load data into H2
ExecutorService executor = Executors.newCachedThreadPool();
setupH2AndInsertAvro(avroFiles, executor);
// Create segments from Avro data
LOGGER.info("Creating offline segments from avro files " + offlineAvroFiles);
buildSegmentsFromAvro(offlineAvroFiles, executor, 0, _segmentDir, _tarDir, tableName, false, null);
// Initialize query generator
setupQueryGenerator(avroFiles, executor);
executor.shutdown();
executor.awaitTermination(10, TimeUnit.MINUTES);
// Set up a Helix spectator to count the number of segments that are uploaded and unlock the latch once 12 segments are online
final CountDownLatch latch = new CountDownLatch(1);
HelixManager manager = HelixManagerFactory.getZKHelixManager(getHelixClusterName(), "test_instance", InstanceType.SPECTATOR, ZkStarter.DEFAULT_ZK_STR);
manager.connect();
manager.addExternalViewChangeListener(new ExternalViewChangeListener() {
@Override
public void onExternalViewChange(List<ExternalView> externalViewList, NotificationContext changeContext) {
for (ExternalView externalView : externalViewList) {
if (externalView.getId().contains(tableName)) {
Set<String> partitionSet = externalView.getPartitionSet();
if (partitionSet.size() == offlineSegmentCount) {
int onlinePartitionCount = 0;
for (String partitionId : partitionSet) {
Map<String, String> partitionStateMap = externalView.getStateMap(partitionId);
if (partitionStateMap.containsValue("ONLINE")) {
onlinePartitionCount++;
}
}
if (onlinePartitionCount == offlineSegmentCount) {
// System.out.println("Got " + offlineSegmentCount + " online tables, unlatching the main thread");
latch.countDown();
}
}
}
}
}
});
// Upload the segments
int i = 0;
for (String segmentName : _tarDir.list()) {
// System.out.println("Uploading segment " + (i++) + " : " + segmentName);
File file = new File(_tarDir, segmentName);
FileUploadUtils.sendSegmentFile("localhost", "8998", segmentName, file, file.length());
}
// Wait for all offline segments to be online
latch.await();
// Load realtime data into Kafka
LOGGER.info("Pushing data from realtime avro files " + realtimeAvroFiles);
pushAvroIntoKafka(realtimeAvroFiles, KafkaStarterUtils.DEFAULT_KAFKA_BROKER, KAFKA_TOPIC);
// Wait until the Pinot event count matches with the number of events in the Avro files
int pinotRecordCount, h2RecordCount;
long timeInFiveMinutes = System.currentTimeMillis() + 5 * 60 * 1000L;
Statement statement = _connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
statement.execute("select count(*) from " + tableName);
ResultSet rs = statement.getResultSet();
rs.first();
h2RecordCount = rs.getInt(1);
rs.close();
waitForRecordCountToStabilizeToExpectedCount(h2RecordCount, timeInFiveMinutes);
}
use of org.testng.annotations.BeforeClass in project pinot by linkedin.
the class BitmapInvertedIndexTest method setup.
@BeforeClass
public void setup() throws Exception {
final String filePath = TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
if (INDEX_DIR.exists()) {
FileUtils.deleteQuietly(INDEX_DIR);
}
final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "time_day", TimeUnit.DAYS, "test");
final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
driver.init(config);
driver.build();
List<String> iiColumns = config.getInvertedIndexCreationColumns();
invertedIndexColumns = new String[iiColumns.size()];
iiColumns.toArray(invertedIndexColumns);
segmentDirectory = new File(INDEX_DIR, driver.getSegmentName());
// System.out.println("built at : " + INDEX_DIR.getAbsolutePath());
}
use of org.testng.annotations.BeforeClass in project pinot by linkedin.
the class BlocksTest method before.
@BeforeClass
public static void before() throws Exception {
final String filePath = TestUtils.getFileFromResourceUrl(BlocksTest.class.getClassLoader().getResource(AVRO_DATA));
if (INDEX_DIR.exists()) {
FileUtils.deleteQuietly(INDEX_DIR);
}
// System.out.println(INDEX_DIR.getAbsolutePath());
final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.DAYS, "test");
config.setTimeColumnName("daysSinceEpoch");
driver.init(config);
driver.build();
final DataFileStream<GenericRecord> avroReader = AvroUtils.getAvroReader(new File(filePath));
final org.apache.avro.Schema avroSchema = avroReader.getSchema();
final String[] columns = new String[avroSchema.getFields().size()];
int i = 0;
for (final Field f : avroSchema.getFields()) {
columns[i] = f.name();
i++;
}
}
Aggregations