Search in sources :

Example 16 with BeforeClass

use of org.testng.annotations.BeforeClass in project pinot by linkedin.

the class QueryExecutorTest method setup.

@BeforeClass
public void setup() throws Exception {
    serverMetrics = new ServerMetrics(new MetricsRegistry());
    TableDataManagerProvider.setServerMetrics(serverMetrics);
    File confDir = new File(QueryExecutorTest.class.getClassLoader().getResource("conf").toURI());
    setupSegmentList(2);
    // ServerBuilder serverBuilder = new ServerBuilder(confDir.getAbsolutePath());
    String configFilePath = confDir.getAbsolutePath();
    // build _serverConf
    PropertiesConfiguration serverConf = new PropertiesConfiguration();
    serverConf.setDelimiterParsingDisabled(false);
    serverConf.load(new File(configFilePath, PINOT_PROPERTIES));
    FileBasedInstanceDataManager instanceDataManager = FileBasedInstanceDataManager.getInstanceDataManager();
    instanceDataManager.init(new FileBasedInstanceDataManagerConfig(serverConf.subset("pinot.server.instance")));
    instanceDataManager.start();
    for (int i = 0; i < 2; ++i) {
        instanceDataManager.getTableDataManager("midas");
        instanceDataManager.getTableDataManager("midas").addSegment(_indexSegmentList.get(i));
    }
    _queryExecutor = new ServerQueryExecutorV1Impl();
    _queryExecutor.init(serverConf.subset("pinot.server.query.executor"), instanceDataManager, new ServerMetrics(new MetricsRegistry()));
}
Also used : MetricsRegistry(com.yammer.metrics.core.MetricsRegistry) FileBasedInstanceDataManager(com.linkedin.pinot.core.data.manager.offline.FileBasedInstanceDataManager) ServerQueryExecutorV1Impl(com.linkedin.pinot.core.query.executor.ServerQueryExecutorV1Impl) ServerMetrics(com.linkedin.pinot.common.metrics.ServerMetrics) FileBasedInstanceDataManagerConfig(com.linkedin.pinot.core.data.manager.config.FileBasedInstanceDataManagerConfig) File(java.io.File) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) BeforeClass(org.testng.annotations.BeforeClass)

Example 17 with BeforeClass

use of org.testng.annotations.BeforeClass in project pinot by linkedin.

the class SelectionOperatorServiceTest method setUp.

@BeforeClass
public void setUp() {
    // SELECT * FROM table ORDER BY int DESC LIMIT 1, 2.
    _selectionOrderBy.setSelectionColumns(Arrays.asList(_columnNames));
    SelectionSort selectionSort = new SelectionSort();
    selectionSort.setColumn("int");
    selectionSort.setIsAsc(false);
    _selectionOrderBy.setSelectionSortSequence(Collections.singletonList(selectionSort));
    _selectionOrderBy.setSize(2);
    _selectionOrderBy.setOffset(1);
}
Also used : SelectionSort(com.linkedin.pinot.common.request.SelectionSort) BeforeClass(org.testng.annotations.BeforeClass)

Example 18 with BeforeClass

use of org.testng.annotations.BeforeClass in project pinot by linkedin.

the class HybridClusterIntegrationTest method setUp.

@BeforeClass
public void setUp() throws Exception {
    //Clean up
    ensureDirectoryExistsAndIsEmpty(_tmpDir);
    ensureDirectoryExistsAndIsEmpty(_segmentDir);
    ensureDirectoryExistsAndIsEmpty(_tarDir);
    tableName = TABLE_NAME;
    // Start Zk, Kafka and Pinot
    startHybridCluster(10);
    // Unpack the Avro files
    TarGzCompressionUtils.unTar(new File(TestUtils.getFileFromResourceUrl(OfflineClusterIntegrationTest.class.getClassLoader().getResource("On_Time_On_Time_Performance_2014_100k_subset_nonulls.tar.gz"))), _tmpDir);
    _tmpDir.mkdirs();
    final List<File> avroFiles = getAllAvroFiles();
    File schemaFile = getSchemaFile();
    schema = Schema.fromFile(schemaFile);
    addSchema(schemaFile, schema.getSchemaName());
    final List<String> invertedIndexColumns = makeInvertedIndexColumns();
    final String sortedColumn = makeSortedColumn();
    // Create Pinot table
    addHybridTable(tableName, "DaysSinceEpoch", "daysSinceEpoch", KafkaStarterUtils.DEFAULT_ZK_STR, KAFKA_TOPIC, schema.getSchemaName(), TENANT_NAME, TENANT_NAME, avroFiles.get(0), sortedColumn, invertedIndexColumns, null, false);
    LOGGER.info("Running with Sorted column=" + sortedColumn + " and inverted index columns = " + invertedIndexColumns);
    // Create a subset of the first 8 segments (for offline) and the last 6 segments (for realtime)
    final List<File> offlineAvroFiles = getOfflineAvroFiles(avroFiles);
    final List<File> realtimeAvroFiles = getRealtimeAvroFiles(avroFiles);
    // Load data into H2
    ExecutorService executor = Executors.newCachedThreadPool();
    setupH2AndInsertAvro(avroFiles, executor);
    // Create segments from Avro data
    LOGGER.info("Creating offline segments from avro files " + offlineAvroFiles);
    buildSegmentsFromAvro(offlineAvroFiles, executor, 0, _segmentDir, _tarDir, tableName, false, null);
    // Initialize query generator
    setupQueryGenerator(avroFiles, executor);
    executor.shutdown();
    executor.awaitTermination(10, TimeUnit.MINUTES);
    // Set up a Helix spectator to count the number of segments that are uploaded and unlock the latch once 12 segments are online
    final CountDownLatch latch = new CountDownLatch(1);
    HelixManager manager = HelixManagerFactory.getZKHelixManager(getHelixClusterName(), "test_instance", InstanceType.SPECTATOR, ZkStarter.DEFAULT_ZK_STR);
    manager.connect();
    manager.addExternalViewChangeListener(new ExternalViewChangeListener() {

        @Override
        public void onExternalViewChange(List<ExternalView> externalViewList, NotificationContext changeContext) {
            for (ExternalView externalView : externalViewList) {
                if (externalView.getId().contains(tableName)) {
                    Set<String> partitionSet = externalView.getPartitionSet();
                    if (partitionSet.size() == offlineSegmentCount) {
                        int onlinePartitionCount = 0;
                        for (String partitionId : partitionSet) {
                            Map<String, String> partitionStateMap = externalView.getStateMap(partitionId);
                            if (partitionStateMap.containsValue("ONLINE")) {
                                onlinePartitionCount++;
                            }
                        }
                        if (onlinePartitionCount == offlineSegmentCount) {
                            //                System.out.println("Got " + offlineSegmentCount + " online tables, unlatching the main thread");
                            latch.countDown();
                        }
                    }
                }
            }
        }
    });
    // Upload the segments
    int i = 0;
    for (String segmentName : _tarDir.list()) {
        //      System.out.println("Uploading segment " + (i++) + " : " + segmentName);
        File file = new File(_tarDir, segmentName);
        FileUploadUtils.sendSegmentFile("localhost", "8998", segmentName, file, file.length());
    }
    // Wait for all offline segments to be online
    latch.await();
    // Load realtime data into Kafka
    LOGGER.info("Pushing data from realtime avro files " + realtimeAvroFiles);
    pushAvroIntoKafka(realtimeAvroFiles, KafkaStarterUtils.DEFAULT_KAFKA_BROKER, KAFKA_TOPIC);
    // Wait until the Pinot event count matches with the number of events in the Avro files
    int pinotRecordCount, h2RecordCount;
    long timeInFiveMinutes = System.currentTimeMillis() + 5 * 60 * 1000L;
    Statement statement = _connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
    statement.execute("select count(*) from " + tableName);
    ResultSet rs = statement.getResultSet();
    rs.first();
    h2RecordCount = rs.getInt(1);
    rs.close();
    waitForRecordCountToStabilizeToExpectedCount(h2RecordCount, timeInFiveMinutes);
}
Also used : ExternalView(org.apache.helix.model.ExternalView) HelixManager(org.apache.helix.HelixManager) ResultSet(java.sql.ResultSet) Set(java.util.Set) Statement(java.sql.Statement) CountDownLatch(java.util.concurrent.CountDownLatch) NotificationContext(org.apache.helix.NotificationContext) ExecutorService(java.util.concurrent.ExecutorService) ResultSet(java.sql.ResultSet) ExternalViewChangeListener(org.apache.helix.ExternalViewChangeListener) File(java.io.File) Map(java.util.Map) BeforeClass(org.testng.annotations.BeforeClass)

Example 19 with BeforeClass

use of org.testng.annotations.BeforeClass in project pinot by linkedin.

the class BitmapInvertedIndexTest method setup.

@BeforeClass
public void setup() throws Exception {
    final String filePath = TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    if (INDEX_DIR.exists()) {
        FileUtils.deleteQuietly(INDEX_DIR);
    }
    final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "time_day", TimeUnit.DAYS, "test");
    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);
    driver.build();
    List<String> iiColumns = config.getInvertedIndexCreationColumns();
    invertedIndexColumns = new String[iiColumns.size()];
    iiColumns.toArray(invertedIndexColumns);
    segmentDirectory = new File(INDEX_DIR, driver.getSegmentName());
//    System.out.println("built at : " + INDEX_DIR.getAbsolutePath());
}
Also used : SegmentIndexCreationDriver(com.linkedin.pinot.core.segment.creator.SegmentIndexCreationDriver) SegmentGeneratorConfig(com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig) File(java.io.File) BeforeClass(org.testng.annotations.BeforeClass)

Example 20 with BeforeClass

use of org.testng.annotations.BeforeClass in project pinot by linkedin.

the class BlocksTest method before.

@BeforeClass
public static void before() throws Exception {
    final String filePath = TestUtils.getFileFromResourceUrl(BlocksTest.class.getClassLoader().getResource(AVRO_DATA));
    if (INDEX_DIR.exists()) {
        FileUtils.deleteQuietly(INDEX_DIR);
    }
    //    System.out.println(INDEX_DIR.getAbsolutePath());
    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.DAYS, "test");
    config.setTimeColumnName("daysSinceEpoch");
    driver.init(config);
    driver.build();
    final DataFileStream<GenericRecord> avroReader = AvroUtils.getAvroReader(new File(filePath));
    final org.apache.avro.Schema avroSchema = avroReader.getSchema();
    final String[] columns = new String[avroSchema.getFields().size()];
    int i = 0;
    for (final Field f : avroSchema.getFields()) {
        columns[i] = f.name();
        i++;
    }
}
Also used : SegmentIndexCreationDriver(com.linkedin.pinot.core.segment.creator.SegmentIndexCreationDriver) Field(org.apache.avro.Schema.Field) SegmentGeneratorConfig(com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig) GenericRecord(org.apache.avro.generic.GenericRecord) File(java.io.File) BeforeClass(org.testng.annotations.BeforeClass)

Aggregations

BeforeClass (org.testng.annotations.BeforeClass)1186 Series (com.axibase.tsd.api.model.series.Series)178 File (java.io.File)157 ArrayList (java.util.ArrayList)78 HashMap (java.util.HashMap)43 ClusterControllerManager (org.apache.helix.integration.manager.ClusterControllerManager)37 Injector (com.google.inject.Injector)36 Properties (java.util.Properties)35 Path (org.apache.hadoop.fs.Path)33 ClusterSetup (org.apache.helix.tools.ClusterSetup)33 Configuration (org.apache.hadoop.conf.Configuration)31 BigDecimal (java.math.BigDecimal)29 URL (java.net.URL)28 MockParticipantManager (org.apache.helix.integration.manager.MockParticipantManager)27 Date (java.util.Date)26 Server (org.eclipse.jetty.server.Server)25 ServerConnector (org.eclipse.jetty.server.ServerConnector)25 ODatabaseDocumentTx (com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx)23 IOException (java.io.IOException)23 Path (java.nio.file.Path)23