Search in sources :

Example 6 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class RuleGeneratorTest method testSequentialDataSequence.

@Test
public void testSequentialDataSequence() throws Exception {
    XMLConfigParser parser = new XMLConfigParser(matcherScenario);
    DataModel model = parser.getDataModels().get(0);
    WriteWorkload loader = new WriteWorkload(parser);
    RulesApplier rulesApplier = loader.getRulesApplier();
    Column targetColumn = null;
    for (Column column : model.getDataMappingColumns()) {
        DataSequence sequence = column.getDataSequence();
        if (sequence == DataSequence.SEQUENTIAL) {
            targetColumn = column;
            break;
        }
    }
    assertNotNull("Could not find a DataSequence.SEQENTIAL rule.", targetColumn);
    assertMultiThreadedIncrementValue(targetColumn, rulesApplier);
}
Also used : RulesApplier(org.apache.phoenix.pherf.rules.RulesApplier) Column(org.apache.phoenix.pherf.configuration.Column) DataModel(org.apache.phoenix.pherf.configuration.DataModel) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) XMLConfigParser(org.apache.phoenix.pherf.configuration.XMLConfigParser) DataSequence(org.apache.phoenix.pherf.configuration.DataSequence) Test(org.junit.Test)

Example 7 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class RuleGeneratorTest method testValueListRule.

@Test
public void testValueListRule() throws Exception {
    List<String> expectedValues = new ArrayList();
    expectedValues.add("aAAyYhnNbBs9kWk");
    expectedValues.add("bBByYhnNbBs9kWu");
    expectedValues.add("cCCyYhnNbBs9kWr");
    XMLConfigParser parser = new XMLConfigParser(matcherScenario);
    WriteWorkload loader = new WriteWorkload(parser);
    RulesApplier rulesApplier = loader.getRulesApplier();
    Scenario scenario = parser.getScenarios().get(0);
    Column simPhxCol = new Column();
    simPhxCol.setName("PARENT_ID");
    simPhxCol.setType(DataTypeMapping.CHAR);
    // Run this 10 times gives a reasonable chance that all the values will appear at least once
    for (int i = 0; i < 10; i++) {
        DataValue value = rulesApplier.getDataForRule(scenario, simPhxCol);
        assertTrue("Got a value not in the list for the rule. :" + value.getValue(), expectedValues.contains(value.getValue()));
    }
}
Also used : RulesApplier(org.apache.phoenix.pherf.rules.RulesApplier) Column(org.apache.phoenix.pherf.configuration.Column) DataValue(org.apache.phoenix.pherf.rules.DataValue) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) ArrayList(java.util.ArrayList) XMLConfigParser(org.apache.phoenix.pherf.configuration.XMLConfigParser) Scenario(org.apache.phoenix.pherf.configuration.Scenario) Test(org.junit.Test)

Example 8 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class RuleGeneratorTest method testRuleOverrides.

@Test
public void testRuleOverrides() throws Exception {
    XMLConfigParser parser = new XMLConfigParser(matcherScenario);
    WriteWorkload loader = new WriteWorkload(parser);
    RulesApplier rulesApplier = loader.getRulesApplier();
    Scenario scenario = parser.getScenarios().get(0);
    // We should be able to find the correct rule based only on Type and Name combination
    // Test CHAR
    Column simPhxCol = new Column();
    simPhxCol.setName("OTHER_ID");
    simPhxCol.setType(DataTypeMapping.CHAR);
    // Get the rule we expect to match
    Column rule = rulesApplier.getRule(simPhxCol);
    assertEquals("Did not find the correct rule.", rule.getName(), simPhxCol.getName());
    assertEquals("Did not find the matching rule type.", rule.getType(), simPhxCol.getType());
    assertEquals("Rule contains incorrect length.", rule.getLength(), 8);
    assertEquals("Rule contains incorrect prefix.", rule.getPrefix(), "z0Oxx00");
    DataValue value = rulesApplier.getDataForRule(scenario, simPhxCol);
    assertEquals("Value returned does not match rule.", value.getValue().length(), 8);
    // Test VARCHAR with RANDOM and prefix
    simPhxCol.setName("OLDVAL_STRING");
    simPhxCol.setType(DataTypeMapping.VARCHAR);
    // Get the rule we expect to match
    rule = rulesApplier.getRule(simPhxCol);
    assertEquals("Did not find the correct rule.", rule.getName(), simPhxCol.getName());
    assertEquals("Did not find the matching rule type.", rule.getType(), simPhxCol.getType());
    assertEquals("Rule contains incorrect length.", rule.getLength(), 10);
    assertEquals("Rule contains incorrect prefix.", rule.getPrefix(), "MYPRFX");
    value = rulesApplier.getDataForRule(scenario, simPhxCol);
    assertEquals("Value returned does not match rule.", value.getValue().length(), 10);
    assertTrue("Value returned start with prefix.", StringUtils.startsWith(value.getValue(), rule.getPrefix()));
}
Also used : RulesApplier(org.apache.phoenix.pherf.rules.RulesApplier) Column(org.apache.phoenix.pherf.configuration.Column) DataValue(org.apache.phoenix.pherf.rules.DataValue) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) XMLConfigParser(org.apache.phoenix.pherf.configuration.XMLConfigParser) Scenario(org.apache.phoenix.pherf.configuration.Scenario) Test(org.junit.Test)

Example 9 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class DataIngestIT method testColumnRulesApplied.

@Test
public void testColumnRulesApplied() {
    Scenario scenario = null;
    try {
        scenario = parser.getScenarioByName("testScenario");
        List<Column> columnListFromPhoenix = util.getColumnsFromPhoenix(scenario.getSchemaName(), scenario.getTableNameWithoutSchemaName(), util.getConnection());
        assertTrue("Could not get phoenix columns.", columnListFromPhoenix.size() > 0);
        WriteWorkload loader = new WriteWorkload(util, parser, scenario, GeneratePhoenixStats.NO);
        WorkloadExecutor executor = new WorkloadExecutor();
        executor.add(loader);
        executor.get();
        executor.shutdown();
        RulesApplier rulesApplier = loader.getRulesApplier();
        List<Map> modelList = rulesApplier.getModelList();
        assertTrue("Could not generate the modelList", modelList.size() > 0);
        for (Column column : columnListFromPhoenix) {
            DataValue data = rulesApplier.getDataForRule(scenario, column);
            // We are generating data values
            // so the value should have been specified by this point.
            assertTrue("Failed to retrieve data for column type: " + column.getType(), data != null);
            // so we should get the default rule.
            if ((column.getType() == DataTypeMapping.VARCHAR) && (column.getName().equals("NEWVAL_STRING"))) {
                assertTrue("Failed to retrieve data for column type: ", data.getDistribution() == Integer.MIN_VALUE);
            }
        }
        // Run some queries
        executor = new WorkloadExecutor();
        Workload query = new QueryExecutor(parser, util, executor);
        executor.add(query);
        executor.get();
        executor.shutdown();
        PhoenixUtil.create().deleteTables("ALL");
    } catch (Exception e) {
        fail("We had an exception: " + e.getMessage());
    }
}
Also used : RulesApplier(org.apache.phoenix.pherf.rules.RulesApplier) Column(org.apache.phoenix.pherf.configuration.Column) DataValue(org.apache.phoenix.pherf.rules.DataValue) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) QueryExecutor(org.apache.phoenix.pherf.workload.QueryExecutor) WorkloadExecutor(org.apache.phoenix.pherf.workload.WorkloadExecutor) Map(java.util.Map) SQLException(java.sql.SQLException) Scenario(org.apache.phoenix.pherf.configuration.Scenario) Workload(org.apache.phoenix.pherf.workload.Workload) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) Test(org.junit.Test)

Example 10 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class Pherf method run.

public void run() throws Exception {
    MonitorManager monitorManager = null;
    List<Workload> workloads = new ArrayList<>();
    WorkloadExecutor workloadExecutor = new WorkloadExecutor(properties, workloads, !isFunctional);
    try {
        if (listFiles) {
            ResourceList list = new ResourceList(PherfConstants.RESOURCE_DATAMODEL);
            Collection<Path> schemaFiles = list.getResourceList(PherfConstants.SCHEMA_ROOT_PATTERN + ".sql");
            System.out.println("Schema Files:");
            for (Path path : schemaFiles) {
                System.out.println(path);
            }
            list = new ResourceList(PherfConstants.RESOURCE_SCENARIO);
            Collection<Path> scenarioFiles = list.getResourceList(PherfConstants.SCENARIO_ROOT_PATTERN + ".xml");
            System.out.println("Scenario Files:");
            for (Path path : scenarioFiles) {
                System.out.println(path);
            }
            return;
        }
        // Compare results and exit  
        if (null != compareResults) {
            logger.info("\nStarting to compare results and exiting for " + compareResults);
            new GoogleChartGenerator(compareResults, compareType).readAndRender();
            return;
        }
        XMLConfigParser parser = new XMLConfigParser(scenarioFile);
        // Drop tables with PHERF schema and regex comparison
        if (null != dropPherfTablesRegEx) {
            logger.info("\nDropping existing table with PHERF namename and " + dropPherfTablesRegEx + " regex expression.");
            phoenixUtil.deleteTables(dropPherfTablesRegEx);
        }
        if (monitor) {
            monitorManager = new MonitorManager(Integer.parseInt(properties.getProperty("pherf.default.monitorFrequency")));
            workloadExecutor.add(monitorManager);
        }
        if (applySchema) {
            logger.info("\nStarting to apply schema...");
            SchemaReader reader = (schemaFile == null) ? new SchemaReader(".*.sql") : new SchemaReader(schemaFile);
            reader.applySchema();
        }
        // Schema and Data Load
        if (preLoadData) {
            logger.info("\nStarting Data Load...");
            Workload workload = new WriteWorkload(parser, generateStatistics);
            try {
                workloadExecutor.add(workload);
                // Wait for dataLoad to complete
                workloadExecutor.get(workload);
            } finally {
                if (null != workload) {
                    workload.complete();
                }
            }
        } else {
            logger.info("\nSKIPPED: Data Load and schema creation as -l argument not specified");
        }
        // Execute multi-threaded query sets
        if (executeQuerySets) {
            logger.info("\nStarting to apply Execute Queries...");
            workloadExecutor.add(new QueryExecutor(parser, phoenixUtil, workloadExecutor, parser.getDataModels(), queryHint, isFunctional, writeRuntimeResults));
        } else {
            logger.info("\nSKIPPED: Multithreaded query set execution as -q argument not specified");
        }
        // Clean up the monitor explicitly
        if (monitorManager != null) {
            logger.info("Run completed. Shutting down Monitor.");
            monitorManager.complete();
        }
        // Collect any final jobs
        workloadExecutor.get();
    } finally {
        if (workloadExecutor != null) {
            logger.info("Run completed. Shutting down thread pool.");
            workloadExecutor.shutdown();
        }
    }
}
Also used : Path(java.nio.file.Path) SchemaReader(org.apache.phoenix.pherf.schema.SchemaReader) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) ArrayList(java.util.ArrayList) XMLConfigParser(org.apache.phoenix.pherf.configuration.XMLConfigParser) WorkloadExecutor(org.apache.phoenix.pherf.workload.WorkloadExecutor) Workload(org.apache.phoenix.pherf.workload.Workload) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) MonitorManager(org.apache.phoenix.pherf.jmx.MonitorManager) ResourceList(org.apache.phoenix.pherf.util.ResourceList) GoogleChartGenerator(org.apache.phoenix.pherf.util.GoogleChartGenerator) QueryExecutor(org.apache.phoenix.pherf.workload.QueryExecutor)

Aggregations

WriteWorkload (org.apache.phoenix.pherf.workload.WriteWorkload)10 Test (org.junit.Test)9 Column (org.apache.phoenix.pherf.configuration.Column)7 XMLConfigParser (org.apache.phoenix.pherf.configuration.XMLConfigParser)7 RulesApplier (org.apache.phoenix.pherf.rules.RulesApplier)7 DataValue (org.apache.phoenix.pherf.rules.DataValue)6 Scenario (org.apache.phoenix.pherf.configuration.Scenario)5 DataModel (org.apache.phoenix.pherf.configuration.DataModel)4 WorkloadExecutor (org.apache.phoenix.pherf.workload.WorkloadExecutor)4 SQLException (java.sql.SQLException)3 ArrayList (java.util.ArrayList)3 QueryExecutor (org.apache.phoenix.pherf.workload.QueryExecutor)2 Workload (org.apache.phoenix.pherf.workload.Workload)2 Path (java.nio.file.Path)1 Map (java.util.Map)1 DataSequence (org.apache.phoenix.pherf.configuration.DataSequence)1 MonitorManager (org.apache.phoenix.pherf.jmx.MonitorManager)1 SchemaReader (org.apache.phoenix.pherf.schema.SchemaReader)1 GoogleChartGenerator (org.apache.phoenix.pherf.util.GoogleChartGenerator)1 ResourceList (org.apache.phoenix.pherf.util.ResourceList)1