Search in sources :

Example 1 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class RuleGeneratorTest method testDateGenerator.

@Test
public void testDateGenerator() throws Exception {
    XMLConfigParser parser = new XMLConfigParser(matcherScenario);
    DataModel model = parser.getDataModels().get(0);
    WriteWorkload loader = new WriteWorkload(parser);
    RulesApplier rulesApplier = loader.getRulesApplier();
    for (Column dataMapping : model.getDataMappingColumns()) {
        if ((dataMapping.getType() == DataTypeMapping.DATE) && (dataMapping.getName().equals("CREATED_DATE"))) {
            // Test directly through generator method and that it converts to Phoenix type
            assertRandomDateValue(dataMapping, rulesApplier);
            // Do this 20 times and we should hit each possibility at least once.
            for (int i = 0; i < 20; i++) {
                DataValue value = rulesApplier.getDataValue(dataMapping);
                assertNotNull("Could not retrieve DataValue for random DATE.", value);
                assertNotNull("Could not retrieve a value in DataValue for random DATE.", value.getValue());
                if (value.getMinValue() != null) {
                    // Check that dates are between min/max
                    assertDateBetween(value);
                }
            }
        }
    }
}
Also used : RulesApplier(org.apache.phoenix.pherf.rules.RulesApplier) Column(org.apache.phoenix.pherf.configuration.Column) DataValue(org.apache.phoenix.pherf.rules.DataValue) DataModel(org.apache.phoenix.pherf.configuration.DataModel) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) XMLConfigParser(org.apache.phoenix.pherf.configuration.XMLConfigParser) Test(org.junit.Test)

Example 2 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class RuleGeneratorTest method testNullChance.

@Test
public void testNullChance() throws Exception {
    XMLConfigParser parser = new XMLConfigParser(matcherScenario);
    DataModel model = parser.getDataModels().get(0);
    WriteWorkload loader = new WriteWorkload(parser);
    RulesApplier rulesApplier = loader.getRulesApplier();
    int sampleSize = 100;
    List<String> values = new ArrayList<>(sampleSize);
    for (Column dataMapping : model.getDataMappingColumns()) {
        DataValue value = rulesApplier.getDataValue(dataMapping);
        if (dataMapping.getNullChance() == 0) {
            // 0 chance of getting null means we should never have an empty string returned
            assertFalse("", value.getValue().equals(""));
        } else if (dataMapping.getNullChance() == 100) {
            // 100 chance of getting null means we should always have an empty string returned
            assertTrue("", value.getValue().equals(""));
        } else if ((dataMapping.getNullChance() == 90)) {
            // You can't really test for this, but you can eyeball it on debugging.
            for (int i = 0; i < sampleSize; i++) {
                DataValue tVal = rulesApplier.getDataValue(dataMapping);
                values.add(tVal.getValue());
            }
            Collections.sort(values);
        }
    }
}
Also used : RulesApplier(org.apache.phoenix.pherf.rules.RulesApplier) Column(org.apache.phoenix.pherf.configuration.Column) DataValue(org.apache.phoenix.pherf.rules.DataValue) DataModel(org.apache.phoenix.pherf.configuration.DataModel) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) XMLConfigParser(org.apache.phoenix.pherf.configuration.XMLConfigParser) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 3 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class RuleGeneratorTest method testCurrentDateGenerator.

//Test to check the current date is generated correctly between the timestamps at column level and datavalue level
@Test
public void testCurrentDateGenerator() throws Exception {
    XMLConfigParser parser = new XMLConfigParser(matcherScenario);
    DataModel model = parser.getDataModels().get(0);
    WriteWorkload loader = new WriteWorkload(parser);
    RulesApplier rulesApplier = loader.getRulesApplier();
    // Time before generating the date
    String timeStamp1 = rulesApplier.getCurrentDate();
    //sleep for few mili-sec
    sleep(2);
    for (Column dataMapping : model.getDataMappingColumns()) {
        if ((dataMapping.getType() == DataTypeMapping.DATE) && (dataMapping.getUseCurrentDate() == true)) {
            // Generate the date using rules
            DataValue value = rulesApplier.getDataValue(dataMapping);
            assertNotNull("Could not retrieve DataValue for random DATE.", value);
            assertNotNull("Could not retrieve a value in DataValue for random DATE.", value.getValue());
            sleep(2);
            // Time after generating the date
            String timeStamp2 = rulesApplier.getCurrentDate();
            // Check that dates are between timestamp1 & timestamp2
            value.setMinValue(timeStamp1);
            value.setMaxValue(timeStamp2);
            assertDateBetween(value);
        }
        // Check at list level
        if ((dataMapping.getType() == DataTypeMapping.DATE) && (dataMapping.getName().equals("PRESENT_DATE"))) {
            // timestamps
            for (int i = 0; i < 1; i++) {
                DataValue value = rulesApplier.getDataValue(dataMapping);
                assertNotNull("Could not retrieve DataValue for random DATE.", value);
                assertNotNull("Could not retrieve a value in DataValue for random DATE.", value.getValue());
                sleep(2);
                // Time after generating the date
                String timeStamp2 = rulesApplier.getCurrentDate();
                // Check generated date is between timestamp1 & timestamp2
                value.setMinValue(timeStamp1);
                value.setMaxValue(timeStamp2);
                assertDateBetween(value);
            }
        }
    }
}
Also used : RulesApplier(org.apache.phoenix.pherf.rules.RulesApplier) Column(org.apache.phoenix.pherf.configuration.Column) DataValue(org.apache.phoenix.pherf.rules.DataValue) DataModel(org.apache.phoenix.pherf.configuration.DataModel) WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) XMLConfigParser(org.apache.phoenix.pherf.configuration.XMLConfigParser) Test(org.junit.Test)

Example 4 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class DataIngestIT method testMultiTenantViewWriteWorkload.

@Test
public /**
     * Validates that Pherf can write data to a Multi-Tenant View in addition to 
     * standard Phoenix tables.
     */
void testMultiTenantViewWriteWorkload() throws Exception {
    // Arrange
    Scenario scenario = parser.getScenarioByName("testMTWriteScenario");
    WorkloadExecutor executor = new WorkloadExecutor();
    executor.add(new WriteWorkload(util, parser, scenario, GeneratePhoenixStats.NO));
    // Act
    try {
        // Wait for data to load up.
        executor.get();
        executor.shutdown();
    } catch (Exception e) {
        fail("Failed to load data. An exception was thrown: " + e.getMessage());
    }
    assertExpectedNumberOfRecordsWritten(scenario);
}
Also used : WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) WorkloadExecutor(org.apache.phoenix.pherf.workload.WorkloadExecutor) SQLException(java.sql.SQLException) Scenario(org.apache.phoenix.pherf.configuration.Scenario) Test(org.junit.Test)

Example 5 with WriteWorkload

use of org.apache.phoenix.pherf.workload.WriteWorkload in project phoenix by apache.

the class DataIngestIT method testMultiTenantScenarioRunBeforeWriteWorkload.

@Test
public void testMultiTenantScenarioRunBeforeWriteWorkload() throws Exception {
    // Arrange
    Scenario scenario = parser.getScenarioByName("testMTDdlWriteScenario");
    WorkloadExecutor executor = new WorkloadExecutor();
    executor.add(new WriteWorkload(util, parser, scenario, GeneratePhoenixStats.NO));
    // Act
    try {
        // Wait for data to load up.
        executor.get();
        executor.shutdown();
    } catch (Exception e) {
        fail("Failed to load data. An exception was thrown: " + e.getMessage());
    }
    assertExpectedNumberOfRecordsWritten(scenario);
}
Also used : WriteWorkload(org.apache.phoenix.pherf.workload.WriteWorkload) WorkloadExecutor(org.apache.phoenix.pherf.workload.WorkloadExecutor) SQLException(java.sql.SQLException) Scenario(org.apache.phoenix.pherf.configuration.Scenario) Test(org.junit.Test)

Aggregations

WriteWorkload (org.apache.phoenix.pherf.workload.WriteWorkload)10 Test (org.junit.Test)9 Column (org.apache.phoenix.pherf.configuration.Column)7 XMLConfigParser (org.apache.phoenix.pherf.configuration.XMLConfigParser)7 RulesApplier (org.apache.phoenix.pherf.rules.RulesApplier)7 DataValue (org.apache.phoenix.pherf.rules.DataValue)6 Scenario (org.apache.phoenix.pherf.configuration.Scenario)5 DataModel (org.apache.phoenix.pherf.configuration.DataModel)4 WorkloadExecutor (org.apache.phoenix.pherf.workload.WorkloadExecutor)4 SQLException (java.sql.SQLException)3 ArrayList (java.util.ArrayList)3 QueryExecutor (org.apache.phoenix.pherf.workload.QueryExecutor)2 Workload (org.apache.phoenix.pherf.workload.Workload)2 Path (java.nio.file.Path)1 Map (java.util.Map)1 DataSequence (org.apache.phoenix.pherf.configuration.DataSequence)1 MonitorManager (org.apache.phoenix.pherf.jmx.MonitorManager)1 SchemaReader (org.apache.phoenix.pherf.schema.SchemaReader)1 GoogleChartGenerator (org.apache.phoenix.pherf.util.GoogleChartGenerator)1 ResourceList (org.apache.phoenix.pherf.util.ResourceList)1