use of org.apache.phoenix.pherf.configuration.DataModel in project phoenix by apache.
the class RuleGeneratorTest method testDateGenerator.
@Test
public void testDateGenerator() throws Exception {
XMLConfigParser parser = new XMLConfigParser(matcherScenario);
DataModel model = parser.getDataModels().get(0);
WriteWorkload loader = new WriteWorkload(parser);
RulesApplier rulesApplier = loader.getRulesApplier();
for (Column dataMapping : model.getDataMappingColumns()) {
if ((dataMapping.getType() == DataTypeMapping.DATE) && (dataMapping.getName().equals("CREATED_DATE"))) {
// Test directly through generator method and that it converts to Phoenix type
assertRandomDateValue(dataMapping, rulesApplier);
// Do this 20 times and we should hit each possibility at least once.
for (int i = 0; i < 20; i++) {
DataValue value = rulesApplier.getDataValue(dataMapping);
assertNotNull("Could not retrieve DataValue for random DATE.", value);
assertNotNull("Could not retrieve a value in DataValue for random DATE.", value.getValue());
if (value.getMinValue() != null) {
// Check that dates are between min/max
assertDateBetween(value);
}
}
}
}
}
use of org.apache.phoenix.pherf.configuration.DataModel in project phoenix by apache.
the class RuleGeneratorTest method testNullChance.
@Test
public void testNullChance() throws Exception {
XMLConfigParser parser = new XMLConfigParser(matcherScenario);
DataModel model = parser.getDataModels().get(0);
WriteWorkload loader = new WriteWorkload(parser);
RulesApplier rulesApplier = loader.getRulesApplier();
int sampleSize = 100;
List<String> values = new ArrayList<>(sampleSize);
for (Column dataMapping : model.getDataMappingColumns()) {
DataValue value = rulesApplier.getDataValue(dataMapping);
if (dataMapping.getNullChance() == 0) {
// 0 chance of getting null means we should never have an empty string returned
assertFalse("", value.getValue().equals(""));
} else if (dataMapping.getNullChance() == 100) {
// 100 chance of getting null means we should always have an empty string returned
assertTrue("", value.getValue().equals(""));
} else if ((dataMapping.getNullChance() == 90)) {
// You can't really test for this, but you can eyeball it on debugging.
for (int i = 0; i < sampleSize; i++) {
DataValue tVal = rulesApplier.getDataValue(dataMapping);
values.add(tVal.getValue());
}
Collections.sort(values);
}
}
}
use of org.apache.phoenix.pherf.configuration.DataModel in project phoenix by apache.
the class RuleGeneratorTest method testCurrentDateGenerator.
//Test to check the current date is generated correctly between the timestamps at column level and datavalue level
@Test
public void testCurrentDateGenerator() throws Exception {
XMLConfigParser parser = new XMLConfigParser(matcherScenario);
DataModel model = parser.getDataModels().get(0);
WriteWorkload loader = new WriteWorkload(parser);
RulesApplier rulesApplier = loader.getRulesApplier();
// Time before generating the date
String timeStamp1 = rulesApplier.getCurrentDate();
//sleep for few mili-sec
sleep(2);
for (Column dataMapping : model.getDataMappingColumns()) {
if ((dataMapping.getType() == DataTypeMapping.DATE) && (dataMapping.getUseCurrentDate() == true)) {
// Generate the date using rules
DataValue value = rulesApplier.getDataValue(dataMapping);
assertNotNull("Could not retrieve DataValue for random DATE.", value);
assertNotNull("Could not retrieve a value in DataValue for random DATE.", value.getValue());
sleep(2);
// Time after generating the date
String timeStamp2 = rulesApplier.getCurrentDate();
// Check that dates are between timestamp1 & timestamp2
value.setMinValue(timeStamp1);
value.setMaxValue(timeStamp2);
assertDateBetween(value);
}
// Check at list level
if ((dataMapping.getType() == DataTypeMapping.DATE) && (dataMapping.getName().equals("PRESENT_DATE"))) {
// timestamps
for (int i = 0; i < 1; i++) {
DataValue value = rulesApplier.getDataValue(dataMapping);
assertNotNull("Could not retrieve DataValue for random DATE.", value);
assertNotNull("Could not retrieve a value in DataValue for random DATE.", value.getValue());
sleep(2);
// Time after generating the date
String timeStamp2 = rulesApplier.getCurrentDate();
// Check generated date is between timestamp1 & timestamp2
value.setMinValue(timeStamp1);
value.setMaxValue(timeStamp2);
assertDateBetween(value);
}
}
}
}
use of org.apache.phoenix.pherf.configuration.DataModel in project phoenix by apache.
the class DataIngestIT method testRWWorkload.
@Test
public void testRWWorkload() throws Exception {
Connection connection = util.getConnection();
WorkloadExecutor executor = new WorkloadExecutor();
DataModel dataModel = parser.getDataModelByName("test_scenario");
List<DataModel> dataModels = new ArrayList<>();
dataModels.add(dataModel);
QueryExecutor qe = new QueryExecutor(parser, util, executor, dataModels, null, false);
executor.add(qe);
Scenario scenario = parser.getScenarioByName("testScenarioRW");
String sql = "select count(*) from " + scenario.getTableName();
try {
// Wait for data to load up.
executor.get();
executor.shutdown();
// Verify data has been loaded
Integer count = new JdbcSession(connection).sql(sql).select(new Outcome<Integer>() {
@Override
public Integer handle(ResultSet resultSet, Statement statement) throws SQLException {
while (resultSet.next()) {
return resultSet.getInt(1);
}
return null;
}
});
assertNotNull("Could not retrieve count. " + count);
// It would be better to sum up all the rowcounts for the scenarios, but this is fine
assertTrue("Could not query any rows for in " + scenario.getTableName(), count > 0);
} catch (Exception e) {
fail("Failed to load data. An exception was thrown: " + e.getMessage());
}
}
use of org.apache.phoenix.pherf.configuration.DataModel in project phoenix by apache.
the class RuleGeneratorTest method testSequentialDataSequence.
@Test
public void testSequentialDataSequence() throws Exception {
XMLConfigParser parser = new XMLConfigParser(matcherScenario);
DataModel model = parser.getDataModels().get(0);
WriteWorkload loader = new WriteWorkload(parser);
RulesApplier rulesApplier = loader.getRulesApplier();
Column targetColumn = null;
for (Column column : model.getDataMappingColumns()) {
DataSequence sequence = column.getDataSequence();
if (sequence == DataSequence.SEQUENTIAL) {
targetColumn = column;
break;
}
}
assertNotNull("Could not find a DataSequence.SEQENTIAL rule.", targetColumn);
assertMultiThreadedIncrementValue(targetColumn, rulesApplier);
}
Aggregations