use of org.apache.phoenix.pherf.rules.DataValue in project phoenix by apache.
the class WriteWorkload method buildStatement.
private PreparedStatement buildStatement(Scenario scenario, List<Column> columns, PreparedStatement statement, SimpleDateFormat simpleDateFormat) throws Exception {
int count = 1;
for (Column column : columns) {
DataValue dataValue = getRulesApplier().getDataForRule(scenario, column);
switch(column.getType()) {
case VARCHAR:
if (dataValue.getValue().equals("")) {
statement.setNull(count, Types.VARCHAR);
} else {
statement.setString(count, dataValue.getValue());
}
break;
case CHAR:
if (dataValue.getValue().equals("")) {
statement.setNull(count, Types.CHAR);
} else {
statement.setString(count, dataValue.getValue());
}
break;
case DECIMAL:
if (dataValue.getValue().equals("")) {
statement.setNull(count, Types.DECIMAL);
} else {
statement.setBigDecimal(count, new BigDecimal(dataValue.getValue()));
}
break;
case INTEGER:
if (dataValue.getValue().equals("")) {
statement.setNull(count, Types.INTEGER);
} else {
statement.setInt(count, Integer.parseInt(dataValue.getValue()));
}
break;
case DATE:
if (dataValue.getValue().equals("")) {
statement.setNull(count, Types.DATE);
} else {
Date date = new java.sql.Date(simpleDateFormat.parse(dataValue.getValue()).getTime());
statement.setDate(count, date);
}
break;
default:
break;
}
count++;
}
return statement;
}
use of org.apache.phoenix.pherf.rules.DataValue in project phoenix by apache.
the class RuleGeneratorTest method testValueListRule.
@Test
public void testValueListRule() throws Exception {
List<String> expectedValues = new ArrayList();
expectedValues.add("aAAyYhnNbBs9kWk");
expectedValues.add("bBByYhnNbBs9kWu");
expectedValues.add("cCCyYhnNbBs9kWr");
XMLConfigParser parser = new XMLConfigParser(matcherScenario);
WriteWorkload loader = new WriteWorkload(parser);
RulesApplier rulesApplier = loader.getRulesApplier();
Scenario scenario = parser.getScenarios().get(0);
Column simPhxCol = new Column();
simPhxCol.setName("PARENT_ID");
simPhxCol.setType(DataTypeMapping.CHAR);
// Run this 10 times gives a reasonable chance that all the values will appear at least once
for (int i = 0; i < 10; i++) {
DataValue value = rulesApplier.getDataForRule(scenario, simPhxCol);
assertTrue("Got a value not in the list for the rule. :" + value.getValue(), expectedValues.contains(value.getValue()));
}
}
use of org.apache.phoenix.pherf.rules.DataValue in project phoenix by apache.
the class RuleGeneratorTest method assertMultiThreadedIncrementValue.
/**
* This method will test {@link org.apache.phoenix.pherf.configuration.DataSequence} SEQUENTIAL
* It ensures values returned always increase uniquely. RulesApplier will be accessed by multiple writer
* so we must ensure increment is thread safe.
*/
private void assertMultiThreadedIncrementValue(final Column column, final RulesApplier rulesApplier) throws Exception {
final int threadCount = 30;
final int increments = 100;
final Set testSet = new TreeSet();
List<Thread> threadList = new ArrayList<>();
for (int i = 0; i < threadCount; i++) {
Thread t = new Thread() {
@Override
public void run() {
for (int i = 0; i < increments; i++) {
try {
DataValue value = rulesApplier.getDataValue(column);
String strValue = value.getValue();
synchronized (testSet) {
assertFalse("Incrementer gave a duplicate value: " + strValue, testSet.contains(strValue));
assertTrue("Length did not equal expected.", strValue.length() == column.getLength());
testSet.add(strValue);
}
} catch (Exception e) {
fail("Caught an exception during test: " + e.getMessage());
}
}
}
};
t.start();
threadList.add(t);
}
// Wait for threads to finish
for (Thread t : threadList) {
try {
t.join();
} catch (InterruptedException e) {
fail("There was a problem reading thread: " + e.getMessage());
}
}
assertTrue("Expected count in increments did not match expected", testSet.size() == (threadCount * increments));
}
use of org.apache.phoenix.pherf.rules.DataValue in project phoenix by apache.
the class RuleGeneratorTest method testRuleOverrides.
@Test
public void testRuleOverrides() throws Exception {
XMLConfigParser parser = new XMLConfigParser(matcherScenario);
WriteWorkload loader = new WriteWorkload(parser);
RulesApplier rulesApplier = loader.getRulesApplier();
Scenario scenario = parser.getScenarios().get(0);
// We should be able to find the correct rule based only on Type and Name combination
// Test CHAR
Column simPhxCol = new Column();
simPhxCol.setName("OTHER_ID");
simPhxCol.setType(DataTypeMapping.CHAR);
// Get the rule we expect to match
Column rule = rulesApplier.getRule(simPhxCol);
assertEquals("Did not find the correct rule.", rule.getName(), simPhxCol.getName());
assertEquals("Did not find the matching rule type.", rule.getType(), simPhxCol.getType());
assertEquals("Rule contains incorrect length.", rule.getLength(), 8);
assertEquals("Rule contains incorrect prefix.", rule.getPrefix(), "z0Oxx00");
DataValue value = rulesApplier.getDataForRule(scenario, simPhxCol);
assertEquals("Value returned does not match rule.", value.getValue().length(), 8);
// Test VARCHAR with RANDOM and prefix
simPhxCol.setName("OLDVAL_STRING");
simPhxCol.setType(DataTypeMapping.VARCHAR);
// Get the rule we expect to match
rule = rulesApplier.getRule(simPhxCol);
assertEquals("Did not find the correct rule.", rule.getName(), simPhxCol.getName());
assertEquals("Did not find the matching rule type.", rule.getType(), simPhxCol.getType());
assertEquals("Rule contains incorrect length.", rule.getLength(), 10);
assertEquals("Rule contains incorrect prefix.", rule.getPrefix(), "MYPRFX");
value = rulesApplier.getDataForRule(scenario, simPhxCol);
assertEquals("Value returned does not match rule.", value.getValue().length(), 10);
assertTrue("Value returned start with prefix.", StringUtils.startsWith(value.getValue(), rule.getPrefix()));
}
use of org.apache.phoenix.pherf.rules.DataValue in project phoenix by apache.
the class DataIngestIT method testColumnRulesApplied.
@Test
public void testColumnRulesApplied() {
Scenario scenario = null;
try {
scenario = parser.getScenarioByName("testScenario");
List<Column> columnListFromPhoenix = util.getColumnsFromPhoenix(scenario.getSchemaName(), scenario.getTableNameWithoutSchemaName(), util.getConnection());
assertTrue("Could not get phoenix columns.", columnListFromPhoenix.size() > 0);
WriteWorkload loader = new WriteWorkload(util, parser, scenario, GeneratePhoenixStats.NO);
WorkloadExecutor executor = new WorkloadExecutor();
executor.add(loader);
executor.get();
executor.shutdown();
RulesApplier rulesApplier = loader.getRulesApplier();
List<Map> modelList = rulesApplier.getModelList();
assertTrue("Could not generate the modelList", modelList.size() > 0);
for (Column column : columnListFromPhoenix) {
DataValue data = rulesApplier.getDataForRule(scenario, column);
// We are generating data values
// so the value should have been specified by this point.
assertTrue("Failed to retrieve data for column type: " + column.getType(), data != null);
// so we should get the default rule.
if ((column.getType() == DataTypeMapping.VARCHAR) && (column.getName().equals("NEWVAL_STRING"))) {
assertTrue("Failed to retrieve data for column type: ", data.getDistribution() == Integer.MIN_VALUE);
}
}
// Run some queries
executor = new WorkloadExecutor();
Workload query = new QueryExecutor(parser, util, executor);
executor.add(query);
executor.get();
executor.shutdown();
PhoenixUtil.create().deleteTables("ALL");
} catch (Exception e) {
fail("We had an exception: " + e.getMessage());
}
}
Aggregations