use of org.apache.phoenix.pherf.configuration.Column in project phoenix by apache.
the class DataIngestIT method testColumnRulesApplied.
@Test
public void testColumnRulesApplied() {
Scenario scenario = null;
try {
scenario = parser.getScenarioByName("testScenario");
List<Column> columnListFromPhoenix = util.getColumnsFromPhoenix(scenario.getSchemaName(), scenario.getTableNameWithoutSchemaName(), util.getConnection());
assertTrue("Could not get phoenix columns.", columnListFromPhoenix.size() > 0);
WriteWorkload loader = new WriteWorkload(util, parser, scenario, GeneratePhoenixStats.NO);
WorkloadExecutor executor = new WorkloadExecutor();
executor.add(loader);
executor.get();
executor.shutdown();
RulesApplier rulesApplier = loader.getRulesApplier();
List<Map> modelList = rulesApplier.getModelList();
assertTrue("Could not generate the modelList", modelList.size() > 0);
for (Column column : columnListFromPhoenix) {
DataValue data = rulesApplier.getDataForRule(scenario, column);
// We are generating data values
// so the value should have been specified by this point.
assertTrue("Failed to retrieve data for column type: " + column.getType(), data != null);
// so we should get the default rule.
if ((column.getType() == DataTypeMapping.VARCHAR) && (column.getName().equals("NEWVAL_STRING"))) {
assertTrue("Failed to retrieve data for column type: ", data.getDistribution() == Integer.MIN_VALUE);
}
}
// Run some queries
executor = new WorkloadExecutor();
Workload query = new QueryExecutor(parser, util, executor);
executor.add(query);
executor.get();
executor.shutdown();
PhoenixUtil.create().deleteTables("ALL");
} catch (Exception e) {
fail("We had an exception: " + e.getMessage());
}
}
use of org.apache.phoenix.pherf.configuration.Column in project phoenix by apache.
the class SchemaReaderIT method assertApplySchemaTest.
private void assertApplySchemaTest() {
try {
util.setZookeeper("localhost");
SchemaReader reader = new SchemaReader(util, ".*datamodel/.*test.*sql");
List<Path> resources = new ArrayList<>(reader.getResourceList());
assertTrue("Could not pull list of schema files.", resources.size() > 0);
assertNotNull("Could not read schema file.", this.getClass().getResourceAsStream(PherfConstants.RESOURCE_DATAMODEL + "/" + resources.get(0).getFileName().toString()));
assertNotNull("Could not read schema file.", reader.resourceToString(resources.get(0)));
try {
reader.applySchema();
} catch (SQLException e) {
fail("Failed to apply schema " + e.getMessage());
}
Connection connection = null;
URL resourceUrl = getClass().getResource("/scenario/test_scenario.xml");
assertNotNull("Test data XML file is missing", resourceUrl);
connection = util.getConnection();
Path resourcePath = Paths.get(resourceUrl.toURI());
DataModel data = XMLConfigParser.readDataModel(resourcePath);
List<Scenario> scenarioList = data.getScenarios();
Scenario scenario = scenarioList.get(0);
List<Column> columnList = util.getColumnsFromPhoenix(scenario.getSchemaName(), scenario.getTableNameWithoutSchemaName(), connection);
assertTrue("Could not retrieve Metadata from Phoenix", columnList.size() > 0);
} catch (Exception e) {
fail("Could not initialize SchemaReader");
e.printStackTrace();
}
}
Aggregations