use of org.apache.phoenix.pherf.schema.SchemaReader in project phoenix by apache.
the class ResultBaseTestIT method setUp.
@BeforeClass
public static void setUp() throws Exception {
PherfConstants constants = PherfConstants.create();
properties = constants.getProperties(PherfConstants.PHERF_PROPERTIES, false);
String dir = properties.getProperty("pherf.default.results.dir");
resultUtil.ensureBaseDirExists(dir);
util.setZookeeper("localhost");
reader = new SchemaReader(util, matcherSchema);
parser = new XMLConfigParser(matcherScenario);
}
use of org.apache.phoenix.pherf.schema.SchemaReader in project phoenix by apache.
the class SchemaReaderIT method assertApplySchemaTest.
private void assertApplySchemaTest() {
try {
util.setZookeeper("localhost");
SchemaReader reader = new SchemaReader(util, ".*datamodel/.*test.*sql");
List<Path> resources = new ArrayList<>(reader.getResourceList());
assertTrue("Could not pull list of schema files.", resources.size() > 0);
assertNotNull("Could not read schema file.", this.getClass().getResourceAsStream(PherfConstants.RESOURCE_DATAMODEL + "/" + resources.get(0).getFileName().toString()));
assertNotNull("Could not read schema file.", reader.resourceToString(resources.get(0)));
try {
reader.applySchema();
} catch (SQLException e) {
fail("Failed to apply schema " + e.getMessage());
}
Connection connection = null;
URL resourceUrl = getClass().getResource("/scenario/test_scenario.xml");
assertNotNull("Test data XML file is missing", resourceUrl);
connection = util.getConnection();
Path resourcePath = Paths.get(resourceUrl.toURI());
DataModel data = XMLConfigParser.readDataModel(resourcePath);
List<Scenario> scenarioList = data.getScenarios();
Scenario scenario = scenarioList.get(0);
List<Column> columnList = util.getColumnsFromPhoenix(scenario.getSchemaName(), scenario.getTableNameWithoutSchemaName(), connection);
assertTrue("Could not retrieve Metadata from Phoenix", columnList.size() > 0);
} catch (Exception e) {
fail("Could not initialize SchemaReader");
e.printStackTrace();
}
}
use of org.apache.phoenix.pherf.schema.SchemaReader in project phoenix by apache.
the class Pherf method run.
public void run() throws Exception {
MonitorManager monitorManager = null;
List<Workload> workloads = new ArrayList<>();
WorkloadExecutor workloadExecutor = new WorkloadExecutor(properties, workloads, !isFunctional);
try {
if (listFiles) {
ResourceList list = new ResourceList(PherfConstants.RESOURCE_DATAMODEL);
Collection<Path> schemaFiles = list.getResourceList(PherfConstants.SCHEMA_ROOT_PATTERN + ".sql");
System.out.println("Schema Files:");
for (Path path : schemaFiles) {
System.out.println(path);
}
list = new ResourceList(PherfConstants.RESOURCE_SCENARIO);
Collection<Path> scenarioFiles = list.getResourceList(PherfConstants.SCENARIO_ROOT_PATTERN + ".xml");
System.out.println("Scenario Files:");
for (Path path : scenarioFiles) {
System.out.println(path);
}
return;
}
// Compare results and exit
if (null != compareResults) {
logger.info("\nStarting to compare results and exiting for " + compareResults);
new GoogleChartGenerator(compareResults, compareType).readAndRender();
return;
}
XMLConfigParser parser = new XMLConfigParser(scenarioFile);
// Drop tables with PHERF schema and regex comparison
if (null != dropPherfTablesRegEx) {
logger.info("\nDropping existing table with PHERF namename and " + dropPherfTablesRegEx + " regex expression.");
phoenixUtil.deleteTables(dropPherfTablesRegEx);
}
if (monitor) {
monitorManager = new MonitorManager(Integer.parseInt(properties.getProperty("pherf.default.monitorFrequency")));
workloadExecutor.add(monitorManager);
}
if (applySchema) {
logger.info("\nStarting to apply schema...");
SchemaReader reader = (schemaFile == null) ? new SchemaReader(".*.sql") : new SchemaReader(schemaFile);
reader.applySchema();
}
// Schema and Data Load
if (preLoadData) {
logger.info("\nStarting Data Load...");
Workload workload = new WriteWorkload(parser, generateStatistics);
try {
workloadExecutor.add(workload);
// Wait for dataLoad to complete
workloadExecutor.get(workload);
} finally {
if (null != workload) {
workload.complete();
}
}
} else {
logger.info("\nSKIPPED: Data Load and schema creation as -l argument not specified");
}
// Execute multi-threaded query sets
if (executeQuerySets) {
logger.info("\nStarting to apply Execute Queries...");
workloadExecutor.add(new QueryExecutor(parser, phoenixUtil, workloadExecutor, parser.getDataModels(), queryHint, isFunctional, writeRuntimeResults));
} else {
logger.info("\nSKIPPED: Multithreaded query set execution as -q argument not specified");
}
// Clean up the monitor explicitly
if (monitorManager != null) {
logger.info("Run completed. Shutting down Monitor.");
monitorManager.complete();
}
// Collect any final jobs
workloadExecutor.get();
} finally {
if (workloadExecutor != null) {
logger.info("Run completed. Shutting down thread pool.");
workloadExecutor.shutdown();
}
}
}
Aggregations