use of org.voltdb.compiler.VoltProjectBuilder.RoleInfo in project voltdb by VoltDB.
the class TestCatalogDiffs method testChangeSecurityProvider.
public void testChangeSecurityProvider() throws IOException {
RoleInfo[] gi = new RoleInfo[2];
gi[0] = new RoleInfo("group1", true, true, true, true, false, false);
gi[1] = new RoleInfo("group2", true, true, true, true, false, false);
UserInfo[] ui = new UserInfo[2];
ui[0] = new UserInfo("user1", "password", new String[] { "group1" });
ui[1] = new UserInfo("user2", "password", new String[] { "group2" });
String original = compileWithGroups(true, "hash", gi, ui, "base", BASEPROCS);
Catalog catOriginal = catalogForJar(original);
// just turn on security
String updated = compileWithGroups(true, "kerberos", gi, ui, "base", BASEPROCS);
Catalog catUpdated = catalogForJar(updated);
verifyDiff(catOriginal, catUpdated, false);
}
use of org.voltdb.compiler.VoltProjectBuilder.RoleInfo in project voltdb by VoltDB.
the class TestCatalogUpdateSuite method suite.
/**
* Build a list of the tests that will be run when TestTPCCSuite gets run by JUnit.
* Use helper classes that are part of the RegressionSuite framework.
* This particular class runs all tests on the the local JNI backend with both
* one and two partition configurations, as well as on the hsql backend.
*
* @return The TestSuite containing all the tests to be run.
* @throws Exception
*/
public static Test suite() throws Exception {
TheHashinator.initialize(TheHashinator.getConfiguredHashinatorClass(), TheHashinator.getConfigureBytes(2));
// the suite made here will all be using the tests from this class
MultiConfigSuiteBuilder builder = new MultiConfigSuiteBuilder(TestCatalogUpdateSuite.class);
/////////////////////////////////////////////////////////////
// CONFIG #1: 1 Local Site/Partitions running on JNI backend
/////////////////////////////////////////////////////////////
// get a server config for the native backend with one sites/partitions
VoltServerConfig config = new LocalCluster("catalogupdate-cluster-base.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
// Catalog upgrade test(s) sporadically fail if there's a local server because
// a file pipe isn't available for grepping local server output.
((LocalCluster) config).setHasLocalServer(true);
// build up a project builder for the workload
TPCCProjectBuilder project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS);
// build the jarfile
boolean basecompile = config.compile(project);
assertTrue(basecompile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-base.xml"));
// add this config to the set of tests to run
builder.addServerConfig(config, false);
/////////////////////////////////////////////////////////////
// DELTA CATALOGS FOR TESTING
/////////////////////////////////////////////////////////////
// As catalogupdate-cluster-base but with security enabled. This requires users and groups..
// We piggy-back the heartbeat change here.
RoleInfo[] groups = new RoleInfo[] { new RoleInfo("group1", false, false, true, false, false, false) };
UserInfo[] users = new UserInfo[] { new UserInfo("user1", "userpass1", new String[] { "group1" }) };
ProcedureInfo procInfo = new ProcedureInfo(new String[] { "group1" }, InsertNewOrder.class);
config = new LocalCluster("catalogupdate-cluster-base-secure.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addUsers(users);
project.addRoles(groups);
project.addProcedures(procInfo);
project.setSecurityEnabled(true, true);
project.setDeadHostTimeout(6000);
boolean compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-base-secure.xml"));
//config = new LocalSingleProcessServer("catalogupdate-local-addtables.jar", 2, BackendTarget.NATIVE_EE_JNI);
config = new LocalCluster("catalogupdate-cluster-addtables.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addSchema(TestCatalogUpdateSuite.class.getResource("testorderby-ddl.sql").getPath());
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS_OPROCS);
project.setElasticDuration(100);
project.setElasticThroughput(50);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addtables.xml"));
// as above but also with a materialized view added to O1
try {
config = new LocalCluster("catalogupdate-cluster-addtableswithmatview.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addSchema(TestCatalogUpdateSuite.class.getResource("testorderby-ddl.sql").getPath());
project.addLiteralSchema("CREATE VIEW MATVIEW_O1(C1, C2, NUM) AS SELECT A_INT, PKEY, COUNT(*) FROM O1 GROUP BY A_INT, PKEY;");
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS_OPROCS);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addtableswithmatview.xml"));
} catch (IOException e) {
fail();
}
config = new LocalCluster("catalogupdate-cluster-addindex.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addLiteralSchema("CREATE INDEX NEWINDEX ON NEW_ORDER (NO_O_ID);");
// history is good because this new index is the only one (no pkey)
project.addLiteralSchema("CREATE INDEX NEWINDEX2 ON HISTORY (H_C_ID);");
// unique index
project.addLiteralSchema("CREATE UNIQUE INDEX NEWINDEX3 ON STOCK (S_I_ID, S_W_ID, S_QUANTITY);");
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addindex.xml"));
config = new LocalCluster("catalogupdate-cluster-addexpressindex.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addLiteralSchema("CREATE INDEX NEWEXPRESSINDEX ON NEW_ORDER ((NO_O_ID+NO_O_ID)-NO_O_ID);");
// history is good because this new index is the only one (no pkey)
project.addLiteralSchema("CREATE INDEX NEWEXPRESSINDEX2 ON HISTORY ((H_C_ID+H_C_ID)-H_C_ID);");
// unique index
// This needs to wait until the test for unique index coverage for indexed expressions can parse out any simple column expressions
// and discover a unique index on some subset.
//TODO: project.addLiteralSchema("CREATE UNIQUE INDEX NEWEXPRESSINDEX3 ON STOCK (S_I_ID, S_W_ID, S_QUANTITY+S_QUANTITY-S_QUANTITY);");
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addexpressindex.xml"));
//config = new LocalSingleProcessServer("catalogupdate-local-expanded.jar", 2, BackendTarget.NATIVE_EE_JNI);
config = new LocalCluster("catalogupdate-cluster-expanded.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(EXPANDEDPROCS);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-expanded.xml"));
config = new LocalCluster("catalogupdate-cluster-adhocproc.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addStmtProcedure("adhocproc1", "SELECT * from WAREHOUSE");
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-adhocproc.xml"));
config = new LocalCluster("catalogupdate-cluster-adhocschema.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addLiteralSchema("CREATE TABLE CATALOG_MODE_DDL_TEST (fld1 INTEGER NOT NULL);");
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-adhocschema.xml"));
//config = new LocalSingleProcessServer("catalogupdate-local-conflict.jar", 2, BackendTarget.NATIVE_EE_JNI);
config = new LocalCluster("catalogupdate-cluster-conflict.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(CONFLICTPROCS);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-conflict.xml"));
//config = new LocalSingleProcessServer("catalogupdate-local-many.jar", 2, BackendTarget.NATIVE_EE_JNI);
config = new LocalCluster("catalogupdate-cluster-many.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(SOMANYPROCS);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-many.xml"));
// A catalog change that enables snapshots
config = new LocalCluster("catalogupdate-cluster-enable_snapshot.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS);
project.setSnapshotSettings("1s", 3, "/tmp/snapshotdir1", "foo1");
// build the jarfile
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-enable_snapshot.xml"));
//Another catalog change to modify the schedule
config = new LocalCluster("catalogupdate-cluster-change_snapshot.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS);
project.setSnapshotSettings("1s", 3, "/tmp/snapshotdir2", "foo2");
// build the jarfile
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-change_snapshot.xml"));
//Another catalog change to modify the schedule
config = new LocalCluster("catalogupdate-cluster-change_snapshot_dir_not_exist.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS);
project.setSnapshotSettings("1s", 3, "/tmp/snapshotdirasda2", "foo2");
// build the jarfile
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-change_snapshot_dir_not_exist.xml"));
//A huge catalog update to test size limits
config = new LocalCluster("catalogupdate-cluster-huge.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
long t = System.currentTimeMillis();
String hugeSchemaURL = generateRandomDDL("catalogupdate-cluster-huge", HUGE_TABLES, HUGE_COLUMNS, HUGE_NAME_SIZE);
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addSchema(hugeSchemaURL);
project.addProcedures(BASEPROCS);
compile = config.compile(project);
assertTrue(compile);
hugeCompileElapsed = (System.currentTimeMillis() - t) / 1000.0;
hugeCatalogXMLPath = Configuration.getPathToCatalogForTest("catalogupdate-cluster-huge.xml");
hugeCatalogJarPath = Configuration.getPathToCatalogForTest("catalogupdate-cluster-huge.jar");
MiscUtils.copyFile(project.getPathToDeployment(), hugeCatalogXMLPath);
config = new LocalCluster("catalogupdate-cluster-change_snapshot_dir_not_exist.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.addDefaultPartitioning();
project.addProcedures(BASEPROCS);
project.setSnapshotSettings("1s", 3, "/tmp/snapshotdirasda2", "foo2");
// build the jarfile
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-change_snapshot_dir_not_exist.xml"));
// Catalogs with different system settings on query time out
config = new LocalCluster("catalogupdate-cluster-timeout-1000.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.setQueryTimeout(1000);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-timeout-1000.xml"));
config = new LocalCluster("catalogupdate-cluster-timeout-5000.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.setQueryTimeout(5000);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-timeout-5000.xml"));
config = new LocalCluster("catalogupdate-cluster-timeout-600.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
project.setQueryTimeout(600);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-timeout-600.xml"));
// elastic duration and throughput catalog update tests
config = new LocalCluster("catalogupdate-cluster-elastic-100-5.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
project = new TPCCProjectBuilder();
project.addDefaultSchema();
// build the jarfile
project.setElasticDuration(100);
project.setElasticThroughput(5);
compile = config.compile(project);
assertTrue(compile);
MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-elastic-100-5.xml"));
return builder;
}
use of org.voltdb.compiler.VoltProjectBuilder.RoleInfo in project voltdb by VoltDB.
the class TestCatalogDiffs method testDeleteGroupAndUser.
public void testDeleteGroupAndUser() throws IOException {
RoleInfo[] gi = new RoleInfo[1];
gi[0] = new RoleInfo("group1", true, true, true, true, false, false);
UserInfo[] ui = new UserInfo[1];
ui[0] = new UserInfo("user1", "password", new String[] { "group1" });
String original = compileWithGroups(false, null, gi, ui, "base", BASEPROCS);
Catalog catOriginal = catalogForJar(original);
// no groups or users this time
String updated = compileWithGroups(false, null, null, null, "base", BASEPROCS);
Catalog catUpdated = catalogForJar(updated);
verifyDiff(catOriginal, catUpdated, false);
}
use of org.voltdb.compiler.VoltProjectBuilder.RoleInfo in project voltdb by VoltDB.
the class TestCatalogDiffs method testModifyUser.
public void testModifyUser() throws IOException {
RoleInfo[] gi = new RoleInfo[1];
gi[0] = new RoleInfo("group1", true, true, true, true, false, false);
UserInfo[] ui = new UserInfo[1];
ui[0] = new UserInfo("user1", "password", new String[] { "group1" });
String original = compileWithGroups(false, null, gi, ui, "base", BASEPROCS);
Catalog catOriginal = catalogForJar(original);
RoleInfo[] gi2 = new RoleInfo[1];
gi2[0] = new RoleInfo("group2", true, true, true, true, true, true);
// change a user.
ui[0] = new UserInfo("user1", "drowssap", new String[] { "group2" });
String updated = compileWithGroups(false, null, gi2, ui, "base", BASEPROCS);
Catalog catUpdated = catalogForJar(updated);
verifyDiff(catOriginal, catUpdated, false);
}
use of org.voltdb.compiler.VoltProjectBuilder.RoleInfo in project voltdb by VoltDB.
the class TestCatalogDiffs method testAddGroupAndUser.
public void testAddGroupAndUser() throws IOException {
String original = compile("base", BASEPROCS);
Catalog catOriginal = catalogForJar(original);
RoleInfo[] gi = new RoleInfo[1];
gi[0] = new RoleInfo("group1", true, true, true, true, true, false);
UserInfo[] ui = new UserInfo[1];
ui[0] = new UserInfo("user1", "password", new String[] { "group1" });
String updated = compileWithGroups(false, null, gi, ui, "base", BASEPROCS);
Catalog catUpdated = catalogForJar(updated);
verifyDiff(catOriginal, catUpdated, false);
}
Aggregations