Search in sources :

Example 1 with ProcedureInfo

use of org.voltdb.compiler.VoltProjectBuilder.ProcedureInfo in project voltdb by VoltDB.

the class TestSecuritySuite method suite.

/**
     * Build a list of the tests that will be run when TestSecuritySuite gets run by JUnit.
     * Use helper classes that are part of the RegressionSuite framework.
     * This particular class runs all tests on the the local JNI backend with both
     * one and two partition configurations, as well as on the hsql backend.
     *
     * @return The TestSuite containing all the tests to be run.
     */
public static Test suite() {
    VoltServerConfig config = null;
    // the suite made here will all be using the tests from this class
    MultiConfigSuiteBuilder builder = new MultiConfigSuiteBuilder(TestSecuritySuite.class);
    // build up a project builder for the workload
    TPCCProjectBuilder project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    ArrayList<ProcedureInfo> procedures = new ArrayList<>();
    procedures.add(new ProcedureInfo(new String[0], PROCEDURES[0]));
    procedures.add(new ProcedureInfo(new String[] { "group1" }, PROCEDURES[1]));
    procedures.add(new ProcedureInfo(new String[] { "group1", "group2" }, PROCEDURES[2]));
    project.addProcedures(procedures);
    UserInfo[] users = new UserInfo[] { new UserInfo("user1", "password", new String[] { "grouP1" }), new UserInfo("user2", "password", new String[] { "grouP2" }), new UserInfo("user3", "password", new String[] { "grouP3" }), new UserInfo("user4", "password", new String[] { "AdMINISTRATOR" }), new UserInfo("userWithDefaultUserPerm", "password", new String[] { "User" }), new UserInfo("userWithAllProc", "password", new String[] { "GroupWithAllProcPerm" }), new UserInfo("userWithDefaultProcPerm", "password", new String[] { "groupWithDefaultProcPerm" }), new UserInfo("userWithoutDefaultProcPerm", "password", new String[] { "groupWiThoutDefaultProcPerm" }), new UserInfo("userWithDefaultProcReadPerm", "password", new String[] { "groupWiThDefaultProcReadPerm" }) };
    project.addUsers(users);
    RoleInfo[] groups = new RoleInfo[] { new RoleInfo("Group1", false, false, false, false, false, false), new RoleInfo("Group2", true, false, false, false, false, false), new RoleInfo("Group3", true, false, false, false, false, false), new RoleInfo("GroupWithDefaultUserPerm", true, false, false, false, false, true), new RoleInfo("GroupWithAllProcPerm", false, false, false, false, false, true), new RoleInfo("GroupWithDefaultProcPerm", false, false, false, true, false, false), new RoleInfo("GroupWithoutDefaultProcPerm", false, false, false, false, false, false), new RoleInfo("GroupWithDefaultProcReadPerm", false, false, false, false, true, false) };
    project.addRoles(groups);
    // suite defines its own ADMINISTRATOR user
    project.setSecurityEnabled(true, false);
    // export disabled in community
    if (MiscUtils.isPro()) {
        project.addExport(true);
    }
    /////////////////////////////////////////////////////////////
    // CONFIG #1: 1 Local Site/Partitions running on JNI backend
    /////////////////////////////////////////////////////////////
    // get a server config for the native backend with one sites/partitions
    config = new LocalCluster("security-onesite.jar", 1, 1, 0, BackendTarget.NATIVE_EE_JNI);
    // build the jarfile
    if (!config.compile(project))
        fail();
    // add this config to the set of tests to run
    builder.addServerConfig(config, false);
    return builder;
}
Also used : RoleInfo(org.voltdb.compiler.VoltProjectBuilder.RoleInfo) ProcedureInfo(org.voltdb.compiler.VoltProjectBuilder.ProcedureInfo) ArrayList(java.util.ArrayList) UserInfo(org.voltdb.compiler.VoltProjectBuilder.UserInfo) TPCCProjectBuilder(org.voltdb.benchmark.tpcc.TPCCProjectBuilder)

Example 2 with ProcedureInfo

use of org.voltdb.compiler.VoltProjectBuilder.ProcedureInfo in project voltdb by VoltDB.

the class TestCatalogUpdateSuite method suite.

/**
     * Build a list of the tests that will be run when TestTPCCSuite gets run by JUnit.
     * Use helper classes that are part of the RegressionSuite framework.
     * This particular class runs all tests on the the local JNI backend with both
     * one and two partition configurations, as well as on the hsql backend.
     *
     * @return The TestSuite containing all the tests to be run.
     * @throws Exception
     */
public static Test suite() throws Exception {
    TheHashinator.initialize(TheHashinator.getConfiguredHashinatorClass(), TheHashinator.getConfigureBytes(2));
    // the suite made here will all be using the tests from this class
    MultiConfigSuiteBuilder builder = new MultiConfigSuiteBuilder(TestCatalogUpdateSuite.class);
    /////////////////////////////////////////////////////////////
    // CONFIG #1: 1 Local Site/Partitions running on JNI backend
    /////////////////////////////////////////////////////////////
    // get a server config for the native backend with one sites/partitions
    VoltServerConfig config = new LocalCluster("catalogupdate-cluster-base.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    // Catalog upgrade test(s) sporadically fail if there's a local server because
    // a file pipe isn't available for grepping local server output.
    ((LocalCluster) config).setHasLocalServer(true);
    // build up a project builder for the workload
    TPCCProjectBuilder project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS);
    // build the jarfile
    boolean basecompile = config.compile(project);
    assertTrue(basecompile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-base.xml"));
    // add this config to the set of tests to run
    builder.addServerConfig(config, false);
    /////////////////////////////////////////////////////////////
    // DELTA CATALOGS FOR TESTING
    /////////////////////////////////////////////////////////////
    // As catalogupdate-cluster-base but with security enabled. This requires users and groups..
    // We piggy-back the heartbeat change here.
    RoleInfo[] groups = new RoleInfo[] { new RoleInfo("group1", false, false, true, false, false, false) };
    UserInfo[] users = new UserInfo[] { new UserInfo("user1", "userpass1", new String[] { "group1" }) };
    ProcedureInfo procInfo = new ProcedureInfo(new String[] { "group1" }, InsertNewOrder.class);
    config = new LocalCluster("catalogupdate-cluster-base-secure.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addUsers(users);
    project.addRoles(groups);
    project.addProcedures(procInfo);
    project.setSecurityEnabled(true, true);
    project.setDeadHostTimeout(6000);
    boolean compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-base-secure.xml"));
    //config = new LocalSingleProcessServer("catalogupdate-local-addtables.jar", 2, BackendTarget.NATIVE_EE_JNI);
    config = new LocalCluster("catalogupdate-cluster-addtables.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addSchema(TestCatalogUpdateSuite.class.getResource("testorderby-ddl.sql").getPath());
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS_OPROCS);
    project.setElasticDuration(100);
    project.setElasticThroughput(50);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addtables.xml"));
    // as above but also with a materialized view added to O1
    try {
        config = new LocalCluster("catalogupdate-cluster-addtableswithmatview.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
        project = new TPCCProjectBuilder();
        project.addDefaultSchema();
        project.addSchema(TestCatalogUpdateSuite.class.getResource("testorderby-ddl.sql").getPath());
        project.addLiteralSchema("CREATE VIEW MATVIEW_O1(C1, C2, NUM) AS SELECT A_INT, PKEY, COUNT(*) FROM O1 GROUP BY A_INT, PKEY;");
        project.addDefaultPartitioning();
        project.addProcedures(BASEPROCS_OPROCS);
        compile = config.compile(project);
        assertTrue(compile);
        MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addtableswithmatview.xml"));
    } catch (IOException e) {
        fail();
    }
    config = new LocalCluster("catalogupdate-cluster-addindex.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addLiteralSchema("CREATE INDEX NEWINDEX ON NEW_ORDER (NO_O_ID);");
    // history is good because this new index is the only one (no pkey)
    project.addLiteralSchema("CREATE INDEX NEWINDEX2 ON HISTORY (H_C_ID);");
    // unique index
    project.addLiteralSchema("CREATE UNIQUE INDEX NEWINDEX3 ON STOCK (S_I_ID, S_W_ID, S_QUANTITY);");
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addindex.xml"));
    config = new LocalCluster("catalogupdate-cluster-addexpressindex.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addLiteralSchema("CREATE INDEX NEWEXPRESSINDEX ON NEW_ORDER ((NO_O_ID+NO_O_ID)-NO_O_ID);");
    // history is good because this new index is the only one (no pkey)
    project.addLiteralSchema("CREATE INDEX NEWEXPRESSINDEX2 ON HISTORY ((H_C_ID+H_C_ID)-H_C_ID);");
    // unique index
    // This needs to wait until the test for unique index coverage for indexed expressions can parse out any simple column expressions
    // and discover a unique index on some subset.
    //TODO: project.addLiteralSchema("CREATE UNIQUE INDEX NEWEXPRESSINDEX3 ON STOCK (S_I_ID, S_W_ID, S_QUANTITY+S_QUANTITY-S_QUANTITY);");
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-addexpressindex.xml"));
    //config = new LocalSingleProcessServer("catalogupdate-local-expanded.jar", 2, BackendTarget.NATIVE_EE_JNI);
    config = new LocalCluster("catalogupdate-cluster-expanded.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(EXPANDEDPROCS);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-expanded.xml"));
    config = new LocalCluster("catalogupdate-cluster-adhocproc.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addStmtProcedure("adhocproc1", "SELECT * from WAREHOUSE");
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-adhocproc.xml"));
    config = new LocalCluster("catalogupdate-cluster-adhocschema.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addLiteralSchema("CREATE TABLE CATALOG_MODE_DDL_TEST (fld1 INTEGER NOT NULL);");
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-adhocschema.xml"));
    //config = new LocalSingleProcessServer("catalogupdate-local-conflict.jar", 2, BackendTarget.NATIVE_EE_JNI);
    config = new LocalCluster("catalogupdate-cluster-conflict.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(CONFLICTPROCS);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-conflict.xml"));
    //config = new LocalSingleProcessServer("catalogupdate-local-many.jar", 2, BackendTarget.NATIVE_EE_JNI);
    config = new LocalCluster("catalogupdate-cluster-many.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(SOMANYPROCS);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-many.xml"));
    // A catalog change that enables snapshots
    config = new LocalCluster("catalogupdate-cluster-enable_snapshot.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS);
    project.setSnapshotSettings("1s", 3, "/tmp/snapshotdir1", "foo1");
    // build the jarfile
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-enable_snapshot.xml"));
    //Another catalog change to modify the schedule
    config = new LocalCluster("catalogupdate-cluster-change_snapshot.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS);
    project.setSnapshotSettings("1s", 3, "/tmp/snapshotdir2", "foo2");
    // build the jarfile
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-change_snapshot.xml"));
    //Another catalog change to modify the schedule
    config = new LocalCluster("catalogupdate-cluster-change_snapshot_dir_not_exist.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS);
    project.setSnapshotSettings("1s", 3, "/tmp/snapshotdirasda2", "foo2");
    // build the jarfile
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-change_snapshot_dir_not_exist.xml"));
    //A huge catalog update to test size limits
    config = new LocalCluster("catalogupdate-cluster-huge.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    long t = System.currentTimeMillis();
    String hugeSchemaURL = generateRandomDDL("catalogupdate-cluster-huge", HUGE_TABLES, HUGE_COLUMNS, HUGE_NAME_SIZE);
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addSchema(hugeSchemaURL);
    project.addProcedures(BASEPROCS);
    compile = config.compile(project);
    assertTrue(compile);
    hugeCompileElapsed = (System.currentTimeMillis() - t) / 1000.0;
    hugeCatalogXMLPath = Configuration.getPathToCatalogForTest("catalogupdate-cluster-huge.xml");
    hugeCatalogJarPath = Configuration.getPathToCatalogForTest("catalogupdate-cluster-huge.jar");
    MiscUtils.copyFile(project.getPathToDeployment(), hugeCatalogXMLPath);
    config = new LocalCluster("catalogupdate-cluster-change_snapshot_dir_not_exist.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.addDefaultPartitioning();
    project.addProcedures(BASEPROCS);
    project.setSnapshotSettings("1s", 3, "/tmp/snapshotdirasda2", "foo2");
    // build the jarfile
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-change_snapshot_dir_not_exist.xml"));
    // Catalogs with different system settings on query time out
    config = new LocalCluster("catalogupdate-cluster-timeout-1000.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.setQueryTimeout(1000);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-timeout-1000.xml"));
    config = new LocalCluster("catalogupdate-cluster-timeout-5000.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.setQueryTimeout(5000);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-timeout-5000.xml"));
    config = new LocalCluster("catalogupdate-cluster-timeout-600.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    project.setQueryTimeout(600);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-timeout-600.xml"));
    // elastic duration and throughput catalog update tests
    config = new LocalCluster("catalogupdate-cluster-elastic-100-5.jar", SITES_PER_HOST, HOSTS, K, BackendTarget.NATIVE_EE_JNI);
    project = new TPCCProjectBuilder();
    project.addDefaultSchema();
    // build the jarfile
    project.setElasticDuration(100);
    project.setElasticThroughput(5);
    compile = config.compile(project);
    assertTrue(compile);
    MiscUtils.copyFile(project.getPathToDeployment(), Configuration.getPathToCatalogForTest("catalogupdate-cluster-elastic-100-5.xml"));
    return builder;
}
Also used : ProcedureInfo(org.voltdb.compiler.VoltProjectBuilder.ProcedureInfo) UserInfo(org.voltdb.compiler.VoltProjectBuilder.UserInfo) IOException(java.io.IOException) RoleInfo(org.voltdb.compiler.VoltProjectBuilder.RoleInfo) TPCCProjectBuilder(org.voltdb.benchmark.tpcc.TPCCProjectBuilder)

Example 3 with ProcedureInfo

use of org.voltdb.compiler.VoltProjectBuilder.ProcedureInfo in project voltdb by VoltDB.

the class TestJSONInterface method testJSONAuth.

public void testJSONAuth() throws Exception {
    try {
        String simpleSchema = "CREATE TABLE HELLOWORLD (\n" + "    HELLO VARCHAR(15),\n" + "    WORLD VARCHAR(20),\n" + "    DIALECT VARCHAR(15) NOT NULL,\n" + "    PRIMARY KEY (DIALECT)\n" + ");";
        File schemaFile = VoltProjectBuilder.writeStringToTempFile(simpleSchema);
        String schemaPath = schemaFile.getPath();
        schemaPath = URLEncoder.encode(schemaPath, "UTF-8");
        VoltProjectBuilder builder = new VoltProjectBuilder();
        builder.addSchema(schemaPath);
        builder.addPartitionInfo("HELLOWORLD", "DIALECT");
        RoleInfo gi = new RoleInfo("foo", true, false, true, true, false, false);
        builder.addRoles(new RoleInfo[] { gi });
        // create 20 users, only the first one has an interesting user/pass
        UserInfo[] ui = new UserInfo[15];
        ui[0] = new UserInfo("ry@nlikesthe", "y@nkees", new String[] { "foo" });
        for (int i = 1; i < ui.length; i++) {
            ui[i] = new UserInfo("USER" + String.valueOf(i), "PASS" + String.valueOf(i), new String[] { "foo" });
        }
        builder.addUsers(ui);
        builder.setSecurityEnabled(true, true);
        ProcedureInfo[] pi = new ProcedureInfo[2];
        pi[0] = new ProcedureInfo(new String[] { "foo" }, "Insert", "insert into HELLOWORLD values (?,?,?);", null);
        pi[1] = new ProcedureInfo(new String[] { "foo" }, "Select", "select * from HELLOWORLD;", null);
        builder.addProcedures(pi);
        builder.setHTTPDPort(8095);
        boolean success = builder.compile(Configuration.getPathToCatalogForTest("json.jar"));
        assertTrue(success);
        VoltDB.Configuration config = new VoltDB.Configuration();
        config.m_pathToCatalog = config.setPathToCatalogForTest("json.jar");
        config.m_pathToDeployment = builder.getPathToDeployment();
        server = new ServerThread(config);
        server.start();
        server.waitForInitialization();
        ParameterSet pset;
        // test good auths
        for (UserInfo u : ui) {
            pset = ParameterSet.fromArrayNoCopy(u.name, u.password, u.name);
            String response = callProcOverJSON("Insert", pset, u.name, u.password, true);
            Response r = responseFromJSON(response);
            assertEquals(ClientResponse.SUCCESS, r.status);
        }
        // test re-using auths
        for (UserInfo u : ui) {
            pset = ParameterSet.fromArrayNoCopy(u.name + "-X", u.password + "-X", u.name + "-X");
            String response = callProcOverJSON("Insert", pset, u.name, u.password, false);
            Response r = responseFromJSON(response);
            assertEquals(ClientResponse.SUCCESS, r.status);
        }
        // test bad auth
        UserInfo u = ui[0];
        pset = ParameterSet.fromArrayNoCopy(u.name + "-X1", u.password + "-X1", u.name + "-X1");
        String response = callProcOverJSON("Insert", pset, u.name, "ick", true, false, 401, ClientAuthScheme.HASH_SHA256);
        Response r = responseFromJSON(response);
        assertEquals(ClientResponse.UNEXPECTED_FAILURE, r.status);
        response = callProcOverJSON("Insert", pset, u.name, "ick", false, false, 401, ClientAuthScheme.HASH_SHA256);
        r = responseFromJSON(response);
        assertEquals(ClientResponse.UNEXPECTED_FAILURE, r.status);
        // test malformed auth (too short hash)
        pset = ParameterSet.fromArrayNoCopy(u.name + "-X2", u.password + "-X2", u.name + "-X2");
        String paramsInJSON = pset.toJSONString();
        HashMap<String, String> params = new HashMap<>();
        params.put("Procedure", "Insert");
        params.put("Parameters", paramsInJSON);
        params.put("User", u.name);
        params.put("Password", Encoder.hexEncode(new byte[] { 1, 2, 3 }));
        response = callProcOverJSONRaw(params, 401);
        r = responseFromJSON(response);
        assertEquals(ClientResponse.UNEXPECTED_FAILURE, r.status);
        // test malformed auth (gibberish password, but good length)
        pset = ParameterSet.fromArrayNoCopy(u.name + "-X3", u.password + "-X3", u.name + "-X3");
        paramsInJSON = pset.toJSONString();
        params = new HashMap<>();
        params.put("Procedure", "Insert");
        params.put("Parameters", paramsInJSON);
        params.put("User", u.name);
        params.put("Password", "abcdefghiabcdefghiabcdefghiabcdefghi");
        response = callProcOverJSONRaw(params, 401);
        r = responseFromJSON(response);
        assertEquals(ClientResponse.UNEXPECTED_FAILURE, r.status);
        // the update catalog test below is for enterprise only
        if (VoltDB.instance().getConfig().m_isEnterprise == false) {
            return;
        }
        // ENG-963 below here
        // do enough to get a new deployment file
        VoltProjectBuilder builder2 = new VoltProjectBuilder();
        builder2.addSchema(schemaPath);
        builder2.addPartitionInfo("HELLOWORLD", "DIALECT");
        // Same groups
        builder2.addRoles(new RoleInfo[] { gi });
        // create same 15 users, hack the last 14 passwords
        ui = new UserInfo[15];
        ui[0] = new UserInfo("ry@nlikesthe", "y@nkees", new String[] { "foo" });
        for (int i = 1; i < ui.length; i++) {
            ui[i] = new UserInfo("USER" + String.valueOf(i), "welcomehackers" + String.valueOf(i), new String[] { "foo" });
        }
        builder2.addUsers(ui);
        builder2.setSecurityEnabled(true, true);
        builder2.addProcedures(pi);
        builder2.setHTTPDPort(8095);
        success = builder2.compile(Configuration.getPathToCatalogForTest("json-update.jar"));
        assertTrue(success);
        pset = ParameterSet.fromArrayNoCopy(Encoder.hexEncode(MiscUtils.fileToBytes(new File(config.m_pathToCatalog))), new String(MiscUtils.fileToBytes(new File(builder2.getPathToDeployment())), "UTF-8"));
        response = callProcOverJSON("@UpdateApplicationCatalog", pset, ui[0].name, ui[0].password, true);
        r = responseFromJSON(response);
        assertEquals(ClientResponse.SUCCESS, r.status);
        // retest the good auths above
        for (UserInfo user : ui) {
            ParameterSet ps = ParameterSet.fromArrayNoCopy(user.name + "-X3", user.password + "-X3", user.name + "-X3");
            String respstr = callProcOverJSON("Insert", ps, user.name, user.password, false);
            Response resp = responseFromJSON(respstr);
            assertEquals(ClientResponse.SUCCESS, resp.status);
        }
        VoltProjectBuilder builder3 = new VoltProjectBuilder();
        builder3.addSchema(schemaPath);
        builder3.addPartitionInfo("HELLOWORLD", "DIALECT");
        // Same groups
        builder3.addRoles(new RoleInfo[] { gi });
        ui = new UserInfo[1];
        ui[0] = new UserInfo("ry@nlikesthe", "D033E22AE348AEB5660FC2140AEC35850C4DA9978C6976E5B5410415BDE908BD4DEE15DFB167A9C873FC4BB8A81F6F2AB448A918", new String[] { "foo" }, false);
        builder3.addUsers(ui);
        builder3.setSecurityEnabled(true, true);
        builder3.addProcedures(pi);
        builder3.setHTTPDPort(8095);
        success = builder3.compile(Configuration.getPathToCatalogForTest("json-update.jar"));
        assertTrue(success);
        pset = ParameterSet.fromArrayNoCopy(Encoder.hexEncode(MiscUtils.fileToBytes(new File(config.m_pathToCatalog))), new String(MiscUtils.fileToBytes(new File(builder3.getPathToDeployment())), "UTF-8"));
        response = callProcOverJSON("@UpdateApplicationCatalog", pset, "ry@nlikesthe", "y@nkees", true);
        r = responseFromJSON(response);
        assertEquals(ClientResponse.SUCCESS, r.status);
        // retest the good auths above
        ParameterSet ps = ParameterSet.fromArrayNoCopy(ui[0].name + "-X4", "admin-X4", ui[0].name + "-X4");
        String respstr = callProcOverJSON("Insert", ps, ui[0].name, "admin", false);
        Response resp = responseFromJSON(respstr);
        assertEquals(ClientResponse.SUCCESS, resp.status);
    } finally {
        if (server != null) {
            server.shutdown();
            server.join();
        }
        server = null;
    }
}
Also used : Configuration(org.voltdb.VoltDB.Configuration) HashMap(java.util.HashMap) ProcedureInfo(org.voltdb.compiler.VoltProjectBuilder.ProcedureInfo) UserInfo(org.voltdb.compiler.VoltProjectBuilder.UserInfo) ClientResponse(org.voltdb.client.ClientResponse) HttpResponse(org.apache.http.HttpResponse) RoleInfo(org.voltdb.compiler.VoltProjectBuilder.RoleInfo) VoltProjectBuilder(org.voltdb.compiler.VoltProjectBuilder) Configuration(org.voltdb.VoltDB.Configuration) File(java.io.File)

Aggregations

ProcedureInfo (org.voltdb.compiler.VoltProjectBuilder.ProcedureInfo)3 RoleInfo (org.voltdb.compiler.VoltProjectBuilder.RoleInfo)3 UserInfo (org.voltdb.compiler.VoltProjectBuilder.UserInfo)3 TPCCProjectBuilder (org.voltdb.benchmark.tpcc.TPCCProjectBuilder)2 File (java.io.File)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 HttpResponse (org.apache.http.HttpResponse)1 Configuration (org.voltdb.VoltDB.Configuration)1 ClientResponse (org.voltdb.client.ClientResponse)1 VoltProjectBuilder (org.voltdb.compiler.VoltProjectBuilder)1