use of org.voltdb.compiler.VoltCompiler in project voltdb by VoltDB.
the class TestAdhocCreateDropJavaProc method testCreateUsingExistingImport.
// This test should trigger the same failure seen in ENG-6611
@Test
public void testCreateUsingExistingImport() throws Exception {
System.out.println("\n\n-----\n testCreateUsingExistingImport \n-----\n\n");
String pathToCatalog = Configuration.getPathToCatalogForTest("updateclasses.jar");
String pathToDeployment = Configuration.getPathToCatalogForTest("updateclasses.xml");
VoltProjectBuilder builder = new VoltProjectBuilder();
// Start off with the dependency imported
builder.addLiteralSchema("import class org.voltdb_testprocs.updateclasses.NoMeaningClass;");
builder.setUseDDLSchema(true);
boolean success = builder.compile(pathToCatalog, 2, 1, 0);
assertTrue("Schema compilation failed", success);
MiscUtils.copyFile(builder.getPathToDeployment(), pathToDeployment);
try {
LocalCluster cluster = new LocalCluster("updateclasses.jar", 2, 1, 0, BackendTarget.NATIVE_EE_JNI);
cluster.compile(builder);
cluster.setHasLocalServer(false);
cluster.startUp();
m_client = ClientFactory.createClient();
m_client.createConnection(cluster.getListenerAddress(0));
ClientResponse resp;
resp = m_client.callProcedure("@SystemCatalog", "CLASSES");
System.out.println(resp.getResults()[0]);
// Now load the procedure requiring the already-resident dependency
InMemoryJarfile jarfile = new InMemoryJarfile();
VoltCompiler comp = new VoltCompiler(false);
comp.addClassToJar(jarfile, org.voltdb_testprocs.updateclasses.testImportProc.class);
try {
resp = m_client.callProcedure("@UpdateClasses", jarfile.getFullJarBytes(), null);
} catch (ProcCallException pce) {
pce.printStackTrace();
fail("Triggered ENG-6611!");
}
resp = m_client.callProcedure("@SystemCatalog", "CLASSES");
assertEquals(2, resp.getResults()[0].getRowCount());
// create the proc and make sure it runs
try {
resp = m_client.callProcedure("@AdHoc", "create procedure from class org.voltdb_testprocs.updateclasses.testImportProc");
} catch (ProcCallException pce) {
fail("Should be able to create testImportProc procedure");
}
assertTrue(findProcedureInSystemCatalog("testImportProc"));
try {
resp = m_client.callProcedure("testImportProc");
} catch (ProcCallException pce) {
pce.printStackTrace();
fail("Should be able to call fully consistent procedure");
}
assertEquals(10L, resp.getResults()[0].asScalarLong());
m_client.close();
cluster.shutDown();
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.voltdb.compiler.VoltCompiler in project voltdb by VoltDB.
the class TestLiveDDLSchemaSwitch method testReplicaWithAdhocDDL.
@Test
public void testReplicaWithAdhocDDL() throws Exception {
generateCatalogsAndDeployments(true);
// Fire up a cluster with no catalog
VoltDB.Configuration config = new VoltDB.Configuration();
config.m_pathToCatalog = m_pathToOtherCatalog;
config.m_pathToDeployment = m_pathToReplicaDeployment;
try {
startSystem(config);
// UAC with schema should fail
assertFalse(findTableInSystemCatalogResults("FOO"));
boolean threw = false;
try {
m_client.updateApplicationCatalog(new File(m_pathToCatalog), null);
} catch (ProcCallException pce) {
threw = true;
assertTrue(pce.getMessage().contains("Cluster is configured to use AdHoc DDL"));
}
assertTrue("@UAC should have failed", threw);
assertFalse(findTableInSystemCatalogResults("FOO"));
// deployment-only UAC should fail
threw = false;
try {
m_client.updateApplicationCatalog(null, new File(m_pathToOtherReplicaDeployment));
} catch (ProcCallException pce) {
threw = true;
}
assertFalse("@UAC should should succeed with just a deployment file", threw);
assertEquals(getHeartbeatTimeout(), 6);
// Adhoc DDL should be rejected
assertFalse(findTableInSystemCatalogResults("BAR"));
try {
m_client.callProcedure("@AdHoc", "create table BAR (ID integer, VAL varchar(50));");
} catch (ProcCallException pce) {
fail("@AdHoc should succeed on replica cluster");
}
assertTrue(findTableInSystemCatalogResults("BAR"));
// Adhoc DML updates should be rejected in the replica
threw = false;
try {
m_client.callProcedure("@AdHoc", "insert into BAR values (100, 'ABC');");
} catch (ProcCallException pce) {
threw = true;
System.out.println(pce.getMessage());
assertTrue(pce.getMessage().contains("Write procedure @AdHoc_RW_MP is not allowed in replica cluster"));
}
assertTrue("Adhoc DDL should have failed", threw);
// @UpdateClasses should be rejected
assertFalse(findClassInSystemCatalog("org.voltdb_testprocs.fullddlfeatures.testImportProc"));
threw = false;
try {
InMemoryJarfile jarfile = new InMemoryJarfile();
VoltCompiler comp = new VoltCompiler(false);
comp.addClassToJar(jarfile, org.voltdb_testprocs.fullddlfeatures.testImportProc.class);
m_client.callProcedure("@UpdateClasses", jarfile.getFullJarBytes(), null);
} catch (ProcCallException pce) {
threw = true;
assertTrue(pce.getMessage().contains("Write procedure @UpdateClasses is not allowed"));
}
assertFalse("@UpdateClasses should have worked", threw);
assertTrue(findClassInSystemCatalog("org.voltdb_testprocs.fullddlfeatures.testImportProc"));
// adhoc queries still work
ClientResponse result = m_client.callProcedure("@AdHoc", "select * from baz;");
assertEquals(ClientResponse.SUCCESS, result.getStatus());
// Promote, should behave like the original master test
m_client.callProcedure("@Promote");
verifyMasterWithAdhocDDL();
} finally {
teardownSystem();
}
}
use of org.voltdb.compiler.VoltCompiler in project voltdb by VoltDB.
the class TestJdbcDatabaseMetaDataGenerator method testGetColumns.
public void testGetColumns() throws Exception {
HashMap<String, Object[]> refcolumns = new HashMap<String, Object[]>();
refcolumns.put("Column1", new Object[] { java.sql.Types.VARCHAR, "VARCHAR", 200, null, null, java.sql.DatabaseMetaData.columnNoNulls, null, null, 200, 1, "NO" });
refcolumns.put("Column2", new Object[] { java.sql.Types.TINYINT, "TINYINT", 7, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 2, "YES" });
refcolumns.put("Column3", new Object[] { java.sql.Types.SMALLINT, "SMALLINT", 15, null, 2, java.sql.DatabaseMetaData.columnNoNulls, "PARTITION_COLUMN", null, null, 1, "NO" });
refcolumns.put("Column4", new Object[] { java.sql.Types.INTEGER, "INTEGER", 31, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 2, "YES" });
refcolumns.put("Column5", new Object[] { java.sql.Types.BIGINT, "BIGINT", 63, null, 2, java.sql.DatabaseMetaData.columnNoNulls, null, null, null, 3, "NO" });
refcolumns.put("Column6", new Object[] { java.sql.Types.FLOAT, "FLOAT", 53, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 1, "YES" });
refcolumns.put("Column7", new Object[] { java.sql.Types.TIMESTAMP, "TIMESTAMP", 63, null, 2, java.sql.DatabaseMetaData.columnNoNulls, null, null, null, 2, "NO" });
refcolumns.put("Column8", new Object[] { java.sql.Types.DECIMAL, "DECIMAL", VoltDecimalHelper.kDefaultPrecision, VoltDecimalHelper.kDefaultScale, 10, java.sql.DatabaseMetaData.columnNullable, null, null, null, 3, "YES" });
refcolumns.put("Column9", new Object[] { java.sql.Types.VARBINARY, "VARBINARY", 250, null, null, java.sql.DatabaseMetaData.columnNoNulls, null, null, 250, 1, "NO" });
refcolumns.put("Column10", new Object[] { java.sql.Types.VARCHAR, "VARCHAR", 200, null, null, java.sql.DatabaseMetaData.columnNullable, null, null, 200, 1, "YES" });
refcolumns.put("Column11", new Object[] { java.sql.Types.BIGINT, "BIGINT", 63, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 2, "YES" });
refcolumns.put("Default1", new Object[] { java.sql.Types.TINYINT, "TINYINT", 7, null, 2, java.sql.DatabaseMetaData.columnNullable, null, "10", null, 1, "YES" });
refcolumns.put("Default2", new Object[] { java.sql.Types.VARCHAR, "VARCHAR", 50, null, null, java.sql.DatabaseMetaData.columnNullable, null, "'DUDE'", 50, 2, "YES" });
String schema = "create table Table1 (Column1 varchar(200) not null, Column2 tinyint);" + "create table Table2 (Column3 smallint not null, Column4 integer, Column5 bigint not null);" + "partition table Table2 on column Column3;" + "create table Table3 (Column6 float, Column7 timestamp not null, Column8 decimal);" + "create table Table4 (Column9 varbinary(250) not null);" + "create view View1 (Column10, Column11) as select Column1, count(*) from Table1 group by Column1;" + "create table Table5 (Default1 tinyint default 10, Default2 varchar(50) default 'DUDE');" + "create procedure sample as select * from Table1;";
VoltCompiler c = compileForDDLTest2(schema);
System.out.println(c.getCatalog().serialize());
JdbcDatabaseMetaDataGenerator dut = new JdbcDatabaseMetaDataGenerator(c.getCatalog(), null, new InMemoryJarfile(testout_jar));
VoltTable columns = dut.getMetaData("ColUmns");
System.out.println(columns);
assertEquals(23, columns.getColumnCount());
assertEquals(13, columns.getRowCount());
for (Map.Entry<String, Object[]> entry : refcolumns.entrySet()) {
verifyColumnData(entry.getKey(), columns, entry.getValue());
}
}
use of org.voltdb.compiler.VoltCompiler in project voltdb by VoltDB.
the class TestJdbcDatabaseMetaDataGenerator method testGetClasses.
public void testGetClasses() throws Exception {
String schema = "create table Table1 (Column1 varchar(200) not null, Column2 integer);" + "partition table Table1 on column Column1;" + "create procedure proc1 as select * from Table1 where Column1=?;" + "partition procedure proc1 on table Table1 column Column1;" + "create procedure proc2 as select * from Table1 where Column2=?;" + "import class org.voltdb_testprocs.fullddlfeatures.*;" + "create procedure from class org.voltdb_testprocs.fullddlfeatures.testImportProc;";
VoltCompiler c = compileForDDLTest2(schema);
JdbcDatabaseMetaDataGenerator dut = new JdbcDatabaseMetaDataGenerator(c.getCatalog(), null, new InMemoryJarfile(testout_jar));
VoltTable classes = dut.getMetaData("classes");
System.out.println(classes);
assertTrue(VoltTableTestHelpers.moveToMatchingRow(classes, "CLASS_NAME", "org.voltdb_testprocs.fullddlfeatures.testImportProc"));
assertEquals(1, classes.get("VOLT_PROCEDURE", VoltType.INTEGER));
assertEquals(1, classes.get("ACTIVE_PROC", VoltType.INTEGER));
assertTrue(VoltTableTestHelpers.moveToMatchingRow(classes, "CLASS_NAME", "org.voltdb_testprocs.fullddlfeatures.testCreateProcFromClassProc"));
assertEquals(1, classes.get("VOLT_PROCEDURE", VoltType.INTEGER));
assertEquals(0, classes.get("ACTIVE_PROC", VoltType.INTEGER));
assertTrue(VoltTableTestHelpers.moveToMatchingRow(classes, "CLASS_NAME", "org.voltdb_testprocs.fullddlfeatures.NoMeaningClass"));
assertEquals(0, classes.get("VOLT_PROCEDURE", VoltType.INTEGER));
assertEquals(0, classes.get("ACTIVE_PROC", VoltType.INTEGER));
}
use of org.voltdb.compiler.VoltCompiler in project voltdb by VoltDB.
the class TestJdbcDatabaseMetaDataGenerator method testGetIndexInfo.
public void testGetIndexInfo() throws Exception {
String schema = "create table Table1 (Column1 smallint ASSUMEUNIQUE, Column2 integer, Column3 bigint not null, Column4 integer, Column5 integer, " + " constraint pk_tree primary key (Column1, Column3));" + "partition table Table1 on column Column3;" + "create index Index1_tree on Table1 (Column2, Column3);" + "create index Index2_hash on Table1 (Column4, Column5);" + "create procedure sample as select * from Table1;";
VoltCompiler c = compileForDDLTest2(schema);
System.out.println(c.getCatalog().serialize());
JdbcDatabaseMetaDataGenerator dut = new JdbcDatabaseMetaDataGenerator(c.getCatalog(), null, new InMemoryJarfile(testout_jar));
VoltTable indexes = dut.getMetaData("IndexInfo");
System.out.println(indexes);
assertEquals(13, indexes.getColumnCount());
assertEquals(7, indexes.getRowCount());
assertTrue(VoltTableTestHelpers.moveToMatchingTupleRow(indexes, "INDEX_NAME", "INDEX1_TREE", "COLUMN_NAME", "Column2"));
assertEquals("TABLE1", indexes.get("TABLE_NAME", VoltType.STRING));
assertEquals((byte) 1, indexes.get("NON_UNIQUE", VoltType.TINYINT));
assertEquals(java.sql.DatabaseMetaData.tableIndexOther, indexes.get("TYPE", VoltType.SMALLINT));
assertEquals((short) 1, indexes.get("ORDINAL_POSITION", VoltType.SMALLINT));
assertEquals("A", indexes.get("ASC_OR_DESC", VoltType.STRING));
assertTrue(VoltTableTestHelpers.moveToMatchingTupleRow(indexes, "INDEX_NAME", "INDEX1_TREE", "COLUMN_NAME", "Column3"));
assertEquals("TABLE1", indexes.get("TABLE_NAME", VoltType.STRING));
assertEquals((byte) 1, indexes.get("NON_UNIQUE", VoltType.TINYINT));
assertEquals(java.sql.DatabaseMetaData.tableIndexOther, indexes.get("TYPE", VoltType.SMALLINT));
assertEquals((short) 2, indexes.get("ORDINAL_POSITION", VoltType.SMALLINT));
assertEquals("A", indexes.get("ASC_OR_DESC", VoltType.STRING));
assertTrue(VoltTableTestHelpers.moveToMatchingTupleRow(indexes, "INDEX_NAME", "INDEX2_HASH", "COLUMN_NAME", "Column4"));
assertEquals("TABLE1", indexes.get("TABLE_NAME", VoltType.STRING));
assertEquals((byte) 1, indexes.get("NON_UNIQUE", VoltType.TINYINT));
assertEquals(java.sql.DatabaseMetaData.tableIndexHashed, indexes.get("TYPE", VoltType.SMALLINT));
assertEquals((short) 1, indexes.get("ORDINAL_POSITION", VoltType.SMALLINT));
assertEquals(null, indexes.get("ASC_OR_DESC", VoltType.STRING));
assertTrue(VoltTableTestHelpers.moveToMatchingTupleRow(indexes, "INDEX_NAME", "INDEX2_HASH", "COLUMN_NAME", "Column5"));
assertEquals("TABLE1", indexes.get("TABLE_NAME", VoltType.STRING));
assertEquals((byte) 1, indexes.get("NON_UNIQUE", VoltType.TINYINT));
assertEquals(java.sql.DatabaseMetaData.tableIndexHashed, indexes.get("TYPE", VoltType.SMALLINT));
assertEquals((short) 2, indexes.get("ORDINAL_POSITION", VoltType.SMALLINT));
assertEquals(null, indexes.get("ASC_OR_DESC", VoltType.STRING));
assertTrue(VoltTableTestHelpers.moveToMatchingTupleRow(indexes, "INDEX_NAME", HSQLInterface.AUTO_GEN_NAMED_CONSTRAINT_IDX + "PK_TREE", "COLUMN_NAME", "Column1"));
assertEquals("TABLE1", indexes.get("TABLE_NAME", VoltType.STRING));
assertEquals((byte) 0, indexes.get("NON_UNIQUE", VoltType.TINYINT));
assertEquals(java.sql.DatabaseMetaData.tableIndexOther, indexes.get("TYPE", VoltType.SMALLINT));
assertEquals((short) 1, indexes.get("ORDINAL_POSITION", VoltType.SMALLINT));
assertEquals("A", indexes.get("ASC_OR_DESC", VoltType.STRING));
assertTrue(VoltTableTestHelpers.moveToMatchingTupleRow(indexes, "INDEX_NAME", HSQLInterface.AUTO_GEN_NAMED_CONSTRAINT_IDX + "PK_TREE", "COLUMN_NAME", "Column3"));
assertEquals("TABLE1", indexes.get("TABLE_NAME", VoltType.STRING));
assertEquals((byte) 0, indexes.get("NON_UNIQUE", VoltType.TINYINT));
assertEquals(java.sql.DatabaseMetaData.tableIndexOther, indexes.get("TYPE", VoltType.SMALLINT));
assertEquals((short) 2, indexes.get("ORDINAL_POSITION", VoltType.SMALLINT));
assertEquals("A", indexes.get("ASC_OR_DESC", VoltType.STRING));
assertTrue(VoltTableTestHelpers.moveToMatchingTupleRow(indexes, "INDEX_NAME", HSQLInterface.AUTO_GEN_UNIQUE_IDX_PREFIX + "TABLE1_COLUMN1", "COLUMN_NAME", "Column1"));
assertEquals("TABLE1", indexes.get("TABLE_NAME", VoltType.STRING));
assertEquals((byte) 0, indexes.get("NON_UNIQUE", VoltType.TINYINT));
assertEquals(java.sql.DatabaseMetaData.tableIndexOther, indexes.get("TYPE", VoltType.SMALLINT));
assertEquals((short) 1, indexes.get("ORDINAL_POSITION", VoltType.SMALLINT));
assertEquals("A", indexes.get("ASC_OR_DESC", VoltType.STRING));
assertFalse(VoltTableTestHelpers.moveToMatchingRow(indexes, "COLUMN_NAME", "NotAColumn"));
}
Aggregations