use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.
the class UpdateCore method executePlanFragment.
@Override
public DependencyPair executePlanFragment(Map<Integer, List<VoltTable>> dependencies, long fragmentId, ParameterSet params, SystemProcedureExecutionContext context) {
if (fragmentId == SysProcFragmentId.PF_updateCatalogPrecheckAndSync) {
String[] tablesThatMustBeEmpty = (String[]) params.getParam(0);
String[] reasonsForEmptyTables = (String[]) params.getParam(1);
checkForNonEmptyTables(tablesThatMustBeEmpty, reasonsForEmptyTables, context);
// Send out fragments to do the initial round-trip to synchronize
// all the cluster sites on the start of catalog update, we'll do
// the actual work on the *next* round-trip below
// Don't actually care about the returned table, just need to send something
// back to the MPI scoreboard
DependencyPair success = new DependencyPair.TableDependencyPair(DEP_updateCatalogSync, new VoltTable(new ColumnInfo[] { new ColumnInfo("UNUSED", VoltType.BIGINT) }));
if (!context.isLowestSiteId()) {
// id on this host.
if (log.isInfoEnabled()) {
log.info("Site " + CoreUtils.hsIdToString(m_site.getCorrespondingSiteId()) + " completed data precheck.");
}
return success;
}
// We know the ZK bytes are okay because the run() method wrote them before sending
// out fragments
CatalogAndIds catalogStuff = null;
try {
catalogStuff = CatalogUtil.getCatalogFromZK(VoltDB.instance().getHostMessenger().getZK());
InMemoryJarfile testjar = new InMemoryJarfile(catalogStuff.catalogBytes);
JarLoader testjarloader = testjar.getLoader();
for (String classname : testjarloader.getClassNames()) {
try {
m_javaClass.forName(classname, true, testjarloader);
}// care about here.
catch (UnsupportedClassVersionError e) {
String msg = "Cannot load classes compiled with a higher version of Java than currently" + " in use. Class " + classname + " was compiled with ";
Integer major = 0;
try {
major = Integer.parseInt(e.getMessage().split("version")[1].trim().split("\\.")[0]);
} catch (Exception ex) {
log.debug("Unable to parse compile version number from UnsupportedClassVersionError.", ex);
}
if (m_versionMap.containsKey(major)) {
msg = msg.concat(m_versionMap.get(major) + ", current runtime version is " + System.getProperty("java.version") + ".");
} else {
msg = msg.concat("an incompatable Java version.");
}
log.error(msg);
throw new VoltAbortException(msg);
} catch (LinkageError | ClassNotFoundException e) {
String cause = e.getMessage();
if (cause == null && e.getCause() != null) {
cause = e.getCause().getMessage();
}
String msg = "Error loading class: " + classname + " from catalog: " + e.getClass().getCanonicalName() + ", " + cause;
log.warn(msg);
throw new VoltAbortException(e);
}
}
} catch (Exception e) {
Throwables.propagate(e);
}
if (log.isInfoEnabled()) {
log.info("Site " + CoreUtils.hsIdToString(m_site.getCorrespondingSiteId()) + " completed data and catalog precheck.");
}
return success;
} else if (fragmentId == SysProcFragmentId.PF_updateCatalogPrecheckAndSyncAggregate) {
// Don't actually care about the returned table, just need to send something
// back to the MPI scoreboard
log.info("Site " + CoreUtils.hsIdToString(m_site.getCorrespondingSiteId()) + " acknowledged data and catalog prechecks.");
return new DependencyPair.TableDependencyPair(DEP_updateCatalogSyncAggregate, new VoltTable(new ColumnInfo[] { new ColumnInfo("UNUSED", VoltType.BIGINT) }));
} else if (fragmentId == SysProcFragmentId.PF_updateCatalog) {
String catalogDiffCommands = (String) params.toArray()[0];
String commands = Encoder.decodeBase64AndDecompress(catalogDiffCommands);
int expectedCatalogVersion = (Integer) params.toArray()[1];
boolean requiresSnapshotIsolation = ((Byte) params.toArray()[2]) != 0;
boolean requireCatalogDiffCmdsApplyToEE = ((Byte) params.toArray()[3]) != 0;
boolean hasSchemaChange = ((Byte) params.toArray()[4]) != 0;
boolean requiresNewExportGeneration = ((Byte) params.toArray()[5]) != 0;
CatalogAndIds catalogStuff = null;
try {
catalogStuff = CatalogUtil.getCatalogFromZK(VoltDB.instance().getHostMessenger().getZK());
} catch (Exception e) {
Throwables.propagate(e);
}
String replayInfo = m_runner.getTxnState().isForReplay() ? " (FOR REPLAY)" : "";
// if this is a new catalog, do the work to update
if (context.getCatalogVersion() == expectedCatalogVersion) {
// update the global catalog if we get there first
@SuppressWarnings("deprecation") Pair<CatalogContext, CatalogSpecificPlanner> p = VoltDB.instance().catalogUpdate(commands, catalogStuff.catalogBytes, catalogStuff.getCatalogHash(), expectedCatalogVersion, DeprecatedProcedureAPIAccess.getVoltPrivateRealTransactionId(this), getUniqueId(), catalogStuff.deploymentBytes, catalogStuff.getDeploymentHash(), requireCatalogDiffCmdsApplyToEE, hasSchemaChange, requiresNewExportGeneration);
// The producer would have been turned off by the code above already.
if (VoltDB.instance().getReplicationRole() == ReplicationRole.NONE && !VoltDB.instance().getReplicationActive()) {
context.resetDrAppliedTracker();
}
// update the local catalog. Safe to do this thanks to the check to get into here.
long uniqueId = m_runner.getUniqueId();
long spHandle = m_runner.getTxnState().getNotice().getSpHandle();
context.updateCatalog(commands, p.getFirst(), p.getSecond(), requiresSnapshotIsolation, uniqueId, spHandle, requireCatalogDiffCmdsApplyToEE, requiresNewExportGeneration);
if (log.isDebugEnabled()) {
log.debug(String.format("Site %s completed catalog update with catalog hash %s, deployment hash %s%s.", CoreUtils.hsIdToString(m_site.getCorrespondingSiteId()), Encoder.hexEncode(catalogStuff.getCatalogHash()).substring(0, 10), Encoder.hexEncode(catalogStuff.getDeploymentHash()).substring(0, 10), replayInfo));
}
} else // if seen before by this code, then check to see if this is a restart
if (context.getCatalogVersion() == (expectedCatalogVersion + 1) && Arrays.equals(context.getCatalogHash(), catalogStuff.getCatalogHash()) && Arrays.equals(context.getDeploymentHash(), catalogStuff.getDeploymentHash())) {
log.info(String.format("Site %s will NOT apply an assumed restarted and identical catalog update with catalog hash %s and deployment hash %s.", CoreUtils.hsIdToString(m_site.getCorrespondingSiteId()), Encoder.hexEncode(catalogStuff.getCatalogHash()), Encoder.hexEncode(catalogStuff.getDeploymentHash())));
} else {
VoltDB.crashLocalVoltDB("Invalid catalog update. Expected version: " + expectedCatalogVersion + ", current version: " + context.getCatalogVersion(), false, null);
}
VoltTable result = new VoltTable(VoltSystemProcedure.STATUS_SCHEMA);
result.addRow(VoltSystemProcedure.STATUS_OK);
return new DependencyPair.TableDependencyPair(DEP_updateCatalog, result);
} else if (fragmentId == SysProcFragmentId.PF_updateCatalogAggregate) {
VoltTable result = VoltTableUtil.unionTables(dependencies.get(DEP_updateCatalog));
return new DependencyPair.TableDependencyPair(DEP_updateCatalogAggregate, result);
} else {
VoltDB.crashLocalVoltDB("Received unrecognized plan fragment id " + fragmentId + " in UpdateApplicationCatalog", false, null);
}
throw new RuntimeException("Should not reach this code");
}
use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.
the class TestAdhocCreateDropJavaProc method testBasic.
@Test
public void testBasic() throws Exception {
System.out.println("\n\n-----\n testBasic \n-----\n\n");
String pathToCatalog = Configuration.getPathToCatalogForTest("updateclasses.jar");
String pathToDeployment = Configuration.getPathToCatalogForTest("updateclasses.xml");
VoltProjectBuilder builder = new VoltProjectBuilder();
builder.addLiteralSchema("-- Don't care");
builder.setUseDDLSchema(true);
boolean success = builder.compile(pathToCatalog, 2, 1, 0);
assertTrue("Schema compilation failed", success);
MiscUtils.copyFile(builder.getPathToDeployment(), pathToDeployment);
try {
LocalCluster cluster = new LocalCluster("updateclasses.jar", 2, 1, 0, BackendTarget.NATIVE_EE_JNI);
cluster.compile(builder);
cluster.setHasLocalServer(false);
cluster.startUp();
m_client = ClientFactory.createClient();
m_client.createConnection(cluster.getListenerAddress(0));
ClientResponse resp;
// Can't create a procedure without a class
resp = m_client.callProcedure("@SystemCatalog", "CLASSES");
System.out.println("CLASSES: " + resp.getResults()[0]);
try {
resp = m_client.callProcedure("@AdHoc", "create procedure from class org.voltdb_testprocs.updateclasses.testImportProc");
fail("Shouldn't be able to create a procedure backed by no class");
} catch (ProcCallException pce) {
}
assertFalse(findProcedureInSystemCatalog("testImportProc"));
InMemoryJarfile jarfile = new InMemoryJarfile();
VoltCompiler comp = new VoltCompiler(false);
comp.addClassToJar(jarfile, org.voltdb_testprocs.updateclasses.testImportProc.class);
resp = m_client.callProcedure("@UpdateClasses", jarfile.getFullJarBytes(), null);
// call the procedure. Maybe this gets better in the future
try {
resp = m_client.callProcedure("@AdHoc", "create procedure from class org.voltdb_testprocs.updateclasses.testImportProc");
} catch (ProcCallException pce) {
fail("We allow procedures to be created with unsatisfied dependencies");
}
assertTrue(findProcedureInSystemCatalog("testImportProc"));
// Make sure we don't crash when we call it though
try {
resp = m_client.callProcedure("testImportProc");
fail("Should return an error and not crash calling procedure w/ bad dependencies");
} catch (ProcCallException pce) {
assertTrue(pce.getMessage().contains("ClassNotFoundException"));
}
// Okay, add the missing dependency
jarfile = new InMemoryJarfile();
comp = new VoltCompiler(false);
comp.addClassToJar(jarfile, org.voltdb_testprocs.updateclasses.NoMeaningClass.class);
resp = m_client.callProcedure("@UpdateClasses", jarfile.getFullJarBytes(), null);
// now we should be able to call it
try {
resp = m_client.callProcedure("testImportProc");
} catch (ProcCallException pce) {
fail("Should be able to call fully consistent procedure");
}
assertEquals(10L, resp.getResults()[0].asScalarLong());
// Now try to remove the procedure class
try {
resp = m_client.callProcedure("@UpdateClasses", null, "org.voltdb_testprocs.updateclasses.*");
fail("Shouldn't be able to rip a class out from under an active proc");
} catch (ProcCallException pce) {
assertTrue(pce.getMessage(), pce.getMessage().contains("Cannot load class for procedure: org.voltdb_testprocs.updateclasses.testImportProc"));
}
// Make sure we didn't purge anything (even the extra dependency)
resp = m_client.callProcedure("@SystemCatalog", "CLASSES");
assertEquals(2, resp.getResults()[0].getRowCount());
// Okay, drop the procedure first
try {
resp = m_client.callProcedure("@AdHoc", "drop procedure testImportProc");
} catch (ProcCallException pce) {
fail("Should be able to drop a stored procedure");
}
assertFalse(findProcedureInSystemCatalog("testImportProc"));
// Now try to remove the procedure class again
try {
resp = m_client.callProcedure("@UpdateClasses", null, "org.voltdb_testprocs.updateclasses.*");
} catch (ProcCallException pce) {
fail("Should be able to remove the classes for an inactive procedure");
}
resp = m_client.callProcedure("@SystemCatalog", "CLASSES");
// no classes in catalog
assertEquals(0, resp.getResults()[0].getRowCount());
m_client.close();
cluster.shutDown();
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.
the class TestAdhocCreateDropJavaProc method testCreateUsingExistingImport.
// This test should trigger the same failure seen in ENG-6611
@Test
public void testCreateUsingExistingImport() throws Exception {
System.out.println("\n\n-----\n testCreateUsingExistingImport \n-----\n\n");
String pathToCatalog = Configuration.getPathToCatalogForTest("updateclasses.jar");
String pathToDeployment = Configuration.getPathToCatalogForTest("updateclasses.xml");
VoltProjectBuilder builder = new VoltProjectBuilder();
// Start off with the dependency imported
builder.addLiteralSchema("import class org.voltdb_testprocs.updateclasses.NoMeaningClass;");
builder.setUseDDLSchema(true);
boolean success = builder.compile(pathToCatalog, 2, 1, 0);
assertTrue("Schema compilation failed", success);
MiscUtils.copyFile(builder.getPathToDeployment(), pathToDeployment);
try {
LocalCluster cluster = new LocalCluster("updateclasses.jar", 2, 1, 0, BackendTarget.NATIVE_EE_JNI);
cluster.compile(builder);
cluster.setHasLocalServer(false);
cluster.startUp();
m_client = ClientFactory.createClient();
m_client.createConnection(cluster.getListenerAddress(0));
ClientResponse resp;
resp = m_client.callProcedure("@SystemCatalog", "CLASSES");
System.out.println(resp.getResults()[0]);
// Now load the procedure requiring the already-resident dependency
InMemoryJarfile jarfile = new InMemoryJarfile();
VoltCompiler comp = new VoltCompiler(false);
comp.addClassToJar(jarfile, org.voltdb_testprocs.updateclasses.testImportProc.class);
try {
resp = m_client.callProcedure("@UpdateClasses", jarfile.getFullJarBytes(), null);
} catch (ProcCallException pce) {
pce.printStackTrace();
fail("Triggered ENG-6611!");
}
resp = m_client.callProcedure("@SystemCatalog", "CLASSES");
assertEquals(2, resp.getResults()[0].getRowCount());
// create the proc and make sure it runs
try {
resp = m_client.callProcedure("@AdHoc", "create procedure from class org.voltdb_testprocs.updateclasses.testImportProc");
} catch (ProcCallException pce) {
fail("Should be able to create testImportProc procedure");
}
assertTrue(findProcedureInSystemCatalog("testImportProc"));
try {
resp = m_client.callProcedure("testImportProc");
} catch (ProcCallException pce) {
pce.printStackTrace();
fail("Should be able to call fully consistent procedure");
}
assertEquals(10L, resp.getResults()[0].asScalarLong());
m_client.close();
cluster.shutDown();
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.
the class TestLiveDDLSchemaSwitch method testReplicaWithAdhocDDL.
@Test
public void testReplicaWithAdhocDDL() throws Exception {
generateCatalogsAndDeployments(true);
// Fire up a cluster with no catalog
VoltDB.Configuration config = new VoltDB.Configuration();
config.m_pathToCatalog = m_pathToOtherCatalog;
config.m_pathToDeployment = m_pathToReplicaDeployment;
try {
startSystem(config);
// UAC with schema should fail
assertFalse(findTableInSystemCatalogResults("FOO"));
boolean threw = false;
try {
m_client.updateApplicationCatalog(new File(m_pathToCatalog), null);
} catch (ProcCallException pce) {
threw = true;
assertTrue(pce.getMessage().contains("Cluster is configured to use AdHoc DDL"));
}
assertTrue("@UAC should have failed", threw);
assertFalse(findTableInSystemCatalogResults("FOO"));
// deployment-only UAC should fail
threw = false;
try {
m_client.updateApplicationCatalog(null, new File(m_pathToOtherReplicaDeployment));
} catch (ProcCallException pce) {
threw = true;
}
assertFalse("@UAC should should succeed with just a deployment file", threw);
assertEquals(getHeartbeatTimeout(), 6);
// Adhoc DDL should be rejected
assertFalse(findTableInSystemCatalogResults("BAR"));
try {
m_client.callProcedure("@AdHoc", "create table BAR (ID integer, VAL varchar(50));");
} catch (ProcCallException pce) {
fail("@AdHoc should succeed on replica cluster");
}
assertTrue(findTableInSystemCatalogResults("BAR"));
// Adhoc DML updates should be rejected in the replica
threw = false;
try {
m_client.callProcedure("@AdHoc", "insert into BAR values (100, 'ABC');");
} catch (ProcCallException pce) {
threw = true;
System.out.println(pce.getMessage());
assertTrue(pce.getMessage().contains("Write procedure @AdHoc_RW_MP is not allowed in replica cluster"));
}
assertTrue("Adhoc DDL should have failed", threw);
// @UpdateClasses should be rejected
assertFalse(findClassInSystemCatalog("org.voltdb_testprocs.fullddlfeatures.testImportProc"));
threw = false;
try {
InMemoryJarfile jarfile = new InMemoryJarfile();
VoltCompiler comp = new VoltCompiler(false);
comp.addClassToJar(jarfile, org.voltdb_testprocs.fullddlfeatures.testImportProc.class);
m_client.callProcedure("@UpdateClasses", jarfile.getFullJarBytes(), null);
} catch (ProcCallException pce) {
threw = true;
assertTrue(pce.getMessage().contains("Write procedure @UpdateClasses is not allowed"));
}
assertFalse("@UpdateClasses should have worked", threw);
assertTrue(findClassInSystemCatalog("org.voltdb_testprocs.fullddlfeatures.testImportProc"));
// adhoc queries still work
ClientResponse result = m_client.callProcedure("@AdHoc", "select * from baz;");
assertEquals(ClientResponse.SUCCESS, result.getStatus());
// Promote, should behave like the original master test
m_client.callProcedure("@Promote");
verifyMasterWithAdhocDDL();
} finally {
teardownSystem();
}
}
use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.
the class TestJdbcDatabaseMetaDataGenerator method testGetColumns.
public void testGetColumns() throws Exception {
HashMap<String, Object[]> refcolumns = new HashMap<String, Object[]>();
refcolumns.put("Column1", new Object[] { java.sql.Types.VARCHAR, "VARCHAR", 200, null, null, java.sql.DatabaseMetaData.columnNoNulls, null, null, 200, 1, "NO" });
refcolumns.put("Column2", new Object[] { java.sql.Types.TINYINT, "TINYINT", 7, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 2, "YES" });
refcolumns.put("Column3", new Object[] { java.sql.Types.SMALLINT, "SMALLINT", 15, null, 2, java.sql.DatabaseMetaData.columnNoNulls, "PARTITION_COLUMN", null, null, 1, "NO" });
refcolumns.put("Column4", new Object[] { java.sql.Types.INTEGER, "INTEGER", 31, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 2, "YES" });
refcolumns.put("Column5", new Object[] { java.sql.Types.BIGINT, "BIGINT", 63, null, 2, java.sql.DatabaseMetaData.columnNoNulls, null, null, null, 3, "NO" });
refcolumns.put("Column6", new Object[] { java.sql.Types.FLOAT, "FLOAT", 53, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 1, "YES" });
refcolumns.put("Column7", new Object[] { java.sql.Types.TIMESTAMP, "TIMESTAMP", 63, null, 2, java.sql.DatabaseMetaData.columnNoNulls, null, null, null, 2, "NO" });
refcolumns.put("Column8", new Object[] { java.sql.Types.DECIMAL, "DECIMAL", VoltDecimalHelper.kDefaultPrecision, VoltDecimalHelper.kDefaultScale, 10, java.sql.DatabaseMetaData.columnNullable, null, null, null, 3, "YES" });
refcolumns.put("Column9", new Object[] { java.sql.Types.VARBINARY, "VARBINARY", 250, null, null, java.sql.DatabaseMetaData.columnNoNulls, null, null, 250, 1, "NO" });
refcolumns.put("Column10", new Object[] { java.sql.Types.VARCHAR, "VARCHAR", 200, null, null, java.sql.DatabaseMetaData.columnNullable, null, null, 200, 1, "YES" });
refcolumns.put("Column11", new Object[] { java.sql.Types.BIGINT, "BIGINT", 63, null, 2, java.sql.DatabaseMetaData.columnNullable, null, null, null, 2, "YES" });
refcolumns.put("Default1", new Object[] { java.sql.Types.TINYINT, "TINYINT", 7, null, 2, java.sql.DatabaseMetaData.columnNullable, null, "10", null, 1, "YES" });
refcolumns.put("Default2", new Object[] { java.sql.Types.VARCHAR, "VARCHAR", 50, null, null, java.sql.DatabaseMetaData.columnNullable, null, "'DUDE'", 50, 2, "YES" });
String schema = "create table Table1 (Column1 varchar(200) not null, Column2 tinyint);" + "create table Table2 (Column3 smallint not null, Column4 integer, Column5 bigint not null);" + "partition table Table2 on column Column3;" + "create table Table3 (Column6 float, Column7 timestamp not null, Column8 decimal);" + "create table Table4 (Column9 varbinary(250) not null);" + "create view View1 (Column10, Column11) as select Column1, count(*) from Table1 group by Column1;" + "create table Table5 (Default1 tinyint default 10, Default2 varchar(50) default 'DUDE');" + "create procedure sample as select * from Table1;";
VoltCompiler c = compileForDDLTest2(schema);
System.out.println(c.getCatalog().serialize());
JdbcDatabaseMetaDataGenerator dut = new JdbcDatabaseMetaDataGenerator(c.getCatalog(), null, new InMemoryJarfile(testout_jar));
VoltTable columns = dut.getMetaData("ColUmns");
System.out.println(columns);
assertEquals(23, columns.getColumnCount());
assertEquals(13, columns.getRowCount());
for (Map.Entry<String, Object[]> entry : refcolumns.entrySet()) {
verifyColumnData(entry.getKey(), columns, entry.getValue());
}
}
Aggregations