use of org.datanucleus.exceptions.NucleusUserException in project datanucleus-core by datanucleus.
the class NucleusContextHelper method createStoreManagerForProperties.
/**
* Method to create a StoreManager based on the specified properties passed in.
* @param props The overall persistence properties
* @param datastoreProps Persistence properties to apply to the datastore
* @param clr ClassLoader resolver
* @param nucCtx NucleusContext
* @return The StoreManager
* @throws NucleusUserException if impossible to create the StoreManager (not in CLASSPATH?, invalid definition?)
*/
public static StoreManager createStoreManagerForProperties(Map<String, Object> props, Map<String, Object> datastoreProps, ClassLoaderResolver clr, NucleusContext nucCtx) {
Extension[] exts = nucCtx.getPluginManager().getExtensionPoint("org.datanucleus.store_manager").getExtensions();
Class[] ctrArgTypes = new Class[] { ClassConstants.CLASS_LOADER_RESOLVER, ClassConstants.PERSISTENCE_NUCLEUS_CONTEXT, Map.class };
Object[] ctrArgs = new Object[] { clr, nucCtx, datastoreProps };
StoreManager storeMgr = null;
// Try using the URL of the data source
String url = (String) props.get(PropertyNames.PROPERTY_CONNECTION_URL.toLowerCase());
if (url != null) {
int idx = url.indexOf(':');
if (idx > -1) {
url = url.substring(0, idx);
}
for (int e = 0; storeMgr == null && e < exts.length; e++) {
ConfigurationElement[] confElm = exts[e].getConfigurationElements();
for (int c = 0; storeMgr == null && c < confElm.length; c++) {
String urlKey = confElm[c].getAttribute("url-key");
if (url == null || urlKey.equalsIgnoreCase(url)) {
// Either no URL, or url defined so take this StoreManager
try {
storeMgr = (StoreManager) nucCtx.getPluginManager().createExecutableExtension("org.datanucleus.store_manager", "url-key", url == null ? urlKey : url, "class-name", ctrArgTypes, ctrArgs);
} catch (InvocationTargetException ex) {
Throwable t = ex.getTargetException();
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else if (t instanceof Error) {
throw (Error) t;
} else {
throw new NucleusException(t.getMessage(), t).setFatal();
}
} catch (Exception ex) {
throw new NucleusException(ex.getMessage(), ex).setFatal();
}
}
}
}
} else {
// Assumed to be using RDBMS since only that allows ConnectionFactory/ConnectionFactoryName TODO If any other stores start supporting ConnectionFactory then update this
try {
storeMgr = (StoreManager) nucCtx.getPluginManager().createExecutableExtension("org.datanucleus.store_manager", "key", "rdbms", "class-name", ctrArgTypes, ctrArgs);
} catch (InvocationTargetException ex) {
Throwable t = ex.getTargetException();
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else if (t instanceof Error) {
throw (Error) t;
} else {
throw new NucleusException(t.getMessage(), t).setFatal();
}
} catch (Exception ex) {
throw new NucleusException(ex.getMessage(), ex).setFatal();
}
}
if (storeMgr == null) {
throw new NucleusUserException(Localiser.msg("008004", url)).setFatal();
}
return storeMgr;
}
use of org.datanucleus.exceptions.NucleusUserException in project datanucleus-core by datanucleus.
the class TemporalDayMethod method evaluate.
/* (non-Javadoc)
* @see org.datanucleus.query.evaluator.memory.InvocationEvaluator#evaluate(org.datanucleus.query.expression.InvokeExpression, org.datanucleus.query.evaluator.memory.InMemoryExpressionEvaluator)
*/
public Object evaluate(InvokeExpression expr, Object invokedValue, InMemoryExpressionEvaluator eval) {
if (invokedValue == null && expr.getArguments() != null) {
// Specified as static function, so use argument of InvokeExpression
List<Expression> argExprs = expr.getArguments();
if (argExprs.size() > 1) {
throw new NucleusUserException("Incorrect number of arguments to DAY");
}
Expression argExpr = argExprs.get(0);
invokedValue = eval.getValueForExpression(argExpr);
}
if (invokedValue == null) {
return Boolean.FALSE;
}
if (!(invokedValue instanceof Date)) {
throw new NucleusException(Localiser.msg("021011", expr.getOperation(), invokedValue.getClass().getName()));
}
if (invokedValue instanceof Date) {
Calendar cal = Calendar.getInstance();
cal.setTime((Date) invokedValue);
return Integer.valueOf(cal.get(Calendar.DAY_OF_MONTH));
} else if (invokedValue instanceof Calendar) {
return Integer.valueOf(((Calendar) invokedValue).get(Calendar.DAY_OF_MONTH));
} else if (invokedValue instanceof LocalDate) {
return ((LocalDate) invokedValue).getDayOfMonth();
} else if (invokedValue instanceof LocalDateTime) {
return ((LocalDateTime) invokedValue).getDayOfMonth();
} else {
throw new NucleusUserException("We do not currently support DAY() with argument of type " + invokedValue.getClass().getName());
}
}
use of org.datanucleus.exceptions.NucleusUserException in project datanucleus-core by datanucleus.
the class SCOUtils method validateObjectForWriting.
/**
* Method to check if an object to be stored in a SCO container is already persistent, or is managed by a
* different ExecutionContext. If not persistent, this call will persist it.
* If not yet flushed to the datastore this call will flush it.
* @param ec ExecutionContext
* @param object The object
* @param fieldValues Values for any fields when persisting (if the object needs persisting)
* @return Whether the object was persisted during this call
*/
public static boolean validateObjectForWriting(ExecutionContext ec, Object object, FieldValues fieldValues) {
boolean persisted = false;
ApiAdapter api = ec.getApiAdapter();
if (api.isPersistable(object)) {
ExecutionContext objectEC = api.getExecutionContext(object);
if (objectEC != null && ec != objectEC) {
throw new NucleusUserException(Localiser.msg("023009", StringUtils.toJVMIDString(object)), api.getIdForObject(object));
} else if (!api.isPersistent(object)) {
// Not persistent, so either is detached, or needs persisting for first time
boolean exists = false;
if (api.isDetached(object)) {
if (ec.getBooleanProperty(PropertyNames.PROPERTY_ATTACH_SAME_DATASTORE)) {
// Assume that it is detached from this datastore
exists = true;
} else {
// Check if the (attached) object exists in this datastore
try {
Object obj = ec.findObject(api.getIdForObject(object), true, false, object.getClass().getName());
if (obj != null) {
// PM.getObjectById creates a dummy object to represent this object and
// automatically
// enlists it in the txn. Evict it to avoid issues with reachability
ObjectProvider objSM = ec.findObjectProvider(obj);
if (objSM != null) {
ec.evictFromTransaction(objSM);
}
}
exists = true;
} catch (NucleusObjectNotFoundException onfe) {
exists = false;
}
}
}
if (!exists) {
// Persist the object
ec.persistObjectInternal(object, fieldValues, ObjectProvider.PC);
persisted = true;
}
} else {
// Persistent state, but is it flushed to the datastore?
ObjectProvider objectSM = ec.findObjectProvider(object);
if (objectSM.isWaitingToBeFlushedToDatastore()) {
// Process any fieldValues
if (fieldValues != null) {
objectSM.loadFieldValues(fieldValues);
}
// Now flush it
objectSM.flush();
// Mark as being persisted since is now in the datastore
persisted = true;
}
}
}
return persisted;
}
use of org.datanucleus.exceptions.NucleusUserException in project datanucleus-core by datanucleus.
the class SchemaTool method getNucleusContextForMode.
/**
* Method to create a NucleusContext for the specified mode of SchemaTool
* @param mode Mode of operation of SchemaTool
* @param api Persistence API
* @param userProps Map containing user provided properties (usually input via a file)
* @param persistenceUnitName Name of the persistence-unit (if any)
* @param ddlFile Name of a file to output DDL to
* @param verbose Verbose mode
* @param ignoreMetaDataForMissingClasses Whether to ignore metadata for missing classes
* @return The NucleusContext to use
* @throws NucleusException Thrown if an error occurs in creating the required NucleusContext
*/
public static StoreNucleusContext getNucleusContextForMode(Mode mode, String api, Map userProps, String persistenceUnitName, String ddlFile, boolean verbose, boolean ignoreMetaDataForMissingClasses) {
// Extract any properties that affect NucleusContext startup
Map startupProps = null;
if (userProps != null) {
// Possible properties to check for
for (String startupPropName : AbstractNucleusContext.STARTUP_PROPERTIES) {
if (userProps.containsKey(startupPropName)) {
if (startupProps == null) {
startupProps = new HashMap();
}
startupProps.put(startupPropName, userProps.get(startupPropName));
}
}
}
// Initialise the context for this API
PersistenceNucleusContext nucleusCtx = new PersistenceNucleusContextImpl(api, startupProps);
Configuration propConfig = nucleusCtx.getConfiguration();
// Generate list of properties for SchemaTool usage
Map props = new HashMap();
// Get properties from PersistenceUnit first...
PersistenceUnitMetaData pumd = null;
if (persistenceUnitName != null) {
props.put(PropertyNames.PROPERTY_PERSISTENCE_UNIT_NAME.toLowerCase(), persistenceUnitName);
// Extract the persistence-unit metadata
String filename = nucleusCtx.getConfiguration().getStringProperty(PropertyNames.PROPERTY_PERSISTENCE_XML_FILENAME);
boolean validateXML = nucleusCtx.getConfiguration().getBooleanProperty(PropertyNames.PROPERTY_METADATA_XML_VALIDATE);
boolean supportXMLNamespaces = nucleusCtx.getConfiguration().getBooleanProperty(PropertyNames.PROPERTY_METADATA_XML_NAMESPACE_AWARE);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
pumd = MetaDataUtils.getMetaDataForPersistenceUnit(nucleusCtx.getPluginManager(), filename, persistenceUnitName, validateXML, supportXMLNamespaces, clr);
if (pumd != null) {
// Add the properties for the unit
if (pumd.getProperties() != null) {
props.putAll(pumd.getProperties());
}
} else {
throw new NucleusUserException("SchemaTool has been specified to use persistence-unit with name " + persistenceUnitName + " but none was found with that name");
}
if (api.equalsIgnoreCase("JPA")) {
// Don't use JARs when in JavaSE for JPA
pumd.clearJarFiles();
}
}
// Add/override with user properties
if (userProps != null) {
// Properties specified by the user in a file
for (Object key : userProps.keySet()) {
String propName = (String) key;
props.put(propName.toLowerCase(Locale.ENGLISH), userProps.get(propName));
}
}
// Finally add/override with system properties (only support particular ones, and in correct case)
String[] propNames = { PropertyNames.PROPERTY_CONNECTION_URL, PropertyNames.PROPERTY_CONNECTION_DRIVER_NAME, PropertyNames.PROPERTY_CONNECTION_USER_NAME, PropertyNames.PROPERTY_CONNECTION_PASSWORD, PropertyNames.PROPERTY_MAPPING, "javax.jdo.option.ConnectionURL", "javax.jdo.option.ConnectionDriverName", "javax.jdo.option.ConnectionUserName", "javax.jdo.option.ConnectionPassword", "javax.jdo.option.Mapping", "javax.persistence.jdbc.url", "javax.persistence.jdbc.driver", "javax.persistence.jdbc.user", "javax.persistence.jdbc.password" };
for (int i = 0; i < propNames.length; i++) {
if (System.getProperty(propNames[i]) != null) {
props.put(propNames[i].toLowerCase(Locale.ENGLISH), System.getProperty(propNames[i]));
}
}
// Interferes with usage
props.put(PropertyNames.PROPERTY_AUTOSTART_MECHANISM.toLowerCase(), "None");
// Tag on the mandatory props that we must have for each mode
if (mode == Mode.CREATE) {
if (ddlFile != null) {
// the tables must not be created in the DB, so do not validate (DDL is being output to a file)
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_TABLES.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_COLUMNS.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_CONSTRAINTS.toLowerCase(), "false");
}
// use tables/columns/constraints settings
props.remove(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_ALL.toLowerCase());
if (!props.containsKey(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_TABLES.toLowerCase())) {
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_TABLES.toLowerCase(), "true");
}
if (!props.containsKey(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_COLUMNS.toLowerCase())) {
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_COLUMNS.toLowerCase(), "true");
}
if (!props.containsKey(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_CONSTRAINTS.toLowerCase())) {
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_CONSTRAINTS.toLowerCase(), "true");
}
props.put(PropertyNames.PROPERTY_DATASTORE_READONLY.toLowerCase(), "false");
props.put("datanucleus.rdbms.checkexisttablesorviews", "true");
} else if (mode == Mode.DELETE) {
props.put(PropertyNames.PROPERTY_DATASTORE_READONLY.toLowerCase(), "false");
} else if (mode == Mode.DELETE_CREATE) {
if (ddlFile != null) {
// the tables must not be created in the DB, so do not validate (DDL is being output to a file)
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_TABLES.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_COLUMNS.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_CONSTRAINTS.toLowerCase(), "false");
}
// use tables/columns/constraints settings
props.remove(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_ALL.toLowerCase());
if (!props.containsKey(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_TABLES.toLowerCase())) {
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_TABLES.toLowerCase(), "true");
}
if (!props.containsKey(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_COLUMNS.toLowerCase())) {
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_COLUMNS.toLowerCase(), "true");
}
if (!props.containsKey(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_CONSTRAINTS.toLowerCase())) {
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_CONSTRAINTS.toLowerCase(), "true");
}
props.put(PropertyNames.PROPERTY_DATASTORE_READONLY.toLowerCase(), "false");
props.put("datanucleus.rdbms.checkexisttablesorviews", "true");
} else if (mode == Mode.VALIDATE) {
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_ALL.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_TABLES.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_CONSTRAINTS.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_COLUMNS.toLowerCase(), "false");
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_TABLES.toLowerCase(), "true");
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_COLUMNS.toLowerCase(), "true");
props.put(PropertyNames.PROPERTY_SCHEMA_VALIDATE_CONSTRAINTS.toLowerCase(), "true");
}
if (ignoreMetaDataForMissingClasses) {
props.put(PropertyNames.PROPERTY_METADATA_IGNORE_METADATA_FOR_MISSING_CLASSES, "true");
}
// Apply remaining persistence properties
propConfig.setPersistenceProperties(props);
if (pumd != null) {
// Initialise the MetaDataManager with all files/classes for this persistence-unit
// This is done now that all persistence properties are set (including the persistence-unit props)
nucleusCtx.getMetaDataManager().loadPersistenceUnit(pumd, null);
}
// Initialise the NucleusContext for use
nucleusCtx.initialise();
if (verbose) {
String msg = Localiser.msg("014020");
LOGGER.info(msg);
System.out.println(msg);
// TODO Some persistence properties will be stored against the StoreManager
Map<String, Object> pmfProps = propConfig.getPersistenceProperties();
Set<String> keys = pmfProps.keySet();
List<String> keyNames = new ArrayList<String>(keys);
Collections.sort(keyNames);
Iterator keyNamesIter = keyNames.iterator();
while (keyNamesIter.hasNext()) {
String key = (String) keyNamesIter.next();
Object value = pmfProps.get(key);
boolean display = true;
if (!key.startsWith("datanucleus")) {
display = false;
} else if (key.equals(PropertyNames.PROPERTY_CONNECTION_PASSWORD.toLowerCase())) {
// Don't show passwords
display = false;
} else if (value == null) {
display = false;
} else if (value instanceof String && StringUtils.isWhitespace((String) value)) {
display = false;
}
if (display) {
// Print the property to sysout
msg = Localiser.msg("014022", key, value);
LOGGER.info(msg);
System.out.println(msg);
}
}
System.out.println();
}
return nucleusCtx;
}
use of org.datanucleus.exceptions.NucleusUserException in project datanucleus-core by datanucleus.
the class SchemaTool method main.
/**
* Entry method when invoked from the command line.
* @param args List of options for processing by the available methods in this class.
* @throws Exception If an error occurs in operation
*/
public static void main(String[] args) throws Exception {
SchemaTool tool = new SchemaTool();
CommandLine cmd = new CommandLine();
cmd.addOption(OPTION_CREATE_DATABASE, OPTION_CREATE_DATABASE, null, Localiser.msg("014024"));
cmd.addOption(OPTION_DELETE_DATABASE, OPTION_DELETE_DATABASE, null, Localiser.msg("014025"));
cmd.addOption(OPTION_CREATE_TABLES_FOR_CLASSES, OPTION_CREATE_TABLES_FOR_CLASSES, null, Localiser.msg("014026"));
cmd.addOption(OPTION_DELETE_TABLES_FOR_CLASSES, OPTION_DELETE_TABLES_FOR_CLASSES, null, Localiser.msg("014027"));
cmd.addOption(OPTION_DELETE_CREATE_TABLES_FOR_CLASSES, OPTION_DELETE_CREATE_TABLES_FOR_CLASSES, null, Localiser.msg("014044"));
cmd.addOption(OPTION_VALIDATE_TABLES_FOR_CLASSES, OPTION_VALIDATE_TABLES_FOR_CLASSES, null, Localiser.msg("014028"));
cmd.addOption(OPTION_DBINFO, OPTION_DBINFO, null, Localiser.msg("014029"));
cmd.addOption(OPTION_SCHEMAINFO, OPTION_SCHEMAINFO, null, Localiser.msg("014030"));
cmd.addOption("help", "help", null, Localiser.msg("014033"));
cmd.addOption(OPTION_DDL_FILE, OPTION_DDL_FILE, "ddlFile", Localiser.msg("014031"));
cmd.addOption(OPTION_COMPLETE_DDL, OPTION_COMPLETE_DDL, null, Localiser.msg("014032"));
cmd.addOption(OPTION_INCLUDE_AUTO_START, OPTION_INCLUDE_AUTO_START, null, "Include Auto-Start Mechanisms");
cmd.addOption(OPTION_API, OPTION_API, "api", "API Adapter (JDO, JPA, etc)");
cmd.addOption(OPTION_CATALOG_NAME, OPTION_CATALOG_NAME, "catalog", "CatalogName");
cmd.addOption(OPTION_SCHEMA_NAME, OPTION_SCHEMA_NAME, "schema", "SchemaName");
cmd.addOption("v", "verbose", null, "verbose output");
cmd.addOption("pu", "persistenceUnit", "<persistence-unit>", "name of the persistence unit to handle the schema for");
cmd.addOption("props", "properties", "props", "path to a properties file");
cmd.addOption("ignoreMetaDataForMissingClasses", "ignoreMetaDataForMissingClasses", null, "Ignore metadata for classes that are missing?");
cmd.parse(args);
// Remaining command line args are filenames (class files, metadata files)
String[] filenames = cmd.getDefaultArgs();
if (cmd.hasOption("api")) {
tool.setApi(cmd.getOptionArg("api"));
}
if (cmd.hasOption(OPTION_CATALOG_NAME)) {
tool.setCatalogName(cmd.getOptionArg(OPTION_CATALOG_NAME));
}
if (cmd.hasOption(OPTION_SCHEMA_NAME)) {
NucleusLogger.GENERAL.info(">> sch input = " + cmd.getOptionArg(OPTION_SCHEMA_NAME));
tool.setSchemaName(cmd.getOptionArg(OPTION_SCHEMA_NAME));
}
// Determine the mode of operation required
String msg = null;
Mode mode = Mode.CREATE;
if (cmd.hasOption(OPTION_CREATE_TABLES_FOR_CLASSES)) {
mode = Mode.CREATE;
msg = Localiser.msg("014000");
} else if (cmd.hasOption(OPTION_DELETE_TABLES_FOR_CLASSES)) {
mode = Mode.DELETE;
msg = Localiser.msg("014001");
} else if (cmd.hasOption(OPTION_DELETE_CREATE_TABLES_FOR_CLASSES)) {
mode = Mode.DELETE_CREATE;
msg = Localiser.msg("014045");
} else if (cmd.hasOption(OPTION_VALIDATE_TABLES_FOR_CLASSES)) {
mode = Mode.VALIDATE;
msg = Localiser.msg("014002");
} else if (cmd.hasOption(OPTION_CREATE_DATABASE)) {
mode = Mode.CREATE_DATABASE;
msg = Localiser.msg("014034", tool.getCatalogName(), tool.getSchemaName());
} else if (cmd.hasOption(OPTION_DELETE_DATABASE)) {
mode = Mode.DELETE_DATABASE;
msg = Localiser.msg("014035", tool.getCatalogName(), tool.getSchemaName());
} else if (cmd.hasOption(OPTION_DBINFO)) {
mode = Mode.DATABASE_INFO;
msg = Localiser.msg("014003");
} else if (cmd.hasOption(OPTION_SCHEMAINFO)) {
mode = Mode.SCHEMA_INFO;
msg = Localiser.msg("014004");
} else if (cmd.hasOption("help")) {
System.out.println(Localiser.msg("014023", cmd.toString()));
System.exit(0);
}
LOGGER.info(msg);
System.out.println(msg);
// Extract the selected options
String propsFileName = null;
String persistenceUnitName = null;
if (cmd.hasOption(OPTION_DDL_FILE)) {
tool.setDdlFile(cmd.getOptionArg(OPTION_DDL_FILE));
}
if (cmd.hasOption(OPTION_COMPLETE_DDL)) {
tool.setCompleteDdl(true);
}
if (cmd.hasOption(OPTION_INCLUDE_AUTO_START)) {
tool.setIncludeAutoStart(true);
}
if (cmd.hasOption("v")) {
tool.setVerbose(true);
}
boolean ignoreMetaDataForMissingClasses = false;
if (cmd.hasOption("ignoreMetaDataForMissingClasses")) {
ignoreMetaDataForMissingClasses = true;
}
if (cmd.hasOption("pu")) {
persistenceUnitName = cmd.getOptionArg("pu");
}
if (cmd.hasOption("props")) {
propsFileName = cmd.getOptionArg("props");
}
// Classpath
msg = Localiser.msg("014005");
LOGGER.info(msg);
if (tool.isVerbose()) {
System.out.println(msg);
}
StringTokenizer tokeniser = new StringTokenizer(System.getProperty("java.class.path"), File.pathSeparator);
while (tokeniser.hasMoreTokens()) {
msg = Localiser.msg("014006", tokeniser.nextToken());
LOGGER.info(msg);
if (tool.isVerbose()) {
System.out.println(msg);
}
}
if (tool.isVerbose()) {
System.out.println();
}
// DDL file
String ddlFilename = tool.getDdlFile();
if (ddlFilename != null) {
msg = Localiser.msg(tool.getCompleteDdl() ? "014018" : "014019", ddlFilename);
LOGGER.info(msg);
if (tool.isVerbose()) {
System.out.println(msg);
System.out.println();
}
}
// Create a NucleusContext for use with this mode
StoreNucleusContext nucleusCtx = null;
try {
Properties props = (propsFileName != null) ? PersistenceUtils.setPropertiesUsingFile(propsFileName) : null;
nucleusCtx = getNucleusContextForMode(mode, tool.getApi(), props, persistenceUnitName, ddlFilename, tool.isVerbose(), ignoreMetaDataForMissingClasses);
} catch (Exception e) {
// Unable to create a NucleusContext so likely input errors
LOGGER.error("Error creating NucleusContext", e);
System.out.println(Localiser.msg("014008", e.getMessage()));
System.exit(1);
return;
}
Set<String> classNames = null;
if (mode != Mode.SCHEMA_INFO && mode != Mode.DATABASE_INFO) {
// This will load up all MetaData for the specified input and throw exceptions where errors are found
try {
MetaDataManager metaDataMgr = nucleusCtx.getMetaDataManager();
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
if (filenames == null && persistenceUnitName == null) {
msg = Localiser.msg("014007");
LOGGER.error(msg);
System.out.println(msg);
throw new NucleusUserException(msg);
}
FileMetaData[] filemds = null;
if (persistenceUnitName != null) {
// Schema management via "persistence-unit"
msg = Localiser.msg("014015", persistenceUnitName);
LOGGER.info(msg);
if (tool.isVerbose()) {
System.out.println(msg);
System.out.println();
}
// The NucleusContext will have initialised the MetaDataManager with the persistence-unit
filemds = metaDataMgr.getFileMetaData();
} else {
// Schema management via "Input Files" (metadata/class)
msg = Localiser.msg("014009");
LOGGER.info(msg);
if (tool.isVerbose()) {
System.out.println(msg);
}
for (int i = 0; i < filenames.length; i++) {
String entry = Localiser.msg("014010", filenames[i]);
LOGGER.info(entry);
if (tool.isVerbose()) {
System.out.println(entry);
}
}
if (tool.isVerbose()) {
System.out.println();
}
LOGGER.debug(Localiser.msg("014011", "" + filenames.length));
filemds = MetaDataUtils.getFileMetaDataForInputFiles(metaDataMgr, clr, filenames);
LOGGER.debug(Localiser.msg("014012", "" + filenames.length));
}
classNames = new TreeSet<String>();
if (filemds == null) {
msg = Localiser.msg("014021");
LOGGER.error(msg);
System.out.println(msg);
System.exit(2);
return;
}
for (int i = 0; i < filemds.length; i++) {
for (int j = 0; j < filemds[i].getNoOfPackages(); j++) {
for (int k = 0; k < filemds[i].getPackage(j).getNoOfClasses(); k++) {
String className = filemds[i].getPackage(j).getClass(k).getFullClassName();
if (!classNames.contains(className)) {
classNames.add(className);
}
}
}
}
} catch (Exception e) {
// Exception will have been logged and sent to System.out in "getFileMetaDataForInput()"
System.exit(2);
return;
}
}
// Run SchemaTool
StoreManager storeMgr = nucleusCtx.getStoreManager();
if (!(storeMgr instanceof SchemaAwareStoreManager)) {
LOGGER.error("StoreManager of type " + storeMgr.getClass().getName() + " is not schema-aware so cannot be used with SchemaTool");
System.exit(2);
return;
}
SchemaAwareStoreManager schemaStoreMgr = (SchemaAwareStoreManager) storeMgr;
try {
if (mode == Mode.CREATE_DATABASE) {
tool.createDatabase(schemaStoreMgr, tool.getCatalogName(), tool.getSchemaName());
} else if (mode == Mode.DELETE_DATABASE) {
tool.deleteDatabase(schemaStoreMgr, tool.getCatalogName(), tool.getSchemaName());
} else if (mode == Mode.CREATE) {
tool.createSchemaForClasses(schemaStoreMgr, classNames);
} else if (mode == Mode.DELETE) {
tool.deleteSchemaForClasses(schemaStoreMgr, classNames);
} else if (mode == Mode.DELETE_CREATE) {
tool.deleteSchemaForClasses(schemaStoreMgr, classNames);
tool.createSchemaForClasses(schemaStoreMgr, classNames);
} else if (mode == Mode.VALIDATE) {
tool.validateSchemaForClasses(schemaStoreMgr, classNames);
} else if (mode == Mode.DATABASE_INFO) {
storeMgr.printInformation("DATASTORE", System.out);
} else if (mode == Mode.SCHEMA_INFO) {
storeMgr.printInformation("SCHEMA", System.out);
}
msg = Localiser.msg("014043");
LOGGER.info(msg);
System.out.println(msg);
} catch (Exception e) {
msg = Localiser.msg("014037", e.getMessage());
System.out.println(msg);
LOGGER.error(msg, e);
System.exit(2);
return;
} finally {
storeMgr.close();
}
}
Aggregations