use of org.apache.torque.engine.database.model.AppData in project dbflute-core by dbflute.
the class DfFreeGenInitializer method prepareDatabase.
protected Database prepareDatabase(DfSchemaXmlReader reader) {
final AppData appData = reader.read();
final Database database = appData.getDatabase();
// same as ControlGenerateJava.vm
database.initializeVersion(90);
database.initializeAdditionalPrimaryKey();
database.initializeAdditionalUniqueKey();
database.initializeAdditionalForeignKey();
database.initializeClassificationDeployment();
database.initializeIncludeQuery();
database.checkProperties();
return database;
}
use of org.apache.torque.engine.database.model.AppData in project dbflute-core by dbflute.
the class DfReplaceSchemaTask method createVelocityContext.
protected VelocityContext createVelocityContext() {
final DfVelocityContextFactory factory = createVelocityContextFactory();
final AppData appData = AppData.createAsEmpty();
return factory.createAsCore(appData, _selector);
}
use of org.apache.torque.engine.database.model.AppData in project dbflute-core by dbflute.
the class DfSql2EntityTask method initControlContext.
// ===================================================================================
// Prepare Generation
// ==================
@Override
public Context initControlContext() throws Exception {
_log.info("");
_log.info("...Preparing generation of customize-entities and parameter-beans");
_log.info("* * * * * * * * * *");
_log.info("* CustomizeEntity *");
_log.info("* * * * * * * * * *");
final StringBuilder logSb = new StringBuilder();
final Database database = _database;
database.setSql2EntitySchemaData(_schemaData);
database.setPmbMetaDataMap(_sql2entityMeta.getPmbMetaDataMap());
database.setSkipDeleteOldClass(isSkipDeleteOldClass());
final Map<String, DfCustomizeEntityInfo> entityInfoMap = _sql2entityMeta.getEntityInfoMap();
final Set<String> entityNameSet = entityInfoMap.keySet();
for (String entityName : entityNameSet) {
final DfCustomizeEntityInfo entityInfo = entityInfoMap.get(entityName);
final Map<String, DfColumnMeta> metaMap = entityInfo.getColumnMap();
final DfOutsideSqlFile outsideSqlFile = entityInfo.getOutsideSqlFile();
final Table tbl = new Table();
tbl.setSql2EntityCustomize(true);
if (outsideSqlFile != null) {
// basically true but checked just in case
tbl.setSql2EntitySqlFile(outsideSqlFile);
}
tbl.setName(entityInfo.getTableDbName());
if (!entityInfo.needsJavaNameConvert()) {
// basically here (except STRUCT type)
tbl.suppressJavaNameConvert();
}
if (entityInfo.hasNestedCustomizeEntity()) {
// basically when STRUCT type
tbl.setSql2EntityCustomizeHasNested(true);
}
if (entityInfo.isAdditionalSchema()) {
// basically when STRUCT type
tbl.setUnifiedSchema(entityInfo.getAdditionalSchema());
}
tbl.setSql2EntityTypeSafeCursor(entityInfo.isCursorHandling());
buildCustomizeEntityTitle(logSb, entityName, entityInfo);
final StringKeyMap<String> pkMap = getPrimaryKeyMap(entityInfo);
final boolean allCommonColumn = hasAllCommonColumn(metaMap);
final Set<String> columnNameSet = metaMap.keySet();
for (String columnName : columnNameSet) {
final Column column = new Column();
setupColumnName(columnName, column);
// an element removed from pkMap if true
// and a table name related to primary key is returned
final String pkRelatedTableName = setupPrimaryKey(pkMap, entityName, columnName, column);
setupTorqueType(metaMap, columnName, column, allCommonColumn);
setupDbType(metaMap, columnName, column);
setupNotNull(metaMap, columnName, column);
setupColumnSizeContainsDigit(metaMap, columnName, column);
setupColumnComment(metaMap, columnName, column);
setupSql2EntityElement(entityName, entityInfo, metaMap, columnName, column, pkRelatedTableName, logSb);
tbl.addColumn(column);
}
if (!pkMap.isEmpty()) {
// if not-removed columns exist
throwPrimaryKeyNotFoundException(entityName, pkMap, columnNameSet);
}
if (entityInfo.isScalarHandling()) {
// it does not generate an only-one-column entity
// one-way love for utility (just in case)
tbl.setDatabase(database);
processScalarHandling(entityInfo, tbl);
} else if (entityInfo.isDomainHandling()) {
// it does not generate an customize-entity
// one-way love for utility (just in case)
tbl.setDatabase(database);
processDomainHandling(entityInfo, tbl);
} else {
// initialize a class name of the entity for typed parameter-bean
// should be before getting names
database.addTable(tbl);
entityInfo.setEntityClassName(tbl.getExtendedEntityClassName());
entityInfo.setImmutableClassName(tbl.getImmutableExtendedEntityClassName());
}
logSb.append(ln());
}
final String databaseType = getDatabaseTypeFacadeProp().getTargetDatabase();
final AppData appData = new AppData(databaseType);
appData.addDatabase(database);
showCustomizeEntity(logSb);
showParameterBean();
final VelocityContext context = createVelocityContext(appData);
return context;
}
use of org.apache.torque.engine.database.model.AppData in project dbflute-core by dbflute.
the class DfSchemaXmlReader method read.
// ===================================================================================
// Read
// ====
public AppData read() {
if (_schemaXml == null) {
String msg = "The property 'schemaXml' should not be null.";
throw new IllegalStateException(msg);
}
final AppData schemaData;
try {
schemaData = createXmlToAppData().parseFile(_schemaXml);
} catch (IOException e) {
String msg = "Failed to read the SchemaXML: " + _schemaXml;
throw new IllegalStateException(msg, e);
}
schemaData.setName(grokName(_schemaXml));
return schemaData;
}
use of org.apache.torque.engine.database.model.AppData in project dbflute-core by dbflute.
the class DfLReverseProcess method prepareDatabase.
// ===================================================================================
// Prepare
// =======
protected Database prepareDatabase() {
final String schemaXml = getLoadDataReverseSchemaXml();
final DfSchemaXmlSerializer serializer = createSchemaXmlSerializer(schemaXml);
serializer.serialize();
final DfSchemaXmlReader reader = createSchemaXmlReader(schemaXml);
final AppData appData = reader.read();
return appData.getDatabase();
}
Aggregations