Search in sources :

Example 1 with DfOutsideSqlFile

use of org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile in project dbflute-core by dbflute.

the class DfSql2EntityTask method getTargetSqlFileList.

// ===================================================================================
// Executing Element
// =================
protected DfOutsideSqlPack getTargetSqlFileList() {
    final DfOutsideSqlPack sqlFileList = collectOutsideSqlChecked();
    final String specifiedSqlFile = DfSpecifiedSqlFile.getInstance().getSpecifiedSqlFile();
    if (specifiedSqlFile != null) {
        final DfOutsideSqlPack filteredList = new DfOutsideSqlPack();
        for (DfOutsideSqlFile outsideSqlFile : sqlFileList.getOutsideSqlFileList()) {
            final String fileName = outsideSqlFile.getPhysicalFile().getName();
            if (specifiedSqlFile.equals(fileName)) {
                filteredList.add(outsideSqlFile);
            }
        }
        return filteredList;
    } else {
        return sqlFileList;
    }
}
Also used : DfOutsideSqlFile(org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile) DfOutsideSqlPack(org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlPack)

Example 2 with DfOutsideSqlFile

use of org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile in project dbflute-core by dbflute.

the class DfBehaviorQueryPathSetupper method gatherBehaviorQueryPathInfo.

/**
 * @param behaviorQueryPathMap The empty map of behavior query path. (NotNull)
 * @param outsideSqlPack The pack object for outside-SQL file. (NotNull)
 */
protected void gatherBehaviorQueryPathInfo(Map<String, Map<String, String>> behaviorQueryPathMap, DfOutsideSqlPack outsideSqlPack) {
    for (DfOutsideSqlFile outsideSqlFile : outsideSqlPack.getOutsideSqlFileList()) {
        final DfBqpOutsideSqlFile bqpOutsideSqlFile = new DfBqpOutsideSqlFile(outsideSqlFile);
        if (!bqpOutsideSqlFile.isBqp()) {
            continue;
        }
        final Map<String, String> behaviorQueryElement = new LinkedHashMap<String, String>();
        final String path = bqpOutsideSqlFile.getFilePath();
        behaviorQueryElement.put(KEY_PATH, path);
        behaviorQueryElement.put(KEY_SUB_DIRECTORY_PATH, bqpOutsideSqlFile.getSubDirectoryPath());
        behaviorQueryElement.put(KEY_ENTITY_NAME, bqpOutsideSqlFile.getEntityName());
        behaviorQueryElement.put(KEY_BEHAVIOR_NAME, bqpOutsideSqlFile.getBehaviorName());
        behaviorQueryElement.put(KEY_BEHAVIOR_QUERY_PATH, bqpOutsideSqlFile.getBehaviorQueryPath());
        if (outsideSqlFile.isSqlAp()) {
            behaviorQueryElement.put(KEY_SQLAP, "true");
            behaviorQueryElement.put(KEY_SQLAP_PROJECT_NAME, outsideSqlFile.getProjectName());
        }
        behaviorQueryPathMap.put(path, behaviorQueryElement);
        // setup informations in the SQL file
        setupInfoInSqlFile(outsideSqlFile, behaviorQueryElement);
    }
}
Also used : DfOutsideSqlFile(org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile) LinkedHashMap(java.util.LinkedHashMap)

Example 3 with DfOutsideSqlFile

use of org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile in project dbflute-core by dbflute.

the class DfSql2EntityTask method showTargetSqlFileInformation.

// ===================================================================================
// SQL File Information
// ====================
protected void showTargetSqlFileInformation(DfOutsideSqlPack outsideSqlPack) {
    final StringBuilder sb = new StringBuilder();
    sb.append(ln()).append("/- - - - - - - - - - - - - - - - - - - - - - - -");
    sb.append(ln()).append("Target SQL files: ").append(outsideSqlPack.size());
    sb.append(ln());
    for (DfOutsideSqlFile sqlFile : outsideSqlPack.getOutsideSqlFileList()) {
        sb.append(ln()).append("  ").append(sqlFile.getPhysicalFile().getName());
    }
    sb.append(ln()).append("- - - - - - - - - -/");
    _log.info(sb.toString());
}
Also used : DfOutsideSqlFile(org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile)

Example 4 with DfOutsideSqlFile

use of org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile in project dbflute-core by dbflute.

the class DfSql2EntityTask method initControlContext.

// ===================================================================================
// Prepare Generation
// ==================
@Override
public Context initControlContext() throws Exception {
    _log.info("");
    _log.info("...Preparing generation of customize-entities and parameter-beans");
    _log.info("* * * * * * * * * *");
    _log.info("* CustomizeEntity *");
    _log.info("* * * * * * * * * *");
    final StringBuilder logSb = new StringBuilder();
    final Database database = _database;
    database.setSql2EntitySchemaData(_schemaData);
    database.setPmbMetaDataMap(_sql2entityMeta.getPmbMetaDataMap());
    database.setSkipDeleteOldClass(isSkipDeleteOldClass());
    final Map<String, DfCustomizeEntityInfo> entityInfoMap = _sql2entityMeta.getEntityInfoMap();
    final Set<String> entityNameSet = entityInfoMap.keySet();
    for (String entityName : entityNameSet) {
        final DfCustomizeEntityInfo entityInfo = entityInfoMap.get(entityName);
        final Map<String, DfColumnMeta> metaMap = entityInfo.getColumnMap();
        final DfOutsideSqlFile outsideSqlFile = entityInfo.getOutsideSqlFile();
        final Table tbl = new Table();
        tbl.setSql2EntityCustomize(true);
        if (outsideSqlFile != null) {
            // basically true but checked just in case
            tbl.setSql2EntitySqlFile(outsideSqlFile);
        }
        tbl.setName(entityInfo.getTableDbName());
        if (!entityInfo.needsJavaNameConvert()) {
            // basically here (except STRUCT type)
            tbl.suppressJavaNameConvert();
        }
        if (entityInfo.hasNestedCustomizeEntity()) {
            // basically when STRUCT type
            tbl.setSql2EntityCustomizeHasNested(true);
        }
        if (entityInfo.isAdditionalSchema()) {
            // basically when STRUCT type
            tbl.setUnifiedSchema(entityInfo.getAdditionalSchema());
        }
        tbl.setSql2EntityTypeSafeCursor(entityInfo.isCursorHandling());
        buildCustomizeEntityTitle(logSb, entityName, entityInfo);
        final StringKeyMap<String> pkMap = getPrimaryKeyMap(entityInfo);
        final boolean allCommonColumn = hasAllCommonColumn(metaMap);
        final Set<String> columnNameSet = metaMap.keySet();
        for (String columnName : columnNameSet) {
            final Column column = new Column();
            setupColumnName(columnName, column);
            // an element removed from pkMap if true
            // and a table name related to primary key is returned
            final String pkRelatedTableName = setupPrimaryKey(pkMap, entityName, columnName, column);
            setupTorqueType(metaMap, columnName, column, allCommonColumn);
            setupDbType(metaMap, columnName, column);
            setupNotNull(metaMap, columnName, column);
            setupColumnSizeContainsDigit(metaMap, columnName, column);
            setupColumnComment(metaMap, columnName, column);
            setupSql2EntityElement(entityName, entityInfo, metaMap, columnName, column, pkRelatedTableName, logSb);
            tbl.addColumn(column);
        }
        if (!pkMap.isEmpty()) {
            // if not-removed columns exist
            throwPrimaryKeyNotFoundException(entityName, pkMap, columnNameSet);
        }
        if (entityInfo.isScalarHandling()) {
            // it does not generate an only-one-column entity
            // one-way love for utility (just in case)
            tbl.setDatabase(database);
            processScalarHandling(entityInfo, tbl);
        } else if (entityInfo.isDomainHandling()) {
            // it does not generate an customize-entity
            // one-way love for utility (just in case)
            tbl.setDatabase(database);
            processDomainHandling(entityInfo, tbl);
        } else {
            // initialize a class name of the entity for typed parameter-bean
            // should be before getting names
            database.addTable(tbl);
            entityInfo.setEntityClassName(tbl.getExtendedEntityClassName());
            entityInfo.setImmutableClassName(tbl.getImmutableExtendedEntityClassName());
        }
        logSb.append(ln());
    }
    final String databaseType = getDatabaseTypeFacadeProp().getTargetDatabase();
    final AppData appData = new AppData(databaseType);
    appData.addDatabase(database);
    showCustomizeEntity(logSb);
    showParameterBean();
    final VelocityContext context = createVelocityContext(appData);
    return context;
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) Table(org.apache.torque.engine.database.model.Table) VelocityContext(org.apache.velocity.VelocityContext) DfCustomizeEntityInfo(org.dbflute.logic.sql2entity.cmentity.DfCustomizeEntityInfo) DfOutsideSqlFile(org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile) Column(org.apache.torque.engine.database.model.Column) AppData(org.apache.torque.engine.database.model.AppData) Database(org.apache.torque.engine.database.model.Database)

Aggregations

DfOutsideSqlFile (org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlFile)4 LinkedHashMap (java.util.LinkedHashMap)1 AppData (org.apache.torque.engine.database.model.AppData)1 Column (org.apache.torque.engine.database.model.Column)1 Database (org.apache.torque.engine.database.model.Database)1 Table (org.apache.torque.engine.database.model.Table)1 VelocityContext (org.apache.velocity.VelocityContext)1 DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)1 DfOutsideSqlPack (org.dbflute.logic.sql2entity.analyzer.DfOutsideSqlPack)1 DfCustomizeEntityInfo (org.dbflute.logic.sql2entity.cmentity.DfCustomizeEntityInfo)1