Search in sources :

Example 16 with ScopeInfo

use of org.dbflute.util.Srl.ScopeInfo in project dbflute-core by dbflute.

the class DfFixedConditionDynamicAnalyzer method resolveIfCommentDynamic.

// ===================================================================================
// IF Comment
// ==========
protected void resolveIfCommentDynamic(String piece, Map<String, String> replacementMap) {
    // e.g. /*IF $$parameterBase$$.serviceRank.code() == $cls(ServiceRank.Bronze)*/
    // then piece is "IF $$parameterBase$$.serviceRank.code() == $cls(ServiceRank.Bronze)"
    // (code() is required for now, because cannot compare "CDef == string" in DBFlute Runtime)
    final String operandPrefix = "== ";
    final String clsMark = "$cls";
    if (piece.contains(operandPrefix + clsMark) && piece.endsWith(")")) {
        // cannot use "&&", "||" for now
        final ScopeInfo scopeInfo = Srl.extractScopeFirst(piece, clsMark + "(", ")");
        if (scopeInfo != null) {
            // basically here, just in case
            // e.g. "== $cls(ServiceRank.Bronze)"
            final String clsReplacementKey = operandPrefix + scopeInfo.getScope();
            if (!replacementMap.containsKey(clsReplacementKey)) {
                // ServiceRank.Bronze
                final String clsType = scopeInfo.getContent();
                final String clsCode = extractEmbeddedCommentClassification(piece, clsType);
                replacementMap.put(clsReplacementKey, operandPrefix + clsCode);
            }
        }
    }
}
Also used : ScopeInfo(org.dbflute.util.Srl.ScopeInfo)

Example 17 with ScopeInfo

use of org.dbflute.util.Srl.ScopeInfo in project dbflute-core by dbflute.

the class DfLanguagePropertyPackageResolver method processMapType.

protected String processMapType(String typeName, boolean exceptUtil, String mapPkg, String mapName) {
    final String mapBegin = mapName + "<";
    final String mapEnd = ">";
    if (typeName.startsWith(mapBegin) && typeName.endsWith(mapEnd)) {
        final ScopeInfo scope = Srl.extractScopeWide(typeName, mapBegin, mapEnd);
        final String content = scope.getContent();
        final String keyType = Srl.substringFirstFront(content, ",").trim();
        final String valueType = Srl.substringFirstRear(content, ",").trim();
        final String resolvedValueType = doResolvePackageName(valueType, exceptUtil);
        return mapPkg + "." + mapBegin + keyType + ", " + resolvedValueType + mapEnd;
    } else {
        return null;
    }
}
Also used : ScopeInfo(org.dbflute.util.Srl.ScopeInfo)

Example 18 with ScopeInfo

use of org.dbflute.util.Srl.ScopeInfo in project dbflute-core by dbflute.

the class DfOutsideSqlTestTask method getSqlFileRunner.

protected DfSqlFileRunnerExecute getSqlFileRunner(final DfRunnerInformation runInfo) {
    final String nonTargetMark = "df:x";
    final DBDef currentDBDef = getDatabaseTypeFacadeProp().getCurrentDBDef();
    return new DfSqlFileRunnerExecute(runInfo, getDataSource()) {

        protected DfOutsideSqlChecker _outsideSqlChecker;

        @Override
        protected String filterSql(String sql) {
            // /- - - - - - - - - - - - - - - - - - - - - - - - - -
            // check parameter comments in the SQL before filtering
            // - - - - - - - - - -/
            checkParameterComment(_sqlFile, sql);
            // filter comments if it needs.
            if (!currentDBDef.dbway().isBlockCommentSupported()) {
                sql = removeBlockComment(sql);
            }
            if (!currentDBDef.dbway().isLineCommentSupported()) {
                sql = removeLineComment(sql);
            }
            return super.filterSql(sql);
        }

        protected String removeBlockComment(final String sql) {
            return Srl.removeBlockComment(sql);
        }

        protected String removeLineComment(final String sql) {
            return Srl.removeLineComment(sql);
        }

        @Override
        protected boolean isTargetSql(String sql) {
            final String entityName = getEntityName(sql);
            if (entityName != null && nonTargetMark.equalsIgnoreCase(entityName)) {
                // non-target SQL
                _nonTargetSqlFileSet.add(_sqlFile);
                _log.info("...Skipping the SQL by non-target mark '" + nonTargetMark + "'");
                return false;
            }
            return super.isTargetSql(sql);
        }

        @Override
        protected void traceSql(String sql) {
            _log.info("SQL:" + ln() + sql);
        }

        @Override
        protected void traceResult(int goodSqlCount, int totalSqlCount) {
            _log.info(" -> success=" + goodSqlCount + " failure=" + (totalSqlCount - goodSqlCount) + ln());
        }

        protected String getEntityName(final String sql) {
            return getTargetString(sql, "#");
        }

        protected String getTargetString(final String sql, final String mark) {
            final List<String> targetList = getTargetList(sql, mark);
            return !targetList.isEmpty() ? targetList.get(0) : null;
        }

        protected List<String> getTargetList(final String sql, final String mark) {
            if (sql == null || sql.trim().length() == 0) {
                String msg = "The sql is invalid: " + sql;
                throw new IllegalArgumentException(msg);
            }
            final List<String> betweenBeginEndMarkList = getListBetweenBeginEndMark(sql, "--" + mark, mark);
            if (!betweenBeginEndMarkList.isEmpty()) {
                return betweenBeginEndMarkList;
            } else {
                // basically for MySQL
                return getListBetweenBeginEndMark(sql, "-- " + mark, mark);
            }
        }

        protected List<String> getListBetweenBeginEndMark(String targetStr, String beginMark, String endMark) {
            final List<ScopeInfo> scopeList = Srl.extractScopeList(targetStr, beginMark, endMark);
            final List<String> resultList = DfCollectionUtil.newArrayList();
            for (ScopeInfo scope : scopeList) {
                resultList.add(scope.getContent());
            }
            return resultList;
        }

        protected void checkParameterComment(File sqlFile, String sql) {
            final DfOutsideSqlProperties outsideSqlProp = getOutsideSqlProperties();
            if (outsideSqlProp.isSuppressParameterCommentCheck()) {
                return;
            }
            if (_outsideSqlChecker == null) {
                _outsideSqlChecker = createOutsideSqlChecker(outsideSqlProp);
            }
            _outsideSqlChecker.check(sqlFile.getName(), sql);
        }
    };
}
Also used : DfOutsideSqlProperties(org.dbflute.properties.DfOutsideSqlProperties) DfSqlFileRunnerExecute(org.dbflute.helper.jdbc.sqlfile.DfSqlFileRunnerExecute) DfOutsideSqlChecker(org.dbflute.logic.outsidesqltest.DfOutsideSqlChecker) DBDef(org.dbflute.dbway.DBDef) ScopeInfo(org.dbflute.util.Srl.ScopeInfo) File(java.io.File) DfSpecifiedSqlFile(org.dbflute.task.bs.assistant.DfSpecifiedSqlFile)

Example 19 with ScopeInfo

use of org.dbflute.util.Srl.ScopeInfo in project dbflute-core by dbflute.

the class DfPmFileTableLoader method verifyFormat.

// ===================================================================================
// Verify Format
// =============
protected void verifyFormat(String templatePath, String evaluated, String delimiter) {
    final String meta = Srl.substringFirstFront(evaluated, delimiter);
    if (!meta.endsWith(LF)) {
        // also CRLF checked
        throwBodyMetaNoIndependentDelimiterException(templatePath, evaluated);
    }
    final int rearIndex = evaluated.indexOf(delimiter) + delimiter.length();
    if (evaluated.length() > rearIndex) {
        // just in case (empty template possible?)
        final String rearFirstStr = evaluated.substring(rearIndex, rearIndex + 1);
        if (!Srl.equalsPlain(rearFirstStr, LF, CR)) {
            // e.g. >>> Hello, ...
            throwBodyMetaNoIndependentDelimiterException(templatePath, evaluated);
        }
    }
    if (!meta.startsWith(COMMENT_BEGIN)) {
        // also leading spaces not allowed
        throwTemplateMetaNotStartWithHeaderCommentException(templatePath, evaluated, meta);
    }
    if (!meta.contains(COMMENT_END)) {
        throwBodyMetaHeaderCommentEndMarkNotFoundException(templatePath, evaluated, meta);
    }
    final String headerComment = Srl.extractScopeFirst(evaluated, COMMENT_BEGIN, COMMENT_END).getContent();
    final ScopeInfo titleScope = Srl.extractScopeFirst(headerComment, TITLE_BEGIN, TITLE_END);
    if (titleScope == null) {
        throwBodyMetaTitleCommentNotFoundException(templatePath, evaluated);
    }
    final String desc = Srl.substringFirstRear(headerComment, TITLE_END);
    if (desc.isEmpty()) {
        throwBodyMetaDescriptionCommentNotFoundException(templatePath, evaluated);
    }
    final String rearMeta = Srl.substringFirstRear(meta, COMMENT_END);
    // no way because of already checked
    // if (!rearMeta.contains(LF)) {
    // }
    final List<String> splitList = Srl.splitList(rearMeta, LF);
    if (!splitList.get(0).trim().isEmpty()) {
        // after '*/'
        throwBodyMetaHeaderCommentEndMarkNoIndependentException(templatePath, evaluated);
    }
    final int nextIndex = 1;
    if (splitList.size() > nextIndex) {
        // after header comment
        final List<String> nextList = splitList.subList(nextIndex, splitList.size());
        final int nextSize = nextList.size();
        int index = 0;
        for (String line : nextList) {
            if (index == nextSize - 1) {
                // last loop
                if (line.isEmpty()) {
                    // empty line only allowed in last loop
                    break;
                }
            }
            if (!allowedPrefixList.stream().anyMatch(prefix -> line.startsWith(prefix))) {
                throwBodyMetaUnknownLineException(templatePath, evaluated, line);
            }
            // option check is not here because it can be added in MailFlute
            // if (line.startsWith(OPTION_LABEL)) {
            // final String options = Srl.substringFirstRear(line, OPTION_LABEL);
            // final List<String> optionList = Srl.splitListTrimmed(options, ".");
            // for (String option : optionList) {
            // if (!optionSet.contains(option)) {
            // throwBodyMetaUnknownOptionException(templatePath, evaluated, option);
            // }
            // }
            // }
            ++index;
        }
    }
}
Also used : Arrays(java.util.Arrays) DfFreeGenMapProp(org.dbflute.logic.manage.freegen.DfFreeGenMapProp) DfFreeGenTableLoader(org.dbflute.logic.manage.freegen.DfFreeGenTableLoader) ScopeInfo(org.dbflute.util.Srl.ScopeInfo) DfBasicProperties(org.dbflute.properties.DfBasicProperties) DfParameterAutoDetectAssist(org.dbflute.logic.sql2entity.analyzer.DfParameterAutoDetectAssist) ArrayList(java.util.ArrayList) DfLanguageDependency(org.dbflute.logic.generate.language.DfLanguageDependency) LinkedHashMap(java.util.LinkedHashMap) ExceptionMessageBuilder(org.dbflute.helper.message.ExceptionMessageBuilder) Map(java.util.Map) FileHierarchyTracer(org.dbflute.helper.filesystem.FileHierarchyTracer) Srl(org.dbflute.util.Srl) LinkedHashSet(java.util.LinkedHashSet) DfCollectionUtil(org.dbflute.util.DfCollectionUtil) Set(java.util.Set) DfParameterAutoDetectProcess(org.dbflute.logic.sql2entity.analyzer.DfParameterAutoDetectProcess) DfNameHintUtil(org.dbflute.util.DfNameHintUtil) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) DfBuildProperties(org.dbflute.DfBuildProperties) File(java.io.File) FileNotFoundException(java.io.FileNotFoundException) DfFreeGenMetaData(org.dbflute.logic.manage.freegen.DfFreeGenMetaData) List(java.util.List) DfSql2EntityMarkAnalyzer(org.dbflute.logic.sql2entity.analyzer.DfSql2EntityMarkAnalyzer) DfParameterAutoDetectBindNode(org.dbflute.logic.sql2entity.analyzer.DfParameterAutoDetectBindNode) IfNode(org.dbflute.twowaysql.node.IfNode) FileHierarchyTracingHandler(org.dbflute.helper.filesystem.FileHierarchyTracingHandler) DfLanguagePropertyPackageResolver(org.dbflute.logic.generate.language.pkgstyle.DfLanguagePropertyPackageResolver) DfFreeGenResource(org.dbflute.logic.manage.freegen.DfFreeGenResource) FileTextIO(org.dbflute.helper.filesystem.FileTextIO) DfSql2EntityMark(org.dbflute.logic.sql2entity.analyzer.DfSql2EntityMark) ScopeInfo(org.dbflute.util.Srl.ScopeInfo)

Example 20 with ScopeInfo

use of org.dbflute.util.Srl.ScopeInfo in project dbflute-core by dbflute.

the class DfPmFileTableLoader method doLoad.

protected Map<String, Map<String, Object>> doLoad(String targetDir, String targetExt, String targetKeyword, List<String> exceptPathList, Map<String, Object> tableMap) {
    final List<File> fileList = DfCollectionUtil.newArrayList();
    final File baseDir = new File(targetDir);
    collectFile(fileList, targetExt, targetKeyword, exceptPathList, baseDir);
    final Map<String, Map<String, Object>> schemaMap = DfCollectionUtil.newLinkedHashMap();
    final FileTextIO textIO = new FileTextIO().encodeAsUTF8().removeUTF8Bom().replaceCrLfToLf();
    for (File pmFile : fileList) {
        final Map<String, Object> table = DfCollectionUtil.newHashMap();
        final String fileName = pmFile.getName();
        table.put("fileName", fileName);
        final String fileText;
        try {
            fileText = textIO.read(new FileInputStream(pmFile));
        } catch (FileNotFoundException e) {
            // no way, collected file
            throw new IllegalStateException("Not found the pmc file: " + pmFile, e);
        }
        final String delimiter = META_DELIMITER;
        if (((String) tableMap.getOrDefault(deriveTableMapKey("isLastaTemplate"), "false")).equalsIgnoreCase("true")) {
            final String templatePath = toPath(pmFile);
            if (!fileText.contains(delimiter)) {
                throwTemplateMetaNotFoundException(templatePath, fileText);
            }
            verifyFormat(templatePath, fileText, delimiter);
            final String headerComment = Srl.extractScopeFirst(fileText, COMMENT_BEGIN, COMMENT_END).getContent();
            final ScopeInfo titleScope = Srl.extractScopeFirst(headerComment, TITLE_BEGIN, TITLE_END);
            final String desc = Srl.substringFirstRear(headerComment, TITLE_END);
            table.put("headerComment", headerComment);
            table.put("title", titleScope.getContent());
            table.put("description", desc);
        }
        String option = null;
        if (fileText.contains(delimiter)) {
            final String bodyMeta = Srl.substringFirstFront(fileText, ">>>");
            if (bodyMeta.contains(OPTION_LABEL)) {
                option = Srl.substringFirstFront(Srl.substringFirstRear(bodyMeta, OPTION_LABEL), LF);
            }
        }
        final boolean convention = !isGenAsIs(option);
        final StringBuilder classNameSb = new StringBuilder();
        classNameSb.append(Srl.camelize(Srl.substringLastFront(fileName, targetExt)));
        final String classSuffix = convention ? deriveClassSuffix(tableMap, baseDir, pmFile) : "";
        classNameSb.append(classSuffix);
        final String className = classNameSb.toString();
        // used as output file name
        table.put("className", className);
        table.put("camelizedName", className);
        final String domainPath = buildDomainPath(pmFile, targetDir);
        // e.g. /member/member_registration.dfpm
        table.put("domainPath", domainPath);
        final String resourcePath = Srl.ltrim(domainPath, "/");
        // e.g. member/member_registration.dfpm
        table.put("resourcePath", resourcePath);
        final String additionalPkg;
        final String basePkgConnector;
        if (Srl.is_NotNull_and_NotEmpty(resourcePath)) {
            if (resourcePath.contains("/")) {
                additionalPkg = Srl.replace(Srl.substringLastFront(resourcePath, "/"), "/", ".");
                basePkgConnector = ".";
            } else {
                additionalPkg = "";
                basePkgConnector = "";
            }
        } else {
            additionalPkg = "";
            basePkgConnector = "";
        }
        table.put("additionalPackage", convention ? "template" + basePkgConnector + additionalPkg : additionalPkg);
        table.put("defName", buildUpperSnakeName(domainPath));
        {
            final String dirPath = Srl.substringLastFront(domainPath, "/");
            final String snakeCase = buildPlainSnakeName(dirPath);
            final String camelizedName = Srl.camelize(snakeCase);
            table.put("camelizedDir", camelizedName);
            table.put("capCamelDir", Srl.initCap(camelizedName));
            table.put("uncapCamelDir", Srl.initUncap(camelizedName));
        }
        {
            final String snakeCase = buildPlainSnakeName(fileName);
            final String camelizedName = Srl.camelize(snakeCase);
            table.put("camelizedFile", camelizedName);
            table.put("capCamelFile", Srl.initCap(camelizedName));
            table.put("uncapCamelFile", Srl.initUncap(camelizedName));
        }
        final Map<String, String> propertyNameTypeMap = new LinkedHashMap<String, String>();
        final Map<String, String> propertyNameOptionMap = new LinkedHashMap<String, String>();
        final Set<String> propertyNameSet = new LinkedHashSet<String>();
        processAutoDetect(fileText, propertyNameTypeMap, propertyNameOptionMap, propertyNameSet);
        processSpecifiedDetect(fileText, propertyNameTypeMap, propertyNameOptionMap, propertyNameSet);
        final List<Map<String, String>> propertyList = new ArrayList<Map<String, String>>();
        final StringBuilder commaSb = new StringBuilder();
        for (String propertyName : propertyNameSet) {
            final Map<String, String> property = new LinkedHashMap<String, String>();
            property.put("propertyName", propertyName);
            property.put("capCalemName", Srl.initCap(propertyName));
            property.put("uncapCalemName", Srl.initUncap(propertyName));
            // exists
            property.put("propertyType", propertyNameTypeMap.get(propertyName));
            propertyList.add(property);
            if (commaSb.length() > 0) {
                commaSb.append(", ");
            }
            commaSb.append("\"").append(propertyName).append("\"");
        }
        table.put("propertyList", propertyList);
        table.put("propertyNameCommaString", commaSb.toString());
        schemaMap.put(fileName, table);
    }
    return schemaMap;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) FileNotFoundException(java.io.FileNotFoundException) ArrayList(java.util.ArrayList) ScopeInfo(org.dbflute.util.Srl.ScopeInfo) FileInputStream(java.io.FileInputStream) LinkedHashMap(java.util.LinkedHashMap) FileTextIO(org.dbflute.helper.filesystem.FileTextIO) File(java.io.File) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Aggregations

ScopeInfo (org.dbflute.util.Srl.ScopeInfo)42 File (java.io.File)7 ArrayList (java.util.ArrayList)7 Map (java.util.Map)7 LinkedHashMap (java.util.LinkedHashMap)6 List (java.util.List)6 FileTextIO (org.dbflute.helper.filesystem.FileTextIO)6 FileInputStream (java.io.FileInputStream)4 FileNotFoundException (java.io.FileNotFoundException)4 LinkedHashSet (java.util.LinkedHashSet)4 IOException (java.io.IOException)3 Arrays (java.util.Arrays)3 Set (java.util.Set)3 ExceptionMessageBuilder (org.dbflute.helper.message.ExceptionMessageBuilder)3 DateTimeFormatter (java.time.format.DateTimeFormatter)2 TemporalAccessor (java.time.temporal.TemporalAccessor)2 DfBuildProperties (org.dbflute.DfBuildProperties)2 DfIllegalPropertySettingException (org.dbflute.exception.DfIllegalPropertySettingException)2 StringKeyMap (org.dbflute.helper.StringKeyMap)2 FileHierarchyTracer (org.dbflute.helper.filesystem.FileHierarchyTracer)2