use of org.dbflute.helper.filesystem.FileTextIO in project dbflute-core by dbflute.
the class DfGapileClassReflector method copyText.
// ===================================================================================
// Assist Logic
// ============
protected void copyText(File mainClassFile, File gapileClassFile) {
final FileTextIO textIO = new FileTextIO().encodeAsUTF8();
String text;
try {
text = textIO.read(new FileInputStream(mainClassFile));
} catch (FileNotFoundException e) {
throw new IllegalStateException("Failed to read the main class text: " + mainClassFile, e);
}
try {
textIO.write(new FileOutputStream(gapileClassFile), text);
} catch (FileNotFoundException e) {
throw new IllegalStateException("Failed to write the gapile class text: " + gapileClassFile, e);
}
}
use of org.dbflute.helper.filesystem.FileTextIO in project dbflute-core by dbflute.
the class DfPmFileTableLoader method doLoad.
protected Map<String, Map<String, Object>> doLoad(String targetDir, String targetExt, String targetKeyword, List<String> exceptPathList, Map<String, Object> tableMap) {
final List<File> fileList = DfCollectionUtil.newArrayList();
final File baseDir = new File(targetDir);
collectFile(fileList, targetExt, targetKeyword, exceptPathList, baseDir);
final Map<String, Map<String, Object>> schemaMap = DfCollectionUtil.newLinkedHashMap();
final FileTextIO textIO = new FileTextIO().encodeAsUTF8().removeUTF8Bom().replaceCrLfToLf();
for (File pmFile : fileList) {
final Map<String, Object> table = DfCollectionUtil.newHashMap();
final String fileName = pmFile.getName();
table.put("fileName", fileName);
final String fileText;
try {
fileText = textIO.read(new FileInputStream(pmFile));
} catch (FileNotFoundException e) {
// no way, collected file
throw new IllegalStateException("Not found the pmc file: " + pmFile, e);
}
final String delimiter = META_DELIMITER;
if (((String) tableMap.getOrDefault(deriveTableMapKey("isLastaTemplate"), "false")).equalsIgnoreCase("true")) {
final String templatePath = toPath(pmFile);
if (!fileText.contains(delimiter)) {
throwTemplateMetaNotFoundException(templatePath, fileText);
}
verifyFormat(templatePath, fileText, delimiter);
final String headerComment = Srl.extractScopeFirst(fileText, COMMENT_BEGIN, COMMENT_END).getContent();
final ScopeInfo titleScope = Srl.extractScopeFirst(headerComment, TITLE_BEGIN, TITLE_END);
final String desc = Srl.substringFirstRear(headerComment, TITLE_END);
table.put("headerComment", headerComment);
table.put("title", titleScope.getContent());
table.put("description", desc);
}
String option = null;
if (fileText.contains(delimiter)) {
final String bodyMeta = Srl.substringFirstFront(fileText, ">>>");
if (bodyMeta.contains(OPTION_LABEL)) {
option = Srl.substringFirstFront(Srl.substringFirstRear(bodyMeta, OPTION_LABEL), LF);
}
}
final boolean convention = !isGenAsIs(option);
final StringBuilder classNameSb = new StringBuilder();
classNameSb.append(Srl.camelize(Srl.substringLastFront(fileName, targetExt)));
final String classSuffix = convention ? deriveClassSuffix(tableMap, baseDir, pmFile) : "";
classNameSb.append(classSuffix);
final String className = classNameSb.toString();
// used as output file name
table.put("className", className);
table.put("camelizedName", className);
final String domainPath = buildDomainPath(pmFile, targetDir);
// e.g. /member/member_registration.dfpm
table.put("domainPath", domainPath);
final String resourcePath = Srl.ltrim(domainPath, "/");
// e.g. member/member_registration.dfpm
table.put("resourcePath", resourcePath);
final String additionalPkg;
final String basePkgConnector;
if (Srl.is_NotNull_and_NotEmpty(resourcePath)) {
if (resourcePath.contains("/")) {
additionalPkg = Srl.replace(Srl.substringLastFront(resourcePath, "/"), "/", ".");
basePkgConnector = ".";
} else {
additionalPkg = "";
basePkgConnector = "";
}
} else {
additionalPkg = "";
basePkgConnector = "";
}
table.put("additionalPackage", convention ? "template" + basePkgConnector + additionalPkg : additionalPkg);
table.put("defName", buildUpperSnakeName(domainPath));
{
final String dirPath = Srl.substringLastFront(domainPath, "/");
final String snakeCase = buildPlainSnakeName(dirPath);
final String camelizedName = Srl.camelize(snakeCase);
table.put("camelizedDir", camelizedName);
table.put("capCamelDir", Srl.initCap(camelizedName));
table.put("uncapCamelDir", Srl.initUncap(camelizedName));
}
{
final String snakeCase = buildPlainSnakeName(fileName);
final String camelizedName = Srl.camelize(snakeCase);
table.put("camelizedFile", camelizedName);
table.put("capCamelFile", Srl.initCap(camelizedName));
table.put("uncapCamelFile", Srl.initUncap(camelizedName));
}
final Map<String, String> propertyNameTypeMap = new LinkedHashMap<String, String>();
final Map<String, String> propertyNameOptionMap = new LinkedHashMap<String, String>();
final Set<String> propertyNameSet = new LinkedHashSet<String>();
processAutoDetect(fileText, propertyNameTypeMap, propertyNameOptionMap, propertyNameSet);
processSpecifiedDetect(fileText, propertyNameTypeMap, propertyNameOptionMap, propertyNameSet);
final List<Map<String, String>> propertyList = new ArrayList<Map<String, String>>();
final StringBuilder commaSb = new StringBuilder();
for (String propertyName : propertyNameSet) {
final Map<String, String> property = new LinkedHashMap<String, String>();
property.put("propertyName", propertyName);
property.put("capCalemName", Srl.initCap(propertyName));
property.put("uncapCalemName", Srl.initUncap(propertyName));
// exists
property.put("propertyType", propertyNameTypeMap.get(propertyName));
propertyList.add(property);
if (commaSb.length() > 0) {
commaSb.append(", ");
}
commaSb.append("\"").append(propertyName).append("\"");
}
table.put("propertyList", propertyList);
table.put("propertyNameCommaString", commaSb.toString());
schemaMap.put(fileName, table);
}
return schemaMap;
}
use of org.dbflute.helper.filesystem.FileTextIO in project dbflute-core by dbflute.
the class DfMailFluteTableLoader method doLoad.
protected Map<String, Map<String, Object>> doLoad(String targetDir, String targetExt, String targetKeyword, List<String> exceptPathList, Map<String, Object> tableMap) {
final List<File> fileList = DfCollectionUtil.newArrayList();
final File baseDir = new File(targetDir);
collectFile(fileList, targetExt, targetKeyword, exceptPathList, baseDir);
final Map<String, Map<String, Object>> schemaMap = DfCollectionUtil.newLinkedHashMap();
final FileTextIO textIO = new FileTextIO().encodeAsUTF8().removeUTF8Bom().replaceCrLfToLf();
for (File bodyFile : fileList) {
final Map<String, Object> table = DfCollectionUtil.newHashMap();
final String fileName = bodyFile.getName();
table.put("fileName", fileName);
final String className = Srl.camelize(Srl.substringLastFront(fileName, targetExt)) + "Postcard";
// used as output file name
table.put("className", className);
table.put("camelizedName", className);
final String addedPkg = deriveAdditionalPackage(tableMap, baseDir, bodyFile);
if (Srl.is_NotNull_and_NotEmpty(addedPkg)) {
table.put("additionalPackage", addedPkg);
}
final String domainPath = buildDomainPath(bodyFile, targetDir);
// e.g. /member/member_registration.dfmail
table.put("domainPath", domainPath);
// e.g. member/member_registration.dfmail
table.put("resourcePath", Srl.ltrim(domainPath, "/"));
table.put("defName", buildUpperSnakeName(domainPath));
{
final String dirPath = Srl.substringLastFront(domainPath, "/");
final String snakeCase = buildPlainSnakeName(dirPath);
final String camelizedName = Srl.camelize(snakeCase);
table.put("camelizedDir", camelizedName);
table.put("capCamelDir", Srl.initCap(camelizedName));
table.put("uncapCamelDir", Srl.initUncap(camelizedName));
}
{
final String snakeCase = buildPlainSnakeName(fileName);
final String camelizedName = Srl.camelize(snakeCase);
table.put("camelizedFile", camelizedName);
table.put("capCamelFile", Srl.initCap(camelizedName));
table.put("uncapCamelFile", Srl.initUncap(camelizedName));
}
final String plainText = readText(textIO, toPath(bodyFile));
final String delimiter = META_DELIMITER;
if (!plainText.contains(delimiter)) {
throwBodyMetaNotFoundException(toPath(bodyFile), plainText);
}
verifyFormat(toPath(bodyFile), plainText, delimiter);
final String headerComment = Srl.extractScopeFirst(plainText, COMMENT_BEGIN, COMMENT_END).getContent();
final ScopeInfo titleScope = Srl.extractScopeFirst(headerComment, TITLE_BEGIN, TITLE_END);
final String desc = Srl.substringFirstRear(headerComment, TITLE_END);
table.put("headerComment", headerComment);
table.put("title", titleScope.getContent());
table.put("description", desc);
final String bodyMeta = Srl.substringFirstFront(plainText, delimiter);
final boolean hasOptionPlusHtml = hasOptionPlusHtml(bodyMeta, delimiter);
table.put("hasOptionPlusHtml", hasOptionPlusHtml);
final String htmlFilePath = deriveHtmlFilePath(toPath(bodyFile));
if (new File(htmlFilePath).exists()) {
if (!hasOptionPlusHtml) {
throwNoPlusHtmlButHtmlTemplateExistsException(toPath(bodyFile), htmlFilePath, bodyMeta);
}
verifyMailHtmlTemplateTextFormat(htmlFilePath, readText(textIO, htmlFilePath));
} else {
if (hasOptionPlusHtml) {
throwNoHtmlTemplateButPlusHtmlExistsException(toPath(bodyFile), htmlFilePath, bodyMeta);
}
}
final Map<String, String> propertyNameTypeMap = new LinkedHashMap<String, String>();
final Map<String, String> propertyNameOptionMap = new LinkedHashMap<String, String>();
final Set<String> propertyNameSet = new LinkedHashSet<String>();
processAutoDetect(plainText, propertyNameTypeMap, propertyNameOptionMap, propertyNameSet);
processSpecifiedDetect(plainText, propertyNameTypeMap, propertyNameOptionMap, propertyNameSet);
final List<Map<String, String>> propertyList = new ArrayList<Map<String, String>>();
final StringBuilder commaSb = new StringBuilder();
for (String propertyName : propertyNameSet) {
final Map<String, String> property = new LinkedHashMap<String, String>();
property.put("propertyName", propertyName);
property.put("capCalemName", Srl.initCap(propertyName));
property.put("uncapCalemName", Srl.initUncap(propertyName));
// exists
property.put("propertyType", propertyNameTypeMap.get(propertyName));
propertyList.add(property);
if (commaSb.length() > 0) {
commaSb.append(", ");
}
commaSb.append("\"").append(propertyName).append("\"");
}
table.put("propertyList", propertyList);
table.put("propertyNameCommaString", commaSb.toString());
schemaMap.put(fileName, table);
}
return schemaMap;
}
use of org.dbflute.helper.filesystem.FileTextIO in project dbflute-core by dbflute.
the class DfAbsractDataWriter method processLargeTextFile.
// -----------------------------------------------------
// Large Text File
// ---------------
// contributed by awaawa, thanks!
protected boolean processLargeTextFile(String dataDirectory, File dataFile, String tableName, String columnName, String value, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
if (value == null || value.trim().length() == 0) {
// cannot be binary
return false;
}
final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
if (columnInfo == null) {
// unsupported when meta data is not found
return false;
}
final Class<?> columnType = getBindType(tableName, columnInfo);
if (columnType == null) {
// unsupported too
return false;
}
if (!isLargeTextFile(dataDirectory, tableName, columnName)) {
// not target as large text file
return false;
}
// the value should be a path to a text file
// from data file's current directory
final String path;
final String trimmedValue = value.trim();
if (trimmedValue.startsWith("/")) {
// means absolute path
path = trimmedValue;
} else {
final String dataFilePath = Srl.replace(dataFile.getAbsolutePath(), "\\", "/");
final String baseDirPath = Srl.substringLastFront(dataFilePath, "/");
path = baseDirPath + "/" + trimmedValue;
}
final File textFile = new File(path);
if (!textFile.exists()) {
throwLoadDataTextFileReadFailureException(tableName, columnName, path, rowNumber);
}
try {
final String read = new FileTextIO().encodeAsUTF8().removeUTF8Bom().read(path);
ps.setString(bindCount, read);
} catch (RuntimeException e) {
throwLoadDataTextFileReadFailureException(tableName, columnName, path, rowNumber, e);
}
return true;
}
use of org.dbflute.helper.filesystem.FileTextIO in project dbflute-core by dbflute.
the class DfArrangeQueryDocSetupper method prepareSourceFileTextIO.
protected FileTextIO prepareSourceFileTextIO() {
final FileTextIO textIO = new FileTextIO();
textIO.setEncoding(getBasicProperties().getSourceFileEncoding());
return textIO;
}
Aggregations