Search in sources :

Example 1 with FileToken

use of org.dbflute.helper.token.file.FileToken in project dbflute-core by dbflute.

the class DfCraftDiff method loadMeta.

// ===================================================================================
// Loading Process
// ===============
protected void loadMeta(String craftMetaDir) {
    final File metaDir = new File(craftMetaDir);
    if (!metaDir.exists() || !metaDir.isDirectory()) {
        return;
    }
    final List<File> metaFileList = getCraftMetaFileList(craftMetaDir);
    if (metaFileList.isEmpty()) {
        // empty directory or no check
        return;
    }
    _log.info("...Loading craft meta: " + craftMetaDir);
    for (final File metaFile : metaFileList) {
        final String craftTitle = extractCraftTitle(metaFile);
        _craftTitleSet.add(craftTitle);
        final boolean next = isCraftDirectionNext(metaFile);
        final FileToken fileToken = new FileToken();
        final List<DfCraftValue> craftValueList = DfCollectionUtil.newArrayList();
        try {
            tokenize(metaFile, fileToken, craftValueList);
        } catch (IOException e) {
            String msg = "Failed to read the file: " + metaFile;
            throw new IllegalStateException(msg, e);
        }
        _log.info("  " + metaFile.getName() + ": rowCount=" + craftValueList.size());
        if (next) {
            registerNextMeta(craftTitle, craftValueList);
        } else {
            registerPreviousMeta(craftTitle, craftValueList);
        }
    }
}
Also used : IOException(java.io.IOException) File(java.io.File) FileToken(org.dbflute.helper.token.file.FileToken)

Example 2 with FileToken

use of org.dbflute.helper.token.file.FileToken in project dbflute-core by dbflute.

the class DfLReverseOutputHandler method outputDelimiterData.

// ===================================================================================
// Delimiter Data
// ==============
protected void outputDelimiterData(final Table table, DfLReverseDataResult templateDataResult, final int limit, DfLReverseOutputResource resource, int sheetNumber, List<String> sectionInfoList) {
    if (_delimiterDataDir == null) {
        return;
    }
    final File delimiterDir = new File(_delimiterDataDir);
    // fixed
    final String ext = "tsv";
    if (!delimiterDir.exists()) {
        delimiterDir.mkdirs();
    }
    final FileToken fileToken = new FileToken();
    // file name uses DB name (no display name) just in case
    final String delimiterFilePath = buildDelimiterFilePath(table, resource, sheetNumber, delimiterDir, ext);
    final List<String> columnNameList = new ArrayList<String>();
    for (Column column : table.getColumnList()) {
        if (!_containsCommonColumn && column.isCommonColumn()) {
            continue;
        }
        columnNameList.add(column.getName());
    }
    final DfJFadCursorCallback cursorCallback = templateDataResult.getCursorCallback();
    cursorCallback.select(new DfJFadCursorHandler() {

        int count = 0;

        public void handle(final DfJFadResultSetWrapper wrapper) {
            try {
                fileToken.make(delimiterFilePath, new FileMakingCallback() {

                    public void write(FileMakingRowWriter writer) throws IOException, SQLException {
                        while (wrapper.next()) {
                            if (limit >= 0 && limit < count) {
                                break;
                            }
                            final List<String> valueList = new ArrayList<String>();
                            for (String columnName : columnNameList) {
                                valueList.add(wrapper.getString(columnName));
                            }
                            writer.writeRow(valueList);
                            ++count;
                        }
                    }
                }, op -> op.encodeAsUTF8().separateByLf().delimitateByTab().headerInfo(columnNameList));
            } catch (IOException e) {
                handleDelimiterDataFailureException(table, delimiterFilePath, e);
            }
            final String delimiterInfo = "  " + delimiterFilePath + " (" + count + ")";
            _log.info(delimiterInfo);
            sectionInfoList.add(delimiterInfo);
        }
    });
}
Also used : DfJFadCursorHandler(org.dbflute.helper.jdbc.facade.DfJFadCursorHandler) Column(org.apache.torque.engine.database.model.Column) DfTableXlsWriter(org.dbflute.helper.io.xls.DfTableXlsWriter) LoggerFactory(org.slf4j.LoggerFactory) DfJFadResultSetWrapper(org.dbflute.helper.jdbc.facade.DfJFadResultSetWrapper) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) SQLException(java.sql.SQLException) Table(org.apache.torque.engine.database.model.Table) Map(java.util.Map) DfLittleAdjustmentProperties(org.dbflute.properties.DfLittleAdjustmentProperties) DataSource(javax.sql.DataSource) DfDataTable(org.dbflute.helper.dataset.DfDataTable) DfDataSet(org.dbflute.helper.dataset.DfDataSet) FileMakingRowWriter(org.dbflute.helper.token.file.FileMakingRowWriter) Logger(org.slf4j.Logger) DfJFadCursorCallback(org.dbflute.helper.jdbc.facade.DfJFadCursorCallback) FileToken(org.dbflute.helper.token.file.FileToken) FileMakingCallback(org.dbflute.helper.token.file.FileMakingCallback) Set(java.util.Set) DfLReverseOutputResource(org.dbflute.logic.doc.lreverse.DfLReverseOutputResource) IOException(java.io.IOException) DfBuildProperties(org.dbflute.DfBuildProperties) File(java.io.File) DfDtsColumnTypes(org.dbflute.helper.dataset.types.DfDtsColumnTypes) List(java.util.List) Entry(java.util.Map.Entry) DfDataRow(org.dbflute.helper.dataset.DfDataRow) DfAdditionalTableProperties(org.dbflute.properties.DfAdditionalTableProperties) ArrayList(java.util.ArrayList) IOException(java.io.IOException) FileMakingCallback(org.dbflute.helper.token.file.FileMakingCallback) DfJFadResultSetWrapper(org.dbflute.helper.jdbc.facade.DfJFadResultSetWrapper) Column(org.apache.torque.engine.database.model.Column) DfJFadCursorHandler(org.dbflute.helper.jdbc.facade.DfJFadCursorHandler) FileMakingRowWriter(org.dbflute.helper.token.file.FileMakingRowWriter) DfJFadCursorCallback(org.dbflute.helper.jdbc.facade.DfJFadCursorCallback) File(java.io.File) FileToken(org.dbflute.helper.token.file.FileToken)

Example 3 with FileToken

use of org.dbflute.helper.token.file.FileToken in project dbflute-core by dbflute.

the class DfLReverseOutputHandler method outputDelimiterData.

// ===================================================================================
// Delimiter Data
// ==============
protected void outputDelimiterData(final Table table, DfLReverseDataResult templateDataResult, final int limit, DfLReverseOutputResource resource, int sheetNumber, List<String> sectionInfoList) {
    if (_delimiterDataDir == null) {
        return;
    }
    final File delimiterDir = new File(_delimiterDataDir);
    // fixed
    final String ext = "tsv";
    final String outputInfo = "...Outputting the over-xls-limit table: " + table.getTableDispName();
    _log.info(outputInfo);
    sectionInfoList.add(outputInfo);
    if (!delimiterDir.exists()) {
        delimiterDir.mkdirs();
    }
    final FileToken fileToken = new FileToken();
    // file name uses DB name (no display name) just in case
    final String delimiterFilePath = buildDelimiterFilePath(table, resource, sheetNumber, delimiterDir, ext);
    final List<String> columnNameList = new ArrayList<String>();
    for (Column column : table.getColumnList()) {
        if (!_containsCommonColumn && column.isCommonColumn()) {
            continue;
        }
        columnNameList.add(column.getName());
    }
    final DfJFadCursorCallback cursorCallback = templateDataResult.getCursorCallback();
    cursorCallback.select(new DfJFadCursorHandler() {

        int count = 0;

        public void handle(final DfJFadResultSetWrapper wrapper) {
            try {
                fileToken.make(delimiterFilePath, new FileMakingCallback() {

                    public void write(FileMakingRowWriter writer) throws IOException, SQLException {
                        while (wrapper.next()) {
                            if (limit >= 0 && limit < count) {
                                break;
                            }
                            final List<String> valueList = new ArrayList<String>();
                            for (String columnName : columnNameList) {
                                valueList.add(wrapper.getString(columnName));
                            }
                            writer.writeRow(valueList);
                            ++count;
                        }
                    }
                }, op -> op.encodeAsUTF8().separateByLf().delimitateByTab().headerInfo(columnNameList));
            } catch (IOException e) {
                handleDelimiterDataFailureException(table, delimiterFilePath, e);
            }
            final String delimiterInfo = " -> " + delimiterFilePath + " (" + count + ")";
            _log.info(delimiterInfo);
            sectionInfoList.add(delimiterInfo);
        }
    });
}
Also used : DfJFadCursorHandler(org.dbflute.helper.jdbc.facade.DfJFadCursorHandler) Column(org.apache.torque.engine.database.model.Column) DfTableXlsWriter(org.dbflute.helper.io.xls.DfTableXlsWriter) LoggerFactory(org.slf4j.LoggerFactory) DfJFadResultSetWrapper(org.dbflute.helper.jdbc.facade.DfJFadResultSetWrapper) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) SQLException(java.sql.SQLException) Table(org.apache.torque.engine.database.model.Table) Map(java.util.Map) DfLittleAdjustmentProperties(org.dbflute.properties.DfLittleAdjustmentProperties) DataSource(javax.sql.DataSource) DfDataTable(org.dbflute.helper.dataset.DfDataTable) DfDataSet(org.dbflute.helper.dataset.DfDataSet) FileMakingRowWriter(org.dbflute.helper.token.file.FileMakingRowWriter) Logger(org.slf4j.Logger) DfJFadCursorCallback(org.dbflute.helper.jdbc.facade.DfJFadCursorCallback) FileToken(org.dbflute.helper.token.file.FileToken) FileMakingCallback(org.dbflute.helper.token.file.FileMakingCallback) Set(java.util.Set) IOException(java.io.IOException) DfBuildProperties(org.dbflute.DfBuildProperties) File(java.io.File) DfDtsColumnTypes(org.dbflute.helper.dataset.types.DfDtsColumnTypes) List(java.util.List) Entry(java.util.Map.Entry) DfDataRow(org.dbflute.helper.dataset.DfDataRow) DfAdditionalTableProperties(org.dbflute.properties.DfAdditionalTableProperties) ArrayList(java.util.ArrayList) IOException(java.io.IOException) FileMakingCallback(org.dbflute.helper.token.file.FileMakingCallback) DfJFadResultSetWrapper(org.dbflute.helper.jdbc.facade.DfJFadResultSetWrapper) Column(org.apache.torque.engine.database.model.Column) DfJFadCursorHandler(org.dbflute.helper.jdbc.facade.DfJFadCursorHandler) FileMakingRowWriter(org.dbflute.helper.token.file.FileMakingRowWriter) DfJFadCursorCallback(org.dbflute.helper.jdbc.facade.DfJFadCursorCallback) File(java.io.File) FileToken(org.dbflute.helper.token.file.FileToken)

Aggregations

File (java.io.File)3 IOException (java.io.IOException)3 FileToken (org.dbflute.helper.token.file.FileToken)3 SQLException (java.sql.SQLException)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2 List (java.util.List)2 Map (java.util.Map)2 Entry (java.util.Map.Entry)2 Set (java.util.Set)2 DataSource (javax.sql.DataSource)2 Column (org.apache.torque.engine.database.model.Column)2 Table (org.apache.torque.engine.database.model.Table)2 DfBuildProperties (org.dbflute.DfBuildProperties)2 DfDataRow (org.dbflute.helper.dataset.DfDataRow)2 DfDataSet (org.dbflute.helper.dataset.DfDataSet)2 DfDataTable (org.dbflute.helper.dataset.DfDataTable)2 DfDtsColumnTypes (org.dbflute.helper.dataset.types.DfDtsColumnTypes)2 DfTableXlsWriter (org.dbflute.helper.io.xls.DfTableXlsWriter)2 DfJFadCursorCallback (org.dbflute.helper.jdbc.facade.DfJFadCursorCallback)2