use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfColumnExtractor method getColumnMap.
public Map<String, DfColumnMeta> getColumnMap(DatabaseMetaData metaData, DfTableMeta tableInfo) throws SQLException {
final List<DfColumnMeta> columnList = getColumnList(metaData, tableInfo);
final Map<String, DfColumnMeta> map = new LinkedHashMap<String, DfColumnMeta>();
for (DfColumnMeta columnInfo : columnList) {
map.put(columnInfo.getColumnName(), columnInfo);
}
return map;
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfXlsDataHandlingWriter method setupDefaultValue.
protected void setupDefaultValue(String dataDirectory, final DfDataSet dataSet) {
final Map<String, String> defaultValueMap = getDefaultValueMap(dataDirectory);
for (int i = 0; i < dataSet.getTableSize(); i++) {
final DfDataTable table = dataSet.getTable(i);
final Set<String> defaultValueMapKeySet = defaultValueMap.keySet();
final String tableName = table.getTableDbName();
final Map<String, DfColumnMeta> metaMetaMap = getColumnMetaMap(tableName);
for (String defaultTargetColumnName : defaultValueMapKeySet) {
final String defaultValue = defaultValueMap.get(defaultTargetColumnName);
if (metaMetaMap.containsKey(defaultTargetColumnName) && !table.hasColumn(defaultTargetColumnName)) {
// values are resolved later so resolve type only here
final DfDtsColumnType columnType;
if (defaultValue.equalsIgnoreCase("sysdate")) {
columnType = DfDtsColumnTypes.TIMESTAMP;
} else {
columnType = DfDtsColumnTypes.STRING;
}
table.addColumn(defaultTargetColumnName, columnType);
for (int j = 0; j < table.getRowSize(); j++) {
final DfDataRow row = table.getRow(j);
// value is set later
row.addValue(defaultTargetColumnName, null);
}
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfXlsDataHandlingWriter method doWriteDataTable.
// -----------------------------------------------------
// DataTable
// ---------
protected int doWriteDataTable(DfXlsDataResource resource, File file, DfDataTable dataTable) {
final String tableDbName = dataTable.getTableDbName();
if (dataTable.getRowSize() == 0) {
_log.info("*Not found row at the table: " + tableDbName);
return 0;
}
final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
if (columnMetaMap.isEmpty()) {
throwTableNotFoundException(file, tableDbName);
}
beforeHandlingTable(tableDbName, columnMetaMap);
checkHeaderColumnIfNeeds(resource, file, dataTable, columnMetaMap);
final List<String> columnNameList = extractColumnNameList(dataTable);
final String dataDirectory = resource.getDataDirectory();
final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
final boolean suppressBatchUpdate = isMergedSuppressBatchUpdate(resource.getDataDirectory());
Connection conn = null;
PreparedStatement ps = null;
String preparedSql = null;
SQLException retryEx = null;
DfDataRow retryDataRow = null;
try {
conn = _dataSource.getConnection();
int loadedRowCount = 0;
final int rowSize = dataTable.getRowSize();
boolean existsEmptyRow = false;
for (int i = 0; i < rowSize; i++) {
final DfDataRow dataRow = dataTable.getRow(i);
if (ps == null) {
final MyCreatedState myCreatedState = new MyCreatedState();
preparedSql = myCreatedState.buildPreparedSql(dataRow);
ps = conn.prepareStatement(preparedSql);
}
if (doWriteDataRow(// basic resources
resource, // basic resources
file, // basic resources
dataTable, // basic resources
dataRow, // meta data
columnMetaMap, // JDBC resources
conn, // JDBC resources
ps, loggingInsertType, suppressBatchUpdate)) {
// option
++loadedRowCount;
if (existsEmptyRow) {
final int emptyRowNumber = dataRow.getRowNumber() - 1;
throwXlsDataEmptyRowDataException(dataDirectory, file, dataTable, emptyRowNumber);
}
} else {
existsEmptyRow = true;
}
}
if (existsEmptyRow) {
_log.info("...Skipping the terminal garbage row");
}
if (!suppressBatchUpdate) {
boolean beginTransaction = false;
boolean transactionClosed = false;
try {
// transaction to retry after
conn.setAutoCommit(false);
beginTransaction = true;
ps.executeBatch();
conn.commit();
transactionClosed = true;
} catch (SQLException e) {
conn.rollback();
transactionClosed = true;
if (!(e instanceof BatchUpdateException)) {
throw e;
}
_log.info("...Retrying by suppressing batch update: " + tableDbName);
final PreparedStatement retryPs = conn.prepareStatement(preparedSql);
for (int i = 0; i < rowSize; i++) {
final DfDataRow dataRow = dataTable.getRow(i);
try {
doWriteDataRow(// basic resources
resource, // basic resources
file, // basic resources
dataTable, // basic resources
dataRow, // meta data
columnMetaMap, // JDBC resources
conn, // JDBC resources
retryPs, LoggingInsertType.NONE, // option (no logging and suppress batch)
true);
} catch (SQLException rowEx) {
retryEx = rowEx;
retryDataRow = dataRow;
break;
}
}
try {
retryPs.close();
} catch (SQLException ignored) {
}
throw e;
} finally {
if (!transactionClosed) {
// for other exceptions
conn.rollback();
}
if (beginTransaction) {
conn.setAutoCommit(true);
}
}
}
noticeLoadedRowSize(tableDbName, loadedRowCount);
checkImplicitClassification(file, tableDbName, columnNameList);
return loadedRowCount;
} catch (RuntimeException e) {
handleWriteTableFailureException(dataDirectory, file, tableDbName, e);
// unreachable
return -1;
} catch (SQLException e) {
handleWriteTableSQLException(dataDirectory, file, dataTable, e, retryEx, retryDataRow, columnNameList);
// unreachable
return -1;
} finally {
closeResource(conn, ps);
// process after (finally) handling table
finallyHandlingTable(tableDbName, columnMetaMap);
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfAbsractDataWriter method processArray.
// -----------------------------------------------------
// ARRAY
// -----
protected boolean processArray(String tableName, String columnName, String value, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
if (value == null || value.trim().length() == 0) {
// cannot be array
return false;
}
final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
if (columnInfo != null) {
if (getBasicProperties().isDatabasePostgreSQL()) {
// rsMeta#getColumnTypeName() returns value starts with "_" if
// rsMeta#getColumnType() returns Types.ARRAY in PostgreSQL.
// e.g. UUID[] -> _uuid
final int jdbcDefValue = columnInfo.getJdbcDefValue();
final String dbTypeName = columnInfo.getDbTypeName();
if (jdbcDefValue != Types.ARRAY || !dbTypeName.startsWith("_")) {
return false;
}
value = filterArrayValue(value);
ps.setObject(bindCount, value, Types.OTHER);
return true;
}
}
// unsupported when meta data is not found
return false;
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfLoadingControlProp method resolveRelativeDate.
// ===================================================================================
// Date Adjustment
// ===============
public void resolveRelativeDate(String dataDirectory, String tableName, Map<String, Object> columnValueMap, Map<String, DfColumnMeta> columnMetaMap, Set<String> sysdateColumnSet, DfColumnBindTypeProvider bindTypeProvider, int rowNumber) {
// was born at LUXA
if (!hasDateAdjustment(dataDirectory, tableName)) {
return;
}
final Map<String, Object> resolvedMap = new HashMap<String, Object>();
for (Entry<String, Object> entry : columnValueMap.entrySet()) {
final String columnName = entry.getKey();
if (isSysdateColumn(sysdateColumnSet, columnName)) {
// keep sysdate as default value
continue;
}
final Object value = entry.getValue();
if (value == null) {
continue;
}
if (!isDateAdjustmentAllowedValueType(value)) {
// out of target type
continue;
}
if (!hasDateAdjustmentExp(dataDirectory, tableName, columnName)) {
// no-adjustment column
continue;
}
final DfColumnMeta columnMeta = columnMetaMap.get(columnName);
final Class<?> bindType = bindTypeProvider.provide(tableName, columnMeta);
if (bindType == null) {
// unknown column type
continue;
}
if (!isDateAdjustmentAllowedBindType(dataDirectory, tableName, columnName, bindType)) {
// cannot be date
continue;
}
final String dateExp = toAdjustedResourceDateExp(tableName, columnName, bindType, value);
if (dateExp == null) {
// e.g. wrong value
continue;
}
final String adjusted = adjustDateIfNeeds(dataDirectory, tableName, columnName, dateExp, rowNumber);
resolvedMap.put(columnName, convertAdjustedValueToDateType(tableName, columnName, bindType, adjusted));
}
for (Entry<String, Object> entry : resolvedMap.entrySet()) {
// to keep original map instance
columnValueMap.put(entry.getKey(), entry.getValue());
}
}
Aggregations