use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfAbsractDataWriter method processDate.
// -----------------------------------------------------
// Date
// ----
protected boolean processDate(String dataDirectory, String tableName, String columnName, String value, Connection conn, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
if (value == null || value.trim().length() == 0) {
// cannot be date
return false;
}
final DfColumnMeta columnMeta = columnInfoMap.get(columnName);
if (columnMeta != null) {
final Class<?> columnType = getBindType(tableName, columnMeta);
if (columnType != null) {
if (!java.util.Date.class.isAssignableFrom(columnType)) {
return false;
}
// only when column type specified
final String resolved = resolveRelativeSysdate(dataDirectory, tableName, columnName, value);
bindNotNullValueByColumnType(tableName, columnName, conn, ps, bindCount, resolved, columnType, rowNumber);
return true;
}
}
// if meta data is not found (basically no way)
try {
final Timestamp timestamp = DfTypeUtil.toTimestamp(value);
ps.setTimestamp(bindCount, timestamp);
return true;
} catch (ParseTimestampException ignored) {
// retry as time
try {
Time time = DfTypeUtil.toTime(value);
ps.setTime(bindCount, time);
return true;
} catch (ParseTimeException ignored2) {
}
// couldn't parse as timestamp and time
return false;
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfAbsractDataWriter method processNull.
// ===================================================================================
// Process Binding
// ===============
// -----------------------------------------------------
// Null Value
// ----------
protected boolean processNull(String dataDirectory, String tableName, String columnName, Object value, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
if (!isNullValue(value)) {
return false;
}
Map<String, Integer> cacheMap = _nullTypeCacheMap.get(tableName);
if (cacheMap == null) {
cacheMap = StringKeyMap.createAsFlexibleOrdered();
_nullTypeCacheMap.put(tableName, cacheMap);
}
final Integer cachedType = cacheMap.get(columnName);
if (cachedType != null) {
// cache hit
// basically no exception
ps.setNull(bindCount, cachedType);
return true;
}
final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
if (columnInfo != null) {
// use mapped type at first
final String mappedJdbcType = _columnHandler.getColumnJdbcType(columnInfo);
final Integer mappedJdbcDefValue = TypeMap.getJdbcDefValueByJdbcType(mappedJdbcType);
try {
ps.setNull(bindCount, mappedJdbcDefValue);
cacheMap.put(columnName, mappedJdbcDefValue);
} catch (SQLException e) {
// retry by plain type
final int plainJdbcDefValue = columnInfo.getJdbcDefValue();
try {
ps.setNull(bindCount, plainJdbcDefValue);
cacheMap.put(columnName, plainJdbcDefValue);
} catch (SQLException ignored) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Failed to execute setNull(bindCount, jdbcDefValue).");
br.addItem("Column");
br.addElement(tableName + "." + columnName);
br.addElement(columnInfo.toString());
br.addItem("Mapped JDBC Type");
br.addElement(mappedJdbcType);
br.addItem("First JDBC Def-Value");
br.addElement(mappedJdbcDefValue);
br.addItem("Retry JDBC Def-Value");
br.addElement(plainJdbcDefValue);
br.addItem("Retry Message");
br.addElement(ignored.getMessage());
String msg = br.buildExceptionMessage();
throw new DfJDBCException(msg, e);
}
}
} else {
// basically no way
// as default
Integer tryType = Types.VARCHAR;
try {
ps.setNull(bindCount, tryType);
cacheMap.put(columnName, tryType);
} catch (SQLException e) {
tryType = Types.NUMERIC;
try {
ps.setNull(bindCount, tryType);
cacheMap.put(columnName, tryType);
} catch (SQLException ignored) {
tryType = Types.TIMESTAMP;
try {
ps.setNull(bindCount, tryType);
cacheMap.put(columnName, tryType);
} catch (SQLException iignored) {
tryType = Types.OTHER;
try {
// last try
ps.setNull(bindCount, tryType);
cacheMap.put(columnName, tryType);
} catch (SQLException iiignored) {
throw e;
}
}
}
}
}
return true;
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfAbsractDataWriter method processBinary.
// -----------------------------------------------------
// Binary
// ------
// -----------------------------------------------------
// Binary
// ------
protected boolean processBinary(String dataDirectory, File dataFile, String tableName, String columnName, String value, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
if (value == null || value.trim().length() == 0) {
// cannot be binary
return false;
}
final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
if (columnInfo == null) {
// unsupported when meta data is not found
return false;
}
final Class<?> columnType = getBindType(tableName, columnInfo);
if (columnType == null) {
// unsupported too
return false;
}
if (!byte[].class.isAssignableFrom(columnType)) {
// not binary
return false;
}
// the value should be a path to a binary file
// from data file's current directory
final String path;
final String trimmedValue = value.trim();
if (trimmedValue.startsWith("/")) {
// means absolute path
path = trimmedValue;
} else {
final String dataFilePath = Srl.replace(dataFile.getAbsolutePath(), "\\", "/");
final String baseDirPath = Srl.substringLastFront(dataFilePath, "/");
path = baseDirPath + "/" + trimmedValue;
}
final File binaryFile = new File(path);
if (!binaryFile.exists()) {
throwLoadDataBinaryFileNotFoundException(tableName, columnName, path, rowNumber);
}
FileInputStream fis = null;
try {
fis = new FileInputStream(binaryFile);
final int fileSize = (int) binaryFile.length();
final byte[] bytes = new byte[fileSize];
fis.read(bytes);
ps.setBytes(bindCount, bytes);
} catch (IOException e) {
throwLoadDataBinaryFileReadFailureException(tableName, columnName, path, rowNumber, e);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException ignored) {
}
}
}
return true;
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfAbsractDataWriter method processNumber.
// -----------------------------------------------------
// Number
// ------
protected boolean processNumber(String tableName, String columnName, String value, Connection conn, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
if (value == null || value.trim().length() == 0) {
// cannot be number
return false;
}
final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
if (columnInfo != null) {
final Class<?> columnType = getBindType(tableName, columnInfo);
if (columnType != null) {
if (!Number.class.isAssignableFrom(columnType)) {
return false;
}
bindNotNullValueByColumnType(tableName, columnName, conn, ps, bindCount, value, columnType, rowNumber);
return true;
}
}
// if meta data is not found (basically no way)
value = filterBigDecimalValue(value);
if (!isBigDecimalValue(value)) {
return false;
}
final BigDecimal bigDecimalValue = getBigDecimalValue(columnName, value);
try {
final long longValue = bigDecimalValue.longValueExact();
ps.setLong(bindCount, longValue);
return true;
} catch (ArithmeticException ignored) {
ps.setBigDecimal(bindCount, bigDecimalValue);
return true;
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfAbsractDataWriter method processLargeTextFile.
// -----------------------------------------------------
// Large Text File
// ---------------
// contributed by awaawa, thanks!
protected boolean processLargeTextFile(String dataDirectory, File dataFile, String tableName, String columnName, String value, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
if (value == null || value.trim().length() == 0) {
// cannot be binary
return false;
}
final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
if (columnInfo == null) {
// unsupported when meta data is not found
return false;
}
final Class<?> columnType = getBindType(tableName, columnInfo);
if (columnType == null) {
// unsupported too
return false;
}
if (!isLargeTextFile(dataDirectory, tableName, columnName)) {
// not target as large text file
return false;
}
// the value should be a path to a text file
// from data file's current directory
final String path;
final String trimmedValue = value.trim();
if (trimmedValue.startsWith("/")) {
// means absolute path
path = trimmedValue;
} else {
final String dataFilePath = Srl.replace(dataFile.getAbsolutePath(), "\\", "/");
final String baseDirPath = Srl.substringLastFront(dataFilePath, "/");
path = baseDirPath + "/" + trimmedValue;
}
final File textFile = new File(path);
if (!textFile.exists()) {
throwLoadDataTextFileReadFailureException(tableName, columnName, path, rowNumber);
}
try {
final String read = new FileTextIO().encodeAsUTF8().removeUTF8Bom().read(path);
ps.setString(bindCount, read);
} catch (RuntimeException e) {
throwLoadDataTextFileReadFailureException(tableName, columnName, path, rowNumber, e);
}
return true;
}
Aggregations