use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfDataTable method extractColumnMetaMap.
// ===================================================================================
// Assist Helper
// =============
protected Map<String, DfColumnMeta> extractColumnMetaMap(DatabaseMetaData metaData, UnifiedSchema unifiedSchema) throws SQLException {
final List<DfColumnMeta> metaList = _columnExtractor.getColumnList(metaData, unifiedSchema, _tableDbName);
final Map<String, DfColumnMeta> metaMap = new HashMap<String, DfColumnMeta>();
for (DfColumnMeta metaInfo : metaList) {
metaMap.put(metaInfo.getColumnName(), metaInfo);
}
return metaMap;
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfProcedureExecutionMetaExtractor method doExtractExecutionMetaData.
protected void doExtractExecutionMetaData(DataSource dataSource, DfProcedureMeta procedure) throws SQLException {
final List<DfProcedureColumnMeta> columnList = procedure.getProcedureColumnList();
if (!needsToCall(columnList)) {
final String name = procedure.buildProcedureLoggingName();
_log.info("...Skipping unneeded call: " + name + " params=" + buildParameterTypeView(columnList));
return;
}
final List<Object> testValueList = DfCollectionUtil.newArrayList();
setupTestValueList(columnList, testValueList);
final boolean existsReturn = existsReturnValue(columnList);
final String sql = createSql(procedure, existsReturn, true);
Connection conn = null;
CallableStatement cs = null;
boolean beginTransaction = false;
try {
_log.info("...Calling: " + sql);
conn = dataSource.getConnection();
conn.setAutoCommit(false);
beginTransaction = true;
cs = conn.prepareCall(sql);
final List<DfProcedureColumnMeta> boundColumnList = DfCollectionUtil.newArrayList();
setupBindParameter(conn, cs, columnList, testValueList, boundColumnList);
boolean executed;
try {
executed = cs.execute();
} catch (SQLException e) {
// retry without escape because Oracle sometimes hates escape
final String retrySql = createSql(procedure, existsReturn, false);
try {
try {
cs.close();
} catch (SQLException ignored) {
}
cs = conn.prepareCall(retrySql);
setupBindParameter(conn, cs, columnList, testValueList, boundColumnList);
executed = cs.execute();
_log.info(" (o) retry: " + retrySql);
} catch (SQLException ignored) {
_log.info(" (x) retry: " + retrySql);
throw e;
}
}
if (executed) {
int closetIndex = 0;
do {
ResultSet rs = null;
try {
rs = cs.getResultSet();
if (rs == null) {
break;
}
final Map<String, DfColumnMeta> columnMetaInfoMap = extractColumnMetaInfoMap(rs, sql);
final DfProcedureNotParamResultMeta notParamResult = new DfProcedureNotParamResultMeta();
final String propertyName;
if (procedure.isCalledBySelect() && closetIndex == 0) {
// for example, table valued function
// if the procedure of this type does not have
// second or more result set basically
// but checks closetIndex just in case
propertyName = "returnResult";
} else {
// basically here
propertyName = "notParamResult" + (closetIndex + 1);
}
notParamResult.setPropertyName(propertyName);
notParamResult.setResultSetColumnInfoMap(columnMetaInfoMap);
procedure.addNotParamResult(notParamResult);
++closetIndex;
} finally {
closeResult(rs);
}
} while (cs.getMoreResults());
}
int index = 0;
for (DfProcedureColumnMeta column : boundColumnList) {
final DfProcedureColumnType columnType = column.getProcedureColumnType();
if (DfProcedureColumnType.procedureColumnIn.equals(columnType)) {
++index;
continue;
}
final int paramIndex = (index + 1);
final Object obj;
if (column.isPostgreSQLCursor()) {
obj = _postgreSqlResultSetType.getValue(cs, paramIndex);
} else if (column.isOracleCursor()) {
obj = _oracleResultSetType.getValue(cs, paramIndex);
} else {
// as default
obj = cs.getObject(paramIndex);
}
if (obj instanceof ResultSet) {
ResultSet rs = null;
try {
rs = (ResultSet) obj;
final Map<String, DfColumnMeta> columnMetaInfoMap = extractColumnMetaInfoMap(rs, sql);
column.setResultSetColumnInfoMap(columnMetaInfoMap);
} finally {
closeResult(rs);
}
}
++index;
}
} catch (SQLException e) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Failed to execute the procedure for getting meta data.");
br.addItem("SQL");
br.addElement(sql);
br.addItem("Parameter");
for (DfProcedureColumnMeta column : columnList) {
br.addElement(column.getColumnDisplayName());
}
br.addItem("Test Value");
br.addElement(buildTestValueDisp(testValueList));
br.addItem("Exception Message");
br.addElement(DfJDBCException.extractMessage(e));
final SQLException nextEx = e.getNextException();
if (nextEx != null) {
br.addElement(DfJDBCException.extractMessage(nextEx));
}
final String msg = br.buildExceptionMessage();
final DfOutsideSqlProperties prop = getProperties().getOutsideSqlProperties();
if (prop.hasSpecifiedExecutionMetaProcedure()) {
throw new DfProcedureExecutionMetaGettingFailureException(msg, e);
} else {
// if no specified, it continues
_continuedFailureMessageMap.put(procedure.getProcedureFullQualifiedName(), msg);
_log.info("*Failed to call so read the warning message displayed later");
}
} finally {
if (cs != null) {
cs.close();
}
if (conn != null) {
try {
conn.rollback();
} catch (SQLException continued) {
// one day Oracle suddenly threw it (by socket trouble?)
final String exp = DfJDBCException.extractMessage(continued);
_log.info("*Failed to roll-back the procedure call but continued: " + exp);
}
if (beginTransaction) {
try {
conn.setAutoCommit(true);
} catch (SQLException continued) {
final String exp = DfJDBCException.extractMessage(continued);
_log.info("*Failed to set auto-commit true: " + exp);
}
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfProcedurePmbSetupper method setupStructAttribute.
protected void setupStructAttribute(DfTypeStructInfo structInfo, ProcedurePropertyInfo propertyInfo) {
final StringKeyMap<DfColumnMeta> attrMap = structInfo.getAttributeInfoMap();
for (DfColumnMeta attrInfo : attrMap.values()) {
// nested array or struct handling
if (attrInfo.hasTypeArrayInfo()) {
// array in struct
final DfTypeArrayInfo typeArrayInfo = attrInfo.getTypeArrayInfo();
if (typeArrayInfo.hasElementStructInfo()) {
// struct in array in struct
registerEntityInfoIfNeeds(typeArrayInfo.getElementStructInfo(), propertyInfo);
}
if (typeArrayInfo.hasElementJavaNative()) {
final String elementJavaNative = typeArrayInfo.getElementJavaNative();
attrInfo.setSql2EntityForcedJavaNative(getGenericListClassName(elementJavaNative));
} else {
final String elementType;
if (typeArrayInfo.hasNestedArray()) {
// array in array in struct
final DfTypeArrayInfo nestedArrayInfo = typeArrayInfo.getNestedArrayInfo();
elementType = getGenericListClassName(doProcessArrayProperty(nestedArrayInfo, propertyInfo));
} else if (typeArrayInfo.hasElementStructInfo()) {
// struct in array in struct
final DfTypeStructInfo elementStructInfo = typeArrayInfo.getElementStructInfo();
elementType = buildStructEntityType(elementStructInfo);
} else {
// scalar in array in struct
elementType = findArrayScalarElementPropertyType(attrInfo.getTypeArrayInfo());
}
typeArrayInfo.setElementJavaNative(elementType);
attrInfo.setSql2EntityForcedJavaNative(getGenericListClassName(elementType));
}
} else if (attrInfo.hasTypeStructInfo()) {
final DfTypeStructInfo nestedStructInfo = attrInfo.getTypeStructInfo();
registerEntityInfoIfNeeds(nestedStructInfo, propertyInfo);
if (nestedStructInfo.hasEntityType()) {
attrInfo.setSql2EntityForcedJavaNative(nestedStructInfo.getEntityType());
} else {
attrInfo.setSql2EntityForcedJavaNative(buildStructEntityType(nestedStructInfo));
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfCustomizeEntityInfo method acceptSelectColumnComment.
// ===================================================================================
// Select Column Comment
// =====================
public void acceptSelectColumnComment(Map<String, String> commentMap) {
if (commentMap == null || commentMap.isEmpty()) {
return;
}
for (Entry<String, DfColumnMeta> entry : _columnMap.entrySet()) {
final String columnName = entry.getKey();
// commentMap should be flexible
final String selectColumnComment = commentMap.get(columnName);
if (Srl.is_NotNull_and_NotTrimmedEmpty(selectColumnComment)) {
final DfColumnMeta meta = entry.getValue();
// basically new-set (get no meta comment)
meta.setColumnComment(selectColumnComment);
if (selectColumnComment.startsWith("*")) {
// means not-null e.g. -- // *Member Name
meta.setRequired(true);
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.
the class DfColumnExtractor method doGetColumnList.
protected List<DfColumnMeta> doGetColumnList(DatabaseMetaData metaData, UnifiedSchema unifiedSchema, String tableName, boolean retry) throws SQLException {
final List<DfColumnMeta> columnList = DfCollectionUtil.newArrayList();
// Column names for duplicate check
final StringSet columnNameSet = StringSet.createAsFlexible();
// Duplicate objects for warning log
final StringSet duplicateTableNameSet = StringSet.createAsFlexible();
final StringSet duplicateColumnNameSet = StringSet.createAsFlexible();
ResultSet rs = null;
try {
rs = extractColumnMetaData(metaData, unifiedSchema, tableName, retry);
if (rs == null) {
return DfCollectionUtil.newArrayList();
}
while (rs.next()) {
// /- - - - - - - - - - - - - - - - - - - - - - - - - - -
// same policy of table process (see DfTableHandler.java)
// - - - - - - - - - -/
final String columnName = rs.getString(4);
if (isColumnExcept(unifiedSchema, tableName, columnName)) {
continue;
}
final String metaTableName = rs.getString(3);
if (checkMetaTableDiffIfNeeds(tableName, metaTableName)) {
continue;
}
// filter duplicate objects
if (columnNameSet.contains(columnName)) {
duplicateTableNameSet.add(metaTableName);
duplicateColumnNameSet.add(columnName);
// ignored with warning
continue;
}
columnNameSet.add(columnName);
final Integer jdbcTypeCode = Integer.valueOf(rs.getString(5));
final String dbTypeName = rs.getString(6);
final Integer columnSize = Integer.valueOf(rs.getInt(7));
final Integer decimalDigits = rs.getInt(9);
final Integer nullType = Integer.valueOf(rs.getInt(11));
final String columnComment = rs.getString(12);
final String defaultValue = rs.getString(13);
final DfColumnMeta columnMeta = new DfColumnMeta();
columnMeta.setTableName(metaTableName);
columnMeta.setColumnName(columnName);
columnMeta.setJdbcDefValue(jdbcTypeCode);
columnMeta.setDbTypeName(dbTypeName);
columnMeta.setColumnSize(columnSize);
columnMeta.setDecimalDigits(decimalDigits);
columnMeta.setRequired(nullType == 0);
columnMeta.setColumnComment(columnComment);
columnMeta.setDefaultValue(filterDefaultValue(defaultValue));
columnList.add(columnMeta);
}
} finally {
if (rs != null) {
rs.close();
}
}
// Show duplicate objects if exists
if (!duplicateColumnNameSet.isEmpty()) {
String msg = "*Duplicate meta data was found:";
msg = msg + "\n[" + tableName + "]";
msg = msg + "\n duplicate tables = " + duplicateTableNameSet;
msg = msg + "\n duplicate columns = " + duplicateColumnNameSet;
_log.info(msg);
}
return columnList;
}
Aggregations