use of com.cubrid.common.core.common.model.PartitionInfo in project cubrid-manager by CUBRID.
the class DataCompareEditorPart method doRefresh.
private void doRefresh(boolean collectRecordCount) {
// FIXME logic code move to core module
Map<String, DataCompare> dataCompareMap = new HashMap<String, DataCompare>();
if (compareList != null) {
for (DataCompare dataCompare : compareList) {
dataCompareMap.put(dataCompare.getTableName(), dataCompare);
}
}
DatabaseInfo sourceDB = ((DataCompareEditorInput) getEditorInput()).getSourceDB();
DatabaseInfo targetDB = ((DataCompareEditorInput) getEditorInput()).getTargetDB();
if (logFileBaseName != null) {
FileUtil.delete(logFileBasePath + File.separatorChar + logFileBaseName);
logFileBaseName = null;
}
logFileBaseName = sourceDB.getDbName() + "_" + System.currentTimeMillis();
GetAllSchemaTask sourceSchemaTask = new GetAllSchemaTask(sourceDB);
GetAllSchemaTask targetSchemaTask = new GetAllSchemaTask(targetDB);
TaskExecutor taskExecutor = new CommonTaskExec(Messages.loadEntireSchemaComparison);
taskExecutor.addTask(sourceSchemaTask);
taskExecutor.addTask(targetSchemaTask);
new ExecTaskWithProgress(taskExecutor).exec();
if (taskExecutor.isSuccess()) {
synchronized (compareList) {
compareList.clear();
Set<String> partitions = new HashSet<String>();
List<SchemaInfo> sourceList = sourceSchemaTask.getSchemaList();
List<SchemaInfo> targetList = targetSchemaTask.getSchemaList();
for (SchemaInfo schemaInfo : sourceList) {
if (schemaInfo.getPartitionList() != null) {
for (PartitionInfo partition : schemaInfo.getPartitionList()) {
String partClassName = partition.getPartitionClassName();
partitions.add(partClassName);
}
}
}
for (SchemaInfo schemaInfo : sourceList) {
DataCompare dataCompare = dataCompareMap.get(schemaInfo.getClassname());
if (dataCompare == null) {
dataCompare = new DataCompare();
dataCompare.setTableName(schemaInfo.getClassname());
dataCompare.setSchemaInfo(schemaInfo);
dataCompare.setRefreshed(false);
} else {
dataCompare.setMatches(0);
dataCompare.setNotExists(0);
dataCompare.setNotMatches(0);
dataCompare.setProgressPosition(0);
}
if (schemaInfo.hasPK() && !partitions.contains(schemaInfo.getClassname())) {
SchemaInfo targetSchemeInfo = getSchemeInfoByName(schemaInfo.getClassname(), targetList);
boolean isSameSchema = canCompareData(schemaInfo, targetSchemeInfo);
dataCompare.setSameSchema(isSameSchema);
compareList.add(dataCompare);
}
}
Collections.sort(compareList, new Comparator<DataCompare>() {
public int compare(DataCompare o1, DataCompare o2) {
if (o1 == null || o1.getTableName() == null) {
return -1;
} else if (o2 == null || o2.getTableName() == null) {
return 1;
}
return o1.getTableName().compareToIgnoreCase(o2.getTableName());
}
});
}
compareTableViewer.refresh();
}
if (!collectRecordCount) {
return;
}
totalRecords = 0;
completedRecords = 0;
refreshRecordCount();
refreshedRecordCounts = true;
}
use of com.cubrid.common.core.common.model.PartitionInfo in project cubrid-manager by CUBRID.
the class SchemaDDLTest method testdata.
/**
* test data()
*
*/
@SuppressWarnings("unused")
public void testdata() throws Exception {
boolean success = createTestTable();
assertTrue(success);
SchemaInfo sup1 = databaseInfo.getSchemaInfo("sup1");
SchemaInfo sup2 = databaseInfo.getSchemaInfo("sup2");
SchemaInfo test = databaseInfo.getSchemaInfo("test");
SchemaInfo testSuperTableName = databaseInfo.getSchemaInfo("testSuperTableName");
SchemaInfo testTableName = databaseInfo.getSchemaInfo("testTableName");
SchemaChangeManager changeList = new SchemaChangeManager(databaseInfo, true);
List<SchemaChangeLog> changeListNoAuto = new ArrayList<SchemaChangeLog>();
//SchemeChangeLog schemeChangeLog= new SchemeChangeLog();
changeList.addSchemeChangeLog(new SchemaChangeLog("a", null, SchemeInnerType.TYPE_CLASSATTRIBUTE));
changeList.addSchemeChangeLog(new SchemaChangeLog("a", null, SchemeInnerType.TYPE_ATTRIBUTE));
changeList.addSchemeChangeLog(new SchemaChangeLog(null, "fk", SchemeInnerType.TYPE_FK));
changeList.addSchemeChangeLog(new SchemaChangeLog("fk", null, SchemeInnerType.TYPE_FK));
Constraint index = testTableName.getConstraintByName("index", "UNIQUE");
changeList.addSchemeChangeLog(new SchemaChangeLog(index.getDefaultName(testTableName.getClassname()) + "$" + //$NON-NLS-1$
index.getName(), //$NON-NLS-1$
null, SchemeInnerType.TYPE_INDEX));
changeList.addSchemeChangeLog(new SchemaChangeLog("a", "a", SchemeInnerType.TYPE_ATTRIBUTE));
changeList.addSchemeChangeLog(new SchemaChangeLog("a", "a", SchemeInnerType.TYPE_ATTRIBUTE));
//changeListNoAuto.add(o)
changeList.setChangeList(changeListNoAuto);
SchemaDDL ddl = new SchemaDDL(changeList, databaseInfo);
SchemaChangeManager changeList2 = new SchemaChangeManager(databaseInfo, false);
SchemaDDL ddl2 = new SchemaDDL(changeList2, databaseInfo);
List<String[]> columnConflicts = SuperClassUtil.getColumnConflicts(databaseInfo, testTableName, testTableName.getSuperClasses(), true);
String[][] classConflicts = columnConflicts.toArray(new String[columnConflicts.size()][]);
columnConflicts = SuperClassUtil.getColumnConflicts(databaseInfo, testTableName, testTableName.getSuperClasses(), false);
String[][] conflicts = columnConflicts.toArray(new String[columnConflicts.size()][]);
ddl.getSchemaDDL(sup1);
ddl.getSchemaDDL(sup2);
ddl.getSchemaDDL(sup1, sup2);
ddl.getAlterDDL(sup1, sup2);
ddl.getSchemaDDL(testTableName);
ddl.getSchemaDDL(testTableName, testTableName);
ddl.getAlterDDL(testTableName, testTableName);
ddl.getSchemaDDL(testTableName, sup1);
ddl.getSchemaDDL(sup1, testTableName);
ddl2.getSchemaDDL(sup1);
ddl2.getSchemaDDL(sup2);
ddl2.getSchemaDDL(sup1, sup2);
ddl2.getAlterDDL(sup1, sup2);
ddl2.getSchemaDDL(testTableName);
ddl2.getSchemaDDL(testTableName, testTableName);
ddl2.getAlterDDL(testTableName, testTableName);
ddl2.getSchemaDDL(testTableName, sup1);
ddl2.getSchemaDDL(sup1, testTableName);
String name = "name";
String className = "className";
String alias = "alias";
boolean isClassResolution = true;
DBResolution oldResolutions = new DBResolution(name, className, alias);
oldResolutions.setName(name);
oldResolutions.setClassName(className);
oldResolutions.setAlias(alias);
oldResolutions.setClassResolution(isClassResolution);
DBResolution newResolutions = new DBResolution(name, className, alias);
newResolutions.setName(name);
newResolutions.setClassName(className);
newResolutions.setAlias(alias);
newResolutions.setClassResolution(isClassResolution);
List<DBResolution> oldResolution = new ArrayList<DBResolution>();
oldResolution.add(oldResolutions);
List<DBResolution> newResolution = new ArrayList<DBResolution>();
newResolution.add(newResolutions);
ddl.getResolutionChanges(oldResolution, newResolution);
List<String> oldSupers = new ArrayList<String>();
oldSupers.add("oldstring");
List<String> newSupers = new ArrayList<String>();
newSupers.add("newstring");
ddl.getSuperclassChanges(oldSupers, newSupers);
ddl.getAddSuperClassDDL("tableName", newSupers, oldResolution, newResolution);
ddl.getDropSuperClassesDDL("tableName", newSupers);
ddl.getChangeOwnerDDL("tableName", "newOwner");
String aname = "name";
String type = "type";
// it belongs to which class
String inherit = "inherit";
boolean indexed = true;
boolean notNull = true;
boolean shared = true;
boolean unique = true;
String defaultValue = "defaultValue";
SerialInfo autoIncrement = null;
String domainClassName = "domainClassName";
boolean isClassAttribute = true;
DBAttribute dbAttribute = new DBAttribute(aname, type, inherit, indexed, notNull, shared, unique, defaultValue, "iso88591_bin");
ddl.getAddColumnDDL("tableName", dbAttribute, newSupers, sup1);
ddl.setEndLineChar("endLineChar");
String aclassName = "className";
String partitionName = "partitionName";
String partitionClassName = "partitionClassName";
PartitionType partitionType = PartitionType.HASH;
String partitionExpr = "partitionExpr";
final List<String> partitionValues = new ArrayList<String>();
partitionValues.add("str");
partitionValues.add("str1");
final List<String> partitionValues2 = new ArrayList<String>();
partitionValues.add("str");
partitionValues.add(null);
int rows = -1;
PartitionInfo partitionInfo4 = new PartitionInfo(aclassName, partitionName, partitionClassName, PartitionType.LIST, partitionExpr, partitionValues, rows);
PartitionInfo partitionInfo6 = new PartitionInfo(aclassName, partitionName, partitionClassName, partitionType, partitionExpr, partitionValues2, rows);
PartitionInfo partitionInfo7 = new PartitionInfo(aclassName, partitionName, partitionClassName, PartitionType.RANGE, partitionExpr, partitionValues, rows);
List<PartitionInfo> partInfoList = new ArrayList<PartitionInfo>();
partInfoList.add(partitionInfo4);
ddl.getTransformToPartitionDDL(partInfoList);
List<PartitionInfo> partInfoListRange = new ArrayList<PartitionInfo>();
partInfoListRange.add(partitionInfo7);
ddl.getTransformToPartitionDDL(partInfoListRange);
ddl.getTransformToGenericDDL("tableName");
ddl.getAddPartitionDDL(partitionInfo4);
ddl.getDelPartitionDDL("tableName", "partName");
PartitionInfo partitionInfo5 = new PartitionInfo(aclassName, partitionName, partitionClassName, partitionType, "partitionExpr1", partitionValues, -1);
List<PartitionInfo> newPartInfoList = new ArrayList<PartitionInfo>();
newPartInfoList.add(partitionInfo5);
newPartInfoList.add(partitionInfo7);
ddl.getCoalescePartitionDDL(partInfoList, newPartInfoList);
ddl.getCoalescePartitionDDL(newPartInfoList, partInfoList);
ddl.getCoalescePartitionDDL(newPartInfoList, partInfoListRange);
ddl.getSplitPartitionDDL(partInfoList, newPartInfoList);
partInfoList.clear();
partInfoList.add(partitionInfo6);
ddl.getSplitPartitionDDL(partInfoList, newPartInfoList);
ddl.getAlterAutoIncrementDDL("tableName", "columnName");
partInfoList.clear();
partitionInfo4 = new PartitionInfo(aclassName, partitionName, partitionClassName, PartitionType.RANGE, partitionExpr, partitionValues, rows);
partInfoList.add(partitionInfo4);
ddl.getSplitPartitionDDL(partInfoList, newPartInfoList);
partInfoList.clear();
partitionInfo4 = new PartitionInfo(aclassName, partitionName, partitionClassName, PartitionType.LIST, partitionExpr, partitionValues, rows);
partInfoList.add(partitionInfo4);
ddl.getSplitPartitionDDL(partInfoList, newPartInfoList);
}
use of com.cubrid.common.core.common.model.PartitionInfo in project cubrid-manager by CUBRID.
the class PartitionInfoTest method testPartitionInfo.
/**
* Test PartitionInfo
*/
public final void testPartitionInfo() {
String className = "className";
String partitionName = "partitionName";
String partitionClassName = "partitionClassName";
PartitionType partitionType = PartitionType.HASH;
String partitionExpr = "partitionExpr";
final List<String> partitionValues = new ArrayList<String>();
int rows = -1;
// test getters and setters
PartitionInfo partitionInfo1 = new PartitionInfo();
PartitionInfo partitionInfo2 = new PartitionInfo(className, partitionType);
PartitionInfo partitionInfo3 = new PartitionInfo(className, partitionName, partitionType, partitionExpr, partitionValues, rows);
PartitionInfo partitionInfo4 = new PartitionInfo(className, partitionName, partitionClassName, partitionType, partitionExpr, partitionValues, rows);
partitionInfo4.setPartitionValues(partitionValues);
partitionInfo4.setClassName(className);
partitionInfo4.setPartitionName(partitionName);
partitionInfo4.setPartitionType(partitionType);
partitionInfo4.setPartitionExpr(partitionExpr);
partitionInfo4.setRows(rows);
assertEquals(partitionInfo4.getClassName(), className);
assertEquals(partitionInfo4.getPartitionName(), partitionName);
assertNotSame(partitionInfo4.getPartitionClassName(), partitionClassName);
assertEquals(partitionInfo4.getPartitionType(), partitionType);
assertEquals(partitionInfo4.getPartitionExpr(), partitionExpr);
assertEquals(partitionInfo4.getRows(), -1);
// test public SerialInfo clone()
PartitionInfo partitionInfoClone1 = partitionInfo1.clone();
assertNotNull(partitionInfoClone1);
PartitionInfo partitionInfoClone2 = partitionInfo2.clone();
assertNotNull(partitionInfoClone2);
PartitionInfo partitionInfoClone3 = partitionInfo3.clone();
assertNotNull(partitionInfoClone3);
partitionInfo4.toString();
partitionInfo4.addPartitionValue("value");
partitionInfo4.setPartitionValues(partitionValues);
partitionInfo4.removePartitionValue("value");
partitionInfo4.addPartitionValue("value");
partitionInfo4.getPartitionValuesString();
partitionInfo4.getPartitionValuesString(true);
partitionInfo4.addPartitionValue("value");
partitionInfo4.getPartitionValuesString();
partitionInfo4.getPartitionValuesString(false);
partitionInfo4.addPartitionValue("value1");
partitionInfo4.addPartitionValue("value2");
partitionInfo4.addPartitionValue("value3");
partitionInfo4.removePartitionValue("value");
partitionInfo4.equals(partitionInfo4);
partitionInfo4.equals("aaa");
PartitionInfo partitionInfo5 = new PartitionInfo(className, partitionName, partitionClassName, partitionType, partitionExpr, partitionValues, rows);
partitionInfo5.setPartitionValues(partitionValues);
partitionInfo5.setClassName(className);
partitionInfo5.setPartitionName(partitionName);
partitionInfo5.setPartitionType(partitionType);
partitionInfo5.setPartitionExpr(partitionExpr);
partitionInfo5.setRows(rows);
partitionInfo4.equals(partitionInfo5);
partitionInfo5.addPartitionValue("value5");
partitionInfo5.setPartitionValues(partitionValues);
partitionInfo5.setClassName("className5");
partitionInfo5.setPartitionName("partitionName5");
partitionInfo5.setPartitionType(PartitionType.LIST);
partitionInfo5.setPartitionExpr("partitionExpr5");
partitionInfo5.setRows(5);
partitionInfo4.equals(partitionInfo5);
partitionInfo5.setPartitionType(partitionType);
partitionInfo5.setClassName(className);
partitionInfo4.equals(partitionInfo5);
partitionInfo5.setPartitionType(partitionType);
partitionInfo5.setClassName(className);
partitionInfo5.setPartitionName(partitionName);
partitionInfo4.equals(partitionInfo5);
partitionInfo5.setPartitionType(partitionType);
partitionInfo5.setClassName(className);
partitionInfo5.setPartitionName(partitionName);
partitionInfo4.setPartitionType(PartitionType.LIST);
partitionInfo5.setPartitionType(PartitionType.RANGE);
partitionInfo4.equals(partitionInfo5);
partitionInfo4.setPartitionType(PartitionType.LIST);
partitionInfo5.setPartitionType(PartitionType.LIST);
partitionInfo5.setClassName(className);
partitionInfo5.setPartitionName(partitionName);
partitionInfo5.setPartitionExpr(partitionExpr);
partitionInfo4.equals(partitionInfo5);
partitionInfo1.setPartitionType(PartitionType.HASH);
partitionInfo2.setPartitionType(PartitionType.HASH);
partitionInfo1.setClassName(null);
partitionInfo2.setClassName(null);
partitionInfo1.equals(partitionInfo2);
partitionInfo1.setClassName("a");
partitionInfo2.setClassName(null);
partitionInfo1.equals(partitionInfo2);
partitionInfo1.setClassName(null);
partitionInfo2.setClassName("b");
partitionInfo1.equals(partitionInfo2);
}
use of com.cubrid.common.core.common.model.PartitionInfo in project cubrid-manager by CUBRID.
the class GetAllSchemaTask method getPartitionInfo.
/**
* Get the partition information of table.
*
* @param schemaInfo SchemaInfo
*/
private void getPartitionInfo() throws SQLException {
final String sql = "SELECT class_name, partition_name, partition_class_name," + " partition_type, partition_expr, partition_values" + " FROM db_partition" + " ORDER BY class_name";
stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
String exprDataType = null;
while (rs.next()) {
String className = rs.getString("class_name");
String partitionName = rs.getString("partition_name");
String partitionClassName = rs.getString("partition_class_name");
String partitionExpr = rs.getString("partition_expr");
SchemaInfo schemaInfo = schemas.get(className);
if (schemaInfo == null) {
LOGGER.error("Table " + className + " not found on the schema info.");
continue;
}
PartitionType partitionType = null;
String partitionTypeStr = rs.getString("partition_type");
if (partitionTypeStr.equalsIgnoreCase("HASH")) {
partitionType = PartitionType.HASH;
} else if (partitionTypeStr.equalsIgnoreCase("LIST")) {
partitionType = PartitionType.LIST;
} else if (partitionTypeStr.equalsIgnoreCase("RANGE")) {
partitionType = PartitionType.RANGE;
}
List<String> partitionValues = new ArrayList<String>();
if (partitionType != PartitionType.HASH) {
Object obj = rs.getObject("partition_values");
if (obj == null) {
continue;
}
Object[] arr = (Object[]) obj;
for (int i = 0, len = arr.length; i < len; i++) {
if (arr[i] == null) {
partitionValues.add(null);
} else {
partitionValues.add(arr[i].toString());
}
}
}
PartitionInfo partitionItem = new PartitionInfo(className, partitionName, partitionClassName, partitionType, partitionExpr, partitionValues, -1);
List<PartitionInfo> result = schemaInfo.getPartitionList();
if (result == null) {
result = new ArrayList<PartitionInfo>();
schemaInfo.setPartitionList(result);
}
if (exprDataType == null && partitionExpr != null && partitionExpr.trim().length() > 0) {
exprDataType = getExprDataType(className, partitionExpr);
}
partitionItem.setPartitionExprType(exprDataType);
result.add(partitionItem);
}
}
use of com.cubrid.common.core.common.model.PartitionInfo in project cubrid-manager by CUBRID.
the class ConstraintComparator method getAlterDDL.
/**
* Return an alter DDL of schema, some changes stored in change
* logs(SchemaChangeManager), others are found by differing old and new
* schema objects
*
* @param oldSchemaInfo SchemaInfo the old reference of oldSchemaInfo
* @param newSchemaInfo SchemaInfo the new reference of oldSchemaInfo
* @return String a string indicates the info of DDL
*/
public String getAlterDDL(SchemaInfo oldSchemaInfo, SchemaInfo newSchemaInfo) {
DDLGenerator generator = new DDLGenerator();
if (oldSchemaInfo == null) {
return null;
}
List<SchemaInfo> oldSupers = SuperClassUtil.getSuperClasses(databaseInfo, oldSchemaInfo);
if (oldSupers == null) {
return null;
}
List<SchemaInfo> newSupers = SuperClassUtil.getSuperClasses(databaseInfo, newSchemaInfo);
if (newSupers == null) {
return null;
}
// old --> new
Map<String, String> attrMap = new HashMap<String, String>();
//Generate the DDL for rename table
String oldTableName = oldSchemaInfo.getClassname().toLowerCase();
String newTableName = newSchemaInfo.getClassname().toLowerCase();
String tableName = oldTableName;
if (!oldTableName.equals(newTableName)) {
String renameDDL = getRenameTableDDL(oldTableName, newTableName);
generator.addSchemaDDLMode(DDLGenerator.TYPE_REBANE_TABLE, newSchemaInfo, renameDDL);
tableName = newTableName;
}
String oldCollation = oldSchemaInfo.getCollation();
String newCollation = newSchemaInfo.getCollation();
if (!StringUtil.isEmpty(newCollation) && !StringUtil.isEqualNotIgnoreNull(oldCollation, newCollation)) {
String alterCollationDDL = getAlterTableCollationDDL(oldSchemaInfo, newSchemaInfo);
generator.addSchemaDDLMode(DDLGenerator.TYPE_CHANGE_TABLE_COLLATE, newSchemaInfo, alterCollationDDL);
}
//Generate the DDL for column attribute change
List<SchemaChangeLog> allAttrChanges = changeLogMgr.getClassAttrChangeLogs();
allAttrChanges.addAll(changeLogMgr.getAttrChangeLogs());
// only new added attribute and after version 8.4.0 support to reorder
boolean isSupportReorderColumn = CompatibleUtil.isSupportReorderColumn(databaseInfo);
if (isSupportReorderColumn) {
/*For the bug TOOLS-1258 After add column, change column name, it will pop error.*/
/*Sort the change log first*/
Collections.sort(allAttrChanges, new ChangeLogCompartor(newSchemaInfo));
for (SchemaChangeLog log : allAttrChanges) {
boolean isClassAttr = false;
if (log.getType() == SchemeInnerType.TYPE_CLASSATTRIBUTE) {
isClassAttr = true;
} else {
isClassAttr = false;
}
appendChangeAttributeDDL(oldSchemaInfo, newSchemaInfo, oldSupers, newSupers, attrMap, tableName, log, isClassAttr, generator);
}
List<SchemaChangeLog> allPosChangeLogs = changeLogMgr.getPositionChangeLogs();
for (SchemaChangeLog log : allPosChangeLogs) {
if (!generator.hasProcessedAttr(log.getOldValue())) {
for (DBAttribute attr : newSchemaInfo.getAttributes()) {
if (attr.getName().equals(log.getOldValue())) {
appendChangeAttributeDDL(oldSchemaInfo, newSchemaInfo, oldSupers, newSupers, attrMap, tableName, log, attr.isClassAttribute(), generator);
break;
}
}
}
}
} else {
for (SchemaChangeLog log : allAttrChanges) {
appendAlterAttributeDDL(oldSchemaInfo, newSchemaInfo, oldSupers, newSupers, attrMap, tableName, log, generator);
}
if (isSupportReorderColumn) {
generator.addSchemaDDLMode(DDLGenerator.TYPE_CHANGE_POS, newSchemaInfo, getAddReorderColumnDDL(oldSchemaInfo, newSchemaInfo, newSupers, tableName));
}
}
//Generate the DDL for super class change
List<String> oldSuperClasses = oldSchemaInfo.getSuperClasses();
List<String> newSuperClasses = newSchemaInfo.getSuperClasses();
List<List<String>> superChanges = getSuperclassChanges(oldSuperClasses, newSuperClasses);
generator.addSchemaDDLMode(DDLGenerator.TYPE_CHANGE_SUPER, newSchemaInfo, appendChangedSuperDDL(oldSchemaInfo, newSchemaInfo, tableName, oldSuperClasses, newSuperClasses, superChanges));
//Generate the DDL for PK change
List<SchemaInfo> allSupers = SuperClassUtil.getSuperClasses(databaseInfo, newSchemaInfo);
allSupers.addAll(newSupers);
allSupers.addAll(oldSupers);
Constraint newPK = newSchemaInfo.getPK(allSupers);
Constraint oldPK = oldSchemaInfo.getPK(oldSupers);
if (oldPK == null && newPK != null) {
// add pk
List<String> pkAttributes = newPK.getAttributes();
if (pkAttributes != null && pkAttributes.size() > 0) {
String addPKDDL = getAddPKDDL(tableName, pkAttributes, newPK.getName()) + endLineChar + StringUtil.NEWLINE;
generator.addSchemaDDLMode(DDLGenerator.TYPE_ADD_INDEX, newPK, addPKDDL);
}
} else if (oldPK != null && newPK == null) {
// del pk
String dropPKDDL = dropPK(tableName, oldPK.getName()) + endLineChar + StringUtil.NEWLINE;
generator.addPreDDLMode(DDLGenerator.TYPE_DROP_INDEX, oldPK, dropPKDDL);
} else if (oldPK != null && newPK != null) {
appendChangedPkDDL(attrMap, tableName, newPK, oldPK, generator);
}
//Generate the DDL for FK change
List<SchemaChangeLog> fkChanges = changeLogMgr.getFKChangeLogs();
for (SchemaChangeLog log : fkChanges) {
appendChangedFkDDL(oldSchemaInfo, newSchemaInfo, attrMap, tableName, log, generator);
}
List<SchemaChangeLog> indexChanges = changeLogMgr.getIndexChangeLogs();
for (SchemaChangeLog log : indexChanges) {
appendChanedIndexDDL(oldSchemaInfo, newSchemaInfo, tableName, log, generator);
}
// Partitioning
boolean isPartitionChanged = isPartitonChange(oldSchemaInfo.getPartitionList(), newSchemaInfo.getPartitionList());
if ("YES".equals(oldSchemaInfo.isPartitionGroup()) && isPartitionChanged) {
String sql = getTransformToGenericDDL(tableName) + endLineChar + StringUtil.NEWLINE;
generator.addSchemaDDLMode(DDLGenerator.TYPE_DROP_PARTITON, oldSchemaInfo.getPartitionList(), sql);
}
if (isPartitionChanged) {
List<PartitionInfo> partitionInfoList = newSchemaInfo.getPartitionList();
String sql = getTransformToPartitionDDL(partitionInfoList);
if (sql != null) {
sql = sql + endLineChar + StringUtil.NEWLINE;
generator.addSchemaDDLMode(DDLGenerator.TYPE_ADD_PARTITON, oldSchemaInfo.getPartitionList(), sql);
}
}
return generator.generatorDDL();
}
Aggregations