use of com.servoy.j2db.util.IntHashMap in project servoy-client by Servoy.
the class EditRecordList method stopEditingImpl.
/**
* This method should only be called through stopEditing(boolean,List<Record>) so that that can call onAutoSaveFailed.
*/
private int stopEditingImpl(final boolean javascriptStop, List<IRecord> recordsToSave, int recursionDepth) {
if (recursionDepth > 50) {
fsm.getApplication().reportJSError("stopEditing max recursion exceeded, look if on (or after) record update or inserts are constantly changing records", new RuntimeException());
return ISaveConstants.SAVE_FAILED;
}
if (ignoreSave) {
return ISaveConstants.AUTO_SAVE_BLOCKED;
}
if (isSavingAll) {
// we are saving all, no need to save anything more
return ISaveConstants.STOPPED;
}
if (recordsToSave == null && savingRecords.size() > 0) {
// we are saving some records, cannot call save all now, not supported
return ISaveConstants.STOPPED;
}
if (recordsToSave != null && savingRecords.size() > 0) {
// make a copy to be sure that removeAll is supported
recordsToSave = new ArrayList<IRecord>(recordsToSave);
recordsToSave.removeAll(savingRecords);
}
if (recordsToSave != null) {
boolean hasEditedRecords = false;
editRecordsLock.lock();
try {
for (IRecord record : recordsToSave) {
if (editedRecords.contains(record)) {
hasEditedRecords = true;
break;
}
}
} finally {
editRecordsLock.unlock();
}
if (!hasEditedRecords)
return ISaveConstants.STOPPED;
}
// here we can't have a test if editedRecords is empty (and return stop)
// because for just globals or findstates (or deleted records)
// we need to pass prepareForSave.
final List<IRecord> recordsToSaveFinal = recordsToSave;
if (!fsm.getApplication().isEventDispatchThread()) {
// only the event dispatch thread can stop an current edit.
// this is a fix for innerworkings because background aggregate queries seems to also trigger saves
// $NON-NLS-1$
Debug.trace("Stop edit postponend because it is not in the event dispatch thread: " + Thread.currentThread().getName());
// calculations running from lazy table view loading threads may trigger stopEditing
fsm.getApplication().invokeLater(new Runnable() {
public void run() {
// do not stop if the user is editing something else.
boolean stop;
editRecordsLock.lock();
try {
stop = editedRecords.size() == 1 && recordsToSaveFinal != null && recordsToSaveFinal.size() == 1 && editedRecords.get(0) == recordsToSaveFinal.get(0);
} finally {
editRecordsLock.unlock();
}
if (stop) {
stopEditing(javascriptStop, recordsToSaveFinal);
} else {
// $NON-NLS-1$
Debug.trace("Stop edit skipped because other records are being edited");
}
}
});
return ISaveConstants.AUTO_SAVE_BLOCKED;
}
int editedRecordsSize;
try {
int p = prepareForSave(true);
if (p != ISaveConstants.STOPPED) {
return p;
}
if (recordsToSave == null) {
isSavingAll = true;
} else {
savingRecords.addAll(recordsToSave);
}
// remove any non referenced failed records
boolean fireChange = false;
editRecordsLock.lock();
try {
if (failedRecords.size() != 0) {
Iterator<IRecordInternal> it = failedRecords.iterator();
while (it.hasNext()) {
IRecordInternal rec = it.next();
if (rec != null) {
if (rec.getParentFoundSet() == null) {
it.remove();
} else if (rec.getParentFoundSet().getRecordIndex(rec) == -1) {
it.remove();
}
}
}
if (failedRecords.size() == 0) {
fireChange = true;
}
}
} finally {
editRecordsLock.unlock();
}
if (fireChange)
fireEditChange();
// remove the unchanged, really calculate when it is a real stop (autosave = true or it is a javascript stop)
removeUnChangedRecords(autoSave || javascriptStop, true);
// check if anything left
int editRecordListSize;
editRecordsLock.lock();
try {
editRecordListSize = editedRecords.size();
if (editRecordListSize == 0)
return ISaveConstants.STOPPED;
} finally {
editRecordsLock.unlock();
}
// cannot stop, its blocked
if (!autoSave && !javascriptStop) {
return ISaveConstants.AUTO_SAVE_BLOCKED;
}
int failedCount = 0;
boolean justValidationErrors = false;
lastStopEditingException = null;
List<RowUpdateInfo> rowUpdates = new ArrayList<RowUpdateInfo>(editRecordListSize);
editRecordsLock.lock();
try {
if (recordsToSave == null) {
// if it is a save all, then first filter out all the duplicate rows.
for (int i = 0; i < editedRecords.size(); i++) {
Row toTest = editedRecords.get(i).getRawData();
for (int j = editedRecords.size(); --j > i; ) {
if (editedRecords.get(j).getRawData() == toTest) {
removeEditedRecord(editedRecords.get(j));
}
}
}
}
Map<IRecordInternal, Integer> processed = new HashMap<IRecordInternal, Integer>();
for (IRecordInternal tmp = getFirstElement(editedRecords, recordsToSave); tmp != null; tmp = getFirstElement(editedRecords, recordsToSave)) {
// check if we do not have an infinite recursive loop
Integer count = processed.get(tmp);
if (count != null && count.intValue() > 50) {
fsm.getApplication().reportJSError("stopEditing max loop counter exceeded on " + tmp.getParentFoundSet().getDataSource() + "/" + tmp.getPKHashKey(), new RuntimeException());
return ISaveConstants.SAVE_FAILED;
}
processed.put(tmp, Integer.valueOf(count == null ? 1 : (count.intValue() + 1)));
if (tmp instanceof Record) {
Record record = (Record) tmp;
// prevent multiple update for the same row (from multiple records)
for (int j = 0; j < rowUpdates.size(); j++) {
if (rowUpdates.get(j).getRow() == record.getRawData()) {
// create a new rowUpdate that contains both updates
RowUpdateInfo removed = rowUpdates.remove(j);
recordTested.remove(record);
// do use the first record, that one must always be leading. (for fire of events)
record = removed.getRecord();
break;
}
}
try {
// test for table events; this may execute table events if the user attached JS methods to them;
// the user might add/delete/edit records in the JS - thus invalidating a normal iterator (it)
// - edited record list changes; this is why an AllowListModificationIterator is used
// Note that the behaviour is different when trigger returns false or when it throws an exception.
// when the trigger returns false, record must stay in editedRecords.
// this is needed because the trigger may be used as validation to keep the user in the record when autosave=true.
// when the trigger throws an exception, the record must move from editedRecords to failedRecords so that in
// scripting the failed records can be examined (the thrown value is retrieved via record.exception.getValue())
editRecordsLock.unlock();
boolean validationErrors = false;
try {
JSRecordMarkers validateObject = fsm.validateRecord(record, null);
if (// throws ServoyException when trigger method throws exception
validateObject != null && validateObject.isHasErrors()) {
Object[] genericExceptions = validateObject.getGenericExceptions();
if (genericExceptions.length > 0) {
// compartible with old code, then those exceptions are catched below.
throw (Exception) genericExceptions[0];
}
// we always want to process all records, but mark this as a validation error so below the failed records are updated.
validationErrors = true;
// update the just failed boolean to true, if that is true and there is not really an exception then handleException of application is not called.
justValidationErrors = true;
failedCount++;
if (!failedRecords.contains(record)) {
failedRecords.add(record);
}
recordTested.remove(record);
}
} finally {
editRecordsLock.lock();
}
if (!validationErrors) {
RowUpdateInfo rowUpdateInfo = getRecordUpdateInfo(record);
if (rowUpdateInfo != null) {
rowUpdateInfo.setRecord(record);
rowUpdates.add(rowUpdateInfo);
} else {
recordTested.remove(record);
}
}
} catch (ServoyException e) {
// $NON-NLS-1$//$NON-NLS-2$
log.debug(// $NON-NLS-1$//$NON-NLS-2$
"stopEditing(" + javascriptStop + ") encountered an exception - could be expected and treated by solution code or not", e);
// trigger method threw exception
lastStopEditingException = e;
failedCount++;
// set latest
record.getRawData().setLastException(e);
if (!failedRecords.contains(record)) {
failedRecords.add(record);
}
recordTested.remove(record);
} catch (Exception e) {
// $NON-NLS-1$ //$NON-NLS-2$
Debug.error("Not a normal Servoy/Db Exception generated in saving record: " + record + " removing the record", e);
recordTested.remove(record);
}
} else {
// find state
recordTested.remove(tmp);
}
editedRecords.remove(tmp);
}
} finally {
editRecordsLock.unlock();
}
if (failedCount > 0) {
placeBackAlreadyProcessedRecords(rowUpdates);
if (lastStopEditingException == null && justValidationErrors) {
return ISaveConstants.VALIDATION_FAILED;
} else {
if (!(lastStopEditingException instanceof ServoyException)) {
lastStopEditingException = new ApplicationException(ServoyException.SAVE_FAILED, lastStopEditingException);
}
if (!javascriptStop)
// $NON-NLS-1$
fsm.getApplication().handleException(// $NON-NLS-1$
fsm.getApplication().getI18NMessage("servoy.formPanel.error.saveFormData"), lastStopEditingException);
return ISaveConstants.SAVE_FAILED;
}
}
if (rowUpdates.size() == 0) {
fireEditChange();
if (Debug.tracing()) {
// $NON-NLS-1$
Debug.trace("no records to update anymore, failed: " + failedRecords.size());
}
return ISaveConstants.STOPPED;
}
if (Debug.tracing()) {
// $NON-NLS-1$ //$NON-NLS-2$
Debug.trace("Updating/Inserting " + rowUpdates.size() + " records: " + rowUpdates.toString());
}
RowUpdateInfo[] infos = rowUpdates.toArray(new RowUpdateInfo[rowUpdates.size()]);
if (infos.length > 1 && !fsm.config.disableInsertsReorder()) {
// search if there are new row pks used that are
// used in records before this record and sort it based on that.
boolean changed = false;
List<RowUpdateInfo> al = new ArrayList<RowUpdateInfo>(Arrays.asList(infos));
int prevI = -1;
outer: for (int i = al.size(); --i > 0; ) {
Row row = al.get(i).getRow();
// only test for new rows and its pks.
if (row.existInDB())
continue;
String[] pkColumns = row.getRowManager().getSQLSheet().getPKColumnDataProvidersAsArray();
Object[] pk = row.getPK();
for (int j = 0; j < pk.length; j++) {
Object pkObject = pk[j];
// special case if pk was db ident and that value was copied from another row.
if (pkObject instanceof DbIdentValue && ((DbIdentValue) pkObject).getRow() != row)
continue;
for (int k = 0; k < i; k++) {
RowUpdateInfo updateInfo = al.get(k);
Object[] values = updateInfo.getRow().getRawColumnData();
int[] pkIndexes = updateInfo.getFoundSet().getSQLSheet().getPKIndexes();
IntHashMap<String> pks = new IntHashMap<String>(pkIndexes.length, 1);
for (int pkIndex : pkIndexes) {
// $NON-NLS-1$
pks.put(pkIndex, "");
}
for (int l = 0; l < values.length; l++) {
// skip all pk column indexes (except from dbidents from other rows, this may need resort). Those shouldn't be resorted
if (!(values[l] instanceof DbIdentValue && ((DbIdentValue) values[l]).getRow() != updateInfo.getRow()) && pks.containsKey(l))
continue;
boolean same = values[l] == pkObject;
if (!same && values[l] != null) {
Column pkColumn = row.getRowManager().getSQLSheet().getTable().getColumn(pkColumns[j]);
if (pkColumn.hasFlag(IBaseColumn.UUID_COLUMN)) {
// same uuids are the same even if not the same object
same = equalObjects(pkObject, values[l], 0, true);
}
}
if (same) {
al.add(k, al.remove(i));
// watch out for endless loops when 2 records both with pk's point to each other...
if (prevI != i) {
prevI = i;
i++;
}
changed = true;
continue outer;
}
}
}
}
}
if (changed) {
infos = al.toArray(infos);
}
}
ISQLStatement[] statements;
if (fsm.config.statementBatching() && infos.length > 1) {
// Merge insert statements insert statements from all info's: multiple info's can share the same statement of the records are batched together on the statement level
List<ISQLStatement> mergedStatements = new ArrayList<ISQLStatement>(infos.length);
ISQLStatement prevStatement = null;
for (RowUpdateInfo rowUpdateInfo : infos) {
ISQLStatement statement = rowUpdateInfo.getISQLStatement();
if (statement.getAction() == ISQLActionTypes.INSERT_ACTION && prevStatement != null && prevStatement.getAction() == ISQLActionTypes.INSERT_ACTION && insertStatementsCanBeMerged(prevStatement, statement)) {
mergeInsertStatements(prevStatement, statement);
} else {
prevStatement = statement;
mergedStatements.add(statement);
}
}
statements = mergedStatements.toArray(new ISQLStatement[mergedStatements.size()]);
} else {
statements = stream(infos).map(RowUpdateInfo::getISQLStatement).toArray(ISQLStatement[]::new);
}
// TODO if one statement fails in a transaction how do we know which one? and should we rollback all rows in these statements?
Object[] idents = null;
try {
idents = fsm.getDataServer().performUpdates(fsm.getApplication().getClientID(), statements);
} catch (Exception e) {
// $NON-NLS-1$//$NON-NLS-2$
log.debug("stopEditing(" + javascriptStop + ") encountered an exception - could be expected and treated by solution code or not", e);
lastStopEditingException = e;
if (!javascriptStop)
// $NON-NLS-1$
fsm.getApplication().handleException(// $NON-NLS-1$
fsm.getApplication().getI18NMessage("servoy.formPanel.error.saveFormData"), new ApplicationException(ServoyException.SAVE_FAILED, lastStopEditingException));
return ISaveConstants.SAVE_FAILED;
}
if (idents.length != infos.length) {
// $NON-NLS-1$
Debug.error("Should be of same size!!");
}
List<RowUpdateInfo> infosToBePostProcessed = new ArrayList<RowUpdateInfo>();
Map<FoundSet, List<Record>> foundsetToRecords = new HashMap<FoundSet, List<Record>>();
Map<FoundSet, List<String>> foundsetToAggregateDeletes = new HashMap<FoundSet, List<String>>();
List<Runnable> fires = new ArrayList<Runnable>(infos.length);
// Walk in reverse over it, so that related rows are update in there row manger before they are required by there parents.
for (int i = infos.length; --i >= 0; ) {
RowUpdateInfo rowUpdateInfo = infos[i];
FoundSet foundSet = rowUpdateInfo.getFoundSet();
Row row = rowUpdateInfo.getRow();
String oldKey = row.getPKHashKey();
Record record = rowUpdateInfo.getRecord();
if (idents != null && idents.length != 0 && idents[i] != null) {
Object retValue = idents[i];
if (retValue instanceof Exception) {
// $NON-NLS-1$//$NON-NLS-2$
log.debug(// $NON-NLS-1$//$NON-NLS-2$
"stopEditing(" + javascriptStop + ") encountered an exception - could be expected and treated by solution code or not", (Exception) retValue);
lastStopEditingException = (Exception) retValue;
failedCount++;
if (retValue instanceof ServoyException) {
((ServoyException) retValue).fillScriptStack();
}
row.setLastException((Exception) retValue);
markRecordAsFailed(record);
JSRecordMarkers vo = record.getRecordMarkers() != null ? record.getRecordMarkers() : new JSRecordMarkers(record, fsm.getApplication());
vo.addGenericException((Exception) retValue);
record.setRecordMarkers(vo);
continue;
} else if (retValue instanceof Object[]) {
Object[] rowData = (Object[]) retValue;
Object[] oldRowData = row.getRawColumnData();
if (oldRowData != null) {
if (oldRowData.length == rowData.length) {
for (int j = 0; j < rowData.length; j++) {
if (rowData[j] instanceof BlobMarkerValue) {
rowData[j] = oldRowData[j];
}
if (oldRowData[j] instanceof DbIdentValue) {
row.setDbIdentValue(rowData[j]);
}
}
} else {
Debug.error("Requery data has different length from row data.");
}
}
row.setRollbackData(rowData, Row.ROLLBACK_MODE.UPDATE_CHANGES);
} else if (!Boolean.TRUE.equals(retValue)) {
// is db ident, can only be one column
row.setDbIdentValue(retValue);
}
}
editRecordsLock.lock();
try {
recordTested.remove(record);
} finally {
editRecordsLock.unlock();
}
if (!row.existInDB()) {
// when row was not saved yet row pkhash will be with new value, pksAndRecordsHolder will have initial value
foundSet.updatePk(record);
}
try {
ISQLStatement statement = rowUpdateInfo.getISQLStatement();
row.getRowManager().rowUpdated(row, oldKey, foundSet, fires, statement instanceof ITrackingSQLStatement ? ((ITrackingSQLStatement) statement).getChangedColumns() : null);
} catch (Exception e) {
// $NON-NLS-1$//$NON-NLS-2$
log.debug("stopEditing(" + javascriptStop + ") encountered an exception - could be expected and treated by solution code or not", e);
lastStopEditingException = e;
failedCount++;
row.setLastException(e);
JSRecordMarkers vo = record.getRecordMarkers() != null ? record.getRecordMarkers() : new JSRecordMarkers(record, fsm.getApplication());
vo.addGenericException(e);
record.setRecordMarkers(vo);
editRecordsLock.lock();
try {
if (!failedRecords.contains(record)) {
failedRecords.add(record);
}
} finally {
editRecordsLock.unlock();
}
}
infosToBePostProcessed.add(infos[i]);
List<Record> lst = foundsetToRecords.get(foundSet);
if (lst == null) {
lst = new ArrayList<Record>(3);
foundsetToRecords.put(foundSet, lst);
}
lst.add(record);
List<String> aggregates = foundsetToAggregateDeletes.get(foundSet);
if (aggregates == null) {
foundsetToAggregateDeletes.put(foundSet, rowUpdateInfo.getAggregatesToRemove());
} else {
List<String> toMerge = rowUpdateInfo.getAggregatesToRemove();
for (int j = 0; j < toMerge.size(); j++) {
String aggregate = toMerge.get(j);
if (!aggregates.contains(aggregate)) {
aggregates.add(aggregate);
}
}
}
}
// run rowmanager fires in reverse order (original order because info's were processed in reverse order) -> first inserted record is fired first
for (int i = fires.size(); --i >= 0; ) {
fires.get(i).run();
}
// get the size of the edited records before the table events, so that we can look if those events did change records again.
editedRecordsSize = editedRecords.size();
Record rowUpdateInfoRecord = null;
for (RowUpdateInfo rowUpdateInfo : infosToBePostProcessed) {
try {
rowUpdateInfoRecord = rowUpdateInfo.getRecord();
((FoundSet) rowUpdateInfoRecord.getParentFoundSet()).executeFoundsetTrigger(new Object[] { rowUpdateInfoRecord }, rowUpdateInfo.getISQLStatement().getAction() == ISQLActionTypes.INSERT_ACTION ? StaticContentSpecLoader.PROPERTY_ONAFTERINSERTMETHODID : StaticContentSpecLoader.PROPERTY_ONAFTERUPDATEMETHODID, true);
} catch (ServoyException e) {
if (e instanceof DataException && e.getCause() instanceof JavaScriptException) {
// trigger method threw exception
// $NON-NLS-1$//$NON-NLS-2$
log.debug("stopEditing(" + javascriptStop + ") encountered an exception - could be expected and treated by solution code or not", e);
lastStopEditingException = e;
failedCount++;
rowUpdateInfoRecord.getRawData().setLastException(e);
JSRecordMarkers vo = rowUpdateInfoRecord.getRecordMarkers() != null ? rowUpdateInfoRecord.getRecordMarkers() : new JSRecordMarkers(rowUpdateInfoRecord, fsm.getApplication());
vo.addGenericException(e);
rowUpdateInfoRecord.setRecordMarkers(vo);
editRecordsLock.lock();
try {
if (!failedRecords.contains(rowUpdateInfoRecord)) {
failedRecords.add(rowUpdateInfoRecord);
}
} finally {
editRecordsLock.unlock();
}
} else {
// $NON-NLS-1$
fsm.getApplication().handleException("Failed to execute after update/insert trigger.", e);
}
}
}
for (Map.Entry<FoundSet, List<Record>> entry : foundsetToRecords.entrySet()) {
FoundSet fs = entry.getKey();
fs.recordsUpdated(entry.getValue(), foundsetToAggregateDeletes.get(fs));
}
boolean shouldFireEditChange;
editRecordsLock.lock();
try {
shouldFireEditChange = editedRecords.size() == 0;
} finally {
editRecordsLock.unlock();
}
if (shouldFireEditChange) {
fireEditChange();
}
if (failedCount > 0) {
if (!javascriptStop) {
lastStopEditingException = new ApplicationException(ServoyException.SAVE_FAILED, lastStopEditingException);
// $NON-NLS-1$
fsm.getApplication().handleException(// $NON-NLS-1$
fsm.getApplication().getI18NMessage("servoy.formPanel.error.saveFormData"), lastStopEditingException);
}
return ISaveConstants.SAVE_FAILED;
}
} catch (RuntimeException e) {
if (e instanceof IllegalArgumentException) {
fsm.getApplication().handleException(null, new ApplicationException(ServoyException.INVALID_INPUT, e));
return ISaveConstants.SAVE_FAILED;
} else if (e instanceof IllegalStateException) {
// $NON-NLS-1$
fsm.getApplication().handleException(fsm.getApplication().getI18NMessage("servoy.formPanel.error.saveFormData"), e);
return ISaveConstants.SAVE_FAILED;
} else {
Debug.error(e);
throw e;
}
} finally {
if (recordsToSave == null) {
isSavingAll = false;
} else {
savingRecords.removeAll(recordsToSave);
}
fireEvents();
}
if (editedRecords.size() != editedRecordsSize && recordsToSave == null) {
// records where changed by the after insert/update table events, call stop edit again if this was not a specific record save.
return stopEditingImpl(javascriptStop, null, recursionDepth + 1);
}
return ISaveConstants.STOPPED;
}
Aggregations