use of java.sql.BatchUpdateException in project entando-core by entando.
the class ActionLogDAO method addLogRecordRelations.
private void addLogRecordRelations(int recordId, ActivityStreamInfo asi, Connection conn) {
if (asi == null) {
return;
}
List<String> groups = asi.getGroups();
if (null == groups || groups.isEmpty()) {
return;
}
Set<String> codes = new HashSet<>(groups);
Iterator<String> iterator = codes.iterator();
PreparedStatement stat = null;
try {
stat = conn.prepareStatement(ADD_LOG_RECORD_RELATION);
while (iterator.hasNext()) {
String groupCode = iterator.next();
stat.setInt(1, recordId);
stat.setString(2, groupCode);
stat.addBatch();
stat.clearParameters();
}
stat.executeBatch();
} catch (BatchUpdateException e) {
logger.error("Error adding relation for record {}", recordId, e);
throw new RuntimeException("Error adding relation for record - " + recordId, e.getNextException());
} catch (Throwable t) {
logger.error("Error adding relations for record {}", recordId, t);
throw new RuntimeException("Error adding relations for record - " + recordId, t);
} finally {
closeDaoResources(null, stat);
}
}
use of java.sql.BatchUpdateException in project entando-core by entando.
the class DataObjectDAO method addDataObjectRelationsRecord.
/**
* Add a record in the table 'dataobjectrelations' for every resource, page,
* other dataobject, role and category associated to the given dataobject).
*
* @param dataobject The current dataobject.
* @param conn The connection to the database.
* @throws ApsSystemException when connection error are detected.
*/
protected void addDataObjectRelationsRecord(DataObject dataobject, Connection conn) throws ApsSystemException {
PreparedStatement stat = null;
try {
stat = conn.prepareStatement(ADD_DATAOBJECT_REL_RECORD);
this.addCategoryRelationsRecord(dataobject, true, stat);
this.addGroupRelationsRecord(dataobject, stat);
EntityAttributeIterator attributeIter = new EntityAttributeIterator(dataobject);
while (attributeIter.hasNext()) {
AttributeInterface currAttribute = (AttributeInterface) attributeIter.next();
}
stat.executeBatch();
} catch (BatchUpdateException e) {
_logger.error("Error saving record into dataobjectrelations {}", dataobject.getId(), e.getNextException());
throw new RuntimeException("Error saving record into dataobjectrelations " + dataobject.getId(), e.getNextException());
} catch (Throwable t) {
_logger.error("Error saving record into dataobjectrelations {}", dataobject.getId(), t);
throw new RuntimeException("Error saving record into dataobjectrelations " + dataobject.getId(), t);
} finally {
closeDaoResources(null, stat);
}
}
use of java.sql.BatchUpdateException in project entando-core by entando.
the class I18nDAO method addLabelGroup.
private void addLabelGroup(String key, ApsProperties labels, Connection conn) throws ApsSystemException {
PreparedStatement stat = null;
try {
stat = conn.prepareStatement(ADD_LABEL);
Iterator<Object> labelKeysIter = labels.keySet().iterator();
while (labelKeysIter.hasNext()) {
String labelLangCode = (String) labelKeysIter.next();
String label = labels.getProperty(labelLangCode);
stat.setString(1, key);
stat.setString(2, labelLangCode);
stat.setString(3, label);
stat.addBatch();
stat.clearParameters();
}
stat.executeBatch();
} catch (BatchUpdateException e) {
_logger.error("Error adding a new label record", e.getNextException());
throw new RuntimeException("Error adding a new label record", e.getNextException());
// processDaoException(e.getNextException(), "Error adding a new label record", "addLabel");
} catch (Throwable t) {
_logger.error("Error while adding a new label", t);
throw new RuntimeException("Error while adding a new label", t);
// processDaoException(t, "Error while adding a new label", "addLabel");
} finally {
closeDaoResources(null, stat);
}
}
use of java.sql.BatchUpdateException in project jackrabbit-oak by apache.
the class RDBDocumentStoreJDBC method insert.
public <T extends Document> Set<String> insert(Connection connection, RDBTableMetaData tmd, List<T> documents) throws SQLException {
int actualSchema = tmd.hasSplitDocs() ? 2 : 1;
PreparedStatement stmt = connection.prepareStatement("insert into " + tmd.getName() + "(ID, MODIFIED, HASBINARY, DELETEDONCE, MODCOUNT, CMODCOUNT, DSIZE, " + (tmd.hasVersion() ? "VERSION, " : "") + (tmd.hasSplitDocs() ? "SDTYPE, SDMAXREVTIME, " : "") + "DATA, BDATA) " + "values (?, ?, ?, ?, ?, ?, ?, " + (tmd.hasVersion() ? (" " + actualSchema + ", ") : "") + (tmd.hasSplitDocs() ? "?, ?, " : "") + "?, ?)");
List<T> sortedDocs = sortDocuments(documents);
int[] results;
try {
for (T document : sortedDocs) {
String data = this.ser.asString(document, tmd.getColumnOnlyProperties());
String id = document.getId();
Number hasBinary = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean deletedOnce = (Boolean) document.get(NodeDocument.DELETED_ONCE);
Long cmodcount = (Long) document.get(COLLISIONSMODCOUNT);
int si = 1;
setIdInStatement(tmd, stmt, si++, id);
stmt.setObject(si++, document.get(MODIFIED), Types.BIGINT);
stmt.setObject(si++, hasBinaryAsNullOrInteger(hasBinary), Types.SMALLINT);
stmt.setObject(si++, deletedOnceAsNullOrInteger(deletedOnce), Types.SMALLINT);
stmt.setObject(si++, document.get(MODCOUNT), Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? Long.valueOf(0) : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (tmd.hasSplitDocs()) {
stmt.setObject(si++, document.get(NodeDocument.SD_TYPE));
stmt.setObject(si++, document.get(NodeDocument.SD_MAX_REV_TIME_IN_SECS));
}
if (data.length() < tmd.getDataLimitInOctets() / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
stmt.addBatch();
}
results = stmt.executeBatch();
} catch (BatchUpdateException ex) {
LOG.debug("Some of the batch updates failed", ex);
results = ex.getUpdateCounts();
} finally {
stmt.close();
}
Set<String> succesfullyInserted = new HashSet<String>();
for (int i = 0; i < results.length; i++) {
int result = results[i];
if (result != 1 && result != Statement.SUCCESS_NO_INFO) {
LOG.debug("DB insert failed for {}: {}", tmd.getName(), sortedDocs.get(i).getId());
} else {
succesfullyInserted.add(sortedDocs.get(i).getId());
}
}
return succesfullyInserted;
}
use of java.sql.BatchUpdateException in project mssql-jdbc by Microsoft.
the class SQLServerPreparedStatement method executeLargeBatch.
public long[] executeLargeBatch() throws SQLServerException, BatchUpdateException, SQLTimeoutException {
DriverJDBCVersion.checkSupportsJDBC42();
loggerExternal.entering(getClassNameLogging(), "executeLargeBatch");
if (loggerExternal.isLoggable(Level.FINER) && Util.IsActivityTraceOn()) {
loggerExternal.finer(toString() + " ActivityId: " + ActivityCorrelator.getNext().toString());
}
checkClosed();
discardLastExecutionResults();
long[] updateCounts;
if (batchParamValues == null)
updateCounts = new long[0];
else
try {
// OUT or INOUT are present, the entire batch fails.
for (Parameter[] paramValues : batchParamValues) {
for (Parameter paramValue : paramValues) {
if (paramValue.isOutput()) {
throw new BatchUpdateException(SQLServerException.getErrString("R_outParamsNotPermittedinBatch"), null, 0, null);
}
}
}
PrepStmtBatchExecCmd batchCommand = new PrepStmtBatchExecCmd(this);
executeStatement(batchCommand);
updateCounts = new long[batchCommand.updateCounts.length];
System.arraycopy(batchCommand.updateCounts, 0, updateCounts, 0, batchCommand.updateCounts.length);
// Transform the SQLException into a BatchUpdateException with the update counts.
if (null != batchCommand.batchException) {
DriverJDBCVersion.throwBatchUpdateException(batchCommand.batchException, updateCounts);
}
} finally {
batchParamValues = null;
}
loggerExternal.exiting(getClassNameLogging(), "executeLargeBatch", updateCounts);
return updateCounts;
}
Aggregations