use of org.jkiss.dbeaver.model.data.DBDValueHandler in project dbeaver by dbeaver.
the class PostgreArrayValueHandler method convertArrayToString.
private String convertArrayToString(@NotNull DBSTypedObject column, Object value, @NotNull DBDDisplayFormat format, boolean nested) {
if (!DBUtils.isNullValue(value) && value instanceof DBDCollection) {
DBDCollection collection = (DBDCollection) value;
boolean isNativeFormat = format == DBDDisplayFormat.NATIVE;
boolean isStringArray = collection.getComponentType().getDataKind() == DBPDataKind.STRING;
DBDValueHandler valueHandler = collection.getComponentValueHandler();
StringBuilder str = new StringBuilder();
if (isNativeFormat && !nested) {
str.append("'");
}
str.append("{");
for (int i = 0; i < collection.getItemCount(); i++) {
if (i > 0) {
// $NON-NLS-1$
str.append(',');
}
final Object item = collection.getItem(i);
String itemString;
if (item instanceof JDBCCollection) {
// Multi-dimensional arrays case
itemString = convertArrayToString(column, item, format, true);
} else {
itemString = valueHandler.getValueDisplayString(collection.getComponentType(), item, format);
}
if (isNativeFormat) {
if (item instanceof String)
str.append('"');
str.append(SQLUtils.escapeString(collection.getComponentType().getDataSource(), itemString));
if (item instanceof String)
str.append('"');
} else {
str.append(itemString);
}
}
str.append("}");
if (isNativeFormat && !nested) {
str.append("'");
}
return str.toString();
}
return super.getValueDisplayString(column, value, format);
}
use of org.jkiss.dbeaver.model.data.DBDValueHandler in project dbeaver by dbeaver.
the class ExecuteBatchImpl method processBatch.
/**
* Execute batch OR generate batch script.
* @param session session
* @param actions script actions. If not null then no execution will be done
* @param options
* @return execution statistics
* @throws DBCException
*/
@NotNull
private DBCStatistics processBatch(@NotNull DBCSession session, @Nullable List<DBEPersistAction> actions, Map<String, Object> options) throws DBCException {
// session.getProgressMonitor().subTask("Save batch (" + values.size() + ")");
DBDValueHandler[] handlers = new DBDValueHandler[attributes.length];
for (int i = 0; i < attributes.length; i++) {
if (attributes[i] instanceof DBDAttributeBinding) {
handlers[i] = ((DBDAttributeBinding) attributes[i]).getValueHandler();
} else {
handlers[i] = DBUtils.findValueHandler(session, attributes[i]);
}
}
boolean useBatch = session.getDataSource().getInfo().supportsBatchUpdates() && reuseStatement && Boolean.FALSE.equals(options.get(DBSDataManipulator.OPTION_DISABLE_BATCHES));
if (values.size() <= 1) {
useBatch = false;
}
DBCStatistics statistics = new DBCStatistics();
DBCStatement statement = null;
try {
// Here we'll try to reuse prepared statement.
// It makes a great sense in case of data transfer where we need millions of inserts.
// We must be aware of nulls because actual insert statements may differ depending on null values.
// So if row nulls aren't the same as in previous row we need to prepare new statement and restart batch.
// Quite complicated but works.
boolean[] prevNulls = new boolean[attributes.length];
boolean[] nulls = new boolean[attributes.length];
int statementsInBatch = 0;
for (int rowIndex = 0; rowIndex < values.size(); rowIndex++) {
Object[] rowValues = values.get(rowIndex);
if (session.getProgressMonitor().isCanceled()) {
break;
}
boolean reuse = reuseStatement;
if (reuse) {
for (int i = 0; i < rowValues.length; i++) {
nulls[i] = DBUtils.isNullValue(rowValues[i]);
}
if (!Arrays.equals(prevNulls, nulls) && statementsInBatch > 0) {
reuse = false;
}
System.arraycopy(nulls, 0, prevNulls, 0, nulls.length);
if (!reuse && statementsInBatch > 0) {
// Flush batch
if (actions == null) {
flushBatch(statistics, statement);
}
statement.close();
statement = null;
statementsInBatch = 0;
reuse = true;
}
}
if (statement == null || !reuse) {
statement = prepareStatement(session, handlers, rowValues, options);
statistics.setQueryText(statement.getQueryString());
statistics.addStatementsCount();
}
try {
bindStatement(handlers, statement, rowValues);
if (actions == null) {
if (useBatch) {
statement.addToBatch();
statementsInBatch++;
} else {
// Execute each row separately
long startTime = System.currentTimeMillis();
executeStatement(statistics, statement);
statistics.addExecuteTime(System.currentTimeMillis() - startTime);
long rowCount = statement.getUpdateRowCount();
if (rowCount > 0) {
statistics.addRowsUpdated(rowCount);
}
// Read keys
if (keysReceiver != null) {
try {
readKeys(statement.getSession(), statement, keysReceiver);
} catch (Exception e) {
log.warn("Error reading auto-generated keys", e);
}
}
}
} else {
String queryString = formatQueryParameters(session, statement.getQueryString(), handlers, rowValues);
actions.add(new SQLDatabasePersistAction("Execute statement", queryString));
}
} finally {
if (!reuse) {
statement.close();
}
if (rowIndex > 0 && rowIndex % 100 == 0) {
session.getProgressMonitor().subTask("Save batch (" + rowIndex + " of " + values.size() + ")");
}
}
}
values.clear();
if (statementsInBatch > 0) {
if (actions == null) {
flushBatch(statistics, statement);
}
statement.close();
statement = null;
}
} finally {
if (reuseStatement && statement != null) {
statement.close();
}
if (!useBatch && !values.isEmpty()) {
values.clear();
}
}
return statistics;
}
Aggregations