use of org.jumpmind.symmetric.model.Data in project symmetric-ds by JumpMind.
the class BshColumnTransform method transform.
public NewAndOldValue transform(IDatabasePlatform platform, DataContext context, TransformColumn column, TransformedData data, Map<String, String> sourceValues, String newValue, String oldValue) throws IgnoreColumnException, IgnoreRowException {
try {
Interpreter interpreter = getInterpreter(context);
interpreter.set("currentValue", newValue);
interpreter.set("oldValue", oldValue);
interpreter.set("channelId", context.getBatch().getChannelId());
interpreter.set("includeOn", column.getIncludeOn());
interpreter.set("sourceDmlType", data.getSourceDmlType());
interpreter.set("sourceDmlTypeString", data.getSourceDmlType().toString());
interpreter.set("transformedData", data);
interpreter.set("transformColumn", column);
Data csvData = (Data) context.get(Constants.DATA_CONTEXT_CURRENT_CSV_DATA);
if (csvData != null && csvData.getTriggerHistory() != null) {
interpreter.set("sourceSchemaName", csvData.getTriggerHistory().getSourceSchemaName());
interpreter.set("sourceCatalogName", csvData.getTriggerHistory().getSourceCatalogName());
interpreter.set("sourceTableName", csvData.getTriggerHistory().getSourceTableName());
}
for (String columnName : sourceValues.keySet()) {
interpreter.set(columnName.toUpperCase(), sourceValues.get(columnName));
interpreter.set(columnName, sourceValues.get(columnName));
}
String transformExpression = column.getTransformExpression();
String globalScript = parameterService.getString(ParameterConstants.BSH_TRANSFORM_GLOBAL_SCRIPT);
String methodName = String.format("transform_%d()", Math.abs(transformExpression.hashCode() + (globalScript == null ? 0 : globalScript.hashCode())));
if (context.get(methodName) == null) {
interpreter.set("log", log);
interpreter.set("sqlTemplate", platform.getSqlTemplate());
interpreter.set("context", context);
interpreter.set("bshContext", bshContext);
interpreter.set(DATA_CONTEXT_ENGINE, context.get(DATA_CONTEXT_ENGINE));
interpreter.set(DATA_CONTEXT_TARGET_NODE, context.get(DATA_CONTEXT_TARGET_NODE));
interpreter.set(DATA_CONTEXT_TARGET_NODE_ID, context.get(DATA_CONTEXT_TARGET_NODE_ID));
interpreter.set(DATA_CONTEXT_TARGET_NODE_GROUP_ID, context.get(DATA_CONTEXT_TARGET_NODE_GROUP_ID));
interpreter.set(DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, context.get(DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID));
interpreter.set(DATA_CONTEXT_SOURCE_NODE, context.get(DATA_CONTEXT_SOURCE_NODE));
interpreter.set(DATA_CONTEXT_SOURCE_NODE_ID, context.get(DATA_CONTEXT_SOURCE_NODE_ID));
interpreter.set(DATA_CONTEXT_SOURCE_NODE_GROUP_ID, context.get(DATA_CONTEXT_SOURCE_NODE_GROUP_ID));
interpreter.set(DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, context.get(DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID));
if (StringUtils.isNotBlank(globalScript)) {
interpreter.eval(globalScript);
}
interpreter.eval(String.format("%s {\n%s\n}", methodName, transformExpression));
context.put(methodName, Boolean.TRUE);
}
Object result = interpreter.eval(methodName);
if (csvData != null && csvData.getTriggerHistory() != null) {
interpreter.unset("sourceSchemaName");
interpreter.unset("sourceCatalogName");
interpreter.unset("sourceTableName");
}
for (String columnName : sourceValues.keySet()) {
interpreter.unset(columnName.toUpperCase());
interpreter.unset(columnName);
}
if (result instanceof String) {
return new NewAndOldValue((String) result, null);
} else if (result instanceof NewAndOldValue) {
return (NewAndOldValue) result;
} else if (result != null) {
return new NewAndOldValue(result.toString(), null);
} else {
return null;
}
} catch (TargetError evalEx) {
Throwable ex = evalEx.getTarget();
if (ex instanceof IgnoreColumnException) {
throw (IgnoreColumnException) ex;
} else if (ex instanceof IgnoreRowException) {
throw (IgnoreRowException) ex;
} else {
throw new TransformColumnException(String.format("Beanshell script error on line %d for target column %s on transform %s", evalEx.getErrorLineNumber(), column.getTargetColumnName(), column.getTransformId()), ex);
}
} catch (Exception ex) {
if (ex instanceof IgnoreColumnException) {
throw (IgnoreColumnException) ex;
} else if (ex instanceof IgnoreRowException) {
throw (IgnoreRowException) ex;
} else {
log.error(String.format("Beanshell script error for target column %s on transform %s", column.getTargetColumnName(), column.getTransformId()), ex);
throw new TransformColumnException(ex);
}
}
}
use of org.jumpmind.symmetric.model.Data in project symmetric-ds by JumpMind.
the class DataGapRouteReader method execute.
protected void execute() {
long maxPeekAheadSizeInBytes = (long) (Runtime.getRuntime().maxMemory() * percentOfHeapToUse);
ISymmetricDialect symmetricDialect = engine.getSymmetricDialect();
ISqlReadCursor<Data> cursor = null;
processInfo = engine.getStatisticManager().newProcessInfo(new ProcessInfoKey(engine.getNodeService().findIdentityNodeId(), null, ProcessType.ROUTER_READER));
processInfo.setCurrentChannelId(context.getChannel().getChannelId());
try {
int lastPeekAheadIndex = 0;
int dataCount = 0;
long maxDataToRoute = context.getChannel().getMaxDataToRoute();
List<Data> peekAheadQueue = new ArrayList<Data>(peekAheadCount);
boolean transactional = !context.getChannel().getBatchAlgorithm().equals(NonTransactionalBatchAlgorithm.NAME) || !symmetricDialect.supportsTransactionId();
processInfo.setStatus(Status.QUERYING);
cursor = prepareCursor();
processInfo.setStatus(Status.EXTRACTING);
boolean moreData = true;
while (dataCount < maxDataToRoute || (lastTransactionId != null && transactional)) {
if (moreData && (lastTransactionId != null || peekAheadQueue.size() == 0)) {
moreData = fillPeekAheadQueue(peekAheadQueue, peekAheadCount, cursor);
}
int dataWithSameTransactionIdCount = 0;
while (peekAheadQueue.size() > 0 && lastTransactionId == null && dataCount < maxDataToRoute) {
Data data = peekAheadQueue.remove(0);
copyToQueue(data);
dataCount++;
processInfo.incrementCurrentDataCount();
processInfo.setCurrentTableName(data.getTableName());
lastTransactionId = data.getTransactionId();
context.addTransaction(lastTransactionId);
dataWithSameTransactionIdCount++;
}
if (lastTransactionId != null && peekAheadQueue.size() > 0) {
Iterator<Data> datas = peekAheadQueue.iterator();
int index = 0;
while (datas.hasNext() && (dataCount < maxDataToRoute || transactional)) {
Data data = datas.next();
if (lastTransactionId.equals(data.getTransactionId())) {
dataWithSameTransactionIdCount++;
datas.remove();
copyToQueue(data);
dataCount++;
processInfo.incrementCurrentDataCount();
processInfo.setCurrentTableName(data.getTableName());
lastPeekAheadIndex = index;
} else {
context.addTransaction(data.getTransactionId());
index++;
}
}
if (dataWithSameTransactionIdCount == 0 || peekAheadQueue.size() - lastPeekAheadIndex > peekAheadCount) {
lastTransactionId = null;
lastPeekAheadIndex = 0;
}
}
if (!moreData && peekAheadQueue.size() == 0) {
// we've reached the end of the result set
break;
} else if (peekAheadSizeInBytes >= maxPeekAheadSizeInBytes) {
log.info("The peek ahead queue has reached its max size of {} bytes. Finishing reading the current transaction", peekAheadSizeInBytes);
finishTransactionMode = true;
peekAheadQueue.clear();
}
}
processInfo.setStatus(Status.OK);
} catch (Throwable ex) {
processInfo.setStatus(Status.ERROR);
String msg = "";
if (engine.getDatabasePlatform().getName().startsWith(DatabaseNamesConstants.FIREBIRD) && isNotBlank(ex.getMessage()) && ex.getMessage().contains("arithmetic exception, numeric overflow, or string truncation")) {
msg = "There is a good chance that the truncation error you are receiving is because contains_big_lobs on the '" + context.getChannel().getChannelId() + "' channel needs to be turned on. Firebird casts to varchar when this setting is not turned on and the data length has most likely exceeded the 10k row size";
}
log.error(msg, ex);
} finally {
if (cursor != null) {
cursor.close();
}
copyToQueue(new EOD());
reading = false;
}
}
use of org.jumpmind.symmetric.model.Data in project symmetric-ds by JumpMind.
the class DataGapRouteReader method prepareCursor.
protected ISqlReadCursor<Data> prepareCursor() {
IParameterService parameterService = engine.getParameterService();
int numberOfGapsToQualify = parameterService.getInt(ParameterConstants.ROUTING_MAX_GAPS_TO_QUALIFY_IN_SQL, 100);
int maxGapsBeforeGreaterThanQuery = parameterService.getInt(ParameterConstants.ROUTING_DATA_READER_THRESHOLD_GAPS_TO_USE_GREATER_QUERY, 100);
boolean useGreaterThanDataId = false;
if (maxGapsBeforeGreaterThanQuery > 0 && this.dataGaps.size() > maxGapsBeforeGreaterThanQuery) {
useGreaterThanDataId = true;
}
String channelId = context.getChannel().getChannelId();
String sql = null;
Boolean lastSelectUsedGreaterThanQuery = lastSelectUsedGreaterThanQueryByEngineName.get(parameterService.getEngineName());
if (lastSelectUsedGreaterThanQuery == null) {
lastSelectUsedGreaterThanQuery = Boolean.FALSE;
}
if (useGreaterThanDataId) {
sql = getSql("selectDataUsingStartDataId", context.getChannel().getChannel());
if (!lastSelectUsedGreaterThanQuery) {
log.info("Switching to select from the data table where data_id >= start gap because there were {} gaps found " + "which was more than the configured threshold of {}", dataGaps.size(), maxGapsBeforeGreaterThanQuery);
lastSelectUsedGreaterThanQueryByEngineName.put(parameterService.getEngineName(), Boolean.TRUE);
}
} else {
sql = qualifyUsingDataGaps(dataGaps, numberOfGapsToQualify, getSql("selectDataUsingGapsSql", context.getChannel().getChannel()));
if (lastSelectUsedGreaterThanQuery) {
log.info("Switching to select from the data table where data_id between gaps");
lastSelectUsedGreaterThanQueryByEngineName.put(parameterService.getEngineName(), Boolean.FALSE);
}
}
if (parameterService.is(ParameterConstants.ROUTING_DATA_READER_ORDER_BY_DATA_ID_ENABLED, true)) {
sql = String.format("%s %s", sql, engine.getRouterService().getSql("orderByDataId"));
}
ISqlTemplate sqlTemplate = engine.getSymmetricDialect().getPlatform().getSqlTemplate();
Object[] args = null;
int[] types = null;
int dataIdSqlType = engine.getSymmetricDialect().getSqlTypeForIds();
if (useGreaterThanDataId) {
args = new Object[] { channelId, dataGaps.get(0).getStartId() };
types = new int[] { Types.VARCHAR, dataIdSqlType };
} else {
int numberOfArgs = 1 + 2 * (numberOfGapsToQualify < dataGaps.size() ? numberOfGapsToQualify : dataGaps.size());
args = new Object[numberOfArgs];
types = new int[numberOfArgs];
args[0] = channelId;
types[0] = Types.VARCHAR;
for (int i = 0; i < numberOfGapsToQualify && i < dataGaps.size(); i++) {
DataGap gap = dataGaps.get(i);
args[i * 2 + 1] = gap.getStartId();
types[i * 2 + 1] = dataIdSqlType;
if ((i + 1) == numberOfGapsToQualify && (i + 1) < dataGaps.size()) {
/*
* there were more gaps than we are going to use in the SQL.
* use the last gap as the end data id for the last range
*/
args[i * 2 + 2] = dataGaps.get(dataGaps.size() - 1).getEndId();
} else {
args[i * 2 + 2] = gap.getEndId();
}
types[i * 2 + 2] = dataIdSqlType;
}
}
this.currentGap = dataGaps.remove(0);
return sqlTemplate.queryForCursor(sql, new ISqlRowMapper<Data>() {
public Data mapRow(Row row) {
return engine.getDataService().mapData(row);
}
}, args, types);
}
use of org.jumpmind.symmetric.model.Data in project symmetric-ds by JumpMind.
the class DataService method insertSqlEvent.
public void insertSqlEvent(Node targetNode, String sql, boolean isLoad, long loadId, String createBy) {
TriggerHistory history = engine.getTriggerRouterService().findTriggerHistoryForGenericSync();
Trigger trigger = engine.getTriggerRouterService().getTriggerById(history.getTriggerId(), false);
String reloadChannelId = getReloadChannelIdForTrigger(trigger, engine.getConfigurationService().getChannels(false));
Data data = new Data(history.getSourceTableName(), DataEventType.SQL, CsvUtils.escapeCsvData(sql), null, history, isLoad ? reloadChannelId : Constants.CHANNEL_CONFIG, null, null);
if (isLoad) {
insertDataAndDataEventAndOutgoingBatch(data, targetNode.getNodeId(), Constants.UNKNOWN_ROUTER_ID, isLoad, loadId, createBy);
} else {
data.setNodeList(targetNode.getNodeId());
insertData(data);
}
}
use of org.jumpmind.symmetric.model.Data in project symmetric-ds by JumpMind.
the class DataService method sendScript.
public void sendScript(String nodeId, String script, boolean isLoad) {
Node targetNode = engine.getNodeService().findNode(nodeId);
TriggerHistory history = engine.getTriggerRouterService().findTriggerHistoryForGenericSync();
Data data = new Data(history.getSourceTableName(), DataEventType.BSH, CsvUtils.escapeCsvData(script), null, history, Constants.CHANNEL_CONFIG, null, null);
data.setNodeList(nodeId);
if (!isLoad) {
insertData(data);
} else {
insertDataAndDataEventAndOutgoingBatch(data, targetNode.getNodeId(), Constants.UNKNOWN_ROUTER_ID, isLoad, -1, null);
}
}
Aggregations