use of org.jumpmind.symmetric.model.OutgoingBatch in project symmetric-ds by JumpMind.
the class MultiBatchStagingWriter method startNewBatch.
protected void startNewBatch() {
this.nextBatch();
long memoryThresholdInBytes = this.dataExtractorService.parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
this.currentDataWriter = buildWriter(memoryThresholdInBytes);
this.batch = new Batch(BatchType.EXTRACT, outgoingBatch.getBatchId(), outgoingBatch.getChannelId(), this.dataExtractorService.symmetricDialect.getBinaryEncoding(), sourceNodeId, outgoingBatch.getNodeId(), false);
this.currentDataWriter.open(context);
this.currentDataWriter.start(batch);
processInfo.incrementBatchCount();
if (table == null) {
throw new SymmetricException("'table' cannot null while starting new batch. Batch: " + outgoingBatch + ". Check trigger/router configs.");
}
this.currentDataWriter.start(table);
}
use of org.jumpmind.symmetric.model.OutgoingBatch in project symmetric-ds by JumpMind.
the class AbstractRouterServiceTest method filterForChannels.
protected void filterForChannels(OutgoingBatches batches, NodeChannel... channels) {
for (Iterator<OutgoingBatch> iterator = batches.getBatches().iterator(); iterator.hasNext(); ) {
OutgoingBatch outgoingBatch = iterator.next();
boolean foundChannel = false;
for (NodeChannel nodeChannel : channels) {
if (outgoingBatch.getChannelId().equals(nodeChannel.getChannelId())) {
foundChannel = true;
}
}
if (!foundChannel) {
iterator.remove();
}
}
}
use of org.jumpmind.symmetric.model.OutgoingBatch in project symmetric-ds by JumpMind.
the class RouterService method completeBatchesAndCommit.
protected void completeBatchesAndCommit(ChannelRouterContext context) {
Set<IDataRouter> usedRouters = new HashSet<IDataRouter>(context.getUsedDataRouters());
List<OutgoingBatch> batches = new ArrayList<OutgoingBatch>(context.getBatchesByNodes().values());
gapDetector.setFullGapAnalysis(context.getSqlTransaction(), true);
if (engine.getParameterService().is(ParameterConstants.ROUTING_LOG_STATS_ON_BATCH_ERROR)) {
engine.getStatisticManager().addRouterStats(context.getStartDataId(), context.getEndDataId(), context.getDataReadCount(), context.getPeekAheadFillCount(), context.getDataGaps(), context.getTransactions(), batches);
}
for (OutgoingBatch batch : batches) {
batch.setRouterMillis(System.currentTimeMillis() - batch.getCreateTime().getTime());
for (IDataRouter dataRouter : usedRouters) {
dataRouter.completeBatch(context, batch);
}
if (Constants.UNROUTED_NODE_ID.equals(batch.getNodeId())) {
batch.setStatus(Status.OK);
} else {
batch.setStatus(Status.NE);
}
engine.getOutgoingBatchService().updateOutgoingBatch(context.getSqlTransaction(), batch);
context.getBatchesByNodes().remove(batch.getNodeId());
}
context.commit();
for (IDataRouter dataRouter : usedRouters) {
dataRouter.contextCommitted(context);
}
context.setNeedsCommitted(false);
}
use of org.jumpmind.symmetric.model.OutgoingBatch in project symmetric-ds by JumpMind.
the class PullUriHandler method logDataReceivedFromPush.
private void logDataReceivedFromPush(Node targetNode, List<OutgoingBatch> batchList) {
int batchesCount = 0;
int dataCount = 0;
for (OutgoingBatch outgoingBatch : batchList) {
if (outgoingBatch.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.OK) {
batchesCount++;
dataCount += outgoingBatch.getDataEventCount();
}
}
if (batchesCount > 0) {
log.info("{} data and {} batches sent during pull request from {}", new Object[] { dataCount, batchesCount, targetNode.toString() });
}
}
use of org.jumpmind.symmetric.model.OutgoingBatch in project symmetric-ds by JumpMind.
the class RouterService method insertDataEvents.
protected int insertDataEvents(ProcessInfo processInfo, ChannelRouterContext context, DataMetaData dataMetaData, Collection<String> nodeIds) {
int numberOfDataEventsInserted = 0;
if (nodeIds == null || nodeIds.size() == 0) {
nodeIds = new HashSet<String>(1);
nodeIds.add(Constants.UNROUTED_NODE_ID);
}
long ts = System.currentTimeMillis();
long batchIdToReuse = -1;
boolean dataEventAdded = false;
for (String nodeId : nodeIds) {
if (nodeId != null) {
Map<String, OutgoingBatch> batches = context.getBatchesByNodes();
OutgoingBatch batch = batches.get(nodeId);
if (batch == null) {
batch = new OutgoingBatch(nodeId, dataMetaData.getNodeChannel().getChannelId(), Status.RT);
batch.setBatchId(batchIdToReuse);
batch.setCommonFlag(context.isProduceCommonBatches());
log.debug("About to insert a new batch for node {} on the '{}' channel. Batches in progress are: {}.", new Object[] { nodeId, batch.getChannelId(), context.getBatchesByNodes().values() });
engine.getOutgoingBatchService().insertOutgoingBatch(batch);
processInfo.incrementBatchCount();
context.getBatchesByNodes().put(nodeId, batch);
// if in reuse mode, then share the batch id
if (context.isProduceCommonBatches()) {
batchIdToReuse = batch.getBatchId();
}
}
if (dataMetaData.getData().getDataEventType() == DataEventType.RELOAD) {
long loadId = context.getLastLoadId();
if (loadId < 0) {
loadId = engine.getSequenceService().nextVal(context.getSqlTransaction(), Constants.SEQUENCE_OUTGOING_BATCH_LOAD_ID);
context.setLastLoadId(loadId);
}
batch.setLoadId(loadId);
} else {
context.setLastLoadId(-1);
}
batch.incrementEventCount(dataMetaData.getData().getDataEventType());
batch.incrementDataEventCount();
batch.incrementTableCount(dataMetaData.getTable().getNameLowerCase());
if (!context.isProduceCommonBatches() || (context.isProduceCommonBatches() && !dataEventAdded)) {
Router router = dataMetaData.getRouter();
context.addDataEvent(dataMetaData.getData().getDataId(), batch.getBatchId(), router != null ? router.getRouterId() : Constants.UNKNOWN_ROUTER_ID);
numberOfDataEventsInserted++;
dataEventAdded = true;
}
Map<String, IBatchAlgorithm> batchAlgorithms = extensionService.getExtensionPointMap(IBatchAlgorithm.class);
if (batchAlgorithms.get(context.getChannel().getBatchAlgorithm()).isBatchComplete(batch, dataMetaData, context)) {
context.setNeedsCommitted(true);
}
}
}
context.incrementStat(System.currentTimeMillis() - ts, ChannelRouterContext.STAT_INSERT_DATA_EVENTS_MS);
return numberOfDataEventsInserted;
}
Aggregations