use of org.jumpmind.symmetric.model.OutgoingBatches in project symmetric-ds by JumpMind.
the class DataExtractorService method extractToPayload.
public List<OutgoingBatchWithPayload> extractToPayload(ProcessInfo processInfo, Node targetNode, PayloadType payloadType, boolean useJdbcTimestampFormat, boolean useUpsertStatements, boolean useDelimiterIdentifiers) {
OutgoingBatches batches = outgoingBatchService.getOutgoingBatches(targetNode.getNodeId(), false);
if (batches.containsBatches()) {
ChannelMap channelMap = configurationService.getSuspendIgnoreChannelLists(targetNode.getNodeId());
List<OutgoingBatch> activeBatches = filterBatchesForExtraction(batches, channelMap);
if (activeBatches.size() > 0) {
IDdlBuilder builder = DdlBuilderFactory.createDdlBuilder(targetNode.getDatabaseType());
if (builder == null) {
throw new IllegalStateException("Could not find a ddl builder registered for the database type of " + targetNode.getDatabaseType() + ". Please check the database type setting for node '" + targetNode.getNodeId() + "'");
}
StructureDataWriter writer = new StructureDataWriter(symmetricDialect.getPlatform(), targetNode.getDatabaseType(), payloadType, useDelimiterIdentifiers, symmetricDialect.getBinaryEncoding(), useJdbcTimestampFormat, useUpsertStatements);
List<OutgoingBatch> extractedBatches = extract(processInfo, targetNode, activeBatches, writer, ExtractMode.FOR_PAYLOAD_CLIENT);
List<OutgoingBatchWithPayload> batchesWithPayload = new ArrayList<OutgoingBatchWithPayload>();
for (OutgoingBatch batch : extractedBatches) {
OutgoingBatchWithPayload batchWithPayload = new OutgoingBatchWithPayload(batch, payloadType);
batchWithPayload.setPayload(writer.getPayloadMap().get(batch.getBatchId()));
batchWithPayload.setPayloadType(payloadType);
batchesWithPayload.add(batchWithPayload);
}
return batchesWithPayload;
}
}
return Collections.emptyList();
}
use of org.jumpmind.symmetric.model.OutgoingBatches in project symmetric-ds by JumpMind.
the class OutgoingBatchService method getOutgoingBatchRange.
public OutgoingBatches getOutgoingBatchRange(String nodeId, Date startDate, Date endDate, String... channels) {
OutgoingBatches batches = new OutgoingBatches();
List<OutgoingBatch> batchList = new ArrayList<OutgoingBatch>();
for (String channel : channels) {
batchList.addAll(sqlTemplate.query(getSql("selectOutgoingBatchPrefixSql", "selectOutgoingBatchTimeRangeSql"), new OutgoingBatchMapper(true), nodeId, channel, startDate, endDate));
}
batches.setBatches(batchList);
return batches;
}
use of org.jumpmind.symmetric.model.OutgoingBatches in project symmetric-ds by JumpMind.
the class OutgoingBatchService method markAllAsSentForNode.
public void markAllAsSentForNode(String nodeId, boolean includeConfigChannel) {
OutgoingBatches batches = null;
int configCount;
do {
configCount = 0;
batches = getOutgoingBatches(nodeId, true);
List<OutgoingBatch> list = batches.getBatches();
/*
* Sort in reverse order so we don't get fk errors for batches that
* are currently processing. We don't make the update transactional
* to prevent contention in highly loaded systems
*/
Collections.sort(list, new Comparator<OutgoingBatch>() {
public int compare(OutgoingBatch o1, OutgoingBatch o2) {
return -new Long(o1.getBatchId()).compareTo(o2.getBatchId());
}
});
for (OutgoingBatch outgoingBatch : list) {
if (includeConfigChannel || !outgoingBatch.getChannelId().equals(Constants.CHANNEL_CONFIG)) {
outgoingBatch.setStatus(Status.OK);
outgoingBatch.setErrorFlag(false);
updateOutgoingBatch(outgoingBatch);
} else {
configCount++;
}
}
} while (batches.getBatches().size() > configCount);
}
use of org.jumpmind.symmetric.model.OutgoingBatches in project symmetric-ds by JumpMind.
the class OutgoingBatchService method getOutgoingBatchRange.
public OutgoingBatches getOutgoingBatchRange(long startBatchId, long endBatchId) {
OutgoingBatches batches = new OutgoingBatches();
batches.setBatches(sqlTemplate.query(getSql("selectOutgoingBatchPrefixSql", "selectOutgoingBatchRangeSql"), new OutgoingBatchMapper(true), startBatchId, endBatchId));
return batches;
}
use of org.jumpmind.symmetric.model.OutgoingBatches in project symmetric-ds by JumpMind.
the class OutgoingBatchService method getOutgoingBatches.
/**
* Select batches to process. Batches that are NOT in error will be returned
* first. They will be ordered by batch id as the batches will have already
* been created by {@link #buildOutgoingBatches(String)} in channel priority
* order.
*/
public OutgoingBatches getOutgoingBatches(String nodeId, boolean includeDisabledChannels) {
long ts = System.currentTimeMillis();
final int maxNumberOfBatchesToSelect = parameterService.getInt(ParameterConstants.OUTGOING_BATCH_MAX_BATCHES_TO_SELECT, 1000);
List<OutgoingBatch> list = (List<OutgoingBatch>) sqlTemplate.query(getSql("selectOutgoingBatchPrefixSql", "selectOutgoingBatchSql"), maxNumberOfBatchesToSelect, new OutgoingBatchMapper(includeDisabledChannels), new Object[] { nodeId, OutgoingBatch.Status.RQ.name(), OutgoingBatch.Status.NE.name(), OutgoingBatch.Status.QY.name(), OutgoingBatch.Status.SE.name(), OutgoingBatch.Status.LD.name(), OutgoingBatch.Status.ER.name(), OutgoingBatch.Status.IG.name() }, null);
OutgoingBatches batches = new OutgoingBatches(list);
List<NodeChannel> channels = new ArrayList<NodeChannel>(configurationService.getNodeChannels(nodeId, true));
batches.sortChannels(channels);
List<IOutgoingBatchFilter> filters = extensionService.getExtensionPointList(IOutgoingBatchFilter.class);
List<OutgoingBatch> keepers = new ArrayList<OutgoingBatch>();
for (NodeChannel channel : channels) {
List<OutgoingBatch> batchesForChannel = getBatchesForChannelWindows(batches.getBatches(), nodeId, channel, configurationService.getNodeGroupChannelWindows(parameterService.getNodeGroupId(), channel.getChannelId()));
if (filters != null) {
for (IOutgoingBatchFilter filter : filters) {
batchesForChannel = filter.filter(channel, batchesForChannel);
}
}
if (parameterService.is(ParameterConstants.DATA_EXTRACTOR_ENABLED) || channel.getChannelId().equals(Constants.CHANNEL_CONFIG)) {
keepers.addAll(batchesForChannel);
}
}
batches.setBatches(keepers);
long executeTimeInMs = System.currentTimeMillis() - ts;
if (executeTimeInMs > Constants.LONG_OPERATION_THRESHOLD) {
log.info("Selecting {} outgoing batch rows for node {} took {} ms", list.size(), nodeId, executeTimeInMs);
}
return batches;
}
Aggregations