use of org.jumpmind.symmetric.model.ProcessInfo in project symmetric-ds by JumpMind.
the class DataGapFastDetector method afterRouting.
/**
* Always make sure sym_data_gap is up to date to make sure that we don't
* dual route data.
*/
public void afterRouting() {
ProcessInfo processInfo = this.statisticManager.newProcessInfo(new ProcessInfoKey(nodeService.findIdentityNodeId(), null, ProcessType.GAP_DETECT));
processInfo.setStatus(Status.PROCESSING);
long printStats = System.currentTimeMillis();
long gapTimoutInMs = parameterService.getLong(ParameterConstants.ROUTING_STALE_DATA_ID_GAP_TIME);
final int dataIdIncrementBy = parameterService.getInt(ParameterConstants.DATA_ID_INCREMENT_BY);
Date currentDate = new Date(routingStartTime);
boolean isBusyExpire = false;
long lastBusyExpireRunTime = getLastBusyExpireRunTime();
if (!isAllDataRead) {
if (lastBusyExpireRunTime == 0) {
setLastBusyExpireRunTime(System.currentTimeMillis());
} else {
long busyExpireMillis = parameterService.getLong(ParameterConstants.ROUTING_STALE_GAP_BUSY_EXPIRE_TIME);
isBusyExpire = System.currentTimeMillis() - lastBusyExpireRunTime >= busyExpireMillis;
}
} else if (lastBusyExpireRunTime != 0) {
setLastBusyExpireRunTime(0);
}
try {
long ts = System.currentTimeMillis();
long lastDataId = -1;
int dataIdCount = 0;
int rangeChecked = 0;
int expireChecked = 0;
gapsAll.addAll(gaps);
Map<DataGap, List<Long>> dataIdMap = getDataIdMap();
if (System.currentTimeMillis() - ts > 30000) {
log.info("It took {}ms to map {} data IDs into {} gaps", new Object[] { System.currentTimeMillis() - ts, dataIds.size(), gaps.size() });
}
for (final DataGap dataGap : gaps) {
final boolean lastGap = dataGap.equals(gaps.get(gaps.size() - 1));
lastDataId = -1;
List<Long> ids = dataIdMap.get(dataGap);
dataIdCount += ids.size();
rangeChecked += dataGap.getEndId() - dataGap.getStartId();
// if we found data in the gap
if (ids.size() > 0) {
gapsDeleted.add(dataGap);
gapsAll.remove(dataGap);
// if we did not find data in the gap and it was not the last gap
} else if (!lastGap && (isAllDataRead || isBusyExpire)) {
Date createTime = dataGap.getCreateTime();
boolean isExpired = false;
if (supportsTransactionViews) {
isExpired = createTime != null && (createTime.getTime() < earliestTransactionTime || earliestTransactionTime == 0);
} else {
isExpired = createTime != null && routingStartTime - createTime.getTime() > gapTimoutInMs;
}
if (isExpired) {
boolean isGapEmpty = false;
if (!isAllDataRead) {
isGapEmpty = dataService.countDataInRange(dataGap.getStartId() - 1, dataGap.getEndId() + 1) == 0;
expireChecked++;
}
if (isAllDataRead || isGapEmpty) {
if (log.isDebugEnabled()) {
if (dataGap.getStartId() == dataGap.getEndId()) {
log.debug("Found a gap in data_id at {}. Skipping it because " + (supportsTransactionViews ? "there are no pending transactions" : "the gap expired"), dataGap.getStartId());
} else {
log.debug("Found a gap in data_id from {} to {}. Skipping it because " + (supportsTransactionViews ? "there are no pending transactions" : "the gap expired"), dataGap.getStartId(), dataGap.getEndId());
}
}
gapsDeleted.add(dataGap);
gapsAll.remove(dataGap);
}
}
}
for (Number number : ids) {
long dataId = number.longValue();
processInfo.incrementCurrentDataCount();
if (lastDataId == -1 && dataGap.getStartId() + dataIdIncrementBy <= dataId) {
// there was a new gap at the start
addDataGap(new DataGap(dataGap.getStartId(), dataId - 1, currentDate));
} else if (lastDataId != -1 && lastDataId + dataIdIncrementBy != dataId && lastDataId != dataId) {
// found a gap somewhere in the existing gap
addDataGap(new DataGap(lastDataId + 1, dataId - 1, currentDate));
}
lastDataId = dataId;
}
// if we found data in the gap
if (lastDataId != -1 && !lastGap && lastDataId + dataIdIncrementBy <= dataGap.getEndId()) {
addDataGap(new DataGap(lastDataId + dataIdIncrementBy, dataGap.getEndId(), currentDate));
}
if (System.currentTimeMillis() - printStats > 30000) {
log.info("The data gap detection has been running for {}ms, detected {} rows over a gap range of {}, " + "found {} new gaps, found old {} gaps, and checked data in {} gaps", new Object[] { System.currentTimeMillis() - ts, dataIdCount, rangeChecked, gapsAdded.size(), gapsDeleted.size(), expireChecked });
printStats = System.currentTimeMillis();
}
}
if (lastDataId != -1) {
DataGap newGap = new DataGap(lastDataId + 1, lastDataId + maxDataToSelect, currentDate);
if (addDataGap(newGap)) {
log.debug("Inserting new last data gap: {}", newGap);
}
}
printStats = saveDataGaps(ts, printStats);
setFullGapAnalysis(false);
if (isBusyExpire) {
setLastBusyExpireRunTime(System.currentTimeMillis());
}
long updateTimeInMs = System.currentTimeMillis() - ts;
if (updateTimeInMs > 10000) {
log.info("Detecting gaps took {} ms", updateTimeInMs);
}
processInfo.setStatus(Status.OK);
} catch (RuntimeException ex) {
processInfo.setStatus(Status.ERROR);
throw ex;
}
}
use of org.jumpmind.symmetric.model.ProcessInfo in project symmetric-ds by JumpMind.
the class StatisticManager method getNodesWithProcessesInError.
public Set<String> getNodesWithProcessesInError() {
String identityNodeId = nodeService.findIdentityNodeId();
Set<String> status = new HashSet<String>();
if (identityNodeId != null) {
List<ProcessInfo> list = getProcessInfos();
for (ProcessInfo processInfo : list) {
String nodeIdInError = processInfo.showInError(identityNodeId);
if (nodeIdInError != null) {
status.add(nodeIdInError);
}
}
}
return status;
}
use of org.jumpmind.symmetric.model.ProcessInfo in project symmetric-ds by JumpMind.
the class StatisticManager method newProcessInfo.
public ProcessInfo newProcessInfo(ProcessInfoKey key) {
ProcessInfo process = new ProcessInfo(key);
ProcessInfo old = processInfos.get(key);
if (old != null) {
if (old.getStatus() != Status.OK && old.getStatus() != Status.ERROR) {
log.warn("Starting a new process even though the previous '{}' process had not finished", old.getProcessType().toString());
log.info("Details from the previous process: {}", old.toString());
}
if (old.getCurrentDataCount() > 0 || old.getDataCount() > 0) {
processInfosThatHaveDoneWork.put(key, old);
}
}
processInfos.put(key, process);
return process;
}
use of org.jumpmind.symmetric.model.ProcessInfo in project symmetric-ds by JumpMind.
the class FileSyncPullUriHandler method handle.
public void handle(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
String nodeId = ServletUtils.getParameter(req, WebConstants.NODE_ID);
if (StringUtils.isBlank(nodeId)) {
ServletUtils.sendError(res, HttpServletResponse.SC_BAD_REQUEST, "Node must be specified");
return;
} else {
log.debug("File sync pull request received from {}", nodeId);
}
IOutgoingTransport outgoingTransport = createOutgoingTransport(res.getOutputStream(), req.getHeader(WebConstants.HEADER_ACCEPT_CHARSET), engine.getConfigurationService().getSuspendIgnoreChannelLists(nodeId));
ProcessInfo processInfo = engine.getStatisticManager().newProcessInfo(new ProcessInfoKey(engine.getNodeService().findIdentityNodeId(), nodeId, ProcessType.FILE_SYNC_PULL_HANDLER));
try {
engine.getFileSyncService().sendFiles(processInfo, engine.getNodeService().findNode(nodeId, true), outgoingTransport);
Node targetNode = engine.getNodeService().findNode(nodeId, true);
if (processInfo.getBatchCount() == 0 && targetNode.isVersionGreaterThanOrEqualTo(3, 8, 0)) {
ServletUtils.sendError(res, HttpServletResponse.SC_NO_CONTENT, "No files to pull.");
} else {
res.setContentType("application/zip");
res.addHeader("Content-Disposition", "attachment; filename=\"file-sync.zip\"");
}
processInfo.setStatus(Status.OK);
} catch (RuntimeException ex) {
processInfo.setStatus(Status.ERROR);
throw ex;
} finally {
if (outgoingTransport != null) {
outgoingTransport.close();
}
}
}
use of org.jumpmind.symmetric.model.ProcessInfo in project symmetric-ds by JumpMind.
the class DataExtractorService method extractBatchRange.
public boolean extractBatchRange(Writer writer, String nodeId, Date startBatchTime, Date endBatchTime, String... channelIds) {
boolean foundBatch = false;
Node sourceNode = nodeService.findIdentity();
OutgoingBatches batches = outgoingBatchService.getOutgoingBatchRange(nodeId, startBatchTime, endBatchTime, channelIds);
List<OutgoingBatch> list = batches.getBatches();
for (OutgoingBatch outgoingBatch : list) {
Node targetNode = nodeService.findNode(nodeId);
if (targetNode == null && Constants.UNROUTED_NODE_ID.equals(nodeId)) {
targetNode = new Node();
targetNode.setNodeId("-1");
}
if (targetNode != null) {
IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(outgoingBatch, sourceNode, targetNode, new ProcessInfo()));
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
new DataProcessor(dataReader, createTransformDataWriter(nodeService.findIdentity(), targetNode, new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity())), "extract range").process(ctx);
foundBatch = true;
}
}
return foundBatch;
}
Aggregations