use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.
the class DataExtractorService method wrapWithTransformWriter.
protected IDataWriter wrapWithTransformWriter(Node sourceNode, Node targetNode, ProcessInfo processInfo, IDataWriter dataWriter, boolean useStagingDataWriter) {
TransformWriter transformExtractWriter = null;
if (useStagingDataWriter) {
long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(new StagingDataWriter(memoryThresholdInBytes, true, nodeService.findIdentityNodeId(), Constants.STAGING_CATEGORY_OUTGOING, stagingManager), processInfo));
} else {
transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(dataWriter, processInfo));
}
return transformExtractWriter;
}
use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.
the class DataExtractorService method sendOutgoingBatch.
protected OutgoingBatch sendOutgoingBatch(ProcessInfo processInfo, Node targetNode, OutgoingBatch currentBatch, boolean isRetry, IDataWriter dataWriter, BufferedWriter writer, ExtractMode mode) {
if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
currentBatch.setSentCount(currentBatch.getSentCount() + 1);
long ts = System.currentTimeMillis();
IStagedResource extractedBatch = getStagedResource(currentBatch);
if (extractedBatch != null) {
if (mode == ExtractMode.FOR_SYM_CLIENT && writer != null) {
if (!isRetry && parameterService.is(ParameterConstants.OUTGOING_BATCH_COPY_TO_INCOMING_STAGING) && !parameterService.is(ParameterConstants.NODE_OFFLINE, false)) {
ISymmetricEngine targetEngine = AbstractSymmetricEngine.findEngineByUrl(targetNode.getSyncUrl());
if (targetEngine != null && extractedBatch.isFileResource()) {
try {
Node sourceNode = nodeService.findIdentity();
IStagedResource targetResource = targetEngine.getStagingManager().create(Constants.STAGING_CATEGORY_INCOMING, Batch.getStagedLocation(false, sourceNode.getNodeId()), currentBatch.getBatchId());
SymmetricUtils.copyFile(extractedBatch.getFile(), targetResource.getFile());
targetResource.setState(State.DONE);
isRetry = true;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
Channel channel = configurationService.getChannel(currentBatch.getChannelId());
DataContext ctx = new DataContext();
transferFromStaging(mode, BatchType.EXTRACT, currentBatch, isRetry, extractedBatch, writer, ctx, channel.getMaxKBytesPerSecond());
} else {
IDataReader dataReader = new ProtocolDataReader(BatchType.EXTRACT, currentBatch.getNodeId(), extractedBatch);
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
new DataProcessor(dataReader, new ProcessInfoDataWriter(dataWriter, processInfo), "send from stage").process(ctx);
if (dataReader.getStatistics().size() > 0) {
Statistics stats = dataReader.getStatistics().values().iterator().next();
statisticManager.incrementDataSent(currentBatch.getChannelId(), stats.get(DataReaderStatistics.READ_RECORD_COUNT));
long byteCount = stats.get(DataReaderStatistics.READ_BYTE_COUNT);
statisticManager.incrementDataBytesSent(currentBatch.getChannelId(), byteCount);
} else {
log.warn("Could not find recorded statistics for batch {}", currentBatch.getNodeBatchId());
}
}
} else {
throw new IllegalStateException(String.format("Could not find the staged resource for batch %s", currentBatch.getNodeBatchId()));
}
currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
}
return currentBatch;
}
use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.
the class DataExtractorService method extractOutgoingBatch.
protected OutgoingBatch extractOutgoingBatch(ProcessInfo processInfo, Node targetNode, IDataWriter dataWriter, OutgoingBatch currentBatch, boolean useStagingDataWriter, boolean updateBatchStatistics, ExtractMode mode) {
if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
Node sourceNode = nodeService.findIdentity();
TransformWriter transformExtractWriter = null;
if (useStagingDataWriter) {
long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(new StagingDataWriter(memoryThresholdInBytes, nodeService.findIdentityNodeId(), Constants.STAGING_CATEGORY_OUTGOING, stagingManager), processInfo));
} else {
transformExtractWriter = createTransformDataWriter(sourceNode, targetNode, new ProcessInfoDataWriter(dataWriter, processInfo));
}
long ts = System.currentTimeMillis();
long extractTimeInMs = 0l;
long byteCount = 0l;
long transformTimeInMs = 0l;
if (currentBatch.getStatus() == Status.IG) {
Batch batch = new Batch(BatchType.EXTRACT, currentBatch.getBatchId(), currentBatch.getChannelId(), symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), currentBatch.getNodeId(), currentBatch.isCommonFlag());
batch.setIgnored(true);
try {
IStagedResource resource = getStagedResource(currentBatch);
if (resource != null) {
resource.delete();
}
DataContext ctx = new DataContext(batch);
ctx.put("targetNode", targetNode);
ctx.put("sourceNode", sourceNode);
transformExtractWriter.open(ctx);
transformExtractWriter.start(batch);
transformExtractWriter.end(batch, false);
} finally {
transformExtractWriter.close();
}
} else if (!isPreviouslyExtracted(currentBatch)) {
int maxPermits = parameterService.getInt(ParameterConstants.CONCURRENT_WORKERS);
String semaphoreKey = useStagingDataWriter ? Long.toString(currentBatch.getBatchId()) : currentBatch.getNodeBatchId();
Semaphore lock = null;
try {
synchronized (locks) {
lock = locks.get(semaphoreKey);
if (lock == null) {
lock = new Semaphore(maxPermits);
locks.put(semaphoreKey, lock);
}
try {
lock.acquire();
} catch (InterruptedException e) {
throw new org.jumpmind.exception.InterruptedException(e);
}
}
synchronized (lock) {
if (!isPreviouslyExtracted(currentBatch)) {
currentBatch.setExtractCount(currentBatch.getExtractCount() + 1);
if (updateBatchStatistics) {
changeBatchStatus(Status.QY, currentBatch, mode);
}
currentBatch.resetStats();
IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(currentBatch, sourceNode, targetNode, processInfo));
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());
new DataProcessor(dataReader, transformExtractWriter, "extract").process(ctx);
extractTimeInMs = System.currentTimeMillis() - ts;
Statistics stats = transformExtractWriter.getNestedWriter().getStatistics().values().iterator().next();
transformTimeInMs = stats.get(DataWriterStatisticConstants.TRANSFORMMILLIS);
extractTimeInMs = extractTimeInMs - transformTimeInMs;
byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
}
}
} catch (RuntimeException ex) {
IStagedResource resource = getStagedResource(currentBatch);
if (resource != null) {
resource.close();
resource.delete();
}
throw ex;
} finally {
lock.release();
synchronized (locks) {
if (lock.availablePermits() == maxPermits) {
locks.remove(semaphoreKey);
}
}
}
}
if (updateBatchStatistics) {
long dataEventCount = currentBatch.getDataEventCount();
long insertEventCount = currentBatch.getInsertEventCount();
currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
// preserve in the case of a reload event
if (dataEventCount > currentBatch.getDataEventCount()) {
currentBatch.setDataEventCount(dataEventCount);
}
// preserve in the case of a reload event
if (insertEventCount > currentBatch.getInsertEventCount()) {
currentBatch.setInsertEventCount(insertEventCount);
}
// "re-queried"
if (extractTimeInMs > 0) {
currentBatch.setExtractMillis(extractTimeInMs);
}
if (byteCount > 0) {
currentBatch.setByteCount(byteCount);
statisticManager.incrementDataBytesExtracted(currentBatch.getChannelId(), byteCount);
statisticManager.incrementDataExtracted(currentBatch.getChannelId(), currentBatch.getExtractCount());
}
}
}
return currentBatch;
}
use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.
the class DataLoaderService method buildDataWriter.
protected IDataWriter buildDataWriter(ProcessInfo processInfo, String sourceNodeId, String channelId, long batchId, boolean isRetry) {
TransformTable[] transforms = null;
NodeGroupLink link = null;
List<ResolvedData> resolvedDatas = new ArrayList<ResolvedData>();
List<IDatabaseWriterFilter> filters = extensionService.getExtensionPointList(IDatabaseWriterFilter.class);
List<IDatabaseWriterFilter> dynamicFilters = filters;
List<IDatabaseWriterErrorHandler> errorHandlers = extensionService.getExtensionPointList(IDatabaseWriterErrorHandler.class);
List<IDatabaseWriterErrorHandler> dynamicErrorHandlers = errorHandlers;
if (sourceNodeId != null) {
Node sourceNode = nodeService.findNode(sourceNodeId, true);
if (sourceNode != null) {
link = new NodeGroupLink(sourceNode.getNodeGroupId(), parameterService.getNodeGroupId());
}
Map<LoadFilterType, Map<String, List<LoadFilter>>> loadFilters = loadFilterService.findLoadFiltersFor(link, true);
List<DynamicDatabaseWriterFilter> databaseWriterFilters = DynamicDatabaseWriterFilter.getDatabaseWriterFilters(engine, loadFilters);
if (loadFilters != null && loadFilters.size() > 0) {
dynamicFilters = new ArrayList<IDatabaseWriterFilter>(filters.size() + 1);
dynamicFilters.addAll(filters);
dynamicFilters.addAll(databaseWriterFilters);
dynamicErrorHandlers = new ArrayList<IDatabaseWriterErrorHandler>(errorHandlers.size() + 1);
dynamicErrorHandlers.addAll(errorHandlers);
dynamicErrorHandlers.addAll(databaseWriterFilters);
}
List<TransformTableNodeGroupLink> transformsList = transformService.findTransformsFor(link, TransformPoint.LOAD);
transforms = transformsList != null ? transformsList.toArray(new TransformTable[transformsList.size()]) : null;
if (isRetry) {
List<IncomingError> incomingErrors = getIncomingErrors(batchId, sourceNodeId);
for (IncomingError incomingError : incomingErrors) {
if (incomingError.isResolveIgnore() || StringUtils.isNotBlank(incomingError.getResolveData())) {
resolvedDatas.add(new ResolvedData(incomingError.getFailedRowNumber(), incomingError.getResolveData(), incomingError.isResolveIgnore()));
}
}
}
}
TransformWriter transformWriter = new TransformWriter(platform, TransformPoint.LOAD, null, transformService.getColumnTransforms(), transforms);
IDataWriter targetWriter = getFactory(channelId).getDataWriter(sourceNodeId, symmetricDialect, transformWriter, dynamicFilters, dynamicErrorHandlers, getConflictSettingsNodeGroupLinks(link, false), resolvedDatas);
transformWriter.setNestedWriter(new ProcessInfoDataWriter(targetWriter, processInfo));
return transformWriter;
}
use of org.jumpmind.symmetric.model.ProcessInfoDataWriter in project symmetric-ds by JumpMind.
the class DataExtractorService method sendOutgoingBatch.
protected OutgoingBatch sendOutgoingBatch(ProcessInfo processInfo, Node targetNode, OutgoingBatch currentBatch, IDataWriter dataWriter, ExtractMode mode) {
if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
currentBatch.setSentCount(currentBatch.getSentCount() + 1);
changeBatchStatus(Status.SE, currentBatch, mode);
long ts = System.currentTimeMillis();
IStagedResource extractedBatch = getStagedResource(currentBatch);
if (extractedBatch != null) {
IDataReader dataReader = new ProtocolDataReader(BatchType.EXTRACT, currentBatch.getNodeId(), extractedBatch);
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
new DataProcessor(dataReader, new ProcessInfoDataWriter(dataWriter, processInfo), "send from stage").process(ctx);
if (dataWriter.getStatistics().size() > 0) {
Statistics stats = dataWriter.getStatistics().values().iterator().next();
statisticManager.incrementDataSent(currentBatch.getChannelId(), stats.get(DataWriterStatisticConstants.STATEMENTCOUNT));
long byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
statisticManager.incrementDataBytesSent(currentBatch.getChannelId(), byteCount);
} else {
log.warn("Could not find recorded statistics for batch {}", currentBatch.getNodeBatchId());
}
} else {
throw new IllegalStateException(String.format("Could not find the staged resource for batch %s", currentBatch.getNodeBatchId()));
}
currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
}
return currentBatch;
}
Aggregations