use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class DataExtractorService method extractConfigurationStandalone.
/**
* Extract the SymmetricDS configuration for the passed in {@link Node}.
*/
public void extractConfigurationStandalone(Node targetNode, Writer writer, String... tablesToExclude) {
Node sourceNode = nodeService.findIdentity();
if (targetNode != null && sourceNode != null) {
Batch batch = new Batch(BatchType.EXTRACT, Constants.VIRTUAL_BATCH_FOR_REGISTRATION, Constants.CHANNEL_CONFIG, symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), targetNode.getNodeId(), false);
NodeGroupLink nodeGroupLink = new NodeGroupLink(parameterService.getNodeGroupId(), targetNode.getNodeGroupId());
List<TriggerRouter> triggerRouters = triggerRouterService.buildTriggerRoutersForSymmetricTables(StringUtils.isBlank(targetNode.getSymmetricVersion()) ? Version.version() : targetNode.getSymmetricVersion(), nodeGroupLink, tablesToExclude);
List<SelectFromTableEvent> initialLoadEvents = new ArrayList<SelectFromTableEvent>(triggerRouters.size() * 2);
for (int i = triggerRouters.size() - 1; i >= 0; i--) {
TriggerRouter triggerRouter = triggerRouters.get(i);
String channelId = triggerRouter.getTrigger().getChannelId();
if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
if (filter(targetNode, triggerRouter.getTrigger().getSourceTableName())) {
TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, triggerRouter.getTrigger().getSourceTableName());
if (triggerHistory == null) {
Trigger trigger = triggerRouter.getTrigger();
Table table = symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
if (table == null) {
throw new IllegalStateException("Could not find a required table: " + triggerRouter.getTrigger().getSourceTableName());
}
triggerHistory = new TriggerHistory(table, triggerRouter.getTrigger(), symmetricDialect.getTriggerTemplate());
triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
}
StringBuilder sql = new StringBuilder(symmetricDialect.createPurgeSqlFor(targetNode, triggerRouter, triggerHistory));
addPurgeCriteriaToConfigurationTables(triggerRouter.getTrigger().getSourceTableName(), sql);
String sourceTable = triggerHistory.getSourceTableName();
Data data = new Data(1, null, sql.toString(), DataEventType.SQL, sourceTable, null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
data.putAttribute(Data.ATTRIBUTE_ROUTER_ID, triggerRouter.getRouter().getRouterId());
initialLoadEvents.add(new SelectFromTableEvent(data));
}
}
}
for (int i = 0; i < triggerRouters.size(); i++) {
TriggerRouter triggerRouter = triggerRouters.get(i);
String channelId = triggerRouter.getTrigger().getChannelId();
if (Constants.CHANNEL_CONFIG.equals(channelId) || Constants.CHANNEL_HEARTBEAT.equals(channelId)) {
if (filter(targetNode, triggerRouter.getTrigger().getSourceTableName())) {
TriggerHistory triggerHistory = triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger().getTriggerId(), null, null, null);
if (triggerHistory == null) {
Trigger trigger = triggerRouter.getTrigger();
triggerHistory = new TriggerHistory(symmetricDialect.getPlatform().getTableFromCache(trigger.getSourceCatalogName(), trigger.getSourceSchemaName(), trigger.getSourceTableName(), false), trigger, symmetricDialect.getTriggerTemplate());
triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
}
Table table = symmetricDialect.getPlatform().getTableFromCache(triggerHistory.getSourceCatalogName(), triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
String initialLoadSql = "1=1 order by ";
String quote = symmetricDialect.getPlatform().getDdlBuilder().getDatabaseInfo().getDelimiterToken();
Column[] pkColumns = table.getPrimaryKeyColumns();
for (int j = 0; j < pkColumns.length; j++) {
if (j > 0) {
initialLoadSql += ", ";
}
initialLoadSql += quote + pkColumns[j].getName() + quote;
}
if (!triggerRouter.getTrigger().getSourceTableName().endsWith(TableConstants.SYM_NODE_IDENTITY)) {
initialLoadEvents.add(new SelectFromTableEvent(targetNode, triggerRouter, triggerHistory, initialLoadSql));
} else {
Data data = new Data(1, null, targetNode.getNodeId(), DataEventType.INSERT, triggerHistory.getSourceTableName(), null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null, null);
initialLoadEvents.add(new SelectFromTableEvent(data));
}
}
}
}
SelectFromTableSource source = new SelectFromTableSource(batch, initialLoadEvents);
ExtractDataReader dataReader = new ExtractDataReader(this.symmetricDialect.getPlatform(), source);
ProtocolDataWriter dataWriter = new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity());
DataProcessor processor = new DataProcessor(dataReader, dataWriter, "configuration extract");
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
processor.process(ctx);
if (triggerRouters.size() == 0) {
log.error("{} attempted registration, but was sent an empty configuration", targetNode);
}
}
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class DataExtractorService method sendOutgoingBatch.
protected OutgoingBatch sendOutgoingBatch(ProcessInfo processInfo, Node targetNode, OutgoingBatch currentBatch, boolean isRetry, IDataWriter dataWriter, BufferedWriter writer, ExtractMode mode) {
if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode) {
currentBatch.setSentCount(currentBatch.getSentCount() + 1);
long ts = System.currentTimeMillis();
IStagedResource extractedBatch = getStagedResource(currentBatch);
if (extractedBatch != null) {
if (mode == ExtractMode.FOR_SYM_CLIENT && writer != null) {
if (!isRetry && parameterService.is(ParameterConstants.OUTGOING_BATCH_COPY_TO_INCOMING_STAGING) && !parameterService.is(ParameterConstants.NODE_OFFLINE, false)) {
ISymmetricEngine targetEngine = AbstractSymmetricEngine.findEngineByUrl(targetNode.getSyncUrl());
if (targetEngine != null && extractedBatch.isFileResource()) {
try {
Node sourceNode = nodeService.findIdentity();
IStagedResource targetResource = targetEngine.getStagingManager().create(Constants.STAGING_CATEGORY_INCOMING, Batch.getStagedLocation(false, sourceNode.getNodeId()), currentBatch.getBatchId());
SymmetricUtils.copyFile(extractedBatch.getFile(), targetResource.getFile());
targetResource.setState(State.DONE);
isRetry = true;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
Channel channel = configurationService.getChannel(currentBatch.getChannelId());
DataContext ctx = new DataContext();
transferFromStaging(mode, BatchType.EXTRACT, currentBatch, isRetry, extractedBatch, writer, ctx, channel.getMaxKBytesPerSecond());
} else {
IDataReader dataReader = new ProtocolDataReader(BatchType.EXTRACT, currentBatch.getNodeId(), extractedBatch);
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
new DataProcessor(dataReader, new ProcessInfoDataWriter(dataWriter, processInfo), "send from stage").process(ctx);
if (dataReader.getStatistics().size() > 0) {
Statistics stats = dataReader.getStatistics().values().iterator().next();
statisticManager.incrementDataSent(currentBatch.getChannelId(), stats.get(DataReaderStatistics.READ_RECORD_COUNT));
long byteCount = stats.get(DataReaderStatistics.READ_BYTE_COUNT);
statisticManager.incrementDataBytesSent(currentBatch.getChannelId(), byteCount);
} else {
log.warn("Could not find recorded statistics for batch {}", currentBatch.getNodeBatchId());
}
}
} else {
throw new IllegalStateException(String.format("Could not find the staged resource for batch %s", currentBatch.getNodeBatchId()));
}
currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
}
return currentBatch;
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class DataLoaderService method loadDataFromTransport.
/**
* Load database from input stream and return a list of batch statuses. This
* is used for a pull request that responds with data, and the
* acknowledgment is sent later.
*/
protected List<IncomingBatch> loadDataFromTransport(final ProcessInfo processInfo, final Node sourceNode, IIncomingTransport transport, OutputStream out) throws IOException {
final ManageIncomingBatchListener listener = new ManageIncomingBatchListener();
final DataContext ctx = new DataContext();
Throwable error = null;
try {
Node targetNode = nodeService.findIdentity();
ctx.put(Constants.DATA_CONTEXT_ENGINE, engine);
if (targetNode != null) {
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
}
if (sourceNode != null) {
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
}
for (ILoadSyncLifecycleListener l : extensionService.getExtensionPointList(ILoadSyncLifecycleListener.class)) {
l.syncStarted(ctx);
}
long memoryThresholdInBytes = parameterService.getLong(ParameterConstants.STREAM_TO_FILE_THRESHOLD);
String targetNodeId = nodeService.findIdentityNodeId();
if (parameterService.is(ParameterConstants.STREAM_TO_FILE_ENABLED)) {
processInfo.setStatus(ProcessInfo.Status.TRANSFERRING);
ExecutorService executor = Executors.newFixedThreadPool(1, new CustomizableThreadFactory(String.format("dataloader-%s-%s", sourceNode.getNodeGroupId(), sourceNode.getNodeId())));
LoadIntoDatabaseOnArrivalListener loadListener = new LoadIntoDatabaseOnArrivalListener(processInfo, sourceNode.getNodeId(), listener, executor);
new SimpleStagingDataWriter(transport.openReader(), stagingManager, Constants.STAGING_CATEGORY_INCOMING, memoryThresholdInBytes, BatchType.LOAD, targetNodeId, ctx, loadListener).process();
/* Previously submitted tasks will still be executed */
executor.shutdown();
OutputStreamWriter outWriter = null;
if (out != null) {
outWriter = new OutputStreamWriter(out, IoConstants.ENCODING);
long keepAliveMillis = parameterService.getLong(ParameterConstants.DATA_LOADER_SEND_ACK_KEEPALIVE);
while (!executor.awaitTermination(keepAliveMillis, TimeUnit.MILLISECONDS)) {
outWriter.write("1=1&");
outWriter.flush();
}
} else {
executor.awaitTermination(12, TimeUnit.HOURS);
}
loadListener.isDone();
} else {
DataProcessor processor = new DataProcessor(new ProtocolDataReader(BatchType.LOAD, targetNodeId, transport.openReader()), null, listener, "data load") {
@Override
protected IDataWriter chooseDataWriter(Batch batch) {
return buildDataWriter(processInfo, sourceNode.getNodeId(), batch.getChannelId(), batch.getBatchId(), ((ManageIncomingBatchListener) listener).getCurrentBatch().isRetry());
}
};
processor.process(ctx);
}
} catch (Throwable ex) {
error = ex;
logAndRethrow(ex);
} finally {
transport.close();
for (ILoadSyncLifecycleListener l : extensionService.getExtensionPointList(ILoadSyncLifecycleListener.class)) {
l.syncEnded(ctx, listener.getBatchesProcessed(), error);
}
}
return listener.getBatchesProcessed();
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class AbstractWriterTest method writeData.
protected long writeData(IDataWriter writer, TableCsvData... datas) {
this.lastDataWriterUsed = writer;
DataContext context = new DataContext();
writer.open(context);
try {
for (TableCsvData tableCsvData : datas) {
Batch batch = new Batch(BatchType.LOAD, getNextBatchId(), "default", BinaryEncoding.BASE64, "00000", "00001", false);
try {
writer.start(batch);
if (writer.start(tableCsvData.table)) {
for (CsvData d : tableCsvData.data) {
writer.write(d);
}
writer.end(tableCsvData.table);
}
writer.end(batch, false);
} catch (IgnoreBatchException ex) {
writer.end(batch, false);
} catch (Exception ex) {
writer.end(batch, true);
if (!isErrorExpected()) {
if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
} else {
throw new RuntimeException(ex);
}
}
}
}
} finally {
writer.close();
}
long statementCount = 0;
Collection<Statistics> stats = writer.getStatistics().values();
for (Statistics statistics : stats) {
statementCount += statistics.get(DataWriterStatisticConstants.STATEMENTCOUNT);
}
return statementCount;
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class DataExtractorService method extractBatchRange.
public boolean extractBatchRange(Writer writer, String nodeId, long startBatchId, long endBatchId) {
boolean foundBatch = false;
Node sourceNode = nodeService.findIdentity();
for (long batchId = startBatchId; batchId <= endBatchId; batchId++) {
OutgoingBatch batch = outgoingBatchService.findOutgoingBatch(batchId, nodeId);
if (batch != null) {
Node targetNode = nodeService.findNode(nodeId);
if (targetNode == null && Constants.UNROUTED_NODE_ID.equals(nodeId)) {
targetNode = new Node();
targetNode.setNodeId("-1");
}
if (targetNode != null) {
IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(batch, sourceNode, targetNode, new ProcessInfo()));
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
new DataProcessor(dataReader, createTransformDataWriter(nodeService.findIdentity(), targetNode, new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity())), "extract range").process(ctx);
foundBatch = true;
}
}
}
return foundBatch;
}
Aggregations