use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class ProtocolDataReaderTest method testSimpleRead.
@Test
public void testSimpleRead() {
String nodeId = "055";
long batchId = 123;
String channelId = "nbc";
StringBuilder builder = beginCsv(nodeId);
beginBatch(builder, batchId, channelId);
putTableN(builder, 1, true);
putInsert(builder, 4);
endCsv(builder);
ProtocolDataReader reader = new ProtocolDataReader(BatchType.LOAD, "test", builder);
DataContext ctx = new DataContext(reader);
reader.open(ctx);
Batch batch = reader.nextBatch();
assertNotNull(batch);
assertEquals(batchId, batch.getBatchId());
Table table = reader.nextTable();
assertNotNull(table);
assertEquals("test1", table.getName());
assertEquals(2, table.getColumnCount());
assertEquals(1, table.getPrimaryKeyColumns().length);
assertEquals("id", table.getColumn(0).getName());
assertEquals("text", table.getColumn(1).getName());
CsvData data = reader.nextData();
assertNotNull(data);
assertEquals(DataEventType.INSERT, data.getDataEventType());
assertEquals("0", data.getParsedData(CsvData.ROW_DATA)[0]);
assertEquals("test", data.getParsedData(CsvData.ROW_DATA)[1]);
data = reader.nextData();
assertNotNull(data);
assertEquals(DataEventType.INSERT, data.getDataEventType());
assertEquals("1", data.getParsedData(CsvData.ROW_DATA)[0]);
assertEquals("test", data.getParsedData(CsvData.ROW_DATA)[1]);
data = reader.nextData();
assertNotNull(data);
assertEquals(DataEventType.INSERT, data.getDataEventType());
assertEquals("2", data.getParsedData(CsvData.ROW_DATA)[0]);
assertEquals("test", data.getParsedData(CsvData.ROW_DATA)[1]);
data = reader.nextData();
assertNotNull(data);
assertEquals(DataEventType.INSERT, data.getDataEventType());
assertEquals("3", data.getParsedData(CsvData.ROW_DATA)[0]);
assertEquals("test", data.getParsedData(CsvData.ROW_DATA)[1]);
data = reader.nextData();
assertNull(data);
table = reader.nextTable();
assertNull(table);
batch = reader.nextBatch();
assertNull(batch);
reader.close();
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class ProtocolDataReaderTest method testTableContextSwitch.
@Test
public void testTableContextSwitch() {
String nodeId = "1";
long batchId = 1;
String channelId = "test";
StringBuilder builder = beginCsv(nodeId);
beginBatch(builder, batchId, channelId);
putTableN(builder, 1, true);
putInsert(builder, 4);
putTableN(builder, 2, true);
putInsert(builder, 4);
putTableN(builder, 1, false);
putInsert(builder, 2);
putTableN(builder, 2, false);
putInsert(builder, 2);
endCsv(builder);
ProtocolDataReader reader = new ProtocolDataReader(BatchType.LOAD, "test", builder);
DataContext ctx = new DataContext(reader);
reader.open(ctx);
Batch batch = reader.nextBatch();
assertNotNull(batch);
Table table = reader.nextTable();
assertNotNull(table);
assertEquals(2, table.getColumnCount());
assertEquals(1, table.getPrimaryKeyColumnCount());
assertEquals("test1", table.getName());
int dataCount = 0;
while (reader.nextData() != null) {
dataCount++;
}
assertEquals(4, dataCount);
table = reader.nextTable();
assertNotNull(table);
assertEquals(2, table.getColumnCount());
assertEquals(1, table.getPrimaryKeyColumnCount());
assertEquals("test2", table.getName());
dataCount = 0;
while (reader.nextData() != null) {
dataCount++;
}
assertEquals(4, dataCount);
table = reader.nextTable();
assertNotNull(table);
assertEquals(2, table.getColumnCount());
assertEquals(1, table.getPrimaryKeyColumnCount());
assertEquals("test1", table.getName());
dataCount = 0;
while (reader.nextData() != null) {
dataCount++;
}
assertEquals(2, dataCount);
table = reader.nextTable();
assertNotNull(table);
assertEquals(2, table.getColumnCount());
assertEquals(1, table.getPrimaryKeyColumnCount());
assertEquals("test2", table.getName());
dataCount = 0;
while (reader.nextData() != null) {
dataCount++;
}
assertEquals(2, dataCount);
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class AbstractXmlPublisherExtensionPoint method addFormattedExtraGroupAttributes.
/**
* Give the opportunity for the user of this publisher to add in additional
* attributes. The default implementation adds in the nodeId from the
* {@link Context}.
*
* @param context
* @param xml
* append XML attributes to this buffer
*/
protected void addFormattedExtraGroupAttributes(Context context, Element xml) {
if (context instanceof DataContext) {
DataContext dataContext = (DataContext) context;
xml.setAttribute("nodeid", dataContext.getBatch().getSourceNodeId());
xml.setAttribute("batchid", Long.toString(dataContext.getBatch().getBatchId()));
}
if (timeStringGenerator != null) {
xml.setAttribute("time", timeStringGenerator.getTime());
}
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class DataExtractorService method extractBatchRange.
public boolean extractBatchRange(Writer writer, String nodeId, Date startBatchTime, Date endBatchTime, String... channelIds) {
boolean foundBatch = false;
Node sourceNode = nodeService.findIdentity();
OutgoingBatches batches = outgoingBatchService.getOutgoingBatchRange(nodeId, startBatchTime, endBatchTime, channelIds);
List<OutgoingBatch> list = batches.getBatches();
for (OutgoingBatch outgoingBatch : list) {
Node targetNode = nodeService.findNode(nodeId);
if (targetNode == null && Constants.UNROUTED_NODE_ID.equals(nodeId)) {
targetNode = new Node();
targetNode.setNodeId("-1");
}
if (targetNode != null) {
IDataReader dataReader = new ExtractDataReader(symmetricDialect.getPlatform(), new SelectFromSymDataSource(outgoingBatch, sourceNode, targetNode, new ProcessInfo()));
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, nodeService.findIdentity());
new DataProcessor(dataReader, createTransformDataWriter(nodeService.findIdentity(), targetNode, new ProtocolDataWriter(nodeService.findIdentityNodeId(), writer, targetNode.requires13Compatiblity())), "extract range").process(ctx);
foundBatch = true;
}
}
return foundBatch;
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class DataExtractorService method extractOutgoingBatch.
protected OutgoingBatch extractOutgoingBatch(ProcessInfo processInfo, Node targetNode, IDataWriter dataWriter, OutgoingBatch currentBatch, boolean useStagingDataWriter, boolean updateBatchStatistics, ExtractMode mode) {
if (currentBatch.getStatus() != Status.OK || ExtractMode.EXTRACT_ONLY == mode || ExtractMode.FOR_SYM_CLIENT == mode) {
Node sourceNode = nodeService.findIdentity();
IDataWriter writer = wrapWithTransformWriter(sourceNode, targetNode, processInfo, dataWriter, useStagingDataWriter);
long ts = System.currentTimeMillis();
long extractTimeInMs = 0l;
long byteCount = 0l;
long transformTimeInMs = 0l;
if (currentBatch.getStatus() == Status.IG) {
cleanupIgnoredBatch(sourceNode, targetNode, currentBatch, writer);
} else if (!isPreviouslyExtracted(currentBatch, true)) {
String semaphoreKey = useStagingDataWriter ? Long.toString(currentBatch.getBatchId()) : currentBatch.getNodeBatchId();
Semaphore lock = null;
try {
synchronized (locks) {
lock = locks.get(semaphoreKey);
if (lock == null) {
lock = new Semaphore(1);
locks.put(semaphoreKey, lock);
}
try {
lock.acquire();
} catch (InterruptedException e) {
throw new org.jumpmind.exception.InterruptedException(e);
}
}
if (!isPreviouslyExtracted(currentBatch, true)) {
currentBatch.setExtractCount(currentBatch.getExtractCount() + 1);
if (updateBatchStatistics) {
changeBatchStatus(Status.QY, currentBatch, mode);
}
currentBatch.resetStats();
DataContext ctx = new DataContext();
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_ID, targetNode.getNodeId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_EXTERNAL_ID, targetNode.getExternalId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE_GROUP_ID, targetNode.getNodeGroupId());
ctx.put(Constants.DATA_CONTEXT_TARGET_NODE, targetNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE, sourceNode);
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_ID, sourceNode.getNodeId());
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_EXTERNAL_ID, sourceNode.getExternalId());
ctx.put(Constants.DATA_CONTEXT_SOURCE_NODE_GROUP_ID, sourceNode.getNodeGroupId());
IDataReader dataReader = buildExtractDataReader(sourceNode, targetNode, currentBatch, processInfo);
new DataProcessor(dataReader, writer, "extract").process(ctx);
extractTimeInMs = System.currentTimeMillis() - ts;
Statistics stats = getExtractStats(writer);
if (stats != null) {
transformTimeInMs = stats.get(DataWriterStatisticConstants.TRANSFORMMILLIS);
extractTimeInMs = extractTimeInMs - transformTimeInMs;
byteCount = stats.get(DataWriterStatisticConstants.BYTECOUNT);
statisticManager.incrementDataBytesExtracted(currentBatch.getChannelId(), byteCount);
statisticManager.incrementDataExtracted(currentBatch.getChannelId(), stats.get(DataWriterStatisticConstants.STATEMENTCOUNT));
}
}
} catch (RuntimeException ex) {
IStagedResource resource = getStagedResource(currentBatch);
if (resource != null) {
resource.close();
resource.delete();
}
throw ex;
} finally {
IStagedResource resource = getStagedResource(currentBatch);
if (resource != null) {
resource.setState(State.DONE);
}
lock.release();
synchronized (locks) {
locks.remove(semaphoreKey);
}
}
}
if (updateBatchStatistics) {
long dataEventCount = currentBatch.getDataEventCount();
long insertEventCount = currentBatch.getInsertEventCount();
currentBatch = requeryIfEnoughTimeHasPassed(ts, currentBatch);
// preserve in the case of a reload event
if (dataEventCount > currentBatch.getDataEventCount()) {
currentBatch.setDataEventCount(dataEventCount);
}
// preserve in the case of a reload event
if (insertEventCount > currentBatch.getInsertEventCount()) {
currentBatch.setInsertEventCount(insertEventCount);
}
// "re-queried"
if (extractTimeInMs > 0) {
currentBatch.setExtractMillis(extractTimeInMs);
}
if (byteCount > 0) {
currentBatch.setByteCount(byteCount);
}
}
}
processInfo.incrementCurrentBatchCount();
return currentBatch;
}
Aggregations