use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class DataExtractorService method cleanupIgnoredBatch.
protected void cleanupIgnoredBatch(Node sourceNode, Node targetNode, OutgoingBatch currentBatch, IDataWriter writer) {
Batch batch = new Batch(BatchType.EXTRACT, currentBatch.getBatchId(), currentBatch.getChannelId(), symmetricDialect.getBinaryEncoding(), sourceNode.getNodeId(), currentBatch.getNodeId(), currentBatch.isCommonFlag());
batch.setIgnored(true);
try {
IStagedResource resource = getStagedResource(currentBatch);
if (resource != null) {
resource.delete();
}
DataContext ctx = new DataContext(batch);
ctx.put("targetNode", targetNode);
ctx.put("sourceNode", sourceNode);
writer.open(ctx);
writer.start(batch);
writer.end(batch, false);
} finally {
writer.close();
}
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class StagingDataWriterTest method readThenWrite.
public void readThenWrite(long threshold) throws Exception {
InputStreamReader is = new InputStreamReader(getClass().getResourceAsStream("FileCsvDataWriterTest.1.csv"));
String origCsv = IOUtils.toString(is);
is.close();
StagingManager stagingManager = new StagingManager(DIR.getAbsolutePath());
ProtocolDataReader reader = new ProtocolDataReader(BatchType.LOAD, "test", origCsv);
StagingDataWriter writer = new StagingDataWriter(threshold, false, "aaa", "test", stagingManager, new BatchListener());
DataProcessor processor = new DataProcessor(reader, writer, "test");
processor.process(new DataContext());
assertEquals(1, batchesWritten.size());
assertEquals(convertEol(origCsv), convertEol(batchesWritten.get(0)));
IStagedResource resource = (IStagedResource) stagingManager.find("test", "aaa", 1);
assertNotNull(resource);
if (threshold > origCsv.length()) {
assertFalse(resource.getFile().exists());
} else {
assertTrue(resource.getFile().exists());
}
resource.delete();
assertFalse(resource.getFile().exists());
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class AbstractXmlPublisherExtensionPoint method resend.
@ManagedOperation(description = "Looks up rows in the database and resends them to the publisher")
@ManagedOperationParameters({ @ManagedOperationParameter(name = "args", description = "A pipe deliminated list of key values to use to look up the tables to resend") })
public boolean resend(String args) {
try {
String[] argArray = args != null ? args.split("\\|") : new String[0];
DataContext context = new DataContext();
IDatabasePlatform platform = engine.getDatabasePlatform();
for (String tableName : tableNamesToPublishAsGroup) {
Table table = platform.getTableFromCache(tableName, false);
List<String[]> dataRowsForTable = readData(table, argArray);
for (String[] values : dataRowsForTable) {
Batch batch = new Batch();
batch.setBinaryEncoding(engine.getSymmetricDialect().getBinaryEncoding());
batch.setSourceNodeId("republish");
context.setBatch(batch);
CsvData data = new CsvData(DataEventType.INSERT);
data.putParsedData(CsvData.ROW_DATA, values);
Element xml = getXmlFromCache(context, context.getBatch().getBinaryEncoding(), table.getColumnNames(), data.getParsedData(CsvData.ROW_DATA), table.getPrimaryKeyColumnNames(), data.getParsedData(CsvData.PK_DATA));
if (xml != null) {
toXmlElement(data.getDataEventType(), xml, table.getCatalog(), table.getSchema(), table.getName(), table.getColumnNames(), data.getParsedData(CsvData.ROW_DATA), table.getPrimaryKeyColumnNames(), data.getParsedData(CsvData.PK_DATA));
}
}
}
if (doesXmlExistToPublish(context)) {
finalizeXmlAndPublish(context);
return true;
} else {
log.warn(String.format("Failed to resend message for tables %s, columns %s, and args %s", tableNamesToPublishAsGroup, groupByColumnNames, args));
}
} catch (RuntimeException ex) {
log.error(String.format("Failed to resend message for tables %s, columns %s, and args %s", tableNamesToPublishAsGroup, groupByColumnNames, args), ex);
}
return false;
}
use of org.jumpmind.symmetric.io.data.DataContext in project symmetric-ds by JumpMind.
the class XmlPublisherFilterTest method setUp.
@Before
public void setUp() {
context = new DataContext(new Batch(BatchType.LOAD, 1111, "default", BinaryEncoding.BASE64, "54321", "00000", false));
table = Table.buildTable(TABLE_TEST, new String[] { "ID1", "ID2" }, new String[] { "ID1", "ID2", "DATA1", "DATA2", "DATA3" });
}
Aggregations