use of org.jumpmind.symmetric.io.data.CsvData in project symmetric-ds by JumpMind.
the class AbstractXmlPublisherExtensionPoint method resend.
@ManagedOperation(description = "Looks up rows in the database and resends them to the publisher")
@ManagedOperationParameters({ @ManagedOperationParameter(name = "args", description = "A pipe deliminated list of key values to use to look up the tables to resend") })
public boolean resend(String args) {
try {
String[] argArray = args != null ? args.split("\\|") : new String[0];
DataContext context = new DataContext();
IDatabasePlatform platform = engine.getDatabasePlatform();
for (String tableName : tableNamesToPublishAsGroup) {
Table table = platform.getTableFromCache(tableName, false);
List<String[]> dataRowsForTable = readData(table, argArray);
for (String[] values : dataRowsForTable) {
Batch batch = new Batch();
batch.setBinaryEncoding(engine.getSymmetricDialect().getBinaryEncoding());
batch.setSourceNodeId("republish");
context.setBatch(batch);
CsvData data = new CsvData(DataEventType.INSERT);
data.putParsedData(CsvData.ROW_DATA, values);
Element xml = getXmlFromCache(context, context.getBatch().getBinaryEncoding(), table.getColumnNames(), data.getParsedData(CsvData.ROW_DATA), table.getPrimaryKeyColumnNames(), data.getParsedData(CsvData.PK_DATA));
if (xml != null) {
toXmlElement(data.getDataEventType(), xml, table.getCatalog(), table.getSchema(), table.getName(), table.getColumnNames(), data.getParsedData(CsvData.ROW_DATA), table.getPrimaryKeyColumnNames(), data.getParsedData(CsvData.PK_DATA));
}
}
}
if (doesXmlExistToPublish(context)) {
finalizeXmlAndPublish(context);
return true;
} else {
log.warn(String.format("Failed to resend message for tables %s, columns %s, and args %s", tableNamesToPublishAsGroup, groupByColumnNames, args));
}
} catch (RuntimeException ex) {
log.error(String.format("Failed to resend message for tables %s, columns %s, and args %s", tableNamesToPublishAsGroup, groupByColumnNames, args), ex);
}
return false;
}
use of org.jumpmind.symmetric.io.data.CsvData in project symmetric-ds by JumpMind.
the class OracleBulkDatabaseWriterTest method testInsertCollision.
@Test
public void testInsertCollision() {
if (platform != null && platform instanceof OracleDatabasePlatform) {
platform.getSqlTemplate().update("truncate table test_bulkload_table_1");
String[] values = { getNextId(), "string2", "string not null2", "char2", "char not null2", "2007-01-02 03:20:10.000", "2007-02-03 04:05:06.000", "0", "47", "67.89", "-0.0747663" };
CsvData data = new CsvData(DataEventType.INSERT, values);
writeData(data, values);
Assert.assertEquals(1, countRows("test_bulkload_table_1"));
try {
setErrorExpected(true);
List<CsvData> datas = new ArrayList<CsvData>();
datas.add(data);
for (int i = 0; i < 10; i++) {
values = new String[] { getNextId(), "string2", "string not null2", "char2", "char not null2", "2007-01-02 03:20:10.000", "2007-02-03 04:05:06.000", "0", "47", "67.89", "-0.0747663" };
data = new CsvData(DataEventType.INSERT, values);
datas.add(data);
}
// we should collide and rollback
writeData(new TableCsvData(platform.getTableFromCache("test_bulkload_table_1", false), datas));
Assert.assertEquals(1, countRows("test_bulkload_table_1"));
} finally {
setErrorExpected(false);
}
}
}
use of org.jumpmind.symmetric.io.data.CsvData in project symmetric-ds by JumpMind.
the class AbstractBulkDatabaseWriterTest method insertAndVerify.
protected void insertAndVerify(String[] values) {
List<CsvData> data = new ArrayList<CsvData>();
data.add(new CsvData(DataEventType.INSERT, (String[]) ArrayUtils.clone(values)));
writeData(data);
assertTestTableEquals(values[0], values);
}
use of org.jumpmind.symmetric.io.data.CsvData in project symmetric-ds by JumpMind.
the class DefaultTransformWriterConflictResolver method performFallbackToInsert.
@Override
protected void performFallbackToInsert(AbstractDatabaseWriter writer, CsvData data, Conflict conflict, boolean retransform) {
TransformedData transformedData = data.getAttribute(TransformedData.class.getName());
if (transformedData != null && retransform) {
List<TransformedData> newlyTransformedDatas = transformWriter.transform(DataEventType.INSERT, writer.getContext(), transformedData.getTransformation(), transformedData.getSourceKeyValues(), transformedData.getOldSourceValues(), transformedData.getSourceValues());
if (newlyTransformedDatas.size() > 0) {
boolean matchedTransform = false;
for (TransformedData newlyTransformedData : newlyTransformedDatas) {
/*
* If there is only one transform, then process it.
* Otherwise, we need to attempt to match the key values to
* choose the correct transform.
*/
if (newlyTransformedDatas.size() == 1 || newlyTransformedData.hasSameKeyValues(transformedData.getKeyValues()) || newlyTransformedData.isGeneratedIdentityNeeded()) {
matchedTransform = true;
Table table = newlyTransformedData.buildTargetTable();
CsvData newData = newlyTransformedData.buildTargetCsvData();
if (newlyTransformedData.isGeneratedIdentityNeeded()) {
if (log.isDebugEnabled()) {
log.debug("Enabling generation of identity for {}", newlyTransformedData.getTableName());
}
writer.allowInsertIntoAutoIncrementColumns(false, table);
} else if (table.hasAutoIncrementColumn()) {
writer.allowInsertIntoAutoIncrementColumns(true, table);
}
writer.start(table);
super.performFallbackToInsert(writer, newData, conflict, retransform);
writer.end(table);
}
}
if (!matchedTransform) {
log.warn("The attempt to retransform resulted in more than one transform. We tried to choose one " + "by matching on the ordered key values, but could not find a match. Please check that the " + "transformation is configured so that it will return keys in the same order regardless of DML type. " + "The original key values that we tried to match on were: {}" + ArrayUtils.toString(transformedData.getKeyValues()));
}
}
} else {
super.performFallbackToInsert(writer, data, conflict, retransform);
}
}
use of org.jumpmind.symmetric.io.data.CsvData in project symmetric-ds by JumpMind.
the class ProtocolDataReader method nextData.
public CsvData nextData() {
if (next instanceof CsvData) {
CsvData data = (CsvData) next;
next = null;
return data;
} else {
do {
next = readNext();
if (next instanceof CsvData) {
CsvData data = (CsvData) next;
next = null;
return data;
}
} while (next != null && !(next instanceof Batch) && !(next instanceof Table));
}
return null;
}
Aggregations