use of org.jumpmind.symmetric.service.IDataExtractorService in project symmetric-ds by JumpMind.
the class NodeManagementService method extractBatcheRange.
@ManagedOperation(description = "Extract multiple batches to a file for a time range")
@ManagedOperationParameters({ @ManagedOperationParameter(name = "fileName", description = "The file to write the batch output to"), @ManagedOperationParameter(name = "nodeId", description = "The target node id whose batches need extracted"), @ManagedOperationParameter(name = "startTime", description = "The start time range to extract. The format is yyyy-MM-dd hh:mm"), @ManagedOperationParameter(name = "endTime", description = "The start time range to extract. The format is yyyy-MM-dd hh:mm"), @ManagedOperationParameter(name = "channelIdList", description = "A comma separated list of channels to extract") })
public boolean extractBatcheRange(String fileName, String nodeId, String startTime, String endTime, String channelIdList) {
File file = new File(fileName);
file.getParentFile().mkdirs();
Date startBatchTime = FormatUtils.parseDate(startTime, FormatUtils.TIMESTAMP_PATTERNS);
Date endBatchTime = FormatUtils.parseDate(endTime, FormatUtils.TIMESTAMP_PATTERNS);
String[] channelIds = channelIdList.split(",");
IDataExtractorService dataExtractorService = engine.getDataExtractorService();
BufferedWriter writer = null;
try {
writer = new BufferedWriter(new FileWriter(file));
dataExtractorService.extractBatchRange(writer, nodeId, startBatchTime, endBatchTime, channelIds);
return true;
} catch (Exception ex) {
log.error("Failed to write batch range to file", ex);
return false;
} finally {
IOUtils.closeQuietly(writer);
}
}
use of org.jumpmind.symmetric.service.IDataExtractorService in project symmetric-ds by JumpMind.
the class AbstractDataExtractorServiceTest method testExtractConfigurationStandalone.
@Test
public void testExtractConfigurationStandalone() throws Exception {
IDataExtractorService dataExtractorService = getDataExtractorService();
StringWriter writer = new StringWriter();
dataExtractorService.extractConfigurationStandalone(TestConstants.TEST_CLIENT_NODE, writer);
String content = writer.getBuffer().toString();
assertNumberOfLinesThatStartWith(25, "table,", content, false, true);
assertNumberOfLinesThatStartWith(23, "columns,", content);
assertNumberOfLinesThatStartWith(23, "keys,", content);
assertNumberOfLinesThatStartWith(23, "sql,", content);
assertNumberOfLinesThatStartWith(0, "update,", content);
assertNumberOfLinesThatStartWith(65, "insert,", content, false, true);
assertNumberOfLinesThatStartWith(1, "commit,-9999", content);
assertNumberOfLinesThatStartWith(1, "batch,-9999", content);
assertNumberOfLinesThatStartWith(1, "nodeid,", content);
assertNumberOfLinesThatStartWith(1, "binary,", content);
}
use of org.jumpmind.symmetric.service.IDataExtractorService in project symmetric-ds by JumpMind.
the class AbstractDataExtractorServiceTest method extract.
protected ExtractResults extract() {
IDataExtractorService service = getDataExtractorService();
StringWriter writer = new StringWriter();
InternalOutgoingTransport transport = new InternalOutgoingTransport(new BufferedWriter(writer));
List<OutgoingBatch> batches = service.extract(new ProcessInfo(), TestConstants.TEST_CLIENT_NODE, transport);
return new ExtractResults(batches, writer.getBuffer().toString());
}
use of org.jumpmind.symmetric.service.IDataExtractorService in project symmetric-ds by JumpMind.
the class SymmetricAdmin method exportBatch.
private void exportBatch(CommandLine line, List<String> args) throws Exception {
IDataExtractorService dataExtractorService = getSymmetricEngine().getDataExtractorService();
String nodeId = popArg(args, "Node ID");
String batchId = popArg(args, "Batch ID");
OutputStreamWriter writer = getWriter(args);
dataExtractorService.extractBatchRange(writer, nodeId, Long.valueOf(batchId), Long.valueOf(batchId));
writer.close();
}
use of org.jumpmind.symmetric.service.IDataExtractorService in project symmetric-ds by JumpMind.
the class RestService method getPullData.
@ApiOperation(value = "Pull pending batches for the specified node for the specified engine")
@RequestMapping(value = "/engine/{engine}/pulldata", method = RequestMethod.GET)
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public final PullDataResults getPullData(@PathVariable("engine") String engineName, @RequestParam(value = WebConstants.NODE_ID) String nodeId, @ApiParam(value = "This the password for the nodeId being passed in. The password is stored in the node_security table.") @RequestParam(value = WebConstants.SECURITY_TOKEN) String securityToken, @RequestParam(value = "useJdbcTimestampFormat", required = false, defaultValue = "true") boolean useJdbcTimestampFormat, @RequestParam(value = "useUpsertStatements", required = false, defaultValue = "false") boolean useUpsertStatements, @RequestParam(value = "useDelimitedIdentifiers", required = false, defaultValue = "true") boolean useDelimitedIdentifiers, @RequestParam(value = "hostName", required = false) String hostName) {
ISymmetricEngine engine = getSymmetricEngine(engineName);
IDataExtractorService dataExtractorService = engine.getDataExtractorService();
IStatisticManager statisticManager = engine.getStatisticManager();
INodeService nodeService = engine.getNodeService();
org.jumpmind.symmetric.model.Node targetNode = nodeService.findNode(nodeId);
if (securityVerified(nodeId, engine, securityToken)) {
ProcessInfo processInfo = statisticManager.newProcessInfo(new ProcessInfoKey(nodeService.findIdentityNodeId(), nodeId, ProcessType.REST_PULL_HANLDER));
try {
PullDataResults results = new PullDataResults();
List<OutgoingBatchWithPayload> extractedBatches = dataExtractorService.extractToPayload(processInfo, targetNode, PayloadType.SQL, useJdbcTimestampFormat, useUpsertStatements, useDelimitedIdentifiers);
List<Batch> batches = new ArrayList<Batch>();
for (OutgoingBatchWithPayload outgoingBatchWithPayload : extractedBatches) {
if (outgoingBatchWithPayload.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.LD || outgoingBatchWithPayload.getStatus() == org.jumpmind.symmetric.model.OutgoingBatch.Status.IG) {
Batch batch = new Batch();
batch.setBatchId(outgoingBatchWithPayload.getBatchId());
batch.setChannelId(outgoingBatchWithPayload.getChannelId());
batch.setSqlStatements(outgoingBatchWithPayload.getPayload());
batches.add(batch);
}
}
results.setBatches(batches);
results.setNbrBatches(batches.size());
processInfo.setStatus(org.jumpmind.symmetric.model.ProcessInfo.Status.OK);
if (engine.getParameterService().is(ParameterConstants.REST_HEARTBEAT_ON_PULL) && hostName != null) {
Heartbeat heartbeat = new Heartbeat();
heartbeat.setNodeId(nodeId);
heartbeat.setHeartbeatTime(new Date());
heartbeat.setHostName(hostName);
this.heartbeatImpl(engine, heartbeat);
}
return results;
} finally {
if (processInfo.getStatus() != org.jumpmind.symmetric.model.ProcessInfo.Status.OK) {
processInfo.setStatus(org.jumpmind.symmetric.model.ProcessInfo.Status.ERROR);
}
}
} else {
throw new NotAllowedException();
}
}
Aggregations