use of org.apache.camel.component.salesforce.api.dto.bulk.JobInfo in project camel by apache.
the class BulkApiIntegrationTest method testRetry.
@Test
public void testRetry() throws Exception {
final SalesforceComponent sf = context().getComponent("salesforce", SalesforceComponent.class);
final String accessToken = sf.getSession().getAccessToken();
final SslContextFactory sslContextFactory = new SslContextFactory();
sslContextFactory.setSslContext(new SSLContextParameters().createSSLContext(context));
final HttpClient httpClient = new HttpClient(sslContextFactory);
httpClient.setConnectTimeout(60000);
httpClient.start();
final String uri = sf.getLoginConfig().getLoginUrl() + "/services/oauth2/revoke?token=" + accessToken;
final Request logoutGet = httpClient.newRequest(uri).method(HttpMethod.GET).timeout(1, TimeUnit.MINUTES);
final ContentResponse response = logoutGet.send();
assertEquals(HttpStatus.OK_200, response.getStatus());
final JobInfo jobInfo = new JobInfo();
jobInfo.setOperation(OperationEnum.INSERT);
jobInfo.setContentType(ContentType.CSV);
jobInfo.setObject(Merchandise__c.class.getSimpleName());
createJob(jobInfo);
}
use of org.apache.camel.component.salesforce.api.dto.bulk.JobInfo in project camel by apache.
the class BulkApiJobIntegrationTest method getJobs.
// test jobs for testJobLifecycle
@Parameters(name = "operation = {1}")
public static Iterable<Object[]> getJobs() {
final List<JobInfo> result = new ArrayList<>();
// insert XML
final JobInfo insertXml = new JobInfo();
insertXml.setObject(Merchandise__c.class.getSimpleName());
insertXml.setContentType(ContentType.XML);
insertXml.setOperation(OperationEnum.INSERT);
result.add(insertXml);
// insert CSV
JobInfo insertCsv = new JobInfo();
insertCsv = new JobInfo();
insertCsv.setObject(Merchandise__c.class.getSimpleName());
insertCsv.setContentType(ContentType.CSV);
insertCsv.setOperation(OperationEnum.INSERT);
result.add(insertCsv);
// update CSV
final JobInfo updateCsv = new JobInfo();
updateCsv.setObject(Merchandise__c.class.getSimpleName());
updateCsv.setContentType(ContentType.CSV);
updateCsv.setOperation(OperationEnum.UPDATE);
result.add(updateCsv);
// upsert CSV
final JobInfo upsertCsv = new JobInfo();
upsertCsv.setObject(Merchandise__c.class.getSimpleName());
upsertCsv.setContentType(ContentType.CSV);
upsertCsv.setOperation(OperationEnum.UPSERT);
upsertCsv.setExternalIdFieldName("Name");
result.add(upsertCsv);
// delete CSV
final JobInfo deleteCsv = new JobInfo();
deleteCsv.setObject(Merchandise__c.class.getSimpleName());
deleteCsv.setContentType(ContentType.CSV);
deleteCsv.setOperation(OperationEnum.DELETE);
result.add(deleteCsv);
// hard delete CSV
final JobInfo hardDeleteCsv = new JobInfo();
hardDeleteCsv.setObject(Merchandise__c.class.getSimpleName());
hardDeleteCsv.setContentType(ContentType.CSV);
hardDeleteCsv.setOperation(OperationEnum.HARD_DELETE);
result.add(hardDeleteCsv);
// query CSV
final JobInfo queryCsv = new JobInfo();
queryCsv.setObject(Merchandise__c.class.getSimpleName());
queryCsv.setContentType(ContentType.CSV);
queryCsv.setOperation(OperationEnum.QUERY);
result.add(queryCsv);
return result.stream().map(j -> new Object[] { j, j.getOperation().name() }).collect(Collectors.toList());
}
use of org.apache.camel.component.salesforce.api.dto.bulk.JobInfo in project camel by apache.
the class DefaultBulkApiClient method closeJob.
@Override
public void closeJob(String jobId, final JobInfoResponseCallback callback) {
final JobInfo request = new JobInfo();
request.setState(JobStateEnum.CLOSED);
final Request post = getRequest(HttpMethod.POST, jobUrl(jobId));
try {
marshalRequest(objectFactory.createJobInfo(request), post, APPLICATION_XML_UTF8);
} catch (SalesforceException e) {
callback.onResponse(null, e);
return;
}
// make the call and parse the result
doHttpRequest(post, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, SalesforceException ex) {
JobInfo value = null;
try {
value = unmarshalResponse(response, post, JobInfo.class);
} catch (SalesforceException e) {
ex = e;
}
callback.onResponse(value, ex);
}
});
}
use of org.apache.camel.component.salesforce.api.dto.bulk.JobInfo in project camel by apache.
the class BulkApiProcessor method processCloseJob.
private void processCloseJob(final Exchange exchange, final AsyncCallback callback) throws SalesforceException {
JobInfo jobBody;
String jobId;
jobBody = exchange.getIn().getBody(JobInfo.class);
if (jobBody != null) {
jobId = jobBody.getId();
} else {
jobId = getParameter(JOB_ID, exchange, USE_BODY, NOT_OPTIONAL);
}
bulkClient.closeJob(jobId, new BulkApiClient.JobInfoResponseCallback() {
@Override
public void onResponse(JobInfo jobInfo, SalesforceException ex) {
processResponse(exchange, jobInfo, ex, callback);
}
});
}
use of org.apache.camel.component.salesforce.api.dto.bulk.JobInfo in project camel by apache.
the class BulkApiBatchIntegrationTest method testBatchLifecycle.
@Theory
public void testBatchLifecycle(BatchTest request) throws Exception {
log.info("Testing Batch lifecycle with {} content", request.contentType);
// create an UPSERT test Job for this batch request
JobInfo jobInfo = new JobInfo();
jobInfo.setOperation(OperationEnum.UPSERT);
jobInfo.setContentType(request.contentType);
jobInfo.setObject(Merchandise__c.class.getSimpleName());
jobInfo.setExternalIdFieldName("Name");
jobInfo = createJob(jobInfo);
// test createBatch
Map<String, Object> headers = new HashMap<String, Object>();
headers.put(SalesforceEndpointConfig.JOB_ID, jobInfo.getId());
headers.put(SalesforceEndpointConfig.CONTENT_TYPE, jobInfo.getContentType());
BatchInfo batchInfo = template().requestBodyAndHeaders("direct:createBatch", request.stream, headers, BatchInfo.class);
assertNotNull("Null batch", batchInfo);
assertNotNull("Null batch id", batchInfo.getId());
// test getAllBatches
@SuppressWarnings("unchecked") List<BatchInfo> batches = template().requestBody("direct:getAllBatches", jobInfo, List.class);
assertNotNull("Null batches", batches);
assertFalse("Empty batch list", batches.isEmpty());
// test getBatch
batchInfo = batches.get(0);
batchInfo = getBatchInfo(batchInfo);
// test getRequest
InputStream requestStream = template().requestBody("direct:getRequest", batchInfo, InputStream.class);
assertNotNull("Null batch request", requestStream);
// wait for batch to finish
log.info("Waiting for batch to finish...");
while (!batchProcessed(batchInfo)) {
// sleep 5 seconds
Thread.sleep(5000);
// check again
batchInfo = getBatchInfo(batchInfo);
}
log.info("Batch finished with state " + batchInfo.getState());
assertEquals("Batch did not succeed", BatchStateEnum.COMPLETED, batchInfo.getState());
// test getResults
InputStream results = template().requestBody("direct:getResults", batchInfo, InputStream.class);
assertNotNull("Null batch results", results);
// close the test job
template().requestBody("direct:closeJob", jobInfo, JobInfo.class);
}
Aggregations