use of com.sforce.async.JobInfo in project tdi-studio-se by Talend.
the class SalesforceBulkAPI method createJob.
private JobInfo createJob() throws AsyncApiException, ConnectionException {
JobInfo job = new JobInfo();
if (concurrencyMode != null) {
job.setConcurrencyMode(concurrencyMode);
}
job.setObject(sObjectType);
job.setOperation(operation);
if (OperationEnum.upsert.equals(operation)) {
job.setExternalIdFieldName(externalIdFieldName);
}
job.setContentType(contentType);
job = connection.createJob(job);
// System.out.println(job);
return job;
}
use of com.sforce.async.JobInfo in project teiid by teiid.
the class TestBulkInsertExecution method testFlowAndErrorReturn.
@Test
public void testFlowAndErrorReturn() throws Exception {
NamedTable table = new NamedTable("temp", null, Mockito.mock(Table.class));
ArrayList<ColumnReference> elements = new ArrayList<ColumnReference>();
elements.add(new ColumnReference(table, "one", Mockito.mock(Column.class), Integer.class));
elements.add(new ColumnReference(table, "two", Mockito.mock(Column.class), String.class));
List<Expression> values = new ArrayList<Expression>();
Parameter param = new Parameter();
param.setType(DataTypeManager.DefaultDataClasses.INTEGER);
param.setValueIndex(0);
values.add(param);
param = new Parameter();
param.setType(DataTypeManager.DefaultDataClasses.STRING);
param.setValueIndex(1);
values.add(param);
ExpressionValueSource valueSource = new ExpressionValueSource(values);
Insert insert = new Insert(table, elements, valueSource);
insert.setParameterValues(Arrays.asList(Arrays.asList(2, '2'), Arrays.asList(2, '2'), Arrays.asList(3, '3')).iterator());
Result r1 = Mockito.mock(Result.class);
Result r2 = Mockito.mock(Result.class);
Result r3 = Mockito.mock(Result.class);
Mockito.when(r1.isSuccess()).thenReturn(true);
Mockito.when(r1.isCreated()).thenReturn(true);
Mockito.when(r2.isSuccess()).thenReturn(true);
Mockito.when(r2.isCreated()).thenReturn(true);
Mockito.when(r3.isSuccess()).thenReturn(false);
Mockito.when(r3.isCreated()).thenReturn(false);
com.sforce.async.Error error = new com.sforce.async.Error();
error.setMessage("failed, check your data");
error.setStatusCode(StatusCode.CANNOT_DISABLE_LAST_ADMIN);
Mockito.when(r3.getErrors()).thenReturn(new Error[] { error });
BatchResult batchResult = Mockito.mock(BatchResult.class);
Mockito.when(batchResult.getResult()).thenReturn(new Result[] { r1 }).thenReturn((new Result[] { r2 })).thenReturn(new Result[] { r3 });
SalesforceConnection connection = Mockito.mock(SalesforceConnection.class);
JobInfo jobInfo = Mockito.mock(JobInfo.class);
Mockito.when(connection.createBulkJob(Mockito.anyString(), Mockito.eq(OperationEnum.insert), Mockito.eq(false))).thenReturn(jobInfo);
Mockito.when(connection.getBulkResults(Mockito.any(JobInfo.class), Mockito.anyList())).thenReturn(new BatchResult[] { batchResult, batchResult, batchResult });
SalesForceExecutionFactory config = new SalesForceExecutionFactory();
config.setMaxBulkInsertBatchSize(1);
InsertExecutionImpl updateExecution = new InsertExecutionImpl(config, insert, connection, Mockito.mock(RuntimeMetadata.class), Mockito.mock(ExecutionContext.class));
while (true) {
try {
updateExecution.execute();
org.junit.Assert.assertArrayEquals(new int[] { 1, 1, -3 }, updateExecution.getUpdateCounts());
break;
} catch (DataNotAvailableException e) {
continue;
}
}
Mockito.verify(connection, Mockito.times(1)).createBulkJob(Mockito.anyString(), Mockito.eq(OperationEnum.insert), Mockito.eq(false));
Mockito.verify(connection, Mockito.times(1)).getBulkResults(Mockito.any(JobInfo.class), Mockito.anyList());
}
use of com.sforce.async.JobInfo in project teiid by teiid.
the class TestBulkInsertExecution method testFlowAndInvocationStack.
@Test
public void testFlowAndInvocationStack() throws Exception {
NamedTable table = new NamedTable("temp", null, Mockito.mock(Table.class));
ArrayList<ColumnReference> elements = new ArrayList<ColumnReference>();
elements.add(new ColumnReference(table, "one", Mockito.mock(Column.class), Integer.class));
elements.add(new ColumnReference(table, "two", Mockito.mock(Column.class), String.class));
List<Expression> values = new ArrayList<Expression>();
Parameter param = new Parameter();
param.setType(DataTypeManager.DefaultDataClasses.INTEGER);
param.setValueIndex(0);
values.add(param);
param = new Parameter();
param.setType(DataTypeManager.DefaultDataClasses.STRING);
param.setValueIndex(1);
values.add(param);
ExpressionValueSource valueSource = new ExpressionValueSource(values);
Insert insert = new Insert(table, elements, valueSource);
insert.setParameterValues(Arrays.asList(Arrays.asList(2, '2'), Arrays.asList(2, '2'), Arrays.asList(3, '3')).iterator());
Result r1 = Mockito.mock(Result.class);
Result r2 = Mockito.mock(Result.class);
Result r3 = Mockito.mock(Result.class);
Mockito.when(r1.isSuccess()).thenReturn(true);
Mockito.when(r1.isCreated()).thenReturn(true);
Mockito.when(r2.isSuccess()).thenReturn(true);
Mockito.when(r2.isCreated()).thenReturn(true);
Mockito.when(r3.isSuccess()).thenReturn(true);
Mockito.when(r3.isCreated()).thenReturn(true);
BatchResult batchResult = Mockito.mock(BatchResult.class);
Mockito.when(batchResult.getResult()).thenReturn(new Result[] { r1 }).thenReturn((new Result[] { r2 })).thenReturn(new Result[] { r3 });
SalesforceConnection connection = Mockito.mock(SalesforceConnection.class);
JobInfo jobInfo = Mockito.mock(JobInfo.class);
Mockito.when(connection.createBulkJob(Mockito.anyString(), Mockito.eq(OperationEnum.insert), Mockito.eq(false))).thenReturn(jobInfo);
Mockito.when(connection.getBulkResults(Mockito.any(JobInfo.class), Mockito.anyList())).thenReturn(new BatchResult[] { batchResult, batchResult, batchResult });
SalesForceExecutionFactory config = new SalesForceExecutionFactory();
config.setMaxBulkInsertBatchSize(1);
InsertExecutionImpl updateExecution = new InsertExecutionImpl(config, insert, connection, Mockito.mock(RuntimeMetadata.class), Mockito.mock(ExecutionContext.class));
while (true) {
try {
updateExecution.execute();
org.junit.Assert.assertArrayEquals(new int[] { 1, 1, 1 }, updateExecution.getUpdateCounts());
break;
} catch (DataNotAvailableException e) {
continue;
}
}
Mockito.verify(connection, Mockito.times(1)).createBulkJob(Mockito.anyString(), Mockito.eq(OperationEnum.insert), Mockito.eq(false));
Mockito.verify(connection, Mockito.times(1)).getBulkResults(Mockito.any(JobInfo.class), Mockito.anyList());
}
use of com.sforce.async.JobInfo in project components by Talend.
the class SalesforceBulkRuntime method doBulkQuery.
/**
* Creates and executes job for bulk query. Job must be finished in 2 minutes on Salesforce side.<br/>
* From Salesforce documentation two scenarios are possible here:
* <ul>
* <li>simple bulk query. It should have status - {@link BatchStateEnum#Completed}.</li>
* <li>primary key chunking bulk query. It should return first batch info with status - {@link BatchStateEnum#NotProcessed}.<br/>
* Other batch info's should have status - {@link BatchStateEnum#Completed}</li>
* </ul>
*
* @param moduleName - input module name.
* @param queryStatement - to be executed.
* @throws AsyncApiException
* @throws InterruptedException
* @throws ConnectionException
*/
public void doBulkQuery(String moduleName, String queryStatement) throws AsyncApiException, InterruptedException, ConnectionException {
job = new JobInfo();
job.setObject(moduleName);
job.setOperation(OperationEnum.query);
if (concurrencyMode != null) {
job.setConcurrencyMode(concurrencyMode);
}
job.setContentType(ContentType.CSV);
job = createJob(job);
if (job.getId() == null) {
// job creation failed
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "failedBatch"), ExceptionContext.build().put("failedBatch", job));
}
ByteArrayInputStream bout = new ByteArrayInputStream(queryStatement.getBytes());
BatchInfo info = createBatchFromStream(job, bout);
int secToWait = 1;
int tryCount = 0;
while (true) {
LOGGER.debug("Awaiting " + secToWait + " seconds for results ...\n" + info);
Thread.sleep(secToWait * 1000);
info = getBatchInfo(job.getId(), info.getId());
if (info.getState() == BatchStateEnum.Completed || (BatchStateEnum.NotProcessed == info.getState() && 0 < chunkSize)) {
break;
} else if (info.getState() == BatchStateEnum.Failed) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_BAD_REQUEST, "failedBatch"), ExceptionContext.build().put("failedBatch", info));
}
tryCount++;
if (tryCount % 3 == 0 && secToWait < 120) {
// after 3 attempt to get the result we multiply the time to wait by 2
// if secToWait < 120 : don't increase exponentially, no need to sleep more than 128 seconds
secToWait = secToWait * 2;
}
// https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/asynch_api_concepts_limits.htm
if (jobTimeOut > 0) {
// if 0, timeout is disabled
long processingTime = System.currentTimeMillis() - job.getCreatedDate().getTimeInMillis();
if (processingTime > jobTimeOut) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_REQUEST_TIMEOUT, "failedBatch"), ExceptionContext.build().put("failedBatch", info));
}
}
}
retrieveResultsOfQuery(info);
}
Aggregations