use of com.sforce.async.BatchInfo in project components by Talend.
the class SalesforceBulkRuntime method doBulkQuery.
/**
* Creates and executes job for bulk query. Job must be finished in 2 minutes on Salesforce side.<br/>
* From Salesforce documentation two scenarios are possible here:
* <ul>
* <li>simple bulk query. It should have status - {@link BatchStateEnum#Completed}.</li>
* <li>primary key chunking bulk query. It should return first batch info with status - {@link BatchStateEnum#NotProcessed}.<br/>
* Other batch info's should have status - {@link BatchStateEnum#Completed}</li>
* </ul>
*
* @param moduleName - input module name.
* @param queryStatement - to be executed.
* @throws AsyncApiException
* @throws InterruptedException
* @throws ConnectionException
*/
public void doBulkQuery(String moduleName, String queryStatement) throws AsyncApiException, InterruptedException, ConnectionException {
job = new JobInfo();
job.setObject(moduleName);
job.setOperation(OperationEnum.query);
if (concurrencyMode != null) {
job.setConcurrencyMode(concurrencyMode);
}
job.setContentType(ContentType.CSV);
job = createJob(job);
if (job.getId() == null) {
// job creation failed
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "failedBatch"), ExceptionContext.build().put("failedBatch", job));
}
ByteArrayInputStream bout = new ByteArrayInputStream(queryStatement.getBytes());
BatchInfo info = createBatchFromStream(job, bout);
int secToWait = 1;
int tryCount = 0;
while (true) {
LOGGER.debug("Awaiting " + secToWait + " seconds for results ...\n" + info);
Thread.sleep(secToWait * 1000);
info = getBatchInfo(job.getId(), info.getId());
if (info.getState() == BatchStateEnum.Completed || (BatchStateEnum.NotProcessed == info.getState() && 0 < chunkSize)) {
break;
} else if (info.getState() == BatchStateEnum.Failed) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_BAD_REQUEST, "failedBatch"), ExceptionContext.build().put("failedBatch", info));
}
tryCount++;
if (tryCount % 3 == 0 && secToWait < 120) {
// after 3 attempt to get the result we multiply the time to wait by 2
// if secToWait < 120 : don't increase exponentially, no need to sleep more than 128 seconds
secToWait = secToWait * 2;
}
// https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/asynch_api_concepts_limits.htm
if (jobTimeOut > 0) {
// if 0, timeout is disabled
long processingTime = System.currentTimeMillis() - job.getCreatedDate().getTimeInMillis();
if (processingTime > jobTimeOut) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_REQUEST_TIMEOUT, "failedBatch"), ExceptionContext.build().put("failedBatch", info));
}
}
}
retrieveResultsOfQuery(info);
}
use of com.sforce.async.BatchInfo in project components by Talend.
the class SalesforceBulkRuntime method createBatch.
/**
* Create a batch by uploading the contents of the file. This closes the output stream.
*
* @param tmpOut The output stream used to write the CSV data for a single batch.
* @param tmpFile The file associated with the above stream.
* @param batchInfos The batch info for the newly created batch is added to this list.
*
* @throws IOException
* @throws AsyncApiException
* @throws ConnectionException
*/
private void createBatch(FileOutputStream tmpOut, File tmpFile, List<BatchInfo> batchInfos) throws IOException, AsyncApiException, ConnectionException {
tmpOut.flush();
tmpOut.close();
FileInputStream tmpInputStream = new FileInputStream(tmpFile);
try {
BatchInfo batchInfo = createBatchFromStream(job, tmpInputStream);
batchInfos.add(batchInfo);
} finally {
tmpInputStream.close();
}
}
use of com.sforce.async.BatchInfo in project components by Talend.
the class SalesforceBulkRuntime method createBatchesFromCSVFile.
/**
* Create and upload batches using a CSV file. The file into the appropriate size batch files.
*
* @return
* @throws IOException
* @throws AsyncApiException
* @throws ConnectionException
*/
private List<BatchInfo> createBatchesFromCSVFile() throws IOException, AsyncApiException, ConnectionException {
List<BatchInfo> batchInfos = new ArrayList<BatchInfo>();
BufferedReader rdr = new BufferedReader(new InputStreamReader(new FileInputStream(bulkFileName), FILE_ENCODING));
// read the CSV header row
byte[] headerBytes = (rdr.readLine() + "\n").getBytes("UTF-8");
int headerBytesLength = headerBytes.length;
File tmpFile = File.createTempFile("sforceBulkAPI", ".csv");
// Split the CSV file into multiple batches
try {
FileOutputStream tmpOut = new FileOutputStream(tmpFile);
int currentBytes = 0;
int currentLines = 0;
String nextLine;
boolean needStart = true;
boolean needEnds = true;
while ((nextLine = rdr.readLine()) != null) {
int num = countQuotes(nextLine);
// nextLine is header or footer of the record
if (num % 2 == 1) {
if (!needStart) {
needEnds = false;
} else {
needStart = false;
}
} else {
// nextLine is a whole record or middle of the record
if (needEnds && needStart) {
needEnds = false;
needStart = false;
}
}
byte[] bytes = (nextLine + "\n").getBytes("UTF-8");
// Create a new batch when our batch size limit is reached
if (currentBytes + bytes.length > maxBytesPerBatch || currentLines > maxRowsPerBatch) {
createBatch(tmpOut, tmpFile, batchInfos);
currentBytes = 0;
currentLines = 0;
}
if (currentBytes == 0) {
tmpOut = new FileOutputStream(tmpFile);
tmpOut.write(headerBytes);
currentBytes = headerBytesLength;
currentLines = 1;
}
tmpOut.write(bytes);
currentBytes += bytes.length;
if (!needStart && !needEnds) {
currentLines++;
needStart = true;
needEnds = true;
}
}
// Finished processing all rows
// Create a final batch for any remaining data
rdr.close();
if (currentLines > 1) {
createBatch(tmpOut, tmpFile, batchInfos);
}
} finally {
tmpFile.delete();
}
return batchInfos;
}
Aggregations