use of org.talend.components.api.exception.ComponentException in project components by Talend.
the class SalesforceOAuthConnection method getSOAPEndpoint.
// it's not necessary for bulk, there is another easy way, looking at genBulkEndpoint
private String getSOAPEndpoint(String id, String type, String accessToken, String version) {
String endpointURL = null;
BufferedReader reader = null;
try {
URLConnection idConn = new URL(id).openConnection();
idConn.setRequestProperty("Authorization", type + " " + accessToken);
reader = new BufferedReader(new InputStreamReader(idConn.getInputStream()));
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNode = objectMapper.readTree(reader);
JsonNode urls = jsonNode.get("urls");
endpointURL = urls.get("partner").toString().replace("{version}", version);
endpointURL = StringUtils.strip(endpointURL, "\"");
} catch (IOException e) {
throw new ComponentException(e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ignore) {
}
}
}
return endpointURL;
}
use of org.talend.components.api.exception.ComponentException in project components by Talend.
the class SalesforceBulkExecReader method getCurrent.
@Override
public IndexedRecord getCurrent() {
BulkResult result = currentBatchResult.get(resultIndex);
IndexedRecord record = null;
try {
record = ((BulkResultAdapterFactory) getFactory()).convertToAvro(result);
} catch (IOException e) {
throw new ComponentException(e);
}
if ("true".equalsIgnoreCase((String) result.getValue("Success"))) {
return record;
} else {
Map<String, Object> resultMessage = new HashMap<String, Object>();
String error = (String) result.getValue("Error");
resultMessage.put("error", error);
resultMessage.put("talend_record", record);
throw new DataRejectException(resultMessage);
}
}
use of org.talend.components.api.exception.ComponentException in project components by Talend.
the class SalesforceBulkRuntime method doBulkQuery.
/**
* Creates and executes job for bulk query. Job must be finished in 2 minutes on Salesforce side.<br/>
* From Salesforce documentation two scenarios are possible here:
* <ul>
* <li>simple bulk query. It should have status - {@link BatchStateEnum#Completed}.</li>
* <li>primary key chunking bulk query. It should return first batch info with status - {@link BatchStateEnum#NotProcessed}.<br/>
* Other batch info's should have status - {@link BatchStateEnum#Completed}</li>
* </ul>
*
* @param moduleName - input module name.
* @param queryStatement - to be executed.
* @throws AsyncApiException
* @throws InterruptedException
* @throws ConnectionException
*/
public void doBulkQuery(String moduleName, String queryStatement) throws AsyncApiException, InterruptedException, ConnectionException {
job = new JobInfo();
job.setObject(moduleName);
job.setOperation(OperationEnum.query);
if (concurrencyMode != null) {
job.setConcurrencyMode(concurrencyMode);
}
job.setContentType(ContentType.CSV);
job = createJob(job);
if (job.getId() == null) {
// job creation failed
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "failedBatch"), ExceptionContext.build().put("failedBatch", job));
}
ByteArrayInputStream bout = new ByteArrayInputStream(queryStatement.getBytes());
BatchInfo info = createBatchFromStream(job, bout);
int secToWait = 1;
int tryCount = 0;
while (true) {
LOGGER.debug("Awaiting " + secToWait + " seconds for results ...\n" + info);
Thread.sleep(secToWait * 1000);
info = getBatchInfo(job.getId(), info.getId());
if (info.getState() == BatchStateEnum.Completed || (BatchStateEnum.NotProcessed == info.getState() && 0 < chunkSize)) {
break;
} else if (info.getState() == BatchStateEnum.Failed) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_BAD_REQUEST, "failedBatch"), ExceptionContext.build().put("failedBatch", info));
}
tryCount++;
if (tryCount % 3 == 0 && secToWait < 120) {
// after 3 attempt to get the result we multiply the time to wait by 2
// if secToWait < 120 : don't increase exponentially, no need to sleep more than 128 seconds
secToWait = secToWait * 2;
}
// https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/asynch_api_concepts_limits.htm
if (jobTimeOut > 0) {
// if 0, timeout is disabled
long processingTime = System.currentTimeMillis() - job.getCreatedDate().getTimeInMillis();
if (processingTime > jobTimeOut) {
throw new ComponentException(new DefaultErrorCode(HttpServletResponse.SC_REQUEST_TIMEOUT, "failedBatch"), ExceptionContext.build().put("failedBatch", info));
}
}
}
retrieveResultsOfQuery(info);
}
use of org.talend.components.api.exception.ComponentException in project components by Talend.
the class SalesforceDatasetRuntime method getSchema.
@Override
public Schema getSchema() {
SalesforceDataprepSource sds = new SalesforceDataprepSource();
SalesforceInputProperties properties = new SalesforceInputProperties("model");
properties.setDatasetProperties(dataset);
sds.initialize(container, properties);
sds.validate(container);
try {
// TODO the UI will be a radio, need to adjust here
return SalesforceSchemaUtils.getSchema(dataset, sds, container);
} catch (IOException e) {
throw new ComponentException(e);
}
}
use of org.talend.components.api.exception.ComponentException in project components by Talend.
the class SnowflakeRowStandalone method runAtDriver.
@Override
public void runAtDriver(RuntimeContainer container) {
Connection connection = null;
try {
connection = createConnection(container);
if (rowProperties.usePreparedStatement()) {
try (PreparedStatement pstmt = connection.prepareStatement(rowProperties.getQuery())) {
SnowflakePreparedStatementUtils.fillPreparedStatement(pstmt, rowProperties.preparedStatementTable);
pstmt.execute();
}
} else {
try (Statement statement = connection.createStatement()) {
statement.executeQuery(rowProperties.getQuery());
}
}
} catch (SQLException e) {
throwComponentException(e, "error.queryExecution");
} catch (IOException ioe) {
throwComponentException(ioe, "error.acquiringConnection");
} finally {
if (connection != null) {
try {
closeConnection(container, connection);
} catch (SQLException e) {
throw new ComponentException(e);
}
}
}
}
Aggregations