use of org.codelibs.core.timer.TimeoutTask in project fess by codelibs.
the class PythonJob method execute.
@Override
public String execute() {
final StringBuilder resultBuf = new StringBuilder();
if (sessionId == null) {
// create session id
sessionId = RandomStringUtils.randomAlphabetic(15);
}
resultBuf.append("Session Id: ").append(sessionId).append("\n");
if (jobExecutor != null) {
jobExecutor.addShutdownListener(() -> ComponentUtil.getProcessHelper().destroyProcess(sessionId));
}
final TimeoutTask timeoutTask = createTimeoutTask();
try {
executePython();
} catch (final Exception e) {
logger.warn("Failed to run python command.", e);
resultBuf.append(e.getMessage()).append("\n");
} finally {
if (timeoutTask != null && !timeoutTask.isCanceled()) {
timeoutTask.cancel();
}
}
return resultBuf.toString();
}
use of org.codelibs.core.timer.TimeoutTask in project fess-crawler by codelibs.
the class FileSystemClient method processRequest.
protected ResponseData processRequest(final String uri, final boolean includeContent) {
if (!isInit.get()) {
synchronized (isInit) {
if (!isInit.get()) {
init();
isInit.set(true);
}
}
}
// start
AccessTimeoutTarget accessTimeoutTarget = null;
TimeoutTask accessTimeoutTask = null;
if (accessTimeout != null) {
accessTimeoutTarget = new AccessTimeoutTarget(Thread.currentThread());
accessTimeoutTask = TimeoutManager.getInstance().addTimeoutTarget(accessTimeoutTarget, accessTimeout.intValue(), false);
}
try {
return getResponseData(uri, includeContent);
} finally {
if (accessTimeout != null) {
accessTimeoutTarget.stop();
if (!accessTimeoutTask.isCanceled()) {
accessTimeoutTask.cancel();
}
}
}
}
use of org.codelibs.core.timer.TimeoutTask in project fess-crawler by codelibs.
the class HcHttpClient method doHttpMethod.
public ResponseData doHttpMethod(final String url, final HttpUriRequest httpRequest) {
if (httpClient == null) {
init();
}
if (logger.isDebugEnabled()) {
logger.debug("Accessing " + url);
}
// start
AccessTimeoutTarget accessTimeoutTarget = null;
TimeoutTask accessTimeoutTask = null;
if (accessTimeout != null) {
accessTimeoutTarget = new AccessTimeoutTarget(Thread.currentThread());
accessTimeoutTask = TimeoutManager.getInstance().addTimeoutTarget(accessTimeoutTarget, accessTimeout.intValue(), false);
}
try {
return processHttpMethod(url, httpRequest);
} finally {
if (accessTimeout != null) {
accessTimeoutTarget.stop();
if (!accessTimeoutTask.isCanceled()) {
accessTimeoutTask.cancel();
}
}
}
}
use of org.codelibs.core.timer.TimeoutTask in project fess-crawler by codelibs.
the class SmbClient method processRequest.
protected ResponseData processRequest(final String uri, final boolean includeContent) {
if (smbAuthenticationHolder == null) {
init();
}
// start
AccessTimeoutTarget accessTimeoutTarget = null;
TimeoutTask accessTimeoutTask = null;
if (accessTimeout != null) {
accessTimeoutTarget = new AccessTimeoutTarget(Thread.currentThread());
accessTimeoutTask = TimeoutManager.getInstance().addTimeoutTarget(accessTimeoutTarget, accessTimeout.intValue(), false);
}
try {
return getResponseData(uri, includeContent);
} finally {
if (accessTimeout != null) {
accessTimeoutTarget.stop();
if (!accessTimeoutTask.isCanceled()) {
accessTimeoutTask.cancel();
}
}
}
}
use of org.codelibs.core.timer.TimeoutTask in project fess by codelibs.
the class CrawlJob method execute.
@Override
public String execute() {
// check # of crawler processes
final int maxCrawlerProcesses = ComponentUtil.getFessConfig().getJobMaxCrawlerProcessesAsInteger();
if (maxCrawlerProcesses > 0) {
final int runningJobCount = getRunningJobCount();
if (runningJobCount > maxCrawlerProcesses) {
throw new JobProcessingException(runningJobCount + " crawler processes are running. Max processes are " + maxCrawlerProcesses + ".");
}
}
final StringBuilder resultBuf = new StringBuilder(100);
final boolean runAll = webConfigIds == null && fileConfigIds == null && dataConfigIds == null;
if (sessionId == null) {
// create session id
final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
sessionId = sdf.format(new Date());
}
resultBuf.append("Session Id: ").append(sessionId).append("\n");
resultBuf.append("Web Config Id:");
if (webConfigIds == null) {
if (runAll) {
resultBuf.append(" ALL\n");
} else {
resultBuf.append(" NONE\n");
}
} else {
for (final String id : webConfigIds) {
resultBuf.append(' ').append(id);
}
resultBuf.append('\n');
}
resultBuf.append("File Config Id:");
if (fileConfigIds == null) {
if (runAll) {
resultBuf.append(" ALL\n");
} else {
resultBuf.append(" NONE\n");
}
} else {
for (final String id : fileConfigIds) {
resultBuf.append(' ').append(id);
}
resultBuf.append('\n');
}
resultBuf.append("Data Config Id:");
if (dataConfigIds == null) {
if (runAll) {
resultBuf.append(" ALL\n");
} else {
resultBuf.append(" NONE\n");
}
} else {
for (final String id : dataConfigIds) {
resultBuf.append(' ').append(id);
}
resultBuf.append('\n');
}
if (jobExecutor != null) {
jobExecutor.addShutdownListener(() -> ComponentUtil.getProcessHelper().destroyProcess(sessionId));
}
final TimeoutTask timeoutTask = createTimeoutTask();
try {
executeCrawler();
ComponentUtil.getKeyMatchHelper().update();
} catch (final JobProcessingException e) {
throw e;
} catch (final Exception e) {
throw new JobProcessingException("Failed to execute a crawl job.", e);
} finally {
if (timeoutTask != null && !timeoutTask.isCanceled()) {
timeoutTask.cancel();
}
}
return resultBuf.toString();
}
Aggregations