use of org.codelibs.fess.es.config.exentity.CrawlingInfo in project fess by codelibs.
the class CrawlingInfoService method importCsv.
public void importCsv(final Reader reader) {
@SuppressWarnings("resource") final CsvReader csvReader = new CsvReader(reader, new CsvConfig());
final DateFormat formatter = new SimpleDateFormat(CoreLibConstants.DATE_FORMAT_ISO_8601_EXTEND);
try {
List<String> list;
// ignore header
csvReader.readValues();
while ((list = csvReader.readValues()) != null) {
try {
final String sessionId = list.get(0);
CrawlingInfo crawlingInfo = crawlingInfoBhv.selectEntity(cb -> {
cb.query().setSessionId_Equal(sessionId);
cb.specify().columnSessionId();
}).orElse(// TODO
null);
if (crawlingInfo == null) {
crawlingInfo = new CrawlingInfo();
crawlingInfo.setSessionId(list.get(0));
crawlingInfo.setCreatedTime(formatter.parse(list.get(1)).getTime());
crawlingInfoBhv.insert(crawlingInfo, op -> op.setRefreshPolicy(Constants.TRUE));
}
final CrawlingInfoParam entity = new CrawlingInfoParam();
entity.setCrawlingInfoId(crawlingInfo.getId());
entity.setKey(list.get(2));
entity.setValue(list.get(3));
entity.setCreatedTime(formatter.parse(list.get(4)).getTime());
crawlingInfoParamBhv.insert(entity, op -> op.setRefreshPolicy(Constants.TRUE));
} catch (final Exception e) {
logger.warn("Failed to read a click log: {}", list, e);
}
}
} catch (final IOException e) {
logger.warn("Failed to read a click log.", e);
}
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfo in project fess by codelibs.
the class CrawlingInfoService method getLastCrawlingInfoParamList.
public List<CrawlingInfoParam> getLastCrawlingInfoParamList(final String sessionId) {
final CrawlingInfo crawlingInfo = getLast(sessionId);
if (crawlingInfo == null) {
return Collections.emptyList();
}
final FessConfig fessConfig = ComponentUtil.getFessConfig();
return crawlingInfoParamBhv.selectList(cb -> {
cb.query().setCrawlingInfoId_Equal(crawlingInfo.getId());
cb.query().addOrderBy_CreatedTime_Asc();
cb.paging(fessConfig.getPageCrawlingInfoParamMaxFetchSizeAsInteger(), 1);
});
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfo in project fess by codelibs.
the class CrawlingInfoHelper method store.
public synchronized void store(final String sessionId, final boolean create) {
CrawlingInfo crawlingInfo = create ? null : getCrawlingInfoService().getLast(sessionId);
if (crawlingInfo == null) {
crawlingInfo = new CrawlingInfo(sessionId);
try {
getCrawlingInfoService().store(crawlingInfo);
} catch (final Exception e) {
throw new FessSystemException("No crawling session.", e);
}
}
if (infoMap != null) {
final List<CrawlingInfoParam> crawlingInfoParamList = new ArrayList<>();
for (final Map.Entry<String, String> entry : infoMap.entrySet()) {
final CrawlingInfoParam crawlingInfoParam = new CrawlingInfoParam();
crawlingInfoParam.setCrawlingInfoId(crawlingInfo.getId());
crawlingInfoParam.setKey(entry.getKey());
crawlingInfoParam.setValue(entry.getValue());
crawlingInfoParamList.add(crawlingInfoParam);
}
getCrawlingInfoService().storeInfo(crawlingInfoParamList);
}
infoMap = null;
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfo in project fess by codelibs.
the class CrawlingInfoHelper method updateParams.
public void updateParams(final String sessionId, final String name, final int dayForCleanup) {
final CrawlingInfo crawlingInfo = getCrawlingInfoService().getLast(sessionId);
if (crawlingInfo == null) {
logger.warn("No crawling session: {}", sessionId);
return;
}
if (StringUtil.isNotBlank(name)) {
crawlingInfo.setName(name);
} else {
crawlingInfo.setName(Constants.CRAWLING_INFO_SYSTEM_NAME);
}
if (dayForCleanup >= 0) {
final long expires = getExpiredTime(dayForCleanup);
crawlingInfo.setExpiredTime(expires);
documentExpires = expires;
}
try {
getCrawlingInfoService().store(crawlingInfo);
} catch (final Exception e) {
throw new FessSystemException("No crawling session.", e);
}
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfo in project fess by codelibs.
the class CrawlingInfoService method deleteSessionIdsBefore.
public void deleteSessionIdsBefore(final String activeSessionId, final String name, final long date) {
final List<CrawlingInfo> crawlingInfoList = crawlingInfoBhv.selectList(cb -> {
cb.query().filtered((cq, cf) -> {
cq.setExpiredTime_LessEqual(date);
if (StringUtil.isNotBlank(name)) {
cf.setName_Equal(name);
}
if (activeSessionId != null) {
cf.setSessionId_NotEqual(activeSessionId);
}
});
cb.fetchFirst(fessConfig.getPageCrawlingInfoMaxFetchSizeAsInteger());
cb.specify().columnId();
});
if (!crawlingInfoList.isEmpty()) {
final List<String> crawlingInfoIdList = new ArrayList<>();
for (final CrawlingInfo cs : crawlingInfoList) {
crawlingInfoIdList.add(cs.getId());
}
crawlingInfoParamBhv.queryDelete(cb2 -> cb2.query().setCrawlingInfoId_InScope(crawlingInfoIdList));
crawlingInfoBhv.batchDelete(crawlingInfoList, op -> op.setRefreshPolicy(Constants.TRUE));
}
}
Aggregations