use of org.codelibs.fess.es.config.exentity.CrawlingInfoParam in project fess by codelibs.
the class CrawlingInfoService method importCsv.
public void importCsv(final Reader reader) {
@SuppressWarnings("resource") final CsvReader csvReader = new CsvReader(reader, new CsvConfig());
final DateFormat formatter = new SimpleDateFormat(CoreLibConstants.DATE_FORMAT_ISO_8601_EXTEND);
try {
List<String> list;
// ignore header
csvReader.readValues();
while ((list = csvReader.readValues()) != null) {
try {
final String sessionId = list.get(0);
CrawlingInfo crawlingInfo = crawlingInfoBhv.selectEntity(cb -> {
cb.query().setSessionId_Equal(sessionId);
cb.specify().columnSessionId();
}).orElse(// TODO
null);
if (crawlingInfo == null) {
crawlingInfo = new CrawlingInfo();
crawlingInfo.setSessionId(list.get(0));
crawlingInfo.setCreatedTime(formatter.parse(list.get(1)).getTime());
crawlingInfoBhv.insert(crawlingInfo, op -> op.setRefreshPolicy(Constants.TRUE));
}
final CrawlingInfoParam entity = new CrawlingInfoParam();
entity.setCrawlingInfoId(crawlingInfo.getId());
entity.setKey(list.get(2));
entity.setValue(list.get(3));
entity.setCreatedTime(formatter.parse(list.get(4)).getTime());
crawlingInfoParamBhv.insert(entity, op -> op.setRefreshPolicy(Constants.TRUE));
} catch (final Exception e) {
logger.warn("Failed to read a click log: {}", list, e);
}
}
} catch (final IOException e) {
logger.warn("Failed to read a click log.", e);
}
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfoParam in project fess by codelibs.
the class CrawlingInfoHelper method store.
public synchronized void store(final String sessionId, final boolean create) {
CrawlingInfo crawlingInfo = create ? null : getCrawlingInfoService().getLast(sessionId);
if (crawlingInfo == null) {
crawlingInfo = new CrawlingInfo(sessionId);
try {
getCrawlingInfoService().store(crawlingInfo);
} catch (final Exception e) {
throw new FessSystemException("No crawling session.", e);
}
}
if (infoMap != null) {
final List<CrawlingInfoParam> crawlingInfoParamList = new ArrayList<>();
for (final Map.Entry<String, String> entry : infoMap.entrySet()) {
final CrawlingInfoParam crawlingInfoParam = new CrawlingInfoParam();
crawlingInfoParam.setCrawlingInfoId(crawlingInfo.getId());
crawlingInfoParam.setKey(entry.getKey());
crawlingInfoParam.setValue(entry.getValue());
crawlingInfoParamList.add(crawlingInfoParam);
}
getCrawlingInfoService().storeInfo(crawlingInfoParamList);
}
infoMap = null;
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfoParam in project fess by codelibs.
the class CrawlingInfoHelper method getInfoMap.
public Map<String, String> getInfoMap(final String sessionId) {
final List<CrawlingInfoParam> crawlingInfoParamList = getCrawlingInfoService().getLastCrawlingInfoParamList(sessionId);
final Map<String, String> map = new HashMap<>();
for (final CrawlingInfoParam crawlingInfoParam : crawlingInfoParamList) {
map.put(crawlingInfoParam.getKey(), crawlingInfoParam.getValue());
}
return map;
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfoParam in project fess by codelibs.
the class CrawlingInfoService method storeInfo.
public void storeInfo(final List<CrawlingInfoParam> crawlingInfoParamList) {
if (crawlingInfoParamList == null) {
throw new FessSystemException("Crawling Session Info is null.");
}
final long now = ComponentUtil.getSystemHelper().getCurrentTimeAsLong();
for (final CrawlingInfoParam crawlingInfoParam : crawlingInfoParamList) {
if (crawlingInfoParam.getCreatedTime() == null) {
crawlingInfoParam.setCreatedTime(now);
}
}
crawlingInfoParamBhv.batchInsert(crawlingInfoParamList, op -> op.setRefreshPolicy(Constants.TRUE));
}
use of org.codelibs.fess.es.config.exentity.CrawlingInfoParam in project fess by codelibs.
the class CrawlingInfoService method exportCsv.
public void exportCsv(final Writer writer) {
final CsvConfig cfg = new CsvConfig(',', '"', '"');
cfg.setEscapeDisabled(false);
cfg.setQuoteDisabled(false);
@SuppressWarnings("resource") final CsvWriter csvWriter = new CsvWriter(writer, cfg);
try {
final List<String> list = new ArrayList<>();
list.add("SessionId");
list.add("SessionCreatedTime");
list.add("Key");
list.add("Value");
list.add("CreatedTime");
csvWriter.writeValues(list);
final DateTimeFormatter formatter = DateTimeFormatter.ofPattern(CoreLibConstants.DATE_FORMAT_ISO_8601_EXTEND);
crawlingInfoParamBhv.selectCursor(cb -> cb.query().matchAll(), new EntityRowHandler<CrawlingInfoParam>() {
@Override
public void handle(final CrawlingInfoParam entity) {
final List<String> list = new ArrayList<>();
entity.getCrawlingInfo().ifPresent(crawlingInfo -> {
addToList(list, crawlingInfo.getSessionId());
addToList(list, crawlingInfo.getCreatedTime());
});
// TODO
if (!entity.getCrawlingInfo().isPresent()) {
addToList(list, "");
addToList(list, "");
}
addToList(list, entity.getKey());
addToList(list, entity.getValue());
addToList(list, entity.getCreatedTime());
try {
csvWriter.writeValues(list);
} catch (final IOException e) {
logger.warn("Failed to write a crawling session info: {}", entity, e);
}
}
private void addToList(final List<String> list, final Object value) {
if (value == null) {
list.add(StringUtil.EMPTY);
} else if (value instanceof LocalDateTime) {
list.add(((LocalDateTime) value).format(formatter));
} else {
list.add(value.toString());
}
}
});
csvWriter.flush();
} catch (final IOException e) {
logger.warn("Failed to write a crawling session info.", e);
}
}
Aggregations