use of org.apache.lucene.index.SegmentCommitInfo in project lucene-solr by apache.
the class ReplicationHandler method getFileList.
@SuppressWarnings("unchecked")
private void getFileList(SolrParams solrParams, SolrQueryResponse rsp) {
String v = solrParams.get(GENERATION);
if (v == null) {
rsp.add("status", "no index generation specified");
return;
}
long gen = Long.parseLong(v);
IndexCommit commit = core.getDeletionPolicy().getCommitPoint(gen);
//System.out.println("ask for files for gen:" + commit.getGeneration() + core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName());
if (commit == null) {
rsp.add("status", "invalid index generation");
return;
}
// reserve the indexcommit for sometime
core.getDeletionPolicy().setReserveDuration(gen, reserveCommitDuration);
List<Map<String, Object>> result = new ArrayList<>();
Directory dir = null;
try {
dir = core.getDirectoryFactory().get(core.getNewIndexDir(), DirContext.DEFAULT, core.getSolrConfig().indexConfig.lockType);
SegmentInfos infos = SegmentInfos.readCommit(dir, commit.getSegmentsFileName());
for (SegmentCommitInfo commitInfo : infos) {
for (String file : commitInfo.files()) {
Map<String, Object> fileMeta = new HashMap<>();
fileMeta.put(NAME, file);
fileMeta.put(SIZE, dir.fileLength(file));
try (final IndexInput in = dir.openInput(file, IOContext.READONCE)) {
try {
long checksum = CodecUtil.retrieveChecksum(in);
fileMeta.put(CHECKSUM, checksum);
} catch (Exception e) {
LOG.warn("Could not read checksum from index file: " + file, e);
}
}
result.add(fileMeta);
}
}
// add the segments_N file
Map<String, Object> fileMeta = new HashMap<>();
fileMeta.put(NAME, infos.getSegmentsFileName());
fileMeta.put(SIZE, dir.fileLength(infos.getSegmentsFileName()));
if (infos.getId() != null) {
try (final IndexInput in = dir.openInput(infos.getSegmentsFileName(), IOContext.READONCE)) {
try {
fileMeta.put(CHECKSUM, CodecUtil.retrieveChecksum(in));
} catch (Exception e) {
LOG.warn("Could not read checksum from index file: " + infos.getSegmentsFileName(), e);
}
}
}
result.add(fileMeta);
} catch (IOException e) {
rsp.add("status", "unable to get file names for given index generation");
rsp.add(EXCEPTION, e);
LOG.error("Unable to get file names for indexCommit generation: " + gen, e);
} finally {
if (dir != null) {
try {
core.getDirectoryFactory().release(dir);
} catch (IOException e) {
SolrException.log(LOG, "Could not release directory after fetching file list", e);
}
}
}
rsp.add(CMD_GET_FILE_LIST, result);
// fetch list of tlog files only if cdcr is activated
if (solrParams.getBool(TLOG_FILES, true) && core.getUpdateHandler().getUpdateLog() != null && core.getUpdateHandler().getUpdateLog() instanceof CdcrUpdateLog) {
try {
List<Map<String, Object>> tlogfiles = getTlogFileList(commit);
LOG.info("Adding tlog files to list: " + tlogfiles);
rsp.add(TLOG_FILES, tlogfiles);
} catch (IOException e) {
rsp.add("status", "unable to get tlog file names for given index generation");
rsp.add(EXCEPTION, e);
LOG.error("Unable to get tlog file names for indexCommit generation: " + gen, e);
}
}
if (confFileNameAlias.size() < 1 || core.getCoreContainer().isZooKeeperAware())
return;
LOG.debug("Adding config files to list: " + includeConfFiles);
//if configuration files need to be included get their details
rsp.add(CONF_FILES, getConfFileInfoFromCache(confFileNameAlias, confFileInfoCache));
}
use of org.apache.lucene.index.SegmentCommitInfo in project lucene-solr by apache.
the class SolrIndexWriter method merge.
// we override this method to collect metrics for merges.
@Override
public void merge(MergePolicy.OneMerge merge) throws IOException {
if (!mergeTotals) {
super.merge(merge);
return;
}
long deletedDocs = 0;
long totalNumDocs = merge.totalNumDocs();
for (SegmentCommitInfo info : merge.segments) {
totalNumDocs -= info.getDelCount();
deletedDocs += info.getDelCount();
}
boolean major = totalNumDocs > majorMergeDocs;
int segmentsCount = merge.segments.size();
Timer.Context context;
if (major) {
runningMajorMerges.incrementAndGet();
runningMajorMergesDocs.addAndGet(totalNumDocs);
runningMajorMergesSegments.addAndGet(segmentsCount);
if (mergeDetails) {
majorMergedDocs.mark(totalNumDocs);
majorDeletedDocs.mark(deletedDocs);
}
context = majorMerge.time();
} else {
runningMinorMerges.incrementAndGet();
runningMinorMergesDocs.addAndGet(totalNumDocs);
runningMinorMergesSegments.addAndGet(segmentsCount);
context = minorMerge.time();
}
try {
super.merge(merge);
} catch (Throwable t) {
mergeErrors.inc();
throw t;
} finally {
context.stop();
if (major) {
runningMajorMerges.decrementAndGet();
runningMajorMergesDocs.addAndGet(-totalNumDocs);
runningMajorMergesSegments.addAndGet(-segmentsCount);
} else {
runningMinorMerges.decrementAndGet();
runningMinorMergesDocs.addAndGet(-totalNumDocs);
runningMinorMergesSegments.addAndGet(-segmentsCount);
}
}
}
Aggregations