Search in sources :

Example 11 with SegmentCommitInfo

use of org.apache.lucene.index.SegmentCommitInfo in project lucene-solr by apache.

the class ReplicationHandler method getFileList.

@SuppressWarnings("unchecked")
private void getFileList(SolrParams solrParams, SolrQueryResponse rsp) {
    String v = solrParams.get(GENERATION);
    if (v == null) {
        rsp.add("status", "no index generation specified");
        return;
    }
    long gen = Long.parseLong(v);
    IndexCommit commit = core.getDeletionPolicy().getCommitPoint(gen);
    //System.out.println("ask for files for gen:" + commit.getGeneration() + core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName());
    if (commit == null) {
        rsp.add("status", "invalid index generation");
        return;
    }
    // reserve the indexcommit for sometime
    core.getDeletionPolicy().setReserveDuration(gen, reserveCommitDuration);
    List<Map<String, Object>> result = new ArrayList<>();
    Directory dir = null;
    try {
        dir = core.getDirectoryFactory().get(core.getNewIndexDir(), DirContext.DEFAULT, core.getSolrConfig().indexConfig.lockType);
        SegmentInfos infos = SegmentInfos.readCommit(dir, commit.getSegmentsFileName());
        for (SegmentCommitInfo commitInfo : infos) {
            for (String file : commitInfo.files()) {
                Map<String, Object> fileMeta = new HashMap<>();
                fileMeta.put(NAME, file);
                fileMeta.put(SIZE, dir.fileLength(file));
                try (final IndexInput in = dir.openInput(file, IOContext.READONCE)) {
                    try {
                        long checksum = CodecUtil.retrieveChecksum(in);
                        fileMeta.put(CHECKSUM, checksum);
                    } catch (Exception e) {
                        LOG.warn("Could not read checksum from index file: " + file, e);
                    }
                }
                result.add(fileMeta);
            }
        }
        // add the segments_N file
        Map<String, Object> fileMeta = new HashMap<>();
        fileMeta.put(NAME, infos.getSegmentsFileName());
        fileMeta.put(SIZE, dir.fileLength(infos.getSegmentsFileName()));
        if (infos.getId() != null) {
            try (final IndexInput in = dir.openInput(infos.getSegmentsFileName(), IOContext.READONCE)) {
                try {
                    fileMeta.put(CHECKSUM, CodecUtil.retrieveChecksum(in));
                } catch (Exception e) {
                    LOG.warn("Could not read checksum from index file: " + infos.getSegmentsFileName(), e);
                }
            }
        }
        result.add(fileMeta);
    } catch (IOException e) {
        rsp.add("status", "unable to get file names for given index generation");
        rsp.add(EXCEPTION, e);
        LOG.error("Unable to get file names for indexCommit generation: " + gen, e);
    } finally {
        if (dir != null) {
            try {
                core.getDirectoryFactory().release(dir);
            } catch (IOException e) {
                SolrException.log(LOG, "Could not release directory after fetching file list", e);
            }
        }
    }
    rsp.add(CMD_GET_FILE_LIST, result);
    // fetch list of tlog files only if cdcr is activated
    if (solrParams.getBool(TLOG_FILES, true) && core.getUpdateHandler().getUpdateLog() != null && core.getUpdateHandler().getUpdateLog() instanceof CdcrUpdateLog) {
        try {
            List<Map<String, Object>> tlogfiles = getTlogFileList(commit);
            LOG.info("Adding tlog files to list: " + tlogfiles);
            rsp.add(TLOG_FILES, tlogfiles);
        } catch (IOException e) {
            rsp.add("status", "unable to get tlog file names for given index generation");
            rsp.add(EXCEPTION, e);
            LOG.error("Unable to get tlog file names for indexCommit generation: " + gen, e);
        }
    }
    if (confFileNameAlias.size() < 1 || core.getCoreContainer().isZooKeeperAware())
        return;
    LOG.debug("Adding config files to list: " + includeConfFiles);
    //if configuration files need to be included get their details
    rsp.add(CONF_FILES, getConfFileInfoFromCache(confFileNameAlias, confFileInfoCache));
}
Also used : SegmentInfos(org.apache.lucene.index.SegmentInfos) SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IOException(java.io.IOException) IndexCommit(org.apache.lucene.index.IndexCommit) NoSuchFileException(java.nio.file.NoSuchFileException) SolrException(org.apache.solr.common.SolrException) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) CdcrUpdateLog(org.apache.solr.update.CdcrUpdateLog) IndexInput(org.apache.lucene.store.IndexInput) SimpleOrderedMap(org.apache.solr.common.util.SimpleOrderedMap) Map(java.util.Map) HashMap(java.util.HashMap) MetricsMap(org.apache.solr.metrics.MetricsMap) Directory(org.apache.lucene.store.Directory)

Example 12 with SegmentCommitInfo

use of org.apache.lucene.index.SegmentCommitInfo in project lucene-solr by apache.

the class SolrIndexWriter method merge.

// we override this method to collect metrics for merges.
@Override
public void merge(MergePolicy.OneMerge merge) throws IOException {
    if (!mergeTotals) {
        super.merge(merge);
        return;
    }
    long deletedDocs = 0;
    long totalNumDocs = merge.totalNumDocs();
    for (SegmentCommitInfo info : merge.segments) {
        totalNumDocs -= info.getDelCount();
        deletedDocs += info.getDelCount();
    }
    boolean major = totalNumDocs > majorMergeDocs;
    int segmentsCount = merge.segments.size();
    Timer.Context context;
    if (major) {
        runningMajorMerges.incrementAndGet();
        runningMajorMergesDocs.addAndGet(totalNumDocs);
        runningMajorMergesSegments.addAndGet(segmentsCount);
        if (mergeDetails) {
            majorMergedDocs.mark(totalNumDocs);
            majorDeletedDocs.mark(deletedDocs);
        }
        context = majorMerge.time();
    } else {
        runningMinorMerges.incrementAndGet();
        runningMinorMergesDocs.addAndGet(totalNumDocs);
        runningMinorMergesSegments.addAndGet(segmentsCount);
        context = minorMerge.time();
    }
    try {
        super.merge(merge);
    } catch (Throwable t) {
        mergeErrors.inc();
        throw t;
    } finally {
        context.stop();
        if (major) {
            runningMajorMerges.decrementAndGet();
            runningMajorMergesDocs.addAndGet(-totalNumDocs);
            runningMajorMergesSegments.addAndGet(-segmentsCount);
        } else {
            runningMinorMerges.decrementAndGet();
            runningMinorMergesDocs.addAndGet(-totalNumDocs);
            runningMinorMergesSegments.addAndGet(-segmentsCount);
        }
    }
}
Also used : SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo) Timer(com.codahale.metrics.Timer)

Aggregations

SegmentCommitInfo (org.apache.lucene.index.SegmentCommitInfo)12 SegmentInfos (org.apache.lucene.index.SegmentInfos)5 IOException (java.io.IOException)4 HashMap (java.util.HashMap)4 ArrayList (java.util.ArrayList)3 Directory (org.apache.lucene.store.Directory)3 FileNotFoundException (java.io.FileNotFoundException)2 NoSuchFileException (java.nio.file.NoSuchFileException)2 ParameterizedMessage (org.apache.logging.log4j.message.ParameterizedMessage)2 Supplier (org.apache.logging.log4j.util.Supplier)2 IndexWriter (org.apache.lucene.index.IndexWriter)2 SegmentReader (org.apache.lucene.index.SegmentReader)2 IndexSearcher (org.apache.lucene.search.IndexSearcher)2 SimpleOrderedMap (org.apache.solr.common.util.SimpleOrderedMap)2 Timer (com.codahale.metrics.Timer)1 Files (java.nio.file.Files)1 Path (java.nio.file.Path)1 BasicFileAttributes (java.nio.file.attribute.BasicFileAttributes)1 AccessControlException (java.security.AccessControlException)1 Arrays (java.util.Arrays)1