Search in sources :

Example 6 with SegmentCommitInfo

use of org.apache.lucene.index.SegmentCommitInfo in project elasticsearch by elastic.

the class ElasticsearchMergePolicy method findForcedMerges.

@Override
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentCommitInfo, Boolean> segmentsToMerge, IndexWriter writer) throws IOException {
    if (upgradeInProgress) {
        MergeSpecification spec = new MergeSpecification();
        for (SegmentCommitInfo info : segmentInfos) {
            if (shouldUpgrade(info)) {
                // TODO: Use IndexUpgradeMergePolicy instead.  We should be comparing codecs,
                // for now we just assume every minor upgrade has a new format.
                logger.debug("Adding segment {} to be upgraded", info.info.name);
                spec.add(new OneMerge(Collections.singletonList(info)));
            }
            if (spec.merges.size() == MAX_CONCURRENT_UPGRADE_MERGES) {
                // hit our max upgrades, so return the spec.  we will get a cascaded call to continue.
                logger.debug("Returning {} merges for upgrade", spec.merges.size());
                return spec;
            }
        }
        // We must have less than our max upgrade merges, so the next return will be our last in upgrading mode.
        if (spec.merges.isEmpty() == false) {
            logger.debug("Returning {} merges for end of upgrade", spec.merges.size());
            return spec;
        }
        // Only set this once there are 0 segments needing upgrading, because when we return a
        // spec, IndexWriter may (silently!) reject that merge if some of the segments we asked
        // to be merged were already being (naturally) merged:
        upgradeInProgress = false;
    // fall through, so when we don't have any segments to upgrade, the delegate policy
    // has a chance to decide what to do (e.g. collapse the segments to satisfy maxSegmentCount)
    }
    return delegate.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge, writer);
}
Also used : SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo)

Example 7 with SegmentCommitInfo

use of org.apache.lucene.index.SegmentCommitInfo in project elasticsearch by elastic.

the class InternalEngine method segments.

@Override
public List<Segment> segments(boolean verbose) {
    try (ReleasableLock lock = readLock.acquire()) {
        Segment[] segmentsArr = getSegmentInfo(lastCommittedSegmentInfos, verbose);
        // fill in the merges flag
        Set<OnGoingMerge> onGoingMerges = mergeScheduler.onGoingMerges();
        for (OnGoingMerge onGoingMerge : onGoingMerges) {
            for (SegmentCommitInfo segmentInfoPerCommit : onGoingMerge.getMergedSegments()) {
                for (Segment segment : segmentsArr) {
                    if (segment.getName().equals(segmentInfoPerCommit.info.name)) {
                        segment.mergeId = onGoingMerge.getId();
                        break;
                    }
                }
            }
        }
        return Arrays.asList(segmentsArr);
    }
}
Also used : SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo) OnGoingMerge(org.elasticsearch.index.merge.OnGoingMerge) ReleasableLock(org.elasticsearch.common.util.concurrent.ReleasableLock)

Example 8 with SegmentCommitInfo

use of org.apache.lucene.index.SegmentCommitInfo in project elasticsearch by elastic.

the class Engine method getSegmentFileSizes.

private ImmutableOpenMap<String, Long> getSegmentFileSizes(SegmentReader segmentReader) {
    Directory directory = null;
    SegmentCommitInfo segmentCommitInfo = segmentReader.getSegmentInfo();
    boolean useCompoundFile = segmentCommitInfo.info.getUseCompoundFile();
    if (useCompoundFile) {
        try {
            directory = engineConfig.getCodec().compoundFormat().getCompoundReader(segmentReader.directory(), segmentCommitInfo.info, IOContext.READ);
        } catch (IOException e) {
            logger.warn((Supplier<?>) () -> new ParameterizedMessage("Error when opening compound reader for Directory [{}] and SegmentCommitInfo [{}]", segmentReader.directory(), segmentCommitInfo), e);
            return ImmutableOpenMap.of();
        }
    } else {
        directory = segmentReader.directory();
    }
    assert directory != null;
    String[] files;
    if (useCompoundFile) {
        try {
            files = directory.listAll();
        } catch (IOException e) {
            final Directory finalDirectory = directory;
            logger.warn((Supplier<?>) () -> new ParameterizedMessage("Couldn't list Compound Reader Directory [{}]", finalDirectory), e);
            return ImmutableOpenMap.of();
        }
    } else {
        try {
            files = segmentReader.getSegmentInfo().files().toArray(new String[] {});
        } catch (IOException e) {
            logger.warn((Supplier<?>) () -> new ParameterizedMessage("Couldn't list Directory from SegmentReader [{}] and SegmentInfo [{}]", segmentReader, segmentReader.getSegmentInfo()), e);
            return ImmutableOpenMap.of();
        }
    }
    ImmutableOpenMap.Builder<String, Long> map = ImmutableOpenMap.builder();
    for (String file : files) {
        String extension = IndexFileNames.getExtension(file);
        long length = 0L;
        try {
            length = directory.fileLength(file);
        } catch (NoSuchFileException | FileNotFoundException e) {
            final Directory finalDirectory = directory;
            logger.warn((Supplier<?>) () -> new ParameterizedMessage("Tried to query fileLength but file is gone [{}] [{}]", finalDirectory, file), e);
        } catch (IOException e) {
            final Directory finalDirectory = directory;
            logger.warn((Supplier<?>) () -> new ParameterizedMessage("Error when trying to query fileLength [{}] [{}]", finalDirectory, file), e);
        }
        if (length == 0L) {
            continue;
        }
        map.put(extension, length);
    }
    if (useCompoundFile && directory != null) {
        try {
            directory.close();
        } catch (IOException e) {
            final Directory finalDirectory = directory;
            logger.warn((Supplier<?>) () -> new ParameterizedMessage("Error when closing compound reader on Directory [{}]", finalDirectory), e);
        }
    }
    return map.build();
}
Also used : SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo) NoSuchFileException(java.nio.file.NoSuchFileException) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) ImmutableOpenMap(org.elasticsearch.common.collect.ImmutableOpenMap) Supplier(org.apache.logging.log4j.util.Supplier) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) Directory(org.apache.lucene.store.Directory)

Example 9 with SegmentCommitInfo

use of org.apache.lucene.index.SegmentCommitInfo in project elasticsearch by elastic.

the class StoreRecoveryTests method testAddIndices.

public void testAddIndices() throws IOException {
    Directory[] dirs = new Directory[randomIntBetween(1, 10)];
    final int numDocs = randomIntBetween(50, 100);
    int id = 0;
    for (int i = 0; i < dirs.length; i++) {
        dirs[i] = newFSDirectory(createTempDir());
        IndexWriter writer = new IndexWriter(dirs[i], newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE).setOpenMode(IndexWriterConfig.OpenMode.CREATE));
        for (int j = 0; j < numDocs; j++) {
            writer.addDocument(Arrays.asList(new StringField("id", Integer.toString(id++), Field.Store.YES)));
        }
        writer.commit();
        writer.close();
    }
    StoreRecovery storeRecovery = new StoreRecovery(new ShardId("foo", "bar", 1), logger);
    RecoveryState.Index indexStats = new RecoveryState.Index();
    Directory target = newFSDirectory(createTempDir());
    storeRecovery.addIndices(indexStats, target, dirs);
    int numFiles = 0;
    Predicate<String> filesFilter = (f) -> f.startsWith("segments") == false && f.equals("write.lock") == false && f.startsWith("extra") == false;
    for (Directory d : dirs) {
        numFiles += Arrays.asList(d.listAll()).stream().filter(filesFilter).count();
    }
    final long targetNumFiles = Arrays.asList(target.listAll()).stream().filter(filesFilter).count();
    assertEquals(numFiles, targetNumFiles);
    assertEquals(indexStats.totalFileCount(), targetNumFiles);
    if (hardLinksSupported(createTempDir())) {
        assertEquals(targetNumFiles, indexStats.reusedFileCount());
    } else {
        assertEquals(0, indexStats.reusedFileCount(), 0);
    }
    DirectoryReader reader = DirectoryReader.open(target);
    SegmentInfos segmentCommitInfos = SegmentInfos.readLatestCommit(target);
    for (SegmentCommitInfo info : segmentCommitInfos) {
        // check that we didn't merge
        assertEquals("all sources must be flush", info.info.getDiagnostics().get("source"), "flush");
    }
    assertEquals(reader.numDeletedDocs(), 0);
    assertEquals(reader.numDocs(), id);
    reader.close();
    target.close();
    IOUtils.close(dirs);
}
Also used : NoMergePolicy(org.apache.lucene.index.NoMergePolicy) Arrays(java.util.Arrays) Files(java.nio.file.Files) StringField(org.apache.lucene.document.StringField) Predicate(java.util.function.Predicate) DirectoryReader(org.apache.lucene.index.DirectoryReader) IOUtils(org.apache.lucene.util.IOUtils) IOException(java.io.IOException) BasicFileAttributes(java.nio.file.attribute.BasicFileAttributes) SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo) SegmentInfos(org.apache.lucene.index.SegmentInfos) IndexWriter(org.apache.lucene.index.IndexWriter) Field(org.apache.lucene.document.Field) RecoveryState(org.elasticsearch.indices.recovery.RecoveryState) CodecUtil(org.apache.lucene.codecs.CodecUtil) Directory(org.apache.lucene.store.Directory) AccessControlException(java.security.AccessControlException) ESTestCase(org.elasticsearch.test.ESTestCase) IndexWriterConfig(org.apache.lucene.index.IndexWriterConfig) IOContext(org.apache.lucene.store.IOContext) Path(java.nio.file.Path) IndexOutput(org.apache.lucene.store.IndexOutput) SegmentInfos(org.apache.lucene.index.SegmentInfos) SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo) DirectoryReader(org.apache.lucene.index.DirectoryReader) IndexWriter(org.apache.lucene.index.IndexWriter) StringField(org.apache.lucene.document.StringField) RecoveryState(org.elasticsearch.indices.recovery.RecoveryState) Directory(org.apache.lucene.store.Directory)

Example 10 with SegmentCommitInfo

use of org.apache.lucene.index.SegmentCommitInfo in project lucene-solr by apache.

the class ReplicationHandler method getFileList.

@SuppressWarnings("unchecked")
private void getFileList(SolrParams solrParams, SolrQueryResponse rsp) {
    String v = solrParams.get(GENERATION);
    if (v == null) {
        rsp.add("status", "no index generation specified");
        return;
    }
    long gen = Long.parseLong(v);
    IndexCommit commit = core.getDeletionPolicy().getCommitPoint(gen);
    //System.out.println("ask for files for gen:" + commit.getGeneration() + core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName());
    if (commit == null) {
        rsp.add("status", "invalid index generation");
        return;
    }
    // reserve the indexcommit for sometime
    core.getDeletionPolicy().setReserveDuration(gen, reserveCommitDuration);
    List<Map<String, Object>> result = new ArrayList<>();
    Directory dir = null;
    try {
        dir = core.getDirectoryFactory().get(core.getNewIndexDir(), DirContext.DEFAULT, core.getSolrConfig().indexConfig.lockType);
        SegmentInfos infos = SegmentInfos.readCommit(dir, commit.getSegmentsFileName());
        for (SegmentCommitInfo commitInfo : infos) {
            for (String file : commitInfo.files()) {
                Map<String, Object> fileMeta = new HashMap<>();
                fileMeta.put(NAME, file);
                fileMeta.put(SIZE, dir.fileLength(file));
                try (final IndexInput in = dir.openInput(file, IOContext.READONCE)) {
                    try {
                        long checksum = CodecUtil.retrieveChecksum(in);
                        fileMeta.put(CHECKSUM, checksum);
                    } catch (Exception e) {
                        LOG.warn("Could not read checksum from index file: " + file, e);
                    }
                }
                result.add(fileMeta);
            }
        }
        // add the segments_N file
        Map<String, Object> fileMeta = new HashMap<>();
        fileMeta.put(NAME, infos.getSegmentsFileName());
        fileMeta.put(SIZE, dir.fileLength(infos.getSegmentsFileName()));
        if (infos.getId() != null) {
            try (final IndexInput in = dir.openInput(infos.getSegmentsFileName(), IOContext.READONCE)) {
                try {
                    fileMeta.put(CHECKSUM, CodecUtil.retrieveChecksum(in));
                } catch (Exception e) {
                    LOG.warn("Could not read checksum from index file: " + infos.getSegmentsFileName(), e);
                }
            }
        }
        result.add(fileMeta);
    } catch (IOException e) {
        rsp.add("status", "unable to get file names for given index generation");
        rsp.add(EXCEPTION, e);
        LOG.error("Unable to get file names for indexCommit generation: " + gen, e);
    } finally {
        if (dir != null) {
            try {
                core.getDirectoryFactory().release(dir);
            } catch (IOException e) {
                SolrException.log(LOG, "Could not release directory after fetching file list", e);
            }
        }
    }
    rsp.add(CMD_GET_FILE_LIST, result);
    // fetch list of tlog files only if cdcr is activated
    if (solrParams.getBool(TLOG_FILES, true) && core.getUpdateHandler().getUpdateLog() != null && core.getUpdateHandler().getUpdateLog() instanceof CdcrUpdateLog) {
        try {
            List<Map<String, Object>> tlogfiles = getTlogFileList(commit);
            LOG.info("Adding tlog files to list: " + tlogfiles);
            rsp.add(TLOG_FILES, tlogfiles);
        } catch (IOException e) {
            rsp.add("status", "unable to get tlog file names for given index generation");
            rsp.add(EXCEPTION, e);
            LOG.error("Unable to get tlog file names for indexCommit generation: " + gen, e);
        }
    }
    if (confFileNameAlias.size() < 1 || core.getCoreContainer().isZooKeeperAware())
        return;
    LOG.debug("Adding config files to list: " + includeConfFiles);
    //if configuration files need to be included get their details
    rsp.add(CONF_FILES, getConfFileInfoFromCache(confFileNameAlias, confFileInfoCache));
}
Also used : SegmentInfos(org.apache.lucene.index.SegmentInfos) SegmentCommitInfo(org.apache.lucene.index.SegmentCommitInfo) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IOException(java.io.IOException) IndexCommit(org.apache.lucene.index.IndexCommit) NoSuchFileException(java.nio.file.NoSuchFileException) SolrException(org.apache.solr.common.SolrException) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) CdcrUpdateLog(org.apache.solr.update.CdcrUpdateLog) IndexInput(org.apache.lucene.store.IndexInput) SimpleOrderedMap(org.apache.solr.common.util.SimpleOrderedMap) Map(java.util.Map) HashMap(java.util.HashMap) MetricsMap(org.apache.solr.metrics.MetricsMap) Directory(org.apache.lucene.store.Directory)

Aggregations

SegmentCommitInfo (org.apache.lucene.index.SegmentCommitInfo)12 SegmentInfos (org.apache.lucene.index.SegmentInfos)5 IOException (java.io.IOException)4 HashMap (java.util.HashMap)4 ArrayList (java.util.ArrayList)3 Directory (org.apache.lucene.store.Directory)3 FileNotFoundException (java.io.FileNotFoundException)2 NoSuchFileException (java.nio.file.NoSuchFileException)2 ParameterizedMessage (org.apache.logging.log4j.message.ParameterizedMessage)2 Supplier (org.apache.logging.log4j.util.Supplier)2 IndexWriter (org.apache.lucene.index.IndexWriter)2 SegmentReader (org.apache.lucene.index.SegmentReader)2 IndexSearcher (org.apache.lucene.search.IndexSearcher)2 SimpleOrderedMap (org.apache.solr.common.util.SimpleOrderedMap)2 Timer (com.codahale.metrics.Timer)1 Files (java.nio.file.Files)1 Path (java.nio.file.Path)1 BasicFileAttributes (java.nio.file.attribute.BasicFileAttributes)1 AccessControlException (java.security.AccessControlException)1 Arrays (java.util.Arrays)1