Search in sources :

Example 1 with IndexAlreadyExistsException

use of org.elasticsearch.indices.IndexAlreadyExistsException in project opencast by opencast.

the class AbstractElasticsearchIndex method createIndex.

/**
 * Prepares Elasticsearch index to store data for the types (or mappings) as returned by {@link #getDocumenTypes()}.
 *
 * @param idx
 *          the index name
 *
 * @throws SearchIndexException
 *           if index and type creation fails
 * @throws IOException
 *           if loading of the type definitions fails
 */
private void createIndex(String idx) throws SearchIndexException, IOException {
    // Make sure the site index exists
    try {
        logger.debug("Trying to create index for '{}'", idx);
        CreateIndexRequest indexCreateRequest = new CreateIndexRequest(idx);
        String settings = getIndexSettings(idx);
        if (settings != null)
            indexCreateRequest.settings(settings);
        CreateIndexResponse siteidxResponse = nodeClient.admin().indices().create(indexCreateRequest).actionGet();
        if (!siteidxResponse.isAcknowledged()) {
            throw new SearchIndexException("Unable to create index for '" + idx + "'");
        }
    } catch (IndexAlreadyExistsException e) {
        logger.info("Detected existing index '{}'", idx);
    }
    // Store the correct mapping
    for (String type : getDocumenTypes()) {
        PutMappingRequest siteMappingRequest = new PutMappingRequest(idx);
        siteMappingRequest.source(getIndexTypeDefinition(idx, type));
        siteMappingRequest.type(type);
        PutMappingResponse siteMappingResponse = nodeClient.admin().indices().putMapping(siteMappingRequest).actionGet();
        if (!siteMappingResponse.isAcknowledged()) {
            throw new SearchIndexException("Unable to install '" + type + "' mapping for index '" + idx + "'");
        }
    }
    // See if the index version exists and check if it matches. The request will
    // fail if there is no version index
    boolean versionIndexExists = false;
    GetRequestBuilder getRequestBuilder = nodeClient.prepareGet(idx, VERSION_TYPE, ROOT_ID);
    try {
        GetResponse response = getRequestBuilder.execute().actionGet();
        if (response.isExists() && response.getField(VERSION) != null) {
            int actualIndexVersion = Integer.parseInt((String) response.getField(VERSION).getValue());
            if (indexVersion != actualIndexVersion)
                throw new SearchIndexException("Search index is at version " + actualIndexVersion + ", but codebase expects " + indexVersion);
            versionIndexExists = true;
            logger.debug("Search index version is {}", indexVersion);
        }
    } catch (ElasticsearchException e) {
        logger.debug("Version index has not been created");
    }
    // The index does not exist, let's create it
    if (!versionIndexExists) {
        logger.debug("Creating version index for site '{}'", idx);
        IndexRequestBuilder requestBuilder = nodeClient.prepareIndex(idx, VERSION_TYPE, ROOT_ID);
        logger.debug("Index version of site '{}' is {}", idx, indexVersion);
        requestBuilder = requestBuilder.setSource(VERSION, Integer.toString(indexVersion));
        requestBuilder.execute().actionGet();
    }
    preparedIndices.add(idx);
}
Also used : SearchIndexException(org.opencastproject.matterhorn.search.SearchIndexException) PutMappingRequest(org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest) ElasticsearchException(org.elasticsearch.ElasticsearchException) GetResponse(org.elasticsearch.action.get.GetResponse) IndexRequestBuilder(org.elasticsearch.action.index.IndexRequestBuilder) IndexAlreadyExistsException(org.elasticsearch.indices.IndexAlreadyExistsException) PutMappingResponse(org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse) CreateIndexRequest(org.elasticsearch.action.admin.indices.create.CreateIndexRequest) CreateIndexResponse(org.elasticsearch.action.admin.indices.create.CreateIndexResponse) GetRequestBuilder(org.elasticsearch.action.get.GetRequestBuilder)

Example 2 with IndexAlreadyExistsException

use of org.elasticsearch.indices.IndexAlreadyExistsException in project elasticsearch-river-couchdb by elastic.

the class CouchdbRiverIntegrationTest method testCreateCouchdbDatabaseWhileRunning_17.

/**
 * Test case for #17: https://github.com/elasticsearch/elasticsearch-river-couchdb/issues/17
 */
@Test
public void testCreateCouchdbDatabaseWhileRunning_17() throws IOException, InterruptedException {
    final int nbDocs = between(50, 300);
    logger.info("  -> Checking couchdb running");
    CouchDBClient.checkCouchDbRunning();
    logger.info("  -> Create index");
    try {
        createIndex(getDbName());
    } catch (IndexAlreadyExistsException e) {
    // No worries. We already created the index before
    }
    logger.info("  -> Create river");
    index("_river", getDbName(), "_meta", jsonBuilder().startObject().field("type", "couchdb").endObject());
    // Check that the river is started
    assertThat(awaitBusy(new Predicate<Object>() {

        public boolean apply(Object obj) {
            try {
                refresh();
                GetResponse response = get("_river", getDbName(), "_status");
                return response.isExists();
            } catch (IndexMissingException e) {
                return false;
            }
        }
    }, 5, TimeUnit.SECONDS), equalTo(true));
    logger.info("  -> Creating test database [{}]", getDbName());
    CouchDBClient.dropAndCreateTestDatabase(getDbName());
    logger.info("  -> Inserting [{}] docs in couchdb", nbDocs);
    for (int i = 0; i < nbDocs; i++) {
        CouchDBClient.putDocument(getDbName(), "" + i, "foo", "bar", "content", "" + i);
    }
    // Check that docs are still processed by the river
    assertThat(awaitBusy(new Predicate<Object>() {

        public boolean apply(Object obj) {
            try {
                refresh();
                SearchResponse response = client().prepareSearch(getDbName()).get();
                logger.info("  -> got {} docs in {} index", response.getHits().totalHits(), getDbName());
                return response.getHits().totalHits() == nbDocs;
            } catch (IndexMissingException e) {
                return false;
            }
        }
    }, 1, TimeUnit.MINUTES), equalTo(true));
}
Also used : IndexAlreadyExistsException(org.elasticsearch.indices.IndexAlreadyExistsException) IndexMissingException(org.elasticsearch.indices.IndexMissingException) GetResponse(org.elasticsearch.action.get.GetResponse) Predicate(org.elasticsearch.common.base.Predicate) SearchResponse(org.elasticsearch.action.search.SearchResponse) Test(org.junit.Test) ElasticsearchIntegrationTest(org.elasticsearch.test.ElasticsearchIntegrationTest)

Example 3 with IndexAlreadyExistsException

use of org.elasticsearch.indices.IndexAlreadyExistsException in project elasticsearch-river-couchdb by elastic.

the class CouchdbRiverIntegrationTest method testClosingWhileIndexing_66.

/**
 * Test case for #66: https://github.com/elasticsearch/elasticsearch-river-couchdb/issues/66
 */
@Test
public void testClosingWhileIndexing_66() throws IOException, InterruptedException {
    final int nbDocs = 10;
    logger.info("  -> Checking couchdb running");
    CouchDBClient.checkCouchDbRunning();
    logger.info("  -> Creating test database [{}]", getDbName());
    CouchDBClient.dropAndCreateTestDatabase(getDbName());
    logger.info("  -> Inserting [{}] docs in couchdb", nbDocs);
    for (int i = 0; i < nbDocs; i++) {
        CouchDBClient.putDocument(getDbName(), "" + i, "foo", "bar", "content", "" + i);
    }
    logger.info("  -> Create index");
    try {
        createIndex(getDbName());
    } catch (IndexAlreadyExistsException e) {
    // No worries. We already created the index before
    }
    logger.info("  -> Create river");
    index("_river", getDbName(), "_meta", jsonBuilder().startObject().field("type", "couchdb").startObject("couchdb").field("script", "for (int x = 0; x < 10000000; x++) { x*x*x } ;").endObject().startObject("index").field("flush_interval", "100ms").endObject().endObject());
    // Check that docs are indexed by the river
    assertThat(awaitBusy(new Predicate<Object>() {

        public boolean apply(Object obj) {
            try {
                refresh();
                SearchResponse response = client().prepareSearch(getDbName()).get();
                logger.info("  -> got {} docs in {} index", response.getHits().totalHits(), getDbName());
                return response.getHits().totalHits() == nbDocs;
            } catch (IndexMissingException e) {
                return false;
            }
        }
    }, 1, TimeUnit.MINUTES), equalTo(true));
    logger.info("  -> Inserting [{}] docs in couchdb", nbDocs);
    for (int i = nbDocs; i < 2 * nbDocs; i++) {
        CouchDBClient.putDocument(getDbName(), "" + i, "foo", "bar", "content", "" + i);
    }
    // Check that docs are still processed by the river
    assertThat(awaitBusy(new Predicate<Object>() {

        public boolean apply(Object obj) {
            try {
                refresh();
                SearchResponse response = client().prepareSearch(getDbName()).get();
                logger.info("  -> got {} docs in {} index", response.getHits().totalHits(), getDbName());
                return response.getHits().totalHits() > nbDocs;
            } catch (IndexMissingException e) {
                return false;
            }
        }
    }, 10, TimeUnit.SECONDS), equalTo(true));
    logger.info("  -> Remove river while injecting");
    client().prepareDelete("_river", getDbName(), "_meta").get();
    logger.info("  -> Inserting [{}] docs in couchdb", nbDocs);
    for (int i = 2 * nbDocs; i < 3 * nbDocs; i++) {
        CouchDBClient.putDocument(getDbName(), "" + i, "foo", "bar", "content", "" + i);
    }
    // Check that docs are indexed by the river
    boolean foundAllDocs = awaitBusy(new Predicate<Object>() {

        public boolean apply(Object obj) {
            try {
                refresh();
                SearchResponse response = client().prepareSearch(getDbName()).get();
                logger.info("  -> got {} docs in {} index", response.getHits().totalHits(), getDbName());
                return response.getHits().totalHits() == 3 * nbDocs;
            } catch (IndexMissingException e) {
                return false;
            }
        }
    }, 10, TimeUnit.SECONDS);
    // We should not have 30 documents at the end as we removed the river immediately after having
    // injecting 10 more docs in couchdb
    assertThat("We should not have 30 documents as the river is supposed to have been stopped!", foundAllDocs, is(false));
// We expect seeing a line in logs like:
// [WARN ][org.elasticsearch.river.couchdb] [node_0] [couchdb][elasticsearch_couch_test_test_closing_while_indexing_66] river was closing while trying to index document [elasticsearch_couch_test_test_closing_while_indexing_66/elasticsearch_couch_test_test_closing_while_indexing_66/11]. Operation skipped.
}
Also used : IndexAlreadyExistsException(org.elasticsearch.indices.IndexAlreadyExistsException) IndexMissingException(org.elasticsearch.indices.IndexMissingException) Predicate(org.elasticsearch.common.base.Predicate) SearchResponse(org.elasticsearch.action.search.SearchResponse) Test(org.junit.Test) ElasticsearchIntegrationTest(org.elasticsearch.test.ElasticsearchIntegrationTest)

Example 4 with IndexAlreadyExistsException

use of org.elasticsearch.indices.IndexAlreadyExistsException in project elasticsearch-jdbc by jprante.

the class StandardSink method beforeFetch.

@Override
public synchronized void beforeFetch() throws IOException {
    Settings settings = context.getSettings();
    String index = settings.get("index", "jdbc");
    String type = settings.get("type", "jdbc");
    if (clientAPI == null) {
        clientAPI = createClient(settings);
        if (clientAPI.client() != null) {
            int pos = index.indexOf('\'');
            if (pos >= 0) {
                SimpleDateFormat formatter = new SimpleDateFormat();
                formatter.applyPattern(index);
                index = formatter.format(new Date());
            }
            try {
                index = resolveAlias(index);
            } catch (Exception e) {
                logger.warn("can not resolve index {}", index);
            }
            setIndex(index);
            setType(type);
            try {
                createIndex(settings, index, type);
            } catch (IndexAlreadyExistsException e) {
                logger.warn(e.getMessage());
            }
        }
        clientAPI.waitForCluster("YELLOW", TimeValue.timeValueSeconds(30));
    }
}
Also used : IndexAlreadyExistsException(org.elasticsearch.indices.IndexAlreadyExistsException) SimpleDateFormat(java.text.SimpleDateFormat) Settings(org.elasticsearch.common.settings.Settings) Date(java.util.Date) IndexAlreadyExistsException(org.elasticsearch.indices.IndexAlreadyExistsException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException)

Example 5 with IndexAlreadyExistsException

use of org.elasticsearch.indices.IndexAlreadyExistsException in project rssriver by dadoonet.

the class RssRiver method start.

@Override
public void start() {
    if (logger.isInfoEnabled())
        logger.info("Starting rss stream");
    try {
        client.admin().indices().prepareCreate(indexName).execute().actionGet();
    } catch (Exception e) {
        if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
        // that's fine
        } else if (ExceptionsHelper.unwrapCause(e) instanceof ClusterBlockException) {
        // ok, not recovered yet..., lets start indexing and hope we
        // recover by the first bulk
        // TODO: a smarter logic can be to register for cluster event
        // listener here, and only start sampling when the block is
        // removed...
        } else {
            logger.warn("failed to create index [{}], disabling river...", e, indexName);
            return;
        }
    }
    try {
        pushMapping(indexName, typeName, RssToJson.buildRssMapping(typeName, raw));
    } catch (Exception e) {
        logger.warn("failed to create mapping for [{}/{}], disabling river...", e, indexName, typeName);
        return;
    }
    // Creating bulk processor
    this.bulkProcessor = BulkProcessor.builder(client, new BulkProcessor.Listener() {

        @Override
        public void beforeBulk(long executionId, BulkRequest request) {
            logger.debug("Going to execute new bulk composed of {} actions", request.numberOfActions());
        }

        @Override
        public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
            logger.debug("Executed bulk composed of {} actions", request.numberOfActions());
            if (response.hasFailures()) {
                logger.warn("There was failures while executing bulk", response.buildFailureMessage());
                if (logger.isDebugEnabled()) {
                    for (BulkItemResponse item : response.getItems()) {
                        if (item.isFailed()) {
                            logger.debug("Error for {}/{}/{} for {} operation: {}", item.getIndex(), item.getType(), item.getId(), item.getOpType(), item.getFailureMessage());
                        }
                    }
                }
            }
        }

        @Override
        public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
            logger.warn("Error executing bulk", failure);
        }
    }).setBulkActions(bulkSize).setConcurrentRequests(maxConcurrentBulk).setFlushInterval(bulkFlushInterval).build();
    // We create as many Threads as there are feeds
    threads = new ArrayList<Thread>(feedsDefinition.size());
    int threadNumber = 0;
    for (RssRiverFeedDefinition feedDefinition : feedsDefinition) {
        Thread thread = EsExecutors.daemonThreadFactory(settings.globalSettings(), "rss_slurper_" + threadNumber).newThread(new RSSParser(feedDefinition));
        thread.start();
        threads.add(thread);
        threadNumber++;
    }
}
Also used : BulkItemResponse(org.elasticsearch.action.bulk.BulkItemResponse) BulkResponse(org.elasticsearch.action.bulk.BulkResponse) ClusterBlockException(org.elasticsearch.cluster.block.ClusterBlockException) ClusterBlockException(org.elasticsearch.cluster.block.ClusterBlockException) IndexAlreadyExistsException(org.elasticsearch.indices.IndexAlreadyExistsException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) FeedException(com.rometools.rome.io.FeedException) IndexAlreadyExistsException(org.elasticsearch.indices.IndexAlreadyExistsException) BulkRequest(org.elasticsearch.action.bulk.BulkRequest)

Aggregations

IndexAlreadyExistsException (org.elasticsearch.indices.IndexAlreadyExistsException)6 SearchResponse (org.elasticsearch.action.search.SearchResponse)3 Predicate (org.elasticsearch.common.base.Predicate)3 IndexMissingException (org.elasticsearch.indices.IndexMissingException)3 IOException (java.io.IOException)2 GetResponse (org.elasticsearch.action.get.GetResponse)2 ElasticsearchIntegrationTest (org.elasticsearch.test.ElasticsearchIntegrationTest)2 Test (org.junit.Test)2 FeedException (com.rometools.rome.io.FeedException)1 MalformedURLException (java.net.MalformedURLException)1 SimpleDateFormat (java.text.SimpleDateFormat)1 Date (java.util.Date)1 ExecutionException (java.util.concurrent.ExecutionException)1 ElasticsearchException (org.elasticsearch.ElasticsearchException)1 CreateIndexRequest (org.elasticsearch.action.admin.indices.create.CreateIndexRequest)1 CreateIndexResponse (org.elasticsearch.action.admin.indices.create.CreateIndexResponse)1 PutMappingRequest (org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest)1 PutMappingResponse (org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse)1 BulkItemResponse (org.elasticsearch.action.bulk.BulkItemResponse)1 BulkRequest (org.elasticsearch.action.bulk.BulkRequest)1