use of org.elasticsearch.indices.IndexMissingException in project elasticsearch-skywalker by jprante.
the class AbstractNodeTest method deleteIndices.
@AfterMethod
public void deleteIndices() {
try {
// clear test index
client("1").admin().indices().delete(new DeleteIndexRequest().indices(INDEX)).actionGet();
} catch (IndexMissingException e) {
// ignore
}
closeNode("1");
closeAllNodes();
}
use of org.elasticsearch.indices.IndexMissingException in project elasticsearch-river-rabbitmq by elastic.
the class RabbitMQIntegrationTest method launchTest.
private void launchTest(XContentBuilder river, final int numMessages, final int numDocsPerMessage, InjectorHook injectorHook, boolean delete, boolean update) throws Exception {
final String dbName = getDbName();
logger.info(" --> create index [{}]", dbName);
try {
client().admin().indices().prepareDelete(dbName).get();
} catch (IndexMissingException e) {
// No worries.
}
try {
createIndex(dbName);
} catch (IndexMissingException e) {
// No worries.
}
ensureGreen(dbName);
logger.info(" -> Checking rabbitmq running");
// We try to connect to RabbitMQ.
// If it's not launched, we don't fail the test but only log it
Channel channel = null;
Connection connection = null;
try {
logger.info(" --> connecting to rabbitmq");
ConnectionFactory factory = new ConnectionFactory();
factory.setHost("localhost");
factory.setPort(AMQP.PROTOCOL.PORT);
connection = factory.newConnection();
} catch (ConnectException ce) {
throw new Exception("RabbitMQ service is not launched on localhost:" + AMQP.PROTOCOL.PORT + ". Can not start Integration test. " + "Launch `rabbitmq-server`.", ce);
}
try {
logger.info(" -> Creating [{}] channel", dbName);
channel = connection.createChannel();
logger.info(" -> Creating queue [{}]", dbName);
channel.queueDeclare(getDbName(), true, false, false, null);
// We purge the queue in case of something is remaining there
logger.info(" -> Purging [{}] channel", dbName);
channel.queuePurge(getDbName());
logger.info(" -> Put [{}] messages with [{}] documents each = [{}] docs", numMessages, numDocsPerMessage, numMessages * numDocsPerMessage);
final Set<String> removed = new HashSet<String>();
int nbUpdated = 0;
for (int i = 0; i < numMessages; i++) {
StringBuffer message = new StringBuffer();
for (int j = 0; j < numDocsPerMessage; j++) {
if (logger.isTraceEnabled()) {
logger.trace(" -> Indexing document [{}] - [{}][{}]", i + "_" + j, i, j);
}
message.append("{ \"index\" : { \"_index\" : \"" + dbName + "\", \"_type\" : \"typex\", \"_id\" : \"" + i + "_" + j + "\" } }\n");
message.append("{ \"field\" : \"" + i + "_" + j + "\",\"numeric\" : " + i * j + " }\n");
// Sometime we update a document
if (update && rarely()) {
String id = between(0, i) + "_" + between(0, j);
// We can only update if it has not been removed :)
if (!removed.contains(id)) {
logger.debug(" -> Updating document [{}] - [{}][{}]", id, i, j);
message.append("{ \"update\" : { \"_index\" : \"" + dbName + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
message.append("{ \"doc\": { \"foo\" : \"bar\", \"field2\" : \"" + i + "_" + j + "\" }}\n");
nbUpdated++;
}
}
// Sometime we delete a document
if (delete && rarely()) {
String id = between(0, i) + "_" + between(0, j);
if (!removed.contains(id)) {
logger.debug(" -> Removing document [{}] - [{}][{}]", id, i, j);
message.append("{ \"delete\" : { \"_index\" : \"" + dbName + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
removed.add(id);
}
}
}
channel.basicPublish("", dbName, null, message.toString().getBytes(StandardCharsets.UTF_8));
}
logger.info(" -> We removed [{}] docs and updated [{}] docs", removed.size(), nbUpdated);
if (injectorHook != null) {
logger.info(" -> Injecting extra data");
injectorHook.inject();
}
logger.info(" --> create river");
IndexResponse indexResponse = index("_river", dbName, "_meta", river);
assertTrue(indexResponse.isCreated());
logger.info("--> checking that river [{}] was created", dbName);
assertThat(awaitBusy(new Predicate<Object>() {
public boolean apply(Object obj) {
GetResponse response = client().prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
return response.isExists();
}
}, 5, TimeUnit.SECONDS), equalTo(true));
// Check that docs are still processed by the river
logger.info(" --> waiting for expected number of docs: [{}]", numDocsPerMessage * numMessages - removed.size());
assertThat(awaitBusy(new Predicate<Object>() {
public boolean apply(Object obj) {
try {
refresh();
int expected = numDocsPerMessage * numMessages - removed.size();
CountResponse response = client().prepareCount(dbName).get();
logger.debug(" -> got {} docs, expected {}", response.getCount(), expected);
return response.getCount() == expected;
} catch (IndexMissingException e) {
return false;
}
}
}, 20, TimeUnit.SECONDS), equalTo(true));
} finally {
if (channel != null && channel.isOpen()) {
channel.close();
}
if (connection != null && connection.isOpen()) {
connection.close();
}
// Deletes the river
GetResponse response = client().prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
if (response.isExists()) {
client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_meta").get();
client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
}
assertThat(awaitBusy(new Predicate<Object>() {
public boolean apply(Object obj) {
GetResponse response = client().prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
return response.isExists();
}
}, 5, TimeUnit.SECONDS), equalTo(false));
}
}
use of org.elasticsearch.indices.IndexMissingException in project elasticsearch-river-couchdb by elastic.
the class CouchdbRiverIntegrationTest method launchTest.
private void launchTest(XContentBuilder river, final Integer numDocs, InjectorHook injectorHook) throws IOException, InterruptedException {
logger.info(" -> Checking couchdb running");
CouchDBClient.checkCouchDbRunning();
logger.info(" -> Creating test database [{}]", getDbName());
CouchDBClient.dropAndCreateTestDatabase(getDbName());
logger.info(" -> Put [{}] documents", numDocs);
for (int i = 0; i < numDocs; i++) {
CouchDBClient.putDocument(getDbName(), "" + i, "foo", "bar", "content", "" + i);
}
logger.info(" -> Put [{}] documents done", numDocs);
if (injectorHook != null) {
logger.info(" -> Injecting extra data");
injectorHook.inject();
}
logger.info(" -> Create river");
try {
createIndex(getDbName());
} catch (IndexAlreadyExistsException e) {
// No worries. We already created the index before
}
index("_river", getDbName(), "_meta", river);
logger.info(" -> Wait for some docs");
assertThat(awaitBusy(new Predicate<Object>() {
public boolean apply(Object obj) {
try {
refresh();
SearchResponse response = client().prepareSearch(getDbName()).get();
logger.info(" -> got {} docs in {} index", response.getHits().totalHits(), getDbName());
return response.getHits().totalHits() == numDocs;
} catch (IndexMissingException e) {
return false;
}
}
}, 1, TimeUnit.MINUTES), equalTo(true));
}
use of org.elasticsearch.indices.IndexMissingException in project elasticsearch-jetty by sonian.
the class ESLoginService method loadUser.
@Override
public UserIdentity loadUser(String user) {
Log.debug("attempting to load user [{}]", user);
try {
GetResponse response = client.prepareGet(authIndex, authType, user).setFields(passwordField, rolesField).execute().actionGet();
if (response.isExists()) {
Log.debug("user [{}] exists; looking for credentials...", user);
Credential credential = null;
GetField passwordGetField = response.getField(passwordField);
if (passwordGetField != null) {
Log.debug("user [{}] using password auth", user);
credential = Credential.getCredential((String) passwordGetField.getValue());
}
String[] roles = getStringValues(response.getField(rolesField));
return putUser(user, credential, roles);
}
} catch (IndexMissingException e) {
Log.warn("no auth index [{}]", authIndex);
} catch (Exception e) {
Log.warn("error finding user [" + user + "] in [" + authIndex + "]", e);
}
return null;
}
Aggregations