use of org.alfresco.repo.search.impl.lucene.LuceneQueryParserException in project alfresco-repository by Alfresco.
the class SOLRAdminClient method executeAction.
/* (non-Javadoc)
* @see org.alfresco.repo.search.impl.solr.SolrAdminClient#executeAction(org.alfresco.repo.search.impl.solr.SolrAdminClient.ACTION, java.util.Map)
*/
@Override
public JSONAPIResult executeAction(String core, JSONAPIResultFactory.ACTION action, Map<String, String> parameters) {
StoreRef store = StoreRef.STORE_REF_WORKSPACE_SPACESSTORE;
SolrStoreMappingWrapper mapping = SolrClientUtil.extractMapping(store, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
HttpClient httpClient = mapping.getHttpClientAndBaseUrl().getFirst();
StringBuilder url = new StringBuilder();
url.append(baseUrl);
if (!url.toString().endsWith("/")) {
url.append("/");
}
url.append("admin/cores");
URLCodec encoder = new URLCodec();
url.append("?action=" + action);
parameters.forEach((key, value) -> {
try {
url.append("&" + key + "=" + encoder.encode(value));
} catch (EncoderException e) {
throw new RuntimeException(e);
}
});
url.append("&alfresco.shards=");
if (mapping.isSharded()) {
url.append(mapping.getShards());
} else {
String solrurl = httpClient.getHostConfiguration().getHostURL() + mapping.getHttpClientAndBaseUrl().getSecond();
url.append(solrurl);
}
if (core != null) {
url.append("&core=" + core);
}
try {
return JSONAPIResultFactory.buildActionResult(action, getOperation(httpClient, url.toString()));
} catch (IOException e) {
throw new LuceneQueryParserException("action", e);
}
}
use of org.alfresco.repo.search.impl.lucene.LuceneQueryParserException in project alfresco-repository by Alfresco.
the class SolrSQLHttpClientTest method testExecuteQuery_connectException.
/**
* Check that an exception is thrown if the Insight Engine can't be reached.
*/
@Test
public void testExecuteQuery_connectException() throws Exception {
// Replace the mock HTTP call method so it throws a ConnectException.
doThrow(new ConnectException()).when(solrSQLHttpClient).postSolrQuery(any(HttpClient.class), anyString(), any(JSONObject.class), any(SolrJsonProcessor.class));
// Call the method under test.
try {
solrSQLHttpClient.executeQuery(mockSearchParameters, LANGUAGE);
fail("Expected exception to be thrown due to failed connection.");
} catch (LuceneQueryParserException e) {
assertTrue("Expected message to mention InsightEngine.", e.getMessage().contains("InsightEngine"));
}
}
use of org.alfresco.repo.search.impl.lucene.LuceneQueryParserException in project alfresco-repository by Alfresco.
the class SolrQueryHTTPClient method execute.
/**
* @param storeRef
* @param handler
* @param params
* @return
*/
public JSONObject execute(StoreRef storeRef, String handler, HashMap<String, String> params) {
try {
SolrStoreMappingWrapper mapping = SolrClientUtil.extractMapping(storeRef, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
URLCodec encoder = new URLCodec();
StringBuilder url = new StringBuilder();
Pair<HttpClient, String> httpClientAndBaseUrl = mapping.getHttpClientAndBaseUrl();
HttpClient httpClient = httpClientAndBaseUrl.getFirst();
for (String key : params.keySet()) {
String value = params.get(key);
if (url.length() == 0) {
url.append(httpClientAndBaseUrl.getSecond());
if (!handler.startsWith("/")) {
url.append("/");
}
url.append(handler);
url.append("?");
url.append(encoder.encode(key, "UTF-8"));
url.append("=");
url.append(encoder.encode(value, "UTF-8"));
} else {
url.append("&");
url.append(encoder.encode(key, "UTF-8"));
url.append("=");
url.append(encoder.encode(value, "UTF-8"));
}
}
if (mapping.isSharded()) {
url.append("&shards=");
url.append(mapping.getShards());
}
// PostMethod post = new PostMethod(url.toString());
GetMethod get = new GetMethod(url.toString());
try {
httpClient.executeMethod(get);
if (get.getStatusCode() == HttpStatus.SC_MOVED_PERMANENTLY || get.getStatusCode() == HttpStatus.SC_MOVED_TEMPORARILY) {
Header locationHeader = get.getResponseHeader("location");
if (locationHeader != null) {
String redirectLocation = locationHeader.getValue();
get.setURI(new URI(redirectLocation, true));
httpClient.executeMethod(get);
}
}
if (get.getStatusCode() != HttpServletResponse.SC_OK) {
throw new LuceneQueryParserException("Request failed " + get.getStatusCode() + " " + url.toString());
}
Reader reader = new BufferedReader(new InputStreamReader(get.getResponseBodyAsStream()));
// TODO - replace with streaming-based solution e.g. SimpleJSON ContentHandler
JSONObject json = new JSONObject(new JSONTokener(reader));
return json;
} finally {
get.releaseConnection();
}
} catch (UnsupportedEncodingException e) {
throw new LuceneQueryParserException("", e);
} catch (HttpException e) {
throw new LuceneQueryParserException("", e);
} catch (IOException e) {
throw new LuceneQueryParserException("", e);
} catch (JSONException e) {
throw new LuceneQueryParserException("", e);
}
}
use of org.alfresco.repo.search.impl.lucene.LuceneQueryParserException in project alfresco-repository by Alfresco.
the class SolrQueryHTTPClient method executeStatsQuery.
/**
* Executes a solr query for statistics
*
* @param searchParameters StatsParameters
* @return SolrStatsResult
*/
public SolrStatsResult executeStatsQuery(final StatsParameters searchParameters) {
if (repositoryState.isBootstrapping()) {
throw new AlfrescoRuntimeException("SOLR stats queries can not be executed while the repository is bootstrapping");
}
try {
StoreRef store = SolrClientUtil.extractStoreRef(searchParameters);
SolrStoreMappingWrapper mapping = SolrClientUtil.extractMapping(store, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
Locale locale = SolrClientUtil.extractLocale(searchParameters);
Pair<HttpClient, String> httpClientAndBaseUrl = mapping.getHttpClientAndBaseUrl();
HttpClient httpClient = httpClientAndBaseUrl.getFirst();
String url = buildStatsUrl(searchParameters, httpClientAndBaseUrl.getSecond(), locale, mapping);
JSONObject body = buildStatsBody(searchParameters, tenantService.getCurrentUserDomain(), locale);
if (httpClient == null) {
throw new AlfrescoRuntimeException("No http client for store " + store.toString());
}
return (SolrStatsResult) postSolrQuery(httpClient, url, body, json -> {
return new SolrStatsResult(json, searchParameters.isDateSearch());
});
} catch (UnsupportedEncodingException e) {
throw new LuceneQueryParserException("stats", e);
} catch (HttpException e) {
throw new LuceneQueryParserException("stats", e);
} catch (IOException e) {
throw new LuceneQueryParserException("stats", e);
} catch (JSONException e) {
throw new LuceneQueryParserException("stats", e);
}
}
use of org.alfresco.repo.search.impl.lucene.LuceneQueryParserException in project alfresco-repository by Alfresco.
the class SolrQueryHTTPClient method executeQuery.
public ResultSet executeQuery(final SearchParameters searchParameters, String language) {
if (repositoryState.isBootstrapping()) {
throw new AlfrescoRuntimeException("SOLR queries can not be executed while the repository is bootstrapping");
}
try {
StoreRef store = SolrClientUtil.extractStoreRef(searchParameters);
SolrStoreMappingWrapper mapping = SolrClientUtil.extractMapping(store, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
Pair<HttpClient, String> httpClientAndBaseUrl = mapping.getHttpClientAndBaseUrl();
HttpClient httpClient = httpClientAndBaseUrl.getFirst();
URLCodec encoder = new URLCodec();
StringBuilder url = new StringBuilder();
url.append(httpClientAndBaseUrl.getSecond());
String languageUrlFragment = SolrClientUtil.extractLanguageFragment(languageMappings, language);
if (!url.toString().endsWith("/")) {
url.append("/");
}
url.append(languageUrlFragment);
// Send the query in JSON only
// url.append("?q=");
// url.append(encoder.encode(searchParameters.getQuery(), "UTF-8"));
url.append("?wt=").append(encoder.encode("json", "UTF-8"));
url.append("&fl=").append(encoder.encode("DBID,score", "UTF-8"));
// Emulate old limiting behaviour and metadata
final LimitBy limitBy;
int maxResults = -1;
if (searchParameters.getMaxItems() >= 0) {
maxResults = searchParameters.getMaxItems();
limitBy = LimitBy.FINAL_SIZE;
} else if (searchParameters.getLimitBy() == LimitBy.FINAL_SIZE && searchParameters.getLimit() >= 0) {
maxResults = searchParameters.getLimit();
limitBy = LimitBy.FINAL_SIZE;
} else {
maxResults = searchParameters.getMaxPermissionChecks();
if (maxResults < 0) {
maxResults = maximumResultsFromUnlimitedQuery;
}
limitBy = LimitBy.NUMBER_OF_PERMISSION_EVALUATIONS;
}
url.append("&rows=").append(String.valueOf(maxResults));
if ((searchParameters.getStores().size() > 1) || (mapping.isSharded())) {
boolean requiresSeparator = false;
url.append("&shards=");
for (StoreRef storeRef : searchParameters.getStores()) {
SolrStoreMappingWrapper storeMapping = SolrClientUtil.extractMapping(storeRef, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
if (requiresSeparator) {
url.append(',');
} else {
requiresSeparator = true;
}
url.append(storeMapping.getShards());
}
}
buildUrlParameters(searchParameters, mapping.isSharded(), encoder, url);
final String searchTerm = searchParameters.getSearchTerm();
String spellCheckQueryStr = null;
if (searchTerm != null && searchParameters.isSpellCheck()) {
StringBuilder builder = new StringBuilder();
builder.append("&spellcheck.q=").append(encoder.encode(searchTerm, "UTF-8"));
builder.append("&spellcheck=").append(encoder.encode("true", "UTF-8"));
spellCheckQueryStr = builder.toString();
url.append(spellCheckQueryStr);
}
JSONObject body = new JSONObject();
body.put("query", searchParameters.getQuery());
// Authorities go over as is - and tenant mangling and query building takes place on the SOLR side
Set<String> allAuthorisations = permissionService.getAuthorisations();
boolean includeGroups = includeGroupsForRoleAdmin ? true : !allAuthorisations.contains(PermissionService.ADMINISTRATOR_AUTHORITY);
JSONArray authorities = new JSONArray();
for (String authority : allAuthorisations) {
if (includeGroups) {
authorities.put(authority);
} else {
if (AuthorityType.getAuthorityType(authority) != AuthorityType.GROUP) {
authorities.put(authority);
}
}
}
body.put("authorities", authorities);
body.put("anyDenyDenies", anyDenyDenies);
JSONArray tenants = new JSONArray();
tenants.put(tenantService.getCurrentUserDomain());
body.put("tenants", tenants);
JSONArray locales = new JSONArray();
for (Locale currentLocale : searchParameters.getLocales()) {
locales.put(DefaultTypeConverter.INSTANCE.convert(String.class, currentLocale));
}
if (locales.length() == 0) {
locales.put(I18NUtil.getLocale());
}
body.put("locales", locales);
JSONArray templates = new JSONArray();
for (String templateName : searchParameters.getQueryTemplates().keySet()) {
JSONObject template = new JSONObject();
template.put("name", templateName);
template.put("template", searchParameters.getQueryTemplates().get(templateName));
templates.put(template);
}
body.put("templates", templates);
JSONArray allAttributes = new JSONArray();
for (String attribute : searchParameters.getAllAttributes()) {
allAttributes.put(attribute);
}
body.put("allAttributes", allAttributes);
body.put("defaultFTSOperator", searchParameters.getDefaultFTSOperator());
body.put("defaultFTSFieldOperator", searchParameters.getDefaultFTSFieldOperator());
body.put("queryConsistency", searchParameters.getQueryConsistency());
if (searchParameters.getMlAnalaysisMode() != null) {
body.put("mlAnalaysisMode", searchParameters.getMlAnalaysisMode().toString());
}
body.put("defaultNamespace", searchParameters.getNamespace());
JSONArray textAttributes = new JSONArray();
for (String attribute : searchParameters.getTextAttributes()) {
textAttributes.put(attribute);
}
body.put("textAttributes", textAttributes);
// just needed for the final parameter
final int maximumResults = maxResults;
return (ResultSet) postSolrQuery(httpClient, url.toString(), body, json -> {
return new SolrJSONResultSet(json, searchParameters, nodeService, nodeDAO, limitBy, maximumResults);
}, spellCheckQueryStr);
} catch (UnsupportedEncodingException e) {
throw new LuceneQueryParserException("", e);
} catch (HttpException e) {
throw new LuceneQueryParserException("", e);
} catch (IOException e) {
throw new LuceneQueryParserException("", e);
} catch (JSONException e) {
throw new LuceneQueryParserException("", e);
}
}
Aggregations