use of annis.service.objects.MatchGroup in project ANNIS by korpling.
the class AnnisRunner method doFind.
public void doFind(String annisQuery) {
List<Match> matches = queryDao.find(analyzeQuery(annisQuery, "find"));
MatchGroup group = new MatchGroup(matches);
out.println(group.toString());
}
use of annis.service.objects.MatchGroup in project ANNIS by korpling.
the class GeneralTextExporter method convertText.
@Override
public Exception convertText(String queryAnnisQL, int contextLeft, int contextRight, Set<String> corpora, List<String> keys, String argsAsString, boolean alignmc, WebResource annisResource, Writer out, EventBus eventBus, Map<String, CorpusConfig> corpusConfigs) {
try {
if (keys == null || keys.isEmpty()) {
// auto set
keys = new LinkedList<>();
keys.add("tok");
List<AnnisAttribute> attributes = new LinkedList<>();
for (String corpus : corpora) {
attributes.addAll(annisResource.path("corpora").path(urlPathEscape.escape(corpus)).path("annotations").queryParam("fetchvalues", "false").queryParam("onlymostfrequentvalues", "false").get(new AnnisAttributeListType()));
}
for (AnnisAttribute a : attributes) {
if (a.getName() != null) {
String[] namespaceAndName = a.getName().split(":", 2);
if (namespaceAndName.length > 1) {
keys.add(namespaceAndName[1]);
} else {
keys.add(namespaceAndName[0]);
}
}
}
}
Map<String, String> args = new HashMap<>();
for (String s : argsAsString.split("&|;")) {
String[] splitted = s.split("=", 2);
String key = splitted[0];
String val = "";
if (splitted.length > 1) {
val = splitted[1];
}
args.put(key, val);
}
int stepSize = 10;
// 1. Get all the matches as Salt ID
InputStream matchStream = annisResource.path("search/find/").queryParam("q", Helper.encodeJersey(queryAnnisQL)).queryParam("corpora", StringUtils.join(corpora, ",")).accept(MediaType.TEXT_PLAIN_TYPE).get(InputStream.class);
try (BufferedReader inReader = new BufferedReader(new InputStreamReader(matchStream, "UTF-8"))) {
WebResource subgraphRes = annisResource.path("search/subgraph");
MatchGroup currentMatches = new MatchGroup();
String currentLine;
int offset = 0;
// 2. iterate over all matches and get the sub-graph for a group of matches
while (!Thread.currentThread().isInterrupted() && (currentLine = inReader.readLine()) != null) {
Match match = Match.parseFromString(currentLine);
currentMatches.getMatches().add(match);
if (currentMatches.getMatches().size() >= stepSize) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
Stopwatch stopwatch = Stopwatch.createUnstarted();
stopwatch.start();
SaltProject p = res.post(SaltProject.class, currentMatches);
stopwatch.stop();
// export was fast enough
if (stopwatch.elapsed(TimeUnit.MILLISECONDS) < 500 && stepSize < 50) {
stepSize += 10;
}
convertText(LegacyGraphConverter.convertToResultSet(p), keys, args, out, offset - currentMatches.getMatches().size());
currentMatches.getMatches().clear();
if (eventBus != null) {
eventBus.post(offset + 1);
}
}
offset++;
}
if (Thread.interrupted()) {
return new InterruptedException("Exporter job was interrupted");
}
// query the left over matches
if (!currentMatches.getMatches().isEmpty()) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
SaltProject p = res.post(SaltProject.class, currentMatches);
convertText(LegacyGraphConverter.convertToResultSet(p), keys, args, out, offset - currentMatches.getMatches().size() - 1);
}
offset = 0;
}
out.append("\n");
out.append("\n");
out.append("finished");
return null;
} catch (AnnisQLSemanticsException | AnnisQLSyntaxException | AnnisCorpusAccessException | UniformInterfaceException | IOException ex) {
return ex;
}
}
use of annis.service.objects.MatchGroup in project ANNIS by korpling.
the class SaltBasedExporter method convertText.
@Override
public Exception convertText(String queryAnnisQL, int contextLeft, int contextRight, Set<String> corpora, List<String> keys, String argsAsString, boolean alignmc, WebResource annisResource, Writer out, EventBus eventBus, Map<String, CorpusConfig> corpusConfigs) {
CacheManager cacheManager = CacheManager.create();
try {
Cache cache = cacheManager.getCache("saltProjectsCache");
if (keys == null || keys.isEmpty()) {
// auto set
keys = new LinkedList<>();
keys.add("tok");
List<AnnisAttribute> attributes = new LinkedList<>();
for (String corpus : corpora) {
attributes.addAll(annisResource.path("corpora").path(urlPathEscape.escape(corpus)).path("annotations").queryParam("fetchvalues", "false").queryParam("onlymostfrequentvalues", "false").get(new AnnisAttributeListType()));
}
for (AnnisAttribute a : attributes) {
if (a.getName() != null) {
String[] namespaceAndName = a.getName().split(":", 2);
if (namespaceAndName.length > 1) {
keys.add(namespaceAndName[1]);
} else {
keys.add(namespaceAndName[0]);
}
}
}
}
Map<String, String> args = new HashMap<>();
for (String s : argsAsString.split("&|;")) {
String[] splitted = s.split("=", 2);
String key = splitted[0];
String val = "";
if (splitted.length > 1) {
val = splitted[1];
}
args.put(key, val);
}
int stepSize = 10;
int pCounter = 1;
Map<Integer, Integer> offsets = new HashMap<Integer, Integer>();
// 1. Get all the matches as Salt ID
InputStream matchStream = annisResource.path("search/find/").queryParam("q", Helper.encodeJersey(queryAnnisQL)).queryParam("corpora", StringUtils.join(corpora, ",")).accept(MediaType.TEXT_PLAIN_TYPE).get(InputStream.class);
// get node count for the query
WebResource resource = Helper.getAnnisWebResource();
List<QueryNode> nodes = resource.path("query/parse/nodes").queryParam("q", Helper.encodeJersey(queryAnnisQL)).get(new GenericType<List<QueryNode>>() {
});
Integer nodeCount = nodes.size();
try (BufferedReader inReader = new BufferedReader(new InputStreamReader(matchStream, "UTF-8"))) {
WebResource subgraphRes = annisResource.path("search/subgraph");
MatchGroup currentMatches = new MatchGroup();
String currentLine;
int offset = 1;
// 2. iterate over all matches and get the sub-graph for a group of matches
while (!Thread.currentThread().isInterrupted() && (currentLine = inReader.readLine()) != null) {
Match match = Match.parseFromString(currentLine);
currentMatches.getMatches().add(match);
if (currentMatches.getMatches().size() >= stepSize) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
Stopwatch stopwatch = Stopwatch.createStarted();
SaltProject p = res.post(SaltProject.class, currentMatches);
stopwatch.stop();
// export was fast enough
if (stopwatch.elapsed(TimeUnit.MILLISECONDS) < 500 && stepSize < 50) {
stepSize += 10;
}
convertSaltProject(p, keys, args, alignmc, offset - currentMatches.getMatches().size(), corpusConfigs, out, nodeCount);
offsets.put(pCounter, offset - currentMatches.getMatches().size());
cache.put(new Element(pCounter++, p));
currentMatches.getMatches().clear();
if (eventBus != null) {
eventBus.post(offset + 1);
}
}
offset++;
}
if (Thread.interrupted()) {
return new InterruptedException("Exporter job was interrupted");
}
// query the left over matches
if (!currentMatches.getMatches().isEmpty()) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
SaltProject p = res.post(SaltProject.class, currentMatches);
convertSaltProject(p, keys, args, alignmc, offset - currentMatches.getMatches().size() - 1, corpusConfigs, out, nodeCount);
offsets.put(pCounter, offset - currentMatches.getMatches().size() - 1);
cache.put(new Element(pCounter++, p));
}
offset = 1;
}
// build the list of ordered match numbers (ordering by occurrence in text)
getOrderedMatchNumbers();
@SuppressWarnings("unchecked") List<Integer> cacheKeys = cache.getKeys();
List<Integer> listOfKeys = new ArrayList<Integer>();
for (Integer key : cacheKeys) {
listOfKeys.add(key);
}
Collections.sort(listOfKeys);
for (Integer key : listOfKeys) {
SaltProject p = (SaltProject) cache.get(key).getObjectValue();
convertSaltProject(p, keys, args, alignmc, offsets.get(key), corpusConfigs, out, null);
}
out.append(System.lineSeparator());
return null;
} catch (AnnisQLSemanticsException | AnnisQLSyntaxException | AnnisCorpusAccessException | UniformInterfaceException | IOException | CacheException | IllegalStateException | ClassCastException ex) {
return ex;
} finally {
cacheManager.removalAll();
cacheManager.shutdown();
}
}
use of annis.service.objects.MatchGroup in project ANNIS by korpling.
the class SingleResultFetchJob method call.
@Override
public SaltProject call() throws Exception {
WebResource subgraphRes = Helper.getAnnisWebResource().path("query/search/subgraph");
if (Thread.interrupted()) {
return null;
}
List<Match> subList = new LinkedList<>();
subList.add(match);
SaltProject p = executeQuery(subgraphRes, new MatchGroup(subList), query.getLeftContext(), query.getRightContext(), query.getSegmentation(), SubgraphFilter.all);
return p;
}
use of annis.service.objects.MatchGroup in project ANNIS by korpling.
the class ResultFetchJob method run.
@Override
public void run() {
WebResource subgraphRes = Helper.getAnnisWebResource().path("query/search/subgraph");
// holds the ids of the matches.
MatchGroup result;
try {
if (Thread.interrupted()) {
return;
}
// set the the progress bar, for given the user some information about the loading process
ui.accessSynchronously(new Runnable() {
@Override
public void run() {
resultPanel.showMatchSearchInProgress(query);
}
});
// get the matches
result = futureMatches.get();
// get the subgraph for each match, when the result is not empty
if (result.getMatches().isEmpty()) {
// check if thread was interrupted
if (Thread.interrupted()) {
return;
}
// nothing found, so inform the user about this.
ui.access(new Runnable() {
@Override
public void run() {
resultPanel.showNoResult();
}
});
} else {
if (Thread.interrupted()) {
return;
}
// since annis found something, inform the user that subgraphs are created
ui.access(new Runnable() {
@Override
public void run() {
resultPanel.showSubgraphSearchInProgress(query, 0.0f);
}
});
// prepare fetching subgraphs
final BlockingQueue<SaltProject> queue = new ArrayBlockingQueue<>(result.getMatches().size());
int current = 0;
final ArrayList<Match> matchList = new ArrayList<>(result.getMatches());
for (Match m : matchList) {
if (Thread.interrupted()) {
return;
}
List<Match> subList = new LinkedList<>();
subList.add(m);
final SaltProject p = executeQuery(subgraphRes, new MatchGroup(subList), query.getLeftContext(), query.getRightContext(), query.getSegmentation(), SubgraphFilter.all);
queue.put(p);
log.debug("added match {} to queue", current + 1);
if (current == 0) {
ui.access(new Runnable() {
@Override
public void run() {
resultPanel.setQueryResultQueue(queue, query, matchList);
}
});
}
if (Thread.interrupted()) {
return;
}
current++;
}
}
// end if no results
} catch (InterruptedException ex) {
// just return
} catch (final ExecutionException root) {
ui.accessSynchronously(new Runnable() {
@Override
public void run() {
if (resultPanel != null && resultPanel.getPaging() != null) {
PagingComponent paging = resultPanel.getPaging();
Throwable cause = root.getCause();
if (cause instanceof UniformInterfaceException) {
UniformInterfaceException ex = (UniformInterfaceException) cause;
if (ex.getResponse().getStatus() == 400) {
List<AqlParseError> errors = ex.getResponse().getEntity(new GenericType<List<AqlParseError>>() {
});
String errMsg = Joiner.on(" | ").join(errors);
paging.setInfo("parsing error: " + errMsg);
} else if (ex.getResponse().getStatus() == 504) {
paging.setInfo("Timeout: query execution took too long");
} else if (ex.getResponse().getStatus() == 403) {
paging.setInfo("Not authorized to query this corpus.");
} else {
paging.setInfo("unknown error: " + ex);
}
} else {
log.error("Unexcepted ExecutionException cause", root);
}
resultPanel.showFinishedSubgraphSearch();
}
}
});
}
// end catch
}
Aggregations