use of annis.exceptions.AnnisCorpusAccessException in project ANNIS by korpling.
the class GeneralTextExporter method convertText.
@Override
public Exception convertText(String queryAnnisQL, int contextLeft, int contextRight, Set<String> corpora, List<String> keys, String argsAsString, boolean alignmc, WebResource annisResource, Writer out, EventBus eventBus, Map<String, CorpusConfig> corpusConfigs) {
try {
if (keys == null || keys.isEmpty()) {
// auto set
keys = new LinkedList<>();
keys.add("tok");
List<AnnisAttribute> attributes = new LinkedList<>();
for (String corpus : corpora) {
attributes.addAll(annisResource.path("corpora").path(urlPathEscape.escape(corpus)).path("annotations").queryParam("fetchvalues", "false").queryParam("onlymostfrequentvalues", "false").get(new AnnisAttributeListType()));
}
for (AnnisAttribute a : attributes) {
if (a.getName() != null) {
String[] namespaceAndName = a.getName().split(":", 2);
if (namespaceAndName.length > 1) {
keys.add(namespaceAndName[1]);
} else {
keys.add(namespaceAndName[0]);
}
}
}
}
Map<String, String> args = new HashMap<>();
for (String s : argsAsString.split("&|;")) {
String[] splitted = s.split("=", 2);
String key = splitted[0];
String val = "";
if (splitted.length > 1) {
val = splitted[1];
}
args.put(key, val);
}
int stepSize = 10;
// 1. Get all the matches as Salt ID
InputStream matchStream = annisResource.path("search/find/").queryParam("q", Helper.encodeJersey(queryAnnisQL)).queryParam("corpora", StringUtils.join(corpora, ",")).accept(MediaType.TEXT_PLAIN_TYPE).get(InputStream.class);
try (BufferedReader inReader = new BufferedReader(new InputStreamReader(matchStream, "UTF-8"))) {
WebResource subgraphRes = annisResource.path("search/subgraph");
MatchGroup currentMatches = new MatchGroup();
String currentLine;
int offset = 0;
// 2. iterate over all matches and get the sub-graph for a group of matches
while (!Thread.currentThread().isInterrupted() && (currentLine = inReader.readLine()) != null) {
Match match = Match.parseFromString(currentLine);
currentMatches.getMatches().add(match);
if (currentMatches.getMatches().size() >= stepSize) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
Stopwatch stopwatch = Stopwatch.createUnstarted();
stopwatch.start();
SaltProject p = res.post(SaltProject.class, currentMatches);
stopwatch.stop();
// export was fast enough
if (stopwatch.elapsed(TimeUnit.MILLISECONDS) < 500 && stepSize < 50) {
stepSize += 10;
}
convertText(LegacyGraphConverter.convertToResultSet(p), keys, args, out, offset - currentMatches.getMatches().size());
currentMatches.getMatches().clear();
if (eventBus != null) {
eventBus.post(offset + 1);
}
}
offset++;
}
if (Thread.interrupted()) {
return new InterruptedException("Exporter job was interrupted");
}
// query the left over matches
if (!currentMatches.getMatches().isEmpty()) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
SaltProject p = res.post(SaltProject.class, currentMatches);
convertText(LegacyGraphConverter.convertToResultSet(p), keys, args, out, offset - currentMatches.getMatches().size() - 1);
}
offset = 0;
}
out.append("\n");
out.append("\n");
out.append("finished");
return null;
} catch (AnnisQLSemanticsException | AnnisQLSyntaxException | AnnisCorpusAccessException | UniformInterfaceException | IOException ex) {
return ex;
}
}
use of annis.exceptions.AnnisCorpusAccessException in project ANNIS by korpling.
the class SaltBasedExporter method convertText.
@Override
public Exception convertText(String queryAnnisQL, int contextLeft, int contextRight, Set<String> corpora, List<String> keys, String argsAsString, boolean alignmc, WebResource annisResource, Writer out, EventBus eventBus, Map<String, CorpusConfig> corpusConfigs) {
CacheManager cacheManager = CacheManager.create();
try {
Cache cache = cacheManager.getCache("saltProjectsCache");
if (keys == null || keys.isEmpty()) {
// auto set
keys = new LinkedList<>();
keys.add("tok");
List<AnnisAttribute> attributes = new LinkedList<>();
for (String corpus : corpora) {
attributes.addAll(annisResource.path("corpora").path(urlPathEscape.escape(corpus)).path("annotations").queryParam("fetchvalues", "false").queryParam("onlymostfrequentvalues", "false").get(new AnnisAttributeListType()));
}
for (AnnisAttribute a : attributes) {
if (a.getName() != null) {
String[] namespaceAndName = a.getName().split(":", 2);
if (namespaceAndName.length > 1) {
keys.add(namespaceAndName[1]);
} else {
keys.add(namespaceAndName[0]);
}
}
}
}
Map<String, String> args = new HashMap<>();
for (String s : argsAsString.split("&|;")) {
String[] splitted = s.split("=", 2);
String key = splitted[0];
String val = "";
if (splitted.length > 1) {
val = splitted[1];
}
args.put(key, val);
}
int stepSize = 10;
int pCounter = 1;
Map<Integer, Integer> offsets = new HashMap<Integer, Integer>();
// 1. Get all the matches as Salt ID
InputStream matchStream = annisResource.path("search/find/").queryParam("q", Helper.encodeJersey(queryAnnisQL)).queryParam("corpora", StringUtils.join(corpora, ",")).accept(MediaType.TEXT_PLAIN_TYPE).get(InputStream.class);
// get node count for the query
WebResource resource = Helper.getAnnisWebResource();
List<QueryNode> nodes = resource.path("query/parse/nodes").queryParam("q", Helper.encodeJersey(queryAnnisQL)).get(new GenericType<List<QueryNode>>() {
});
Integer nodeCount = nodes.size();
try (BufferedReader inReader = new BufferedReader(new InputStreamReader(matchStream, "UTF-8"))) {
WebResource subgraphRes = annisResource.path("search/subgraph");
MatchGroup currentMatches = new MatchGroup();
String currentLine;
int offset = 1;
// 2. iterate over all matches and get the sub-graph for a group of matches
while (!Thread.currentThread().isInterrupted() && (currentLine = inReader.readLine()) != null) {
Match match = Match.parseFromString(currentLine);
currentMatches.getMatches().add(match);
if (currentMatches.getMatches().size() >= stepSize) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
Stopwatch stopwatch = Stopwatch.createStarted();
SaltProject p = res.post(SaltProject.class, currentMatches);
stopwatch.stop();
// export was fast enough
if (stopwatch.elapsed(TimeUnit.MILLISECONDS) < 500 && stepSize < 50) {
stepSize += 10;
}
convertSaltProject(p, keys, args, alignmc, offset - currentMatches.getMatches().size(), corpusConfigs, out, nodeCount);
offsets.put(pCounter, offset - currentMatches.getMatches().size());
cache.put(new Element(pCounter++, p));
currentMatches.getMatches().clear();
if (eventBus != null) {
eventBus.post(offset + 1);
}
}
offset++;
}
if (Thread.interrupted()) {
return new InterruptedException("Exporter job was interrupted");
}
// query the left over matches
if (!currentMatches.getMatches().isEmpty()) {
WebResource res = subgraphRes.queryParam("left", "" + contextLeft).queryParam("right", "" + contextRight);
if (args.containsKey("segmentation")) {
res = res.queryParam("segmentation", args.get("segmentation"));
}
SubgraphFilter filter = getSubgraphFilter();
if (filter != null) {
res = res.queryParam("filter", filter.name());
}
SaltProject p = res.post(SaltProject.class, currentMatches);
convertSaltProject(p, keys, args, alignmc, offset - currentMatches.getMatches().size() - 1, corpusConfigs, out, nodeCount);
offsets.put(pCounter, offset - currentMatches.getMatches().size() - 1);
cache.put(new Element(pCounter++, p));
}
offset = 1;
}
// build the list of ordered match numbers (ordering by occurrence in text)
getOrderedMatchNumbers();
@SuppressWarnings("unchecked") List<Integer> cacheKeys = cache.getKeys();
List<Integer> listOfKeys = new ArrayList<Integer>();
for (Integer key : cacheKeys) {
listOfKeys.add(key);
}
Collections.sort(listOfKeys);
for (Integer key : listOfKeys) {
SaltProject p = (SaltProject) cache.get(key).getObjectValue();
convertSaltProject(p, keys, args, alignmc, offsets.get(key), corpusConfigs, out, null);
}
out.append(System.lineSeparator());
return null;
} catch (AnnisQLSemanticsException | AnnisQLSyntaxException | AnnisCorpusAccessException | UniformInterfaceException | IOException | CacheException | IllegalStateException | ClassCastException ex) {
return ex;
} finally {
cacheManager.removalAll();
cacheManager.shutdown();
}
}
Aggregations