use of org.eclipse.rdf4j.query.resultio.QueryResultParseException in project rdf4j by eclipse.
the class BooleanTextParser method parse.
@Override
public synchronized boolean parse(InputStream in) throws IOException, QueryResultParseException {
Reader reader = new InputStreamReader(in, Charset.forName("US-ASCII"));
String value = IOUtil.readString(reader, 16);
value = value.trim();
boolean result = false;
if (value.equalsIgnoreCase("true")) {
result = true;
} else if (value.equalsIgnoreCase("false")) {
result = false;
} else {
throw new QueryResultParseException("Invalid value: " + value);
}
if (this.handler != null) {
try {
this.handler.handleBoolean(result);
} catch (QueryResultHandlerException e) {
if (e.getCause() != null && e.getCause() instanceof IOException) {
throw (IOException) e.getCause();
} else {
throw new QueryResultParseException("Found an issue with the query result handler", e);
}
}
}
return result;
}
use of org.eclipse.rdf4j.query.resultio.QueryResultParseException in project rdf4j by eclipse.
the class AbstractSPARQLJSONParser method parseQueryResultInternal.
protected boolean parseQueryResultInternal(InputStream in, boolean attemptParseBoolean, boolean attemptParseTuple) throws IOException, QueryResultParseException, QueryResultHandlerException {
if (!attemptParseBoolean && !attemptParseTuple) {
throw new IllegalArgumentException("Internal error: Did not specify whether to parse as either boolean and/or tuple");
}
JsonParser jp = JSON_FACTORY.createParser(in);
boolean result = false;
if (jp.nextToken() != JsonToken.START_OBJECT) {
throw new QueryResultParseException("Expected SPARQL Results JSON document to start with an Object", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
List<String> varsList = new ArrayList<String>();
boolean varsFound = false;
Set<BindingSet> bindings = new HashSet<BindingSet>();
while (jp.nextToken() != JsonToken.END_OBJECT) {
final String baseStr = jp.getCurrentName();
if (baseStr.equals(HEAD)) {
if (jp.nextToken() != JsonToken.START_OBJECT) {
throw new QueryResultParseException("Did not find object under " + baseStr + " field", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
while (jp.nextToken() != JsonToken.END_OBJECT) {
final String headStr = jp.getCurrentName();
if (headStr.equals(VARS)) {
if (!attemptParseTuple) {
throw new QueryResultParseException("Found tuple results variables when attempting to parse SPARQL Results JSON to boolean result");
}
if (jp.nextToken() != JsonToken.START_ARRAY) {
throw new QueryResultParseException("Expected variable labels to be an array", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
while (jp.nextToken() != JsonToken.END_ARRAY) {
varsList.add(jp.getText());
}
if (this.handler != null) {
handler.startQueryResult(varsList);
}
varsFound = true;
// out now.
if (!bindings.isEmpty() && this.handler != null) {
for (BindingSet nextBinding : bindings) {
handler.handleSolution(nextBinding);
handler.endQueryResult();
}
bindings.clear();
}
} else if (headStr.equals(LINK)) {
List<String> linksList = new ArrayList<String>();
if (jp.nextToken() != JsonToken.START_ARRAY) {
throw new QueryResultParseException("Expected links to be an array", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
while (jp.nextToken() != JsonToken.END_ARRAY) {
linksList.add(jp.getText());
}
if (this.handler != null) {
handler.handleLinks(linksList);
}
} else {
throw new QueryResultParseException("Found unexpected object in head field: " + headStr, jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
}
} else if (baseStr.equals(RESULTS)) {
if (!attemptParseTuple) {
throw new QueryResultParseException("Found tuple results bindings when attempting to parse SPARQL Results JSON to boolean result");
}
if (jp.nextToken() != JsonToken.START_OBJECT) {
throw new QueryResultParseException("Found unexpected token in results object: " + jp.getCurrentName(), jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
while (jp.nextToken() != JsonToken.END_OBJECT) {
if (jp.getCurrentName().equals(BINDINGS)) {
if (jp.nextToken() != JsonToken.START_ARRAY) {
throw new QueryResultParseException("Found unexpected token in bindings object", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
while (jp.nextToken() != JsonToken.END_ARRAY) {
MapBindingSet nextBindingSet = new MapBindingSet();
if (jp.getCurrentToken() != JsonToken.START_OBJECT) {
throw new QueryResultParseException("Did not find object in bindings array: " + jp.getCurrentName(), jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
while (jp.nextToken() != JsonToken.END_OBJECT) {
if (jp.getCurrentToken() != JsonToken.FIELD_NAME) {
throw new QueryResultParseException("Did not find binding name", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
final String bindingStr = jp.getCurrentName();
if (jp.nextToken() != JsonToken.START_OBJECT) {
throw new QueryResultParseException("Did not find object for binding value", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
String lang = null;
String type = null;
String datatype = null;
String value = null;
while (jp.nextToken() != JsonToken.END_OBJECT) {
if (jp.getCurrentToken() != JsonToken.FIELD_NAME) {
throw new QueryResultParseException("Did not find value attribute under " + bindingStr + " field", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
String fieldName = jp.getCurrentName();
// move to the value token
jp.nextToken();
// set the appropriate state variable
if (TYPE.equals(fieldName)) {
type = jp.getText();
} else if (XMLLANG.equals(fieldName)) {
lang = jp.getText();
} else if (DATATYPE.equals(fieldName)) {
datatype = jp.getText();
} else if (VALUE.equals(fieldName)) {
value = jp.getText();
} else {
throw new QueryResultParseException("Unexpected field name: " + fieldName, jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
}
nextBindingSet.addBinding(bindingStr, parseValue(type, value, lang, datatype));
}
// bindings state
if (!varsFound) {
// Buffer the bindings to fit with the
// QueryResultHandler contract so that startQueryResults
// is
// always called before handleSolution
bindings.add(nextBindingSet);
} else if (handler != null) {
handler.handleSolution(nextBindingSet);
}
}
if (handler != null) {
handler.endQueryResult();
}
} else // SPARQL spec
if (jp.getCurrentName().equals(DISTINCT) || jp.getCurrentName().equals(ORDERED)) {
jp.nextToken();
} else {
throw new QueryResultParseException("Found unexpected field in results: " + jp.getCurrentName(), jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
}
} else if (baseStr.equals(BOOLEAN)) {
if (!attemptParseBoolean) {
throw new QueryResultParseException("Found boolean results when attempting to parse SPARQL Results JSON to tuple results");
}
jp.nextToken();
result = Boolean.parseBoolean(jp.getText());
if (handler != null) {
handler.handleBoolean(result);
}
} else {
throw new QueryResultParseException("Found unexpected object in top level " + baseStr + " field", jp.getCurrentLocation().getLineNr(), jp.getCurrentLocation().getColumnNr());
}
}
return result;
}
use of org.eclipse.rdf4j.query.resultio.QueryResultParseException in project rdf4j by eclipse.
the class SPARQLXMLParserCustomTest method testEntityExpansionUnrelatedSettings.
/**
* Test with unrelated ParserConfig settings
*
* @throws Exception
*/
@Test
public void testEntityExpansionUnrelatedSettings() throws Exception {
ParserConfig config = new ParserConfig();
QueryResultCollector handler = new QueryResultCollector();
ParseErrorCollector errorCollector = new ParseErrorCollector();
QueryResultParser aParser = QueryResultIO.createTupleParser(TupleQueryResultFormat.SPARQL).setQueryResultHandler(handler).setParserConfig(config).setParseErrorListener(errorCollector);
try {
// this should trigger a SAX parse exception that will blow up at
// the 64k entity limit rather than OOMing
aParser.parseQueryResult(this.getClass().getResourceAsStream("/sparqlxml/bad-entity-expansion-limit.srx"));
fail("Parser did not throw an exception");
} catch (QueryResultParseException e) {
// assertTrue(e.getMessage().contains(
// "The parser has encountered more than \"64,000\" entity
// expansions in this document; this is the limit imposed by the
// "));
}
assertEquals(0, errorCollector.getWarnings().size());
assertEquals(0, errorCollector.getErrors().size());
assertEquals(1, errorCollector.getFatalErrors().size());
}
use of org.eclipse.rdf4j.query.resultio.QueryResultParseException in project rdf4j by eclipse.
the class SPARQLXMLParserCustomTest method testEntityExpansionDefaultSettings.
/**
* Test with the default ParserConfig settings. Ie, setParserConfig is not
* called.
*
* @throws Exception
*/
@Test
public void testEntityExpansionDefaultSettings() throws Exception {
QueryResultCollector handler = new QueryResultCollector();
ParseErrorCollector errorCollector = new ParseErrorCollector();
QueryResultParser aParser = QueryResultIO.createTupleParser(TupleQueryResultFormat.SPARQL).setQueryResultHandler(handler).setParseErrorListener(errorCollector);
try {
// this should trigger a SAX parse exception that will blow up at
// the 64k entity limit rather than OOMing
aParser.parseQueryResult(this.getClass().getResourceAsStream("/sparqlxml/bad-entity-expansion-limit.srx"));
fail("Parser did not throw an exception");
} catch (QueryResultParseException e) {
// assertTrue(e.getMessage().contains(
// "The parser has encountered more than \"64,000\" entity
// expansions in this document; this is the limit imposed by the
// "));
}
assertEquals(0, errorCollector.getWarnings().size());
assertEquals(0, errorCollector.getErrors().size());
assertEquals(1, errorCollector.getFatalErrors().size());
}
use of org.eclipse.rdf4j.query.resultio.QueryResultParseException in project rdf4j by eclipse.
the class SPARQLXMLParserCustomTest method testEntityExpansionSecureProcessing.
/**
* Test with Secure processing setting on.
*
* @throws Exception
*/
@Test
public void testEntityExpansionSecureProcessing() throws Exception {
QueryResultCollector handler = new QueryResultCollector();
ParseErrorCollector errorCollector = new ParseErrorCollector();
QueryResultParser aParser = QueryResultIO.createTupleParser(TupleQueryResultFormat.SPARQL).setQueryResultHandler(handler).set(XMLParserSettings.SECURE_PROCESSING, true).setParseErrorListener(errorCollector);
try {
// this should trigger a SAX parse exception that will blow up at
// the 64k entity limit rather than OOMing
aParser.parseQueryResult(this.getClass().getResourceAsStream("/sparqlxml/bad-entity-expansion-limit.srx"));
fail("Parser did not throw an exception");
} catch (QueryResultParseException e) {
// assertTrue(e.getMessage().contains(
// "The parser has encountered more than \"64,000\" entity
// expansions in this document; this is the limit imposed by the
// "));
}
assertEquals(0, errorCollector.getWarnings().size());
assertEquals(0, errorCollector.getErrors().size());
assertEquals(1, errorCollector.getFatalErrors().size());
}
Aggregations