use of org.apache.jena.rdf.model.Statement in project webofneeds by researchstudio-sat.
the class GetProposalsTests method condenseTestCaseIteratively.
private static void condenseTestCaseIteratively(String filename, String outputPath) throws Exception {
RdfUtils.Pair<Dataset> inputAndExpectedOutput = loadDatasetPair(filename);
try {
if (!passesTest(inputAndExpectedOutput.getFirst(), inputAndExpectedOutput.getSecond())) {
System.out.println("test does not pass, cannot condense: " + filename);
return;
}
} catch (Exception e) {
System.out.println("test throws an Exception, cannot condense: " + filename);
return;
}
Dataset condensedDataset = inputAndExpectedOutput.getFirst();
Dataset expectedOutput = inputAndExpectedOutput.getSecond();
Iterator<String> graphNamesIt = condensedDataset.listNames();
int deletedStatements = 0;
while (graphNamesIt.hasNext()) {
String graphName = graphNamesIt.next();
System.out.println("trying to remove graph: " + graphName);
Dataset backupDataset = RdfUtils.cloneDataset(condensedDataset);
condensedDataset.removeNamedModel(graphName);
if (!passesTest(condensedDataset, expectedOutput)) {
System.out.println("cannot remove graph: " + graphName + ", trying individual triples");
condensedDataset = backupDataset;
// now try to remove triples
Model condensedModel = condensedDataset.getNamedModel(graphName);
Model attepmtedStatements = ModelFactory.createDefaultModel();
boolean done = false;
while (!done) {
Model backupModel = RdfUtils.cloneModel(condensedModel);
StmtIterator it = condensedModel.listStatements();
done = true;
while (it.hasNext()) {
Statement stmt = it.next();
if (attepmtedStatements.contains(stmt)) {
System.out.println("attempted this before");
continue;
}
System.out.println("trying statement: " + stmt);
attepmtedStatements.add(stmt);
it.remove();
deletedStatements++;
done = false;
break;
}
condensedDataset.removeNamedModel(graphName);
if (!condensedModel.isEmpty()) {
condensedDataset.addNamedModel(graphName, condensedModel);
}
if (!passesTest(condensedDataset, expectedOutput)) {
System.out.println("could not delete statement");
condensedModel = backupModel;
condensedDataset.replaceNamedModel(graphName, condensedModel);
deletedStatements--;
} else {
System.out.println("deleted a statement");
}
}
if (!passesTest(condensedDataset, expectedOutput)) {
System.out.println("test does not pass after removing statements!");
condensedDataset = backupDataset;
} else {
System.out.println("removed " + deletedStatements + " statements");
}
} else {
System.out.println("removed graph: " + graphName);
}
System.out.println("dataset has ");
}
RDFDataMgr.write(new FileOutputStream(Paths.get(outputPath + filename).toFile()), condensedDataset, Lang.TRIG);
System.out.println("wrote condensed input file to: " + outputPath + filename);
}
use of org.apache.jena.rdf.model.Statement in project webofneeds by researchstudio-sat.
the class AgreementProtocolTest method condenseTestCaseIteratively.
private static void condenseTestCaseIteratively(String filename, String outputPath) throws Exception {
RdfUtils.Pair<Dataset> inputAndExpectedOutput = loadDatasetPair(filename);
try {
if (!passesTest(inputAndExpectedOutput.getFirst(), inputAndExpectedOutput.getSecond())) {
System.out.println("test does not pass, cannot condense: " + filename);
return;
}
} catch (Exception e) {
System.out.println("test throws an Exception, cannot condense: " + filename);
return;
}
Dataset condensedDataset = inputAndExpectedOutput.getFirst();
Dataset expectedOutput = inputAndExpectedOutput.getSecond();
Iterator<String> graphNamesIt = condensedDataset.listNames();
int deletedStatements = 0;
while (graphNamesIt.hasNext()) {
String graphName = graphNamesIt.next();
System.out.println("trying to remove graph: " + graphName);
Dataset backupDataset = RdfUtils.cloneDataset(condensedDataset);
condensedDataset.removeNamedModel(graphName);
if (!passesTest(condensedDataset, expectedOutput)) {
System.out.println("cannot remove graph: " + graphName + ", trying individual triples");
condensedDataset = backupDataset;
// now try to remove triples
Model condensedModel = condensedDataset.getNamedModel(graphName);
Model attepmtedStatements = ModelFactory.createDefaultModel();
boolean done = false;
while (!done) {
Model backupModel = RdfUtils.cloneModel(condensedModel);
StmtIterator it = condensedModel.listStatements();
done = true;
while (it.hasNext()) {
Statement stmt = it.next();
if (attepmtedStatements.contains(stmt)) {
System.out.println("attempted this before");
continue;
}
System.out.println("trying statement: " + stmt);
attepmtedStatements.add(stmt);
it.remove();
deletedStatements++;
done = false;
break;
}
condensedDataset.removeNamedModel(graphName);
if (!condensedModel.isEmpty()) {
condensedDataset.addNamedModel(graphName, condensedModel);
}
if (!passesTest(condensedDataset, expectedOutput)) {
System.out.println("could not delete statement");
condensedModel = backupModel;
condensedDataset.replaceNamedModel(graphName, condensedModel);
deletedStatements--;
} else {
System.out.println("deleted a statement");
}
}
if (!passesTest(condensedDataset, expectedOutput)) {
System.out.println("test does not pass after removing statements!");
condensedDataset = backupDataset;
} else {
System.out.println("removed " + deletedStatements + " statements");
}
} else {
System.out.println("removed graph: " + graphName);
}
System.out.println("dataset has ");
}
RDFDataMgr.write(new FileOutputStream(Paths.get(outputPath + filename).toFile()), condensedDataset, Lang.TRIG);
System.out.println("wrote condensed input file to: " + outputPath + filename);
}
use of org.apache.jena.rdf.model.Statement in project opentheso by miledrousset.
the class SynchroSparql method run.
@Override
public void run() {
System.out.println("dans le run saprtql");
String url = sparqlStruct.getAdresseServeur().replaceAll("http", "jdbc:virtuoso").trim() + ":1111";
VirtGraph graph = new VirtGraph(sparqlStruct.getGraph(), url, sparqlStruct.getNom_d_utilisateur(), sparqlStruct.getMot_de_passe());
String str = "CLEAR GRAPH <" + sparqlStruct.getGraph() + ">";
VirtuosoUpdateRequest vur = VirtuosoUpdateFactory.create(str, graph);
vur.exec();
Model m = ModelFactory.createDefaultModel();
DownloadBean db = new DownloadBean();
db.setConnect(conn);
StreamedContent file = db.thesoToFile(sparqlStruct.getThesaurus(), liste_lang, liste_group, 0);
try {
m.read(file.getStream(), null);
} catch (Exception e) {
// graph.close();
}
StmtIterator iter = m.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.nextStatement();
Resource subject = stmt.getSubject();
Property predicate = stmt.getPredicate();
RDFNode object = stmt.getObject();
Triple tri = new Triple(subject.asNode(), predicate.asNode(), object.asNode());
graph.add(tri);
}
graph.close();
}
use of org.apache.jena.rdf.model.Statement in project legato by DOREMUS-ANR.
the class ModelManager method getAllPropFromModels.
/**
*************************************
***Get all properties from two models
**************************************
*/
public static List<Property> getAllPropFromModels(Model srcModel, Model tgtModel) {
List<Property> propList = new ArrayList<Property>();
StmtIterator iter1 = srcModel.listStatements();
while (iter1.hasNext()) {
Statement stmt = iter1.nextStatement();
Property prop = stmt.getPredicate();
if (!propList.contains(prop))
propList.add(prop);
}
StmtIterator iter2 = tgtModel.listStatements();
while (iter2.hasNext()) {
Statement stmt = iter2.nextStatement();
Property prop = stmt.getPredicate();
if (!propList.contains(prop))
propList.add(prop);
}
return propList;
}
use of org.apache.jena.rdf.model.Statement in project legato by DOREMUS-ANR.
the class ModelManager method getFilteredTriples.
/**
******
* Filter triples from an RDF model with specific properties
*******
*/
public static Model getFilteredTriples(Model model, List<Property> listProperties) {
Model newModel = ModelFactory.createDefaultModel();
StmtIterator iter = model.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.nextStatement();
Property property = stmt.getPredicate();
if (listProperties.contains(property)) {
newModel.add(stmt);
}
}
return model;
}
Aggregations