Search in sources :

Example 1 with RDFIteratingReader

use of ambit2.rest.dataset.RDFIteratingReader in project ambit-mirror by ideaconsult.

the class RepositoryWriterRDFTest method testWrite.

public void testWrite(String baseref) throws Exception {
    setUpDatabaseFromResource("src-datasets.xml");
    IDatabaseConnection c = getConnection();
    ITable chemicals = c.createQueryTable("EXPECTED", "SELECT * FROM chemicals");
    Assert.assertEquals(4, chemicals.getRowCount());
    ITable strucs = c.createQueryTable("EXPECTED", "SELECT * FROM structure");
    Assert.assertEquals(5, strucs.getRowCount());
    ITable srcdataset = c.createQueryTable("EXPECTED", "SELECT * FROM src_dataset");
    Assert.assertEquals(3, srcdataset.getRowCount());
    ITable struc_src = c.createQueryTable("EXPECTED", "SELECT * FROM struc_dataset");
    Assert.assertEquals(8, struc_src.getRowCount());
    ITable property = c.createQueryTable("EXPECTED", "SELECT * FROM properties");
    Assert.assertEquals(3, property.getRowCount());
    ITable tuples = c.createQueryTable("EXPECTED", "SELECT * FROM tuples");
    Assert.assertEquals(1, tuples.getRowCount());
    ITable property_values = c.createQueryTable("EXPECTED", "SELECT * FROM property_values");
    Assert.assertEquals(1, property_values.getRowCount());
    ITable p_tuples = c.createQueryTable("EXPECTED", "SELECT * FROM property_tuples");
    Assert.assertEquals(1, p_tuples.getRowCount());
    InputStream in = this.getClass().getClassLoader().getResourceAsStream("import_dataset2.rdf");
    Assert.assertNotNull(in);
    RDFIteratingReader reader = new RDFIteratingReader(in, SilentChemObjectBuilder.getInstance(), baseref, "RDF/XML");
    // reader.setReference(LiteratureEntry.getInstance("input.rdf"));
    write(reader, c.getConnection());
    c.close();
    c = getConnection();
    chemicals = c.createQueryTable("EXPECTED", "SELECT * FROM chemicals");
    Assert.assertEquals(4, chemicals.getRowCount());
    chemicals = c.createQueryTable("EXPECTED", "SELECT * FROM chemicals where smiles is not null and inchi is not null and formula is not null");
    Assert.assertEquals(0, chemicals.getRowCount());
    strucs = c.createQueryTable("EXPECTED", "SELECT * FROM structure");
    Assert.assertEquals(5, strucs.getRowCount());
    srcdataset = c.createQueryTable("EXPECTED", "SELECT * FROM src_dataset where name='TEST INPUT'");
    Assert.assertEquals(1, srcdataset.getRowCount());
    struc_src = c.createQueryTable("EXPECTED", "SELECT * FROM struc_dataset");
    Assert.assertEquals(8, struc_src.getRowCount());
    property = c.createQueryTable("EXPECTED", "SELECT * FROM properties");
    Assert.assertEquals(33, property.getRowCount());
    property_values = c.createQueryTable("EXPECTED", "SELECT * FROM property_values");
    Assert.assertEquals(57, property_values.getRowCount());
    tuples = c.createQueryTable("EXPECTED", "SELECT * FROM tuples");
    Assert.assertEquals(3, tuples.getRowCount());
    p_tuples = c.createQueryTable("EXPECTED", "SELECT * FROM property_tuples");
    Assert.assertEquals(57, p_tuples.getRowCount());
    p_tuples = c.createQueryTable("EXPECTED", "SELECT * FROM values_string where name=\"Property 1\" and value = \"XXXXX\" and idstructure=100215");
    Assert.assertEquals(1, p_tuples.getRowCount());
    p_tuples = c.createQueryTable("EXPECTED", "SELECT value_num FROM property_values join properties using(idproperty) where name='http://anotherservice.com:8080/feature/1' and idstructure=100215");
    Assert.assertEquals(1, p_tuples.getRowCount());
    Assert.assertEquals("3.14", p_tuples.getValue(0, "value_num").toString());
    c.close();
}
Also used : RDFIteratingReader(ambit2.rest.dataset.RDFIteratingReader) InputStream(java.io.InputStream) ITable(org.dbunit.dataset.ITable) IDatabaseConnection(org.dbunit.database.IDatabaseConnection)

Example 2 with RDFIteratingReader

use of ambit2.rest.dataset.RDFIteratingReader in project ambit-mirror by ideaconsult.

the class CallableFileImport method importFile.

public TaskResult importFile(File file) throws Exception {
    try {
        // if target dataset is not defined, create new dataset
        final SourceDataset dataset = targetDataset != null ? targetDataset : datasetMeta(file);
        if (targetDataset == null)
            dataset.setId(-1);
        final BatchDBProcessor<String> batch = new BatchDBProcessor<String>() {

            /**
             */
            private static final long serialVersionUID = -7971761364143510120L;

            @Override
            public Iterator<String> getIterator(IInputState target) throws AmbitException {
                try {
                    File file = ((FileInputState) target).getFile();
                    RDFIteratingReader i = getRDFIterator(file, getReporter().getBaseReference().toString());
                    if (i == null) {
                        IIteratingChemObjectReader ni = getNanoCMLIterator(file, getReporter().getBaseReference().toString());
                        if (ni == null)
                            return super.getIterator(target);
                        else
                            return ni;
                    } else {
                        /*
			     * RDFMetaDatasetIterator datasets = null; try {
			     * datasets = new
			     * RDFMetaDatasetIterator(i.getJenaModel());
			     * datasets
			     * .setBaseReference(getReporter().getBaseReference
			     * ()); while (datasets.hasNext()) { SourceDataset d
			     * = datasets.next(); dataset.setId(d.getId());
			     * dataset.setName(d.getName());
			     * dataset.setTitle(d.getTitle());
			     * dataset.setURL(d.getURL()); } } catch (Exception
			     * x) { x.printStackTrace(); } finally { try {
			     * datasets.close();} catch (Exception x) {} }
			     */
                        return i;
                    }
                } catch (AmbitException x) {
                    throw x;
                } catch (Exception x) {
                    throw new AmbitException(x);
                }
            }

            @Override
            public void onItemProcessed(String input, Object output, IBatchStatistics stats) {
                super.onItemProcessed(input, output, stats);
                if (firstCompoundOnly && (stats.getRecords(RECORDS_STATS.RECORDS_PROCESSED) >= 1)) {
                    cancelled = true;
                    if (output != null)
                        if ((output instanceof ArrayList) && ((ArrayList) output).size() > 0) {
                            if (((ArrayList) output).get(0) instanceof IStructureRecord)
                                recordImported = (IStructureRecord) ((ArrayList) output).get(0);
                        } else if (output instanceof IStructureRecord)
                            recordImported = (IStructureRecord) output;
                }
            }
        };
        batch.setReference(dataset.getReference());
        batch.setConnection(connection);
        final RepositoryWriter writer = new RepositoryWriter();
        writer.setUseExistingStructure(isPropertyOnly());
        writer.setPropertyKey(getMatcher());
        writer.setDataset(dataset);
        final ProcessorsChain<String, IBatchStatistics, IProcessor> chain = new ProcessorsChain<String, IBatchStatistics, IProcessor>();
        chain.add(writer);
        batch.setProcessorChain(chain);
        writer.setConnection(connection);
        FileInputState fin = new FileInputState(file);
        IBatchStatistics stats = batch.process(fin);
        if (firstCompoundOnly) {
            if (recordImported == null)
                throw new Exception("No compound imported");
            if (compoundReporter == null)
                compoundReporter = new ConformerURIReporter("", null, false);
            try {
                batch.close();
            } catch (Exception xx) {
            }
            return new TaskResult(compoundReporter.getURI(recordImported));
        } else {
            ReadDataset q = new ReadDataset();
            q.setValue(dataset);
            QueryExecutor<ReadDataset> x = new QueryExecutor<ReadDataset>();
            x.setConnection(connection);
            ResultSet rs = x.process(q);
            ISourceDataset newDataset = null;
            while (rs.next()) {
                newDataset = q.getObject(rs);
                break;
            }
            x.closeResults(rs);
            x.setConnection(null);
            if (newDataset == null)
                throw new ResourceException(Status.SUCCESS_NO_CONTENT);
            if (reporter == null)
                reporter = new DatasetURIReporter<IQueryRetrieval<ISourceDataset>, ISourceDataset>();
            try {
                batch.close();
            } catch (Exception xx) {
            }
            return new TaskResult(reporter.getURI(newDataset));
        }
    } catch (ResourceException x) {
        throw x;
    } catch (Exception x) {
        throw new ResourceException(new Status(Status.SERVER_ERROR_INTERNAL, x.getMessage()));
    } finally {
        try {
            connection.close();
        } catch (Exception x) {
        }
        connection = null;
    }
}
Also used : RDFIteratingReader(ambit2.rest.dataset.RDFIteratingReader) ArrayList(java.util.ArrayList) RepositoryWriter(ambit2.db.processors.RepositoryWriter) IStructureRecord(ambit2.base.interfaces.IStructureRecord) ISourceDataset(ambit2.base.data.ISourceDataset) ProcessorsChain(net.idea.modbcum.i.processors.ProcessorsChain) IInputState(ambit2.core.io.IInputState) QueryExecutor(ambit2.db.search.QueryExecutor) ResultSet(java.sql.ResultSet) ResourceException(org.restlet.resource.ResourceException) Status(org.restlet.data.Status) IIteratingChemObjectReader(org.openscience.cdk.io.iterator.IIteratingChemObjectReader) IBatchStatistics(net.idea.modbcum.i.batch.IBatchStatistics) IProcessor(net.idea.modbcum.i.processors.IProcessor) BatchDBProcessor(ambit2.db.processors.BatchDBProcessor) AmbitException(net.idea.modbcum.i.exceptions.AmbitException) CDKException(org.openscience.cdk.exception.CDKException) ResourceException(org.restlet.resource.ResourceException) ConformerURIReporter(ambit2.rest.structure.ConformerURIReporter) ISourceDataset(ambit2.base.data.ISourceDataset) SourceDataset(ambit2.base.data.SourceDataset) File(java.io.File) FileInputState(ambit2.core.io.FileInputState) ReadDataset(ambit2.db.update.dataset.ReadDataset) AmbitException(net.idea.modbcum.i.exceptions.AmbitException) DatasetURIReporter(ambit2.rest.dataset.DatasetURIReporter)

Example 3 with RDFIteratingReader

use of ambit2.rest.dataset.RDFIteratingReader in project ambit-mirror by ideaconsult.

the class RepositoryWriterRDFTest method testWriteForeign.

/**
 * Writes "Property 1" = "XXXXX" for structure 100215
 *
 * @throws Exception
 */
@Test
public void testWriteForeign() throws Exception {
    setUpDatabaseFromResource("src-datasets.xml");
    IDatabaseConnection c = getConnection();
    ITable chemicals = c.createQueryTable("EXPECTED", "SELECT * FROM chemicals");
    Assert.assertEquals(4, chemicals.getRowCount());
    ITable strucs = c.createQueryTable("EXPECTED", "SELECT * FROM structure");
    Assert.assertEquals(5, strucs.getRowCount());
    ITable srcdataset = c.createQueryTable("EXPECTED", "SELECT * FROM src_dataset");
    Assert.assertEquals(3, srcdataset.getRowCount());
    ITable struc_src = c.createQueryTable("EXPECTED", "SELECT * FROM struc_dataset");
    Assert.assertEquals(8, struc_src.getRowCount());
    ITable property = c.createQueryTable("EXPECTED", "SELECT * FROM properties");
    Assert.assertEquals(3, property.getRowCount());
    ITable tuples = c.createQueryTable("EXPECTED", "SELECT * FROM tuples");
    Assert.assertEquals(1, tuples.getRowCount());
    ITable property_values = c.createQueryTable("EXPECTED", "SELECT * FROM property_values");
    Assert.assertEquals(1, property_values.getRowCount());
    ITable p_tuples = c.createQueryTable("EXPECTED", "SELECT * FROM property_tuples");
    Assert.assertEquals(1, p_tuples.getRowCount());
    InputStream in = this.getClass().getClassLoader().getResourceAsStream("import_dataset2.rdf");
    Assert.assertNotNull(in);
    RDFIteratingReader reader = new RDFIteratingReader(in, SilentChemObjectBuilder.getInstance(), String.format("http://somethingelse.com:%d", port), "RDF/XML");
    // reader.setReference(LiteratureEntry.getInstance("input.rdf"));
    write(reader, c.getConnection());
    c.close();
    c = getConnection();
    chemicals = c.createQueryTable("EXPECTED", "SELECT * FROM chemicals");
    Assert.assertEquals(5, chemicals.getRowCount());
    chemicals = c.createQueryTable("EXPECTED", "SELECT * FROM chemicals where smiles is not null and inchi is not null and formula is not null");
    Assert.assertEquals(2, chemicals.getRowCount());
    strucs = c.createQueryTable("EXPECTED", "SELECT * FROM structure");
    Assert.assertEquals(7, strucs.getRowCount());
    srcdataset = c.createQueryTable("EXPECTED", "SELECT * FROM src_dataset where name='TEST INPUT'");
    Assert.assertEquals(1, srcdataset.getRowCount());
    struc_src = c.createQueryTable("EXPECTED", "SELECT * FROM struc_dataset");
    Assert.assertEquals(10, struc_src.getRowCount());
    property = c.createQueryTable("EXPECTED", "SELECT * FROM properties");
    Assert.assertEquals(34, property.getRowCount());
    property_values = c.createQueryTable("EXPECTED", "SELECT * FROM property_values");
    Assert.assertEquals(57, property_values.getRowCount());
    tuples = c.createQueryTable("EXPECTED", "SELECT * FROM tuples");
    Assert.assertEquals(3, tuples.getRowCount());
    p_tuples = c.createQueryTable("EXPECTED", "SELECT * FROM property_tuples");
    Assert.assertEquals(57, p_tuples.getRowCount());
    p_tuples = c.createQueryTable("EXPECTED", "SELECT * FROM values_string where name=\"http://localhost:8181/feature/1\" and value = \"XXXXX\"");
    Assert.assertEquals(1, p_tuples.getRowCount());
    p_tuples = c.createQueryTable("EXPECTED", "SELECT value_num FROM property_values join properties using(idproperty) where name='http://anotherservice.com:8080/feature/1'");
    Assert.assertEquals(1, p_tuples.getRowCount());
    Assert.assertEquals("3.14", p_tuples.getValue(0, "value_num").toString());
    c.close();
}
Also used : RDFIteratingReader(ambit2.rest.dataset.RDFIteratingReader) InputStream(java.io.InputStream) ITable(org.dbunit.dataset.ITable) IDatabaseConnection(org.dbunit.database.IDatabaseConnection) Test(org.junit.Test) ResourceTest(ambit2.rest.test.ResourceTest)

Aggregations

RDFIteratingReader (ambit2.rest.dataset.RDFIteratingReader)3 InputStream (java.io.InputStream)2 IDatabaseConnection (org.dbunit.database.IDatabaseConnection)2 ITable (org.dbunit.dataset.ITable)2 ISourceDataset (ambit2.base.data.ISourceDataset)1 SourceDataset (ambit2.base.data.SourceDataset)1 IStructureRecord (ambit2.base.interfaces.IStructureRecord)1 FileInputState (ambit2.core.io.FileInputState)1 IInputState (ambit2.core.io.IInputState)1 BatchDBProcessor (ambit2.db.processors.BatchDBProcessor)1 RepositoryWriter (ambit2.db.processors.RepositoryWriter)1 QueryExecutor (ambit2.db.search.QueryExecutor)1 ReadDataset (ambit2.db.update.dataset.ReadDataset)1 DatasetURIReporter (ambit2.rest.dataset.DatasetURIReporter)1 ConformerURIReporter (ambit2.rest.structure.ConformerURIReporter)1 ResourceTest (ambit2.rest.test.ResourceTest)1 File (java.io.File)1 ResultSet (java.sql.ResultSet)1 ArrayList (java.util.ArrayList)1 IBatchStatistics (net.idea.modbcum.i.batch.IBatchStatistics)1