use of ambit2.db.processors.RepositoryWriter in project ambit-mirror by ideaconsult.
the class CallableFileImport method importFile.
public TaskResult importFile(File file) throws Exception {
try {
// if target dataset is not defined, create new dataset
final SourceDataset dataset = targetDataset != null ? targetDataset : datasetMeta(file);
if (targetDataset == null)
dataset.setId(-1);
final BatchDBProcessor<String> batch = new BatchDBProcessor<String>() {
/**
*/
private static final long serialVersionUID = -7971761364143510120L;
@Override
public Iterator<String> getIterator(IInputState target) throws AmbitException {
try {
File file = ((FileInputState) target).getFile();
RDFIteratingReader i = getRDFIterator(file, getReporter().getBaseReference().toString());
if (i == null) {
IIteratingChemObjectReader ni = getNanoCMLIterator(file, getReporter().getBaseReference().toString());
if (ni == null)
return super.getIterator(target);
else
return ni;
} else {
/*
* RDFMetaDatasetIterator datasets = null; try {
* datasets = new
* RDFMetaDatasetIterator(i.getJenaModel());
* datasets
* .setBaseReference(getReporter().getBaseReference
* ()); while (datasets.hasNext()) { SourceDataset d
* = datasets.next(); dataset.setId(d.getId());
* dataset.setName(d.getName());
* dataset.setTitle(d.getTitle());
* dataset.setURL(d.getURL()); } } catch (Exception
* x) { x.printStackTrace(); } finally { try {
* datasets.close();} catch (Exception x) {} }
*/
return i;
}
} catch (AmbitException x) {
throw x;
} catch (Exception x) {
throw new AmbitException(x);
}
}
@Override
public void onItemProcessed(String input, Object output, IBatchStatistics stats) {
super.onItemProcessed(input, output, stats);
if (firstCompoundOnly && (stats.getRecords(RECORDS_STATS.RECORDS_PROCESSED) >= 1)) {
cancelled = true;
if (output != null)
if ((output instanceof ArrayList) && ((ArrayList) output).size() > 0) {
if (((ArrayList) output).get(0) instanceof IStructureRecord)
recordImported = (IStructureRecord) ((ArrayList) output).get(0);
} else if (output instanceof IStructureRecord)
recordImported = (IStructureRecord) output;
}
}
};
batch.setReference(dataset.getReference());
batch.setConnection(connection);
final RepositoryWriter writer = new RepositoryWriter();
writer.setUseExistingStructure(isPropertyOnly());
writer.setPropertyKey(getMatcher());
writer.setDataset(dataset);
final ProcessorsChain<String, IBatchStatistics, IProcessor> chain = new ProcessorsChain<String, IBatchStatistics, IProcessor>();
chain.add(writer);
batch.setProcessorChain(chain);
writer.setConnection(connection);
FileInputState fin = new FileInputState(file);
IBatchStatistics stats = batch.process(fin);
if (firstCompoundOnly) {
if (recordImported == null)
throw new Exception("No compound imported");
if (compoundReporter == null)
compoundReporter = new ConformerURIReporter("", null, false);
try {
batch.close();
} catch (Exception xx) {
}
return new TaskResult(compoundReporter.getURI(recordImported));
} else {
ReadDataset q = new ReadDataset();
q.setValue(dataset);
QueryExecutor<ReadDataset> x = new QueryExecutor<ReadDataset>();
x.setConnection(connection);
ResultSet rs = x.process(q);
ISourceDataset newDataset = null;
while (rs.next()) {
newDataset = q.getObject(rs);
break;
}
x.closeResults(rs);
x.setConnection(null);
if (newDataset == null)
throw new ResourceException(Status.SUCCESS_NO_CONTENT);
if (reporter == null)
reporter = new DatasetURIReporter<IQueryRetrieval<ISourceDataset>, ISourceDataset>();
try {
batch.close();
} catch (Exception xx) {
}
return new TaskResult(reporter.getURI(newDataset));
}
} catch (ResourceException x) {
throw x;
} catch (Exception x) {
throw new ResourceException(new Status(Status.SERVER_ERROR_INTERNAL, x.getMessage()));
} finally {
try {
connection.close();
} catch (Exception x) {
}
connection = null;
}
}
use of ambit2.db.processors.RepositoryWriter in project ambit-mirror by ideaconsult.
the class CallableUpdateDataset method createProcessors.
@Override
protected ProcessorsChain<String, IBatchStatistics, IProcessor> createProcessors() throws Exception {
final RepositoryWriter writer = new RepositoryWriter();
writer.setDataset(dataset);
final ProcessorsChain<String, IBatchStatistics, IProcessor> chain = new ProcessorsChain<String, IBatchStatistics, IProcessor>();
chain.add(writer);
return chain;
}
use of ambit2.db.processors.RepositoryWriter in project ambit-mirror by ideaconsult.
the class RepositoryWriterToXMLTest method write.
public int write(IRawReader<IStructureRecord> reader, Connection connection, PropertyKey key) throws Exception {
RepositoryWriter writer = new RepositoryWriter();
if (key != null)
writer.setPropertyKey(key);
writer.setDataset(new SourceDataset("TEST INPUT", LiteratureEntry.getInstance("File", "file:study.toxml")));
writer.setConnection(connection);
writer.open();
int records = 0;
while (reader.hasNext()) {
IStructureRecord record = reader.nextRecord();
writer.write(record);
records++;
}
reader.close();
writer.close();
return records;
}
use of ambit2.db.processors.RepositoryWriter in project ambit-mirror by ideaconsult.
the class RepositoryWriterTest method write.
public int write(IRawReader<IStructureRecord> reader, Connection connection, PropertyKey key, boolean useExistingStructure, int maxrecords) throws Exception {
RepositoryWriter writer = new RepositoryWriter();
writer.setUseExistingStructure(useExistingStructure);
if (key != null)
writer.setPropertyKey(key);
writer.setDataset(new SourceDataset("TEST INPUT", LiteratureEntry.getInstance("File", "file:input.sdf")));
writer.setConnection(connection);
writer.open();
int records = 0;
while (reader.hasNext()) {
IStructureRecord record = reader.nextRecord();
writer.write(record);
records++;
if (maxrecords <= 0 || (records <= maxrecords))
continue;
else
break;
}
reader.close();
writer.close();
return records;
}
use of ambit2.db.processors.RepositoryWriter in project ambit-mirror by ideaconsult.
the class NanoWriterTest method write.
public int write(IRawReader<IStructureRecord> reader, Connection connection, PropertyKey key) throws Exception {
RepositoryWriter writer = new RepositoryWriter();
if (key != null)
writer.setPropertyKey(key);
writer.setDataset(new SourceDataset("TEST INPUT", LiteratureEntry.getInstance("File", "file:input.nmx")));
writer.setConnection(connection);
writer.open();
int records = 0;
while (reader.hasNext()) {
IStructureRecord record = reader.nextRecord();
writer.write(record);
records++;
}
reader.close();
writer.close();
return records;
}
Aggregations