use of ambit2.db.update.dataset.ReadDataset in project ambit-mirror by ideaconsult.
the class MetadatasetResource method getQuery.
protected IQueryRetrieval<M> getQuery(Context context, Request request, Response response, boolean IDcanBeEmpty) throws ResourceException {
Form form = getResourceRef(request).getQueryAsForm();
try {
headless = Boolean.parseBoolean(form.getFirstValue("headless"));
} catch (Exception x) {
headless = false;
}
AbstractReadDataset query = null;
structureParam = getStructureParameter();
StringCondition condition;
try {
condition = StringCondition.getInstance(form.getFirstValue(QueryResource.condition));
} catch (Exception x) {
condition = StringCondition.getInstance(StringCondition.C_EQ);
}
Property property = new Property(null);
property.setClazz(null);
property.setLabel(null);
property.setReference(null);
for (search_features sf : search_features.values()) {
Object id = form.getFirstValue(sf.name());
if (id != null) {
// because we are not storing full local references!
if (search_features.feature_hassource.equals(sf)) {
String parent = getRequest().getRootRef().toString();
int p = id.toString().indexOf(parent);
if (p >= 0) {
// yet one more hack ... should store at least prefixes
id = id.toString().substring(p + parent.length()).replace("/algorithm/", "").replace("/model/", "");
}
}
sf.setProperty(property, id);
if (query == null) {
query = new QueryDatasetByFeatures(property, condition);
((QueryDatasetByFeatures) query).setStructure(structureParam);
}
}
}
if (query == null) {
query = new ReadDataset();
query.setValue(null);
}
Object id = request.getAttributes().get(DatasetStructuresResource.datasetKey);
if (id != null)
try {
Integer idnum = new Integer(Reference.decode(id.toString()));
dataset = (M) new SourceDataset();
dataset.setID(idnum);
query.setValue(dataset);
} catch (NumberFormatException x) {
if (id.toString().startsWith(DatasetStructuresResource.QR_PREFIX)) {
String key = id.toString().substring(DatasetStructuresResource.QR_PREFIX.length());
try {
IQueryRetrieval<M> q = (IQueryRetrieval<M>) new ReadStoredQuery(Integer.parseInt(key.toString()));
return q;
} catch (NumberFormatException xx) {
throw new InvalidResourceIDException(id);
}
} else {
dataset = (M) new SourceDataset();
dataset.setName(id.toString());
query.setValue(dataset);
}
} catch (Exception x) {
throw new InvalidResourceIDException(id);
}
if (!IDcanBeEmpty && (query.getValue() == null))
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Empty dataset ID!");
return query;
}
use of ambit2.db.update.dataset.ReadDataset in project ambit-mirror by ideaconsult.
the class CallableSubstanceImporter method createReference.
@Override
protected TaskResult createReference(Connection connection) throws Exception {
if ((importedRecord.getIdsubstance() > 0) || (importedRecord.getSubstanceUUID() != null)) {
try {
batch.close();
} catch (Exception xx) {
}
try {
if (file != null && file.exists())
file.delete();
} catch (Exception x) {
}
return new TaskResult(substanceReporter.getURI(importedRecord));
} else {
SourceDataset newDataset = dataset;
if (newDataset.getId() <= 0) {
ReadDataset q = new ReadDataset();
q.setValue(newDataset);
QueryExecutor<ReadDataset> x = new QueryExecutor<ReadDataset>();
x.setConnection(connection);
ResultSet rs = x.process(q);
while (rs.next()) {
newDataset = q.getObject(rs);
if (newDataset.getId() > 0)
break;
}
x.closeResults(rs);
x.setConnection(null);
}
if (newDataset == null || newDataset.getId() <= 0)
throw new ResourceException(Status.SUCCESS_NO_CONTENT);
try {
batch.close();
} catch (Exception xx) {
}
try {
if (file != null && file.exists())
file.delete();
} catch (Exception x) {
}
return new TaskResult(datasetURIReporter.getURI(newDataset));
}
}
use of ambit2.db.update.dataset.ReadDataset in project ambit-mirror by ideaconsult.
the class CallableFileImport method importFile.
public TaskResult importFile(File file) throws Exception {
try {
// if target dataset is not defined, create new dataset
final SourceDataset dataset = targetDataset != null ? targetDataset : datasetMeta(file);
if (targetDataset == null)
dataset.setId(-1);
final BatchDBProcessor<String> batch = new BatchDBProcessor<String>() {
/**
*/
private static final long serialVersionUID = -7971761364143510120L;
@Override
public Iterator<String> getIterator(IInputState target) throws AmbitException {
try {
File file = ((FileInputState) target).getFile();
RDFIteratingReader i = getRDFIterator(file, getReporter().getBaseReference().toString());
if (i == null) {
IIteratingChemObjectReader ni = getNanoCMLIterator(file, getReporter().getBaseReference().toString());
if (ni == null)
return super.getIterator(target);
else
return ni;
} else {
/*
* RDFMetaDatasetIterator datasets = null; try {
* datasets = new
* RDFMetaDatasetIterator(i.getJenaModel());
* datasets
* .setBaseReference(getReporter().getBaseReference
* ()); while (datasets.hasNext()) { SourceDataset d
* = datasets.next(); dataset.setId(d.getId());
* dataset.setName(d.getName());
* dataset.setTitle(d.getTitle());
* dataset.setURL(d.getURL()); } } catch (Exception
* x) { x.printStackTrace(); } finally { try {
* datasets.close();} catch (Exception x) {} }
*/
return i;
}
} catch (AmbitException x) {
throw x;
} catch (Exception x) {
throw new AmbitException(x);
}
}
@Override
public void onItemProcessed(String input, Object output, IBatchStatistics stats) {
super.onItemProcessed(input, output, stats);
if (firstCompoundOnly && (stats.getRecords(RECORDS_STATS.RECORDS_PROCESSED) >= 1)) {
cancelled = true;
if (output != null)
if ((output instanceof ArrayList) && ((ArrayList) output).size() > 0) {
if (((ArrayList) output).get(0) instanceof IStructureRecord)
recordImported = (IStructureRecord) ((ArrayList) output).get(0);
} else if (output instanceof IStructureRecord)
recordImported = (IStructureRecord) output;
}
}
};
batch.setReference(dataset.getReference());
batch.setConnection(connection);
final RepositoryWriter writer = new RepositoryWriter();
writer.setUseExistingStructure(isPropertyOnly());
writer.setPropertyKey(getMatcher());
writer.setDataset(dataset);
final ProcessorsChain<String, IBatchStatistics, IProcessor> chain = new ProcessorsChain<String, IBatchStatistics, IProcessor>();
chain.add(writer);
batch.setProcessorChain(chain);
writer.setConnection(connection);
FileInputState fin = new FileInputState(file);
IBatchStatistics stats = batch.process(fin);
if (firstCompoundOnly) {
if (recordImported == null)
throw new Exception("No compound imported");
if (compoundReporter == null)
compoundReporter = new ConformerURIReporter("", null, false);
try {
batch.close();
} catch (Exception xx) {
}
return new TaskResult(compoundReporter.getURI(recordImported));
} else {
ReadDataset q = new ReadDataset();
q.setValue(dataset);
QueryExecutor<ReadDataset> x = new QueryExecutor<ReadDataset>();
x.setConnection(connection);
ResultSet rs = x.process(q);
ISourceDataset newDataset = null;
while (rs.next()) {
newDataset = q.getObject(rs);
break;
}
x.closeResults(rs);
x.setConnection(null);
if (newDataset == null)
throw new ResourceException(Status.SUCCESS_NO_CONTENT);
if (reporter == null)
reporter = new DatasetURIReporter<IQueryRetrieval<ISourceDataset>, ISourceDataset>();
try {
batch.close();
} catch (Exception xx) {
}
return new TaskResult(reporter.getURI(newDataset));
}
} catch (ResourceException x) {
throw x;
} catch (Exception x) {
throw new ResourceException(new Status(Status.SERVER_ERROR_INTERNAL, x.getMessage()));
} finally {
try {
connection.close();
} catch (Exception x) {
}
connection = null;
}
}
use of ambit2.db.update.dataset.ReadDataset in project ambit-mirror by ideaconsult.
the class CallableFinder method getDataset.
protected SourceDataset getDataset(SourceDataset dataset) {
QueryExecutor exec = new QueryExecutor();
Connection connection = null;
try {
ReadDataset q = new ReadDataset();
q.setPage(0);
q.setPageSize(1);
q.setValue(dataset);
DBConnection dbc = new DBConnection(context);
connection = dbc.getConnection();
exec.setConnection(connection);
ResultSet rs = exec.process(q);
while (rs.next()) {
dataset = q.getObject(rs);
}
try {
exec.closeResults(rs);
} catch (Exception x) {
}
;
} catch (Exception x) {
// if failed will return the original dataset
} finally {
try {
exec.close();
} catch (Exception x) {
}
;
try {
connection.close();
} catch (Exception x) {
}
;
}
return dataset;
}
use of ambit2.db.update.dataset.ReadDataset in project ambit-mirror by ideaconsult.
the class DbStructureWriter method writeDataset.
protected void writeDataset(IStructureRecord structure) throws SQLException, AmbitException, OperationNotSupportedException {
if (getDataset() == null)
setDataset(new SourceDataset("Default"));
if (getDataset().getID() <= 0) {
if (readDatasetQuery == null)
readDatasetQuery = new ReadDataset();
readDatasetQuery.setValue(getDataset());
readDatasetQuery.setCondition(StringCondition.getInstance(StringCondition.C_EQ));
readDatasetQuery.setPageSize(2);
readDatasetQuery.setPage(0);
ResultSet rs = null;
try {
rs = queryexec.process(readDatasetQuery);
// should be 1 only
int record = 0;
while (rs.next()) {
SourceDataset d = readDatasetQuery.getObject(rs);
getDataset().setID(d.getID());
record++;
}
if (record > 1)
// smth is wrong
getDataset().setID(-1);
} catch (Exception x) {
getDataset().setID(-1);
}
}
datasetAddStruc.setObject(structure);
datasetAddStruc.setGroup(dataset);
try {
exec.process(datasetAddStruc);
} catch (Exception x) {
logger.log(Level.SEVERE, String.format("Error %s adding structure /compound/%d/conformer/%d to dataset [%d] %s", x.getMessage(), structure.getIdchemical(), structure.getIdstructure(), dataset.getId(), dataset.getName()));
throw new AmbitException(x);
}
propertyWriter.setDataset(dataset);
}
Aggregations