use of org.dkpro.lab.storage.impl.PropertiesAdapter in project dkpro-lab by dkpro.
the class BatchTaskTest method importTest.
@Test(expected = RuntimeException.class)
public void importTest() throws Exception {
Task producer = new ExecutableTaskBase() {
@Override
public void execute(TaskContext aContext) throws Exception {
System.out.println("Running producer");
Properties data = new Properties();
data.setProperty("key", "value");
aContext.storeBinary("DATA", new PropertiesAdapter(data));
}
};
Task consumer = new ExecutableTaskBase() {
@Override
public void execute(TaskContext aContext) throws Exception {
System.out.println("Running consumer");
Properties data = new Properties();
aContext.retrieveBinary("DATA", new PropertiesAdapter(data));
Assert.assertEquals(data.getProperty("key"), "value");
}
};
consumer.addImport(producer, "DATA1", "DATA");
DefaultBatchTask batch = new DefaultBatchTask();
batch.addTask(producer);
batch.addTask(consumer);
Lab.getInstance().run(batch);
}
use of org.dkpro.lab.storage.impl.PropertiesAdapter in project dkpro-lab by dkpro.
the class MultiThreadBatchTaskTest method importTest.
@Test(expected = UnresolvedImportException.class)
public void importTest() throws Exception {
Task producer = new ExecutableTaskBase() {
@Override
public void execute(TaskContext aContext) throws Exception {
System.out.println("Running producer");
Properties data = new Properties();
data.setProperty("key", "value");
aContext.storeBinary("DATA", new PropertiesAdapter(data));
}
};
Task consumer = new ExecutableTaskBase() {
@Override
public void execute(TaskContext aContext) throws Exception {
System.out.println("Running consumer");
Properties data = new Properties();
aContext.retrieveBinary("DATA", new PropertiesAdapter(data));
Assert.assertEquals(data.getProperty("key"), "value");
}
};
consumer.addImport(producer, "DATA1", "DATA");
DefaultBatchTask batch = new DefaultBatchTask();
// BatchTask batch = new BatchTask();
batch.addTask(producer);
batch.addTask(consumer);
Lab.getInstance().run(batch);
}
use of org.dkpro.lab.storage.impl.PropertiesAdapter in project dkpro-lab by dkpro.
the class TaskBase method persist.
@Override
public void persist(final TaskContext aContext) throws IOException {
if (!initialized) {
throw new IllegalStateException("Task not initialized. Maybe forgot to call super.initialize(ctx) in [" + getClass().getName() + "]?");
}
aContext.storeBinary(ATTRIBUTES_KEY, new PropertiesAdapter(getAttributes(), "Task properties"));
aContext.storeBinary(DISCRIMINATORS_KEY, new PropertiesAdapter(getResolvedDescriminators(aContext)));
}
use of org.dkpro.lab.storage.impl.PropertiesAdapter in project dkpro-lab by dkpro.
the class BatchTaskEngine method getLatestExecution.
/**
* Locate the latest task execution compatible with the given task configuration.
*
* @param aContext
* the context of the current batch task.
* @param aType
* the type of the task context to find.
* @param aDiscriminators
* the discriminators of the task context to find.
* @param aConfig
* the current parameter configuration.
* @throws TaskContextNotFoundException
* if a matching task context could not be found.
* @see ImportUtil#matchConstraints(Map, Map, boolean)
*/
private TaskContextMetadata getLatestExecution(TaskContext aContext, String aType, Map<String, String> aDiscriminators, Map<String, Object> aConfig) {
// Convert parameter values to strings
Map<String, String> config = new HashMap<String, String>();
for (Entry<String, Object> e : aConfig.entrySet()) {
config.put(e.getKey(), Util.toString(e.getValue()));
// If the conversion service has a registered value override the constraint here
// accordingly
Object object = e.getValue();
ConversionService cs = aContext.getConversionService();
if (cs.isRegistered(object)) {
config.put(e.getKey(), cs.getDiscriminableValue(object));
}
}
StorageService storage = aContext.getStorageService();
List<TaskContextMetadata> metas = storage.getContexts(aType, aDiscriminators);
for (TaskContextMetadata meta : metas) {
Map<String, String> discriminators = storage.retrieveBinary(meta.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter()).getMap();
// interpret the discriminators as constraints on the current configuration.
if (ImportUtil.matchConstraints(discriminators, config, false)) {
return meta;
}
}
throw ImportUtil.createContextNotFoundException(aType, aDiscriminators);
}
use of org.dkpro.lab.storage.impl.PropertiesAdapter in project dkpro-tc by dkpro.
the class InnerBatchReport method execute.
@Override
public void execute() throws Exception {
StorageService store = getContext().getStorageService();
Properties prop = new Properties();
List<File> id2outcomeFiles = new ArrayList<>();
List<File> baselineMajorityClass2outcomeFiles = new ArrayList<>();
List<File> baselineRandom2outcomeFiles = new ArrayList<>();
Set<String> ids = getTaskIdsFromMetaData(getSubtasks());
for (String id : ids) {
if (!TcTaskTypeUtil.isFacadeTask(store, id)) {
continue;
}
Set<String> wrap = new HashSet<>();
wrap.add(id);
Set<String> subTaskId = collectTasks(wrap);
subTaskId.remove(id);
// Should be only one anyway?
for (String subId : subTaskId) {
if (!TcTaskTypeUtil.isMachineLearningAdapterTask(store, subId)) {
continue;
}
Map<String, String> discriminatorsMap = store.retrieveBinary(id, Task.DISCRIMINATORS_KEY, new PropertiesAdapter()).getMap();
File id2outcomeFile = store.locateKey(subId, ID_OUTCOME_KEY);
id2outcomeFiles.add(id2outcomeFile);
File baselineMajority2outcomeFile = store.locateKey(subId, BASELINE_MAJORITIY_ID_OUTCOME_KEY);
if (isAvailable(baselineMajority2outcomeFile)) {
baselineMajorityClass2outcomeFiles.add(baselineMajority2outcomeFile);
}
File baselineRandom2outcomeFile = store.locateKey(subId, BASELINE_RANDOM_ID_OUTCOME_KEY);
if (isAvailable(baselineRandom2outcomeFile)) {
baselineRandom2outcomeFiles.add(baselineRandom2outcomeFile);
}
for (Entry<String, String> e : discriminatorsMap.entrySet()) {
String key = e.getKey();
String value = e.getValue();
prop.setProperty(key, value);
}
}
}
String learningMode = getDiscriminator(store, ids, DIM_LEARNING_MODE);
writeCombinedOutcomeReport(FILE_COMBINED_ID_OUTCOME_KEY, aggregate(learningMode, id2outcomeFiles));
writeCombinedOutcomeReport(FILE_COMBINED_BASELINE_MAJORITY_OUTCOME_KEY, aggregate(learningMode, baselineMajorityClass2outcomeFiles));
writeCombinedOutcomeReport(FILE_COMBINED_BASELINE_RANDOM_OUTCOME_KEY, aggregate(learningMode, baselineRandom2outcomeFiles));
}
Aggregations