use of org.dkpro.lab.storage.StorageService in project dkpro-tc by dkpro.
the class BatchCrossValidationReport method execute.
@Override
public void execute() throws Exception {
StorageService store = getContext().getStorageService();
TcFlexTable<String> table = TcFlexTable.forClass(String.class);
table.setDefaultValue("");
Set<String> idPool = getTaskIdsFromMetaData(getSubtasks());
String learningMode = determineLearningMode(store, idPool);
for (String id : idPool) {
if (!TcTaskTypeUtil.isCrossValidationTask(store, id)) {
continue;
}
Map<String, String> discriminatorsMap = getDiscriminatorsForContext(store, id, Task.DISCRIMINATORS_KEY);
discriminatorsMap = ReportUtils.removeKeyRedundancy(discriminatorsMap);
Map<String, String> values = new HashMap<String, String>();
values.putAll(discriminatorsMap);
// The classification result is always there
File combinedId2outcome = store.locateKey(id, FILE_COMBINED_ID_OUTCOME_KEY);
Map<String, String> results = MetricComputationUtil.getResults(combinedId2outcome, learningMode);
values.putAll(results);
// Majority baseline is not defined for regression i.e. might not be there
File majBaseline = store.locateKey(id, FILE_COMBINED_BASELINE_MAJORITY_OUTCOME_KEY);
if (isAvailable(majBaseline)) {
Map<String, String> r = MetricComputationUtil.getResults(majBaseline, learningMode);
for (Entry<String, String> e : r.entrySet()) {
values.put(e.getKey() + ".MajorityBaseline", e.getValue());
}
}
// Random baseline is not defined for regression i.e. might not be there
File randomBaseline = store.locateKey(id, FILE_COMBINED_BASELINE_RANDOM_OUTCOME_KEY);
if (isAvailable(randomBaseline)) {
Map<String, String> r = MetricComputationUtil.getResults(randomBaseline, learningMode);
for (Entry<String, String> e : r.entrySet()) {
values.put(e.getKey() + ".RandomBaseline", e.getValue());
}
}
table.addRow(getContextLabel(id), values);
}
/*
* TODO: make rows to columns e.g. create a new table and set columns to rows of old table
* and rows to columns but than must be class FlexTable in this case adapted accordingly:
* enable setting
*/
ReportUtils.writeExcelAndCSV(getContext(), getContextLabel(), table, EVAL_FILE_NAME, SUFFIX_EXCEL, SUFFIX_CSV);
}
use of org.dkpro.lab.storage.StorageService in project dkpro-tc by dkpro.
the class BatchTrainTestReport method execute.
@Override
public void execute() throws Exception {
StorageService store = getContext().getStorageService();
TcFlexTable<String> table = TcFlexTable.forClass(String.class);
table.setDefaultValue("");
Set<String> idPool = getTaskIdsFromMetaData(getSubtasks());
for (String id : idPool) {
if (!TcTaskTypeUtil.isFacadeTask(store, id)) {
continue;
}
Set<String> wrapped = new HashSet<>();
wrapped.add(id);
Set<String> subTaskId = collectTasks(wrapped);
subTaskId.remove(id);
// Should be only one anyway?
for (String subId : subTaskId) {
if (!TcTaskTypeUtil.isMachineLearningAdapterTask(store, subId)) {
continue;
}
Map<String, String> discriminatorsMap = getDiscriminators(store, subId);
discriminatorsMap = ReportUtils.clearDiscriminatorsByExcludePattern(discriminatorsMap, discriminatorsToExclude);
discriminatorsMap = ReportUtils.removeKeyRedundancy(discriminatorsMap);
// add the results into the discriminator map
File id2o = getId2Outcome(subId);
String mode = getDiscriminator(store, subId, DIM_LEARNING_MODE);
Map<String, String> resultMap = MetricComputationUtil.getResults(id2o, mode);
discriminatorsMap.putAll(resultMap);
File majBaseline = getBaselineMajorityClassId2Outcome(subId);
if (isAvailable(majBaseline)) {
Map<String, String> results = MetricComputationUtil.getResults(majBaseline, mode);
for (Entry<String, String> e : results.entrySet()) {
discriminatorsMap.put(e.getKey() + ".MajorityBaseline", e.getValue());
}
}
File randomBaseline = getBaselineRandomId2Outcome(subId);
if (isAvailable(randomBaseline)) {
Map<String, String> results = MetricComputationUtil.getResults(randomBaseline, mode);
for (Entry<String, String> e : results.entrySet()) {
discriminatorsMap.put(e.getKey() + ".RandomBaseline", e.getValue());
}
}
table.addRow(getContextLabel(subId), discriminatorsMap);
}
}
ReportUtils.writeExcelAndCSV(getContext(), getContextLabel(), table, EVAL_FILE_NAME, SUFFIX_EXCEL, SUFFIX_CSV);
}
use of org.dkpro.lab.storage.StorageService in project dkpro-tc by dkpro.
the class DeepLearningInnerBatchReport method execute.
@Override
public void execute() throws Exception {
StorageService store = getContext().getStorageService();
Properties prop = new Properties();
List<File> id2outcomeFiles = new ArrayList<>();
Set<String> ids = getTaskIdsFromMetaData(getSubtasks());
for (String id : ids) {
if (!TcTaskTypeUtil.isMachineLearningAdapterTask(store, id)) {
continue;
}
Map<String, String> discriminatorsMap = store.retrieveBinary(id, Task.DISCRIMINATORS_KEY, new PropertiesAdapter()).getMap();
File id2outcomeFile = store.locateKey(id, Constants.ID_OUTCOME_KEY);
id2outcomeFiles.add(id2outcomeFile);
for (Entry<String, String> e : discriminatorsMap.entrySet()) {
String key = e.getKey();
String value = e.getValue();
prop.setProperty(key, value);
}
}
String learningMode = getDiscriminator(store, ids, DIM_LEARNING_MODE);
ID2OutcomeCombiner<String> aggregator = new ID2OutcomeCombiner<>(learningMode);
for (File id2o : id2outcomeFiles) {
aggregator.add(id2o, learningMode);
}
writeCombinedOutcomeReport(aggregator.generateId2OutcomeFile());
}
use of org.dkpro.lab.storage.StorageService in project dkpro-tc by dkpro.
the class BasicResultReport method writeToDisk.
private void writeToDisk(Properties pa) throws Exception {
StorageService store = getContext().getStorageService();
File key = store.locateKey(getContext().getId(), OUTPUT_FILE);
FileOutputStream fos = null;
try {
fos = new FileOutputStream(key);
pa.store(fos, "Results");
} finally {
IOUtils.closeQuietly(fos);
}
}
use of org.dkpro.lab.storage.StorageService in project dkpro-tc by dkpro.
the class BatchRuntimeReport method execute.
@Override
public void execute() throws Exception {
List<String> keyOrdered = new ArrayList<>();
StorageService store = getContext().getStorageService();
Set<String> taskIds = getTaskIdsFromMetaData(getSubtasks());
taskIds = readInnerTasksIfCrossValidation(taskIds);
for (String id : taskIds) {
if (TcTaskTypeUtil.isFacadeTask(store, id)) {
Set<String> subTasks = collectSubtasks(id);
subTasks.remove(id);
for (String subId : subTasks) {
long executionTime = getExecutionTime(subId);
registerTime(subId, executionTime);
keyOrdered.add(subId);
}
// Facade tasks are not registered they are just a shell and do not much anyway
continue;
}
long executionTime = getExecutionTime(id);
registerTime(id, executionTime);
keyOrdered.add(id);
}
String output = buildOutput(keyOrdered);
File runtime = getContext().getFile(RUNTIME_KEY, AccessMode.READWRITE);
FileUtils.writeStringToFile(runtime, output, "utf-8");
}
Aggregations