use of io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.TrainingDatasetFilter in project hopsworks by logicalclocks.
the class TrainingDatasetControllerTest method testconvertToFilterEntities_leftLogicRightFilter.
@Test
public void testconvertToFilterEntities_leftLogicRightFilter() throws Exception {
// fg.feature > 1 and fg.feature > 2
// "fg.feature > 1" stores as filter logic and "fg.feature > 2" stores as filter
TrainingDataset trainingDataset = new TrainingDataset();
Feature f1 = new Feature("test_f", "fg0");
FilterLogic head = new FilterLogic();
head.setType(AND);
FilterLogic left = new FilterLogic();
Filter left_left = new Filter(f1, GREATER_THAN, "1");
left.setLeftFilter(left_left);
left.setType(SINGLE);
head.setLeftLogic(left);
Filter right = new Filter(f1, GREATER_THAN, "2");
head.setRightFilter(right);
List<TrainingDatasetFilter> actual = target.convertToFilterEntities(head, trainingDataset, "L");
List<TrainingDatasetFilter> expected = new ArrayList<>();
expected.add(createTrainingDatasetFilter(null, AND, "L"));
expected.add(createTrainingDatasetFilter(createTrainingDatasetFilterCondition("test_f", GREATER_THAN, "2"), SINGLE, "L.R"));
expected.add(createTrainingDatasetFilter(createTrainingDatasetFilterCondition("test_f", GREATER_THAN, "1"), SINGLE, "L.L"));
Assert.assertEquals(expected.size(), actual.size());
Assert.assertTrue(expected.containsAll(actual));
Assert.assertTrue(actual.containsAll(expected));
}
use of io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.TrainingDatasetFilter in project hopsworks by logicalclocks.
the class TrainingDatasetControllerTest method testconvertToFilterEntities_featureComparison.
@Test
public void testconvertToFilterEntities_featureComparison() throws Exception {
// fg.feature > fg.otherFeature and fg.feature > fg1.otherFeature
TrainingDataset trainingDataset = new TrainingDataset();
Feature f1 = new Feature("test_f", "fg0");
FilterLogic head = new FilterLogic();
head.setType(AND);
FilterValue filterValueLeft = new FilterValue(0, "fg0", "test_f1");
Filter left = new Filter(f1, GREATER_THAN, filterValueLeft);
head.setLeftFilter(left);
FilterValue filterValueRight = new FilterValue(1, "fg1", "test_f2");
Filter right = new Filter(f1, GREATER_THAN, filterValueRight);
head.setRightFilter(right);
List<TrainingDatasetFilter> actual = target.convertToFilterEntities(head, trainingDataset, "L");
List<TrainingDatasetFilter> expected = new ArrayList<>();
expected.add(createTrainingDatasetFilter(null, AND, "L"));
expected.add(createTrainingDatasetFilter(createTrainingDatasetFilterCondition("test_f", GREATER_THAN, "test_f1", null, 0), SINGLE, "L.L"));
expected.add(createTrainingDatasetFilter(createTrainingDatasetFilterCondition("test_f", GREATER_THAN, "test_f2", null, 1), SINGLE, "L.R"));
Assert.assertEquals(expected.size(), actual.size());
Assert.assertTrue(expected.containsAll(actual));
Assert.assertTrue(actual.containsAll(expected));
}
use of io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.TrainingDatasetFilter in project hopsworks by logicalclocks.
the class TrainingDatasetController method makeTrainingDatasetFilter.
private TrainingDatasetFilter makeTrainingDatasetFilter(String path, TrainingDataset trainingDataset, Filter filter, SqlFilterLogic type) {
TrainingDatasetFilter trainingDatasetFilter = new TrainingDatasetFilter(trainingDataset);
TrainingDatasetFilterCondition condition = filter == null ? null : convertFilter(filter, trainingDatasetFilter);
trainingDatasetFilter.setCondition(condition);
trainingDatasetFilter.setPath(path);
trainingDatasetFilter.setType(type);
return trainingDatasetFilter;
}
use of io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.TrainingDatasetFilter in project hopsworks by logicalclocks.
the class TrainingDatasetController method convertToFilterLogic.
/**
* Reconstruct {@link io.hops.hopsworks.common.featurestore.query.filter.FilterLogic} from a list of
* {@link io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.TrainingDatasetFilter} entity
* Logic:
* 1. get head node
* 2. if type is single, return Filter
* else get left/right children and assign left/right filterLogic
*
* @param trainingDatasetFilters
* @param features
* @param headPath
* @return filter logic
* @throws FeaturestoreException
*/
FilterLogic convertToFilterLogic(Collection<TrainingDatasetFilter> trainingDatasetFilters, Map<String, Feature> features, String headPath) throws FeaturestoreException {
if (trainingDatasetFilters.size() == 0) {
return null;
}
FilterLogic filterLogic = new FilterLogic();
TrainingDatasetFilter headNode = trainingDatasetFilters.stream().filter(filter -> filter.getPath().equals(headPath)).findFirst().orElseThrow(() -> new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.COULD_NOT_GET_QUERY_FILTER, Level.WARNING));
filterLogic.setType(headNode.getType());
if (headNode.getType().equals(SqlFilterLogic.SINGLE)) {
Filter filter = convertToFilter(headNode.getCondition(), features);
filterLogic.setLeftFilter(filter);
} else {
List<TrainingDatasetFilter> leftChildren = trainingDatasetFilters.stream().filter(filter -> filter.getPath().startsWith(headPath + ".L")).collect(Collectors.toList());
List<TrainingDatasetFilter> rightChildren = trainingDatasetFilters.stream().filter(filter -> filter.getPath().startsWith(headPath + ".R")).collect(Collectors.toList());
if (!leftChildren.isEmpty()) {
if (leftChildren.size() == 1) {
filterLogic.setLeftFilter(convertToFilter(leftChildren.get(0).getCondition(), features));
} else {
filterLogic.setLeftLogic(convertToFilterLogic(leftChildren, features, headPath + ".L"));
}
}
if (!rightChildren.isEmpty()) {
if (rightChildren.size() == 1) {
filterLogic.setRightFilter(convertToFilter(rightChildren.get(0).getCondition(), features));
} else {
filterLogic.setRightLogic(convertToFilterLogic(rightChildren, features, headPath + ".R"));
}
}
}
return filterLogic;
}
Aggregations