use of org.apache.spark.sql.sources.Filter in project Gaffer by gchq.
the class FiltersToOperationConverter method applyVertexSourceDestinationFilters.
private AbstractGetRDD<?> applyVertexSourceDestinationFilters(final View view) {
View clonedView = view.clone();
AbstractGetRDD<?> operation = null;
for (final Filter filter : filters) {
if (filter instanceof EqualTo) {
final EqualTo equalTo = (EqualTo) filter;
final String attribute = equalTo.attribute();
if (attribute.equals(SchemaToStructTypeConverter.VERTEX_COL_NAME)) {
// Only entities are relevant, so remove any edge groups from the view
LOGGER.info("Found EqualTo filter with attribute {}, setting views to only contain entity groups", attribute);
View.Builder viewBuilder = new View.Builder();
for (final String entityGroup : view.getEntityGroups()) {
viewBuilder = viewBuilder.entity(entityGroup);
}
clonedView = viewBuilder.build();
LOGGER.info("Setting operation to GetRDDOfElements");
operation = new GetRDDOfElements<>(sqlContext.sparkContext(), new EntitySeed(equalTo.value()));
operation.setView(clonedView);
break;
} else if (attribute.equals(SchemaToStructTypeConverter.SRC_COL_NAME) || attribute.equals(SchemaToStructTypeConverter.DST_COL_NAME)) {
// Only edges are relevant, so remove any entity groups from the view
LOGGER.info("Found EqualTo filter with attribute {}, setting views to only contain edge groups", attribute);
View.Builder viewBuilder = new View.Builder();
for (final String edgeGroup : view.getEdgeGroups()) {
viewBuilder = viewBuilder.edge(edgeGroup);
}
clonedView = viewBuilder.build();
LOGGER.info("Setting operation to GetRDDOfElements");
operation = new GetRDDOfElements<>(sqlContext.sparkContext(), new EntitySeed(equalTo.value()));
operation.setView(clonedView);
break;
}
}
}
if (operation == null) {
LOGGER.debug("Setting operation to GetRDDOfAllElements");
operation = new GetRDDOfAllElements(sqlContext.sparkContext());
operation.setView(clonedView);
}
return operation;
}
use of org.apache.spark.sql.sources.Filter in project Gaffer by gchq.
the class FilterToOperationConverterTest method testSpecifySource.
@Test
public void testSpecifySource() throws OperationException {
final Schema schema = getSchema();
final SQLContext sqlContext = getSqlContext("testSpecifySource");
final Filter[] filters = new Filter[1];
filters[0] = new EqualTo(SchemaToStructTypeConverter.SRC_COL_NAME, "0");
FiltersToOperationConverter converter = new FiltersToOperationConverter(sqlContext, getViewFromSchema(schema), schema, filters);
AbstractGetRDD<?> operation = converter.getOperation();
assertTrue(operation instanceof GetRDDOfElements);
assertEquals(0, operation.getView().getEntityGroups().size());
assertEquals(EDGE_GROUPS, operation.getView().getEdgeGroups());
final Set<EntitySeed> seeds = new HashSet<>();
for (final Object seed : ((GetRDDOfElements) operation).getSeeds()) {
seeds.add((EntitySeed) seed);
}
assertEquals(Collections.singleton(new EntitySeed("0")), seeds);
sqlContext.sparkContext().stop();
}
use of org.apache.spark.sql.sources.Filter in project Gaffer by gchq.
the class FilterToOperationConverterTest method testIncompatibleGroups.
@Test
public void testIncompatibleGroups() throws OperationException {
final Schema schema = getSchema();
final SQLContext sqlContext = getSqlContext("testIncompatibleGroups");
final Filter[] filters = new Filter[2];
filters[0] = new EqualTo(SchemaToStructTypeConverter.GROUP, "A");
filters[1] = new EqualTo(SchemaToStructTypeConverter.GROUP, "B");
final FiltersToOperationConverter converter = new FiltersToOperationConverter(sqlContext, getViewFromSchema(schema), schema, filters);
final AbstractGetRDD<?> operation = converter.getOperation();
assertNull(operation);
sqlContext.sparkContext().stop();
}
use of org.apache.spark.sql.sources.Filter in project Gaffer by gchq.
the class FilterToOperationConverterTest method testSpecifyDestination.
@Test
public void testSpecifyDestination() throws OperationException {
final Schema schema = getSchema();
final SQLContext sqlContext = getSqlContext("testSpecifyDestination");
final Filter[] filters = new Filter[1];
filters[0] = new EqualTo(SchemaToStructTypeConverter.DST_COL_NAME, "0");
final FiltersToOperationConverter converter = new FiltersToOperationConverter(sqlContext, getViewFromSchema(schema), schema, filters);
final AbstractGetRDD<?> operation = converter.getOperation();
assertTrue(operation instanceof GetRDDOfElements);
assertEquals(0, operation.getView().getEntityGroups().size());
assertEquals(EDGE_GROUPS, operation.getView().getEdgeGroups());
final Set<EntitySeed> seeds = new HashSet<>();
for (final Object seed : ((GetRDDOfElements) operation).getSeeds()) {
seeds.add((EntitySeed) seed);
}
assertEquals(Collections.singleton(new EntitySeed("0")), seeds);
sqlContext.sparkContext().stop();
}
use of org.apache.spark.sql.sources.Filter in project Gaffer by gchq.
the class FilterToOperationConverterTest method testTwoGroups.
@Test
public void testTwoGroups() throws OperationException {
final Schema schema = getSchema();
final SQLContext sqlContext = getSqlContext("testTwoGroups");
final Filter[] filters = new Filter[1];
final Filter left = new EqualTo(SchemaToStructTypeConverter.GROUP, ENTITY_GROUP);
final Filter right = new EqualTo(SchemaToStructTypeConverter.GROUP, EDGE_GROUP2);
filters[0] = new Or(left, right);
final FiltersToOperationConverter converter = new FiltersToOperationConverter(sqlContext, getViewFromSchema(schema), schema, filters);
final AbstractGetRDD<?> operation = converter.getOperation();
assertTrue(operation instanceof GetRDDOfAllElements);
assertEquals(Collections.singleton(ENTITY_GROUP), operation.getView().getEntityGroups());
assertEquals(Collections.singleton(EDGE_GROUP2), operation.getView().getEdgeGroups());
sqlContext.sparkContext().stop();
}
Aggregations