use of org.apache.spark.sql.SQLContext in project Gaffer by gchq.
the class FilterToOperationConverterTest method testSpecifyVertex.
@Test
public void testSpecifyVertex() throws OperationException {
final Schema schema = getSchema();
final SQLContext sqlContext = getSqlContext("testSpecifyVertex");
final Filter[] filters = new Filter[1];
filters[0] = new EqualTo(SchemaToStructTypeConverter.VERTEX_COL_NAME, "0");
final FiltersToOperationConverter converter = new FiltersToOperationConverter(sqlContext, getViewFromSchema(schema), schema, filters);
final AbstractGetRDD<?> operation = converter.getOperation();
assertTrue(operation instanceof GetRDDOfElements);
assertEquals(Collections.singleton(ENTITY_GROUP), operation.getView().getEntityGroups());
assertEquals(0, operation.getView().getEdgeGroups().size());
final Set<EntitySeed> seeds = new HashSet<>();
for (final Object seed : ((GetRDDOfElements) operation).getSeeds()) {
seeds.add((EntitySeed) seed);
}
assertEquals(Collections.singleton(new EntitySeed("0")), seeds);
sqlContext.sparkContext().stop();
}
use of org.apache.spark.sql.SQLContext in project Gaffer by gchq.
the class FilterToOperationConverterTest method testSpecifyMultiplePropertyFilters.
@Test
public void testSpecifyMultiplePropertyFilters() throws OperationException {
final Schema schema = getSchema();
final SQLContext sqlContext = getSqlContext("testSpecifyMultiplePropertyFilters");
final Filter[] filters = new Filter[2];
filters[0] = new GreaterThan("property1", 5);
filters[1] = new LessThan("property4", 8L);
FiltersToOperationConverter converter = new FiltersToOperationConverter(sqlContext, getViewFromSchema(schema), schema, filters);
AbstractGetRDD<?> operation = converter.getOperation();
assertTrue(operation instanceof GetRDDOfAllElements);
// Only groups ENTITY_GROUP and EDGE_GROUP should be in the view as only they have property1 and property4
View opView = operation.getView();
List<ConsumerFunctionContext<String, FilterFunction>> entityPostAggFilters = opView.getEntity(ENTITY_GROUP).getPostAggregationFilterFunctions();
assertEquals(2, entityPostAggFilters.size());
final ArrayList<String> expectedProperties = new ArrayList<>();
expectedProperties.add("property1");
expectedProperties.add("property4");
assertEquals(1, entityPostAggFilters.get(0).getSelection().size());
assertEquals(expectedProperties.get(0), entityPostAggFilters.get(0).getSelection().get(0));
assertEquals(1, entityPostAggFilters.get(1).getSelection().size());
assertEquals(expectedProperties.get(1), entityPostAggFilters.get(1).getSelection().get(0));
final ArrayList<FilterFunction> expectedFunctions = new ArrayList<>();
expectedFunctions.add(new IsMoreThan(5, false));
expectedFunctions.add(new IsLessThan(8L, false));
assertEquals(expectedFunctions.get(0), entityPostAggFilters.get(0).getFunction());
assertEquals(expectedFunctions.get(1), entityPostAggFilters.get(1).getFunction());
final List<ConsumerFunctionContext<String, FilterFunction>> edgePostAggFilters = opView.getEdge(EDGE_GROUP).getPostAggregationFilterFunctions();
assertEquals(2, edgePostAggFilters.size());
assertEquals(1, edgePostAggFilters.get(0).getSelection().size());
assertEquals(expectedProperties.get(0), edgePostAggFilters.get(0).getSelection().get(0));
assertEquals(1, edgePostAggFilters.get(1).getSelection().size());
assertEquals(expectedProperties.get(1), edgePostAggFilters.get(1).getSelection().get(0));
sqlContext.sparkContext().stop();
}
use of org.apache.spark.sql.SQLContext in project zeppelin by apache.
the class SparkSqlInterpreter method cancel.
@Override
public void cancel(InterpreterContext context) {
SparkInterpreter sparkInterpreter = getSparkInterpreter();
SQLContext sqlc = sparkInterpreter.getSQLContext();
SparkContext sc = sqlc.sparkContext();
sc.cancelJobGroup(Utils.buildJobGroupId(context));
}
use of org.apache.spark.sql.SQLContext in project geode by apache.
the class OQLJavaDemo method main.
public static void main(String[] argv) {
if (argv.length != 1) {
System.err.printf("Usage: OQLJavaDemo <locators>\n");
return;
}
SparkConf conf = new SparkConf().setAppName("OQLJavaDemo");
// "192.168.1.47[10335]"
conf.set(GeodeLocatorPropKey, argv[0]);
JavaSparkContext sc = new JavaSparkContext(conf);
SQLContext sqlContext = new org.apache.spark.sql.SQLContext(sc);
DataFrame df = javaFunctions(sqlContext).geodeOQL("select * from /str_str_region");
System.out.println("======= DataFrame =======\n");
df.show();
sc.stop();
}
use of org.apache.spark.sql.SQLContext in project geode by apache.
the class JavaAPITest method testSQLContextFunction.
@Test
public void testSQLContextFunction() throws Exception {
SQLContext mockSQLContext = mock(SQLContext.class);
GeodeJavaSQLContextFunctions wrapper = javaFunctions(mockSQLContext);
assertTrue(wrapper.scf.getClass() == GeodeSQLContextFunctions.class);
}
Aggregations