use of org.apache.spark.SparkContext in project Gaffer by gchq.
the class GetDataFrameOfElementsExample method runExamples.
@Override
public void runExamples() {
// Need to actively turn logging on and off as needed as Spark produces some logs
// even when the log level is set to off.
ROOT_LOGGER.setLevel(Level.OFF);
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("getDataFrameOfElementsWithEntityGroup").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final SparkContext sc = new SparkContext(sparkConf);
sc.setLogLevel("OFF");
final SQLContext sqlc = new SQLContext(sc);
final Graph graph = getGraph();
try {
getDataFrameOfElementsWithEntityGroup(sqlc, graph);
getDataFrameOfElementsWithEdgeGroup(sqlc, graph);
} catch (final OperationException e) {
throw new RuntimeException(e);
}
sc.stop();
ROOT_LOGGER.setLevel(Level.INFO);
}
use of org.apache.spark.SparkContext in project geode by apache.
the class JavaAPITest method createCommonMocks.
@SuppressWarnings("unchecked")
public Tuple3<SparkContext, GeodeConnectionConf, GeodeConnection> createCommonMocks() {
SparkContext mockSparkContext = mock(SparkContext.class);
GeodeConnectionConf mockConnConf = mock(GeodeConnectionConf.class);
GeodeConnection mockConnection = mock(GeodeConnection.class);
when(mockConnConf.getConnection()).thenReturn(mockConnection);
when(mockConnConf.locators()).thenReturn(new LinkedList());
return new Tuple3<>(mockSparkContext, mockConnConf, mockConnection);
}
use of org.apache.spark.SparkContext in project geode by apache.
the class JavaAPITest method testJavaSparkContextFunctions.
@Test
public void testJavaSparkContextFunctions() throws Exception {
SparkContext mockSparkContext = mock(SparkContext.class);
JavaSparkContext mockJavaSparkContext = mock(JavaSparkContext.class);
when(mockJavaSparkContext.sc()).thenReturn(mockSparkContext);
GeodeJavaSparkContextFunctions wrapper = javaFunctions(mockJavaSparkContext);
assertTrue(mockSparkContext == wrapper.sc);
}
use of org.apache.spark.SparkContext in project incubator-systemml by apache.
the class MLContextUtil method getSparkContextFromProxy.
/**
* Obtain the Spark Context from the MLContextProxy
*
* @return the Spark Context
*/
public static SparkContext getSparkContextFromProxy() {
MLContext activeMLContext = MLContextProxy.getActiveMLContextForAPI();
SparkContext sc = getSparkContext(activeMLContext);
return sc;
}
use of org.apache.spark.SparkContext in project cdap by caskdata.
the class NaiveBayesClassifier method initialize.
@Override
public void initialize(SparkExecutionPluginContext context) throws Exception {
FileSet fileSet = context.getDataset(config.fileSetName);
Location modelLocation = fileSet.getBaseLocation().append(config.path);
if (!modelLocation.exists()) {
throw new IllegalArgumentException(String.format("Failed to find model to use for classification. Location does not exist: %s.", modelLocation));
}
// load the model from a file in the model fileset
JavaSparkContext javaSparkContext = context.getSparkContext();
SparkContext sparkContext = JavaSparkContext.toSparkContext(javaSparkContext);
loadedModel = NaiveBayesModel.load(sparkContext, modelLocation.toURI().getPath());
tf = new HashingTF(100);
}
Aggregations