Search in sources :

Example 36 with SparkSession

use of org.apache.spark.sql.SparkSession in project net.jgp.labs.spark by jgperrin.

the class ArrayToDatasetApp method start.

private void start() {
    SparkSession spark = SparkSession.builder().appName("Array to Dataset").master("spark://10.0.100.81:7077").getOrCreate();
    String[] l = new String[] { "a", "b", "c", "d" };
    List<String> data = Arrays.asList(l);
    Dataset<String> df = spark.createDataset(data, Encoders.STRING());
    df.show();
}
Also used : SparkSession(org.apache.spark.sql.SparkSession)

Example 37 with SparkSession

use of org.apache.spark.sql.SparkSession in project net.jgp.labs.spark by jgperrin.

the class BooksCsvToDataset method start.

private void start() {
    SparkSession spark = SparkSession.builder().appName("Book CSV to Dataset").master("local").getOrCreate();
    String filename = "data/books.csv";
    // @formatter:off
    Dataset<Row> df = spark.read().format("csv").option("inferSchema", // We are not inferring the schema for now
    "false").option("header", "true").load(filename);
    // @formatter:on
    df.show();
    // In this case everything is a string
    df.printSchema();
}
Also used : SparkSession(org.apache.spark.sql.SparkSession) Row(org.apache.spark.sql.Row)

Example 38 with SparkSession

use of org.apache.spark.sql.SparkSession in project net.jgp.labs.spark by jgperrin.

the class JsonComplexArrayToDataset method start.

private void start() {
    SparkSession spark = SparkSession.builder().appName("Complex JSON array to Dataset").master("local").getOrCreate();
    String filename = "data/array-complex.json";
    long start = System.currentTimeMillis();
    Dataset<Row> df = spark.read().json(filename);
    long stop = System.currentTimeMillis();
    System.out.println("Processing took " + (stop - start) + " ms");
    df.show();
    df.printSchema();
}
Also used : SparkSession(org.apache.spark.sql.SparkSession) Row(org.apache.spark.sql.Row)

Example 39 with SparkSession

use of org.apache.spark.sql.SparkSession in project net.jgp.labs.spark by jgperrin.

the class JsonToDataset method start.

private void start() {
    SparkSession spark = SparkSession.builder().appName("JSON to Dataset").master("local").getOrCreate();
    String filename = "data/north-carolina-school-performance-data.json";
    long start = System.currentTimeMillis();
    Dataset<Row> df = spark.read().json(filename);
    long stop = System.currentTimeMillis();
    System.out.println("Processing took " + (stop - start) + " ms");
    df.show();
    df.printSchema();
    // Flatenization
    df = df.withColumn("district", df.col("fields.district"));
    // this does not work as the column stays here (Spark 2.0.0)
    df = df.drop(df.col("fields.district"));
    df.show();
    df.printSchema();
}
Also used : SparkSession(org.apache.spark.sql.SparkSession) Row(org.apache.spark.sql.Row)

Example 40 with SparkSession

use of org.apache.spark.sql.SparkSession in project net.jgp.labs.spark by jgperrin.

the class CustomDataSourceToDataset method start.

private void start() {
    SparkSession spark = SparkSession.builder().appName("Custom data set to Dataset").master("local").getOrCreate();
    String filename = "data/array-complex.json";
    long start = System.currentTimeMillis();
    Dataset<Row> df = spark.read().format("net.jgp.labs.spark.x.datasource.SubStringCounterDataSource").option(K.COUNT + "0", // count the number of 'a'
    "a").option(K.COUNT + "1", // count the number of 'b'
    "b").option(K.COUNT + "2", // count the number of 'color'
    "color").load(// local file
    filename);
    long stop = System.currentTimeMillis();
    log.info("Processing took {} ms", stop - start);
    df.printSchema();
    df.show();
}
Also used : SparkSession(org.apache.spark.sql.SparkSession) Row(org.apache.spark.sql.Row)

Aggregations

SparkSession (org.apache.spark.sql.SparkSession)53 Row (org.apache.spark.sql.Row)43 StructType (org.apache.spark.sql.types.StructType)11 ArrayList (java.util.ArrayList)6 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)6 StructField (org.apache.spark.sql.types.StructField)6 SparkConf (org.apache.spark.SparkConf)4 JavaRDD (org.apache.spark.api.java.JavaRDD)3 Script (org.apache.sysml.api.mlcontext.Script)3 Test (org.junit.Test)3 Dataset (org.apache.spark.sql.Dataset)2 StreamingQuery (org.apache.spark.sql.streaming.StreamingQuery)2 StreamingQueryException (org.apache.spark.sql.streaming.StreamingQueryException)2 DMLScript (org.apache.sysml.api.DMLScript)2 RUNTIME_PLATFORM (org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM)2 MLContext (org.apache.sysml.api.mlcontext.MLContext)2 Matrix (org.apache.sysml.api.mlcontext.Matrix)2 MatrixBlock (org.apache.sysml.runtime.matrix.data.MatrixBlock)2 MatrixIndexes (org.apache.sysml.runtime.matrix.data.MatrixIndexes)2 File (java.io.File)1