use of org.apache.spark.sql.Row in project net.jgp.labs.spark by jgperrin.
the class JsonArrayToDataset method start.
private void start() {
SparkSession spark = SparkSession.builder().appName("JSON array to Dataset").master("local").getOrCreate();
String filename = "data/array.json";
long start = System.currentTimeMillis();
Dataset<Row> df = spark.read().json(filename);
long stop = System.currentTimeMillis();
System.out.println("Processing took " + (stop - start) + " ms");
df.show();
df.printSchema();
// Turns the "one liner" into a real column
df = df.select(explode(df.col("valsInArrays"))).toDF("vals");
df.show();
df.printSchema();
}
use of org.apache.spark.sql.Row in project net.jgp.labs.spark by jgperrin.
the class JsonMapToDataset method start.
private void start() {
SparkSession spark = SparkSession.builder().appName("JSON map to Dataset").master("local").getOrCreate();
String filename = "data/map.json";
long start = System.currentTimeMillis();
Dataset<Row> df = spark.read().json(filename);
long stop = System.currentTimeMillis();
System.out.println("Processing took " + (stop - start) + " ms");
df.show();
df.printSchema();
}
use of org.apache.spark.sql.Row in project net.jgp.labs.spark by jgperrin.
the class QuotedCsvWithHeaderToDataset method start.
private void start() {
SparkSession spark = SparkSession.builder().appName("CSV to Dataset").master("local").getOrCreate();
String filename = "data/csv-quoted.txt";
Dataset<Row> df = spark.read().option("inferSchema", "true").option("header", "true").csv(filename);
df.show();
df.printSchema();
}
use of org.apache.spark.sql.Row in project net.jgp.labs.spark by jgperrin.
the class MySQLToDatasetApp method start.
private void start() {
SparkSession spark = SparkSession.builder().appName("Dataset from MySQL JDBC Connection").master("local").getOrCreate();
java.util.Properties props = new Properties();
props.put("user", "root");
props.put("password", "password");
props.put("useSSL", "false");
Dataset<Row> df = spark.read().jdbc("jdbc:mysql://localhost:3306/sakila?serverTimezone=EST", "actor", props);
df = df.orderBy(df.col("last_name"));
df.show();
}
use of org.apache.spark.sql.Row in project net.jgp.labs.spark by jgperrin.
the class ArrayToDataframeApp method start.
private void start() {
SparkSession spark = SparkSession.builder().appName("Array to Dataframe").master("local").getOrCreate();
String[] l = new String[] { "a", "b", "c", "d" };
List<String> data = Arrays.asList(l);
Dataset<String> ds = spark.createDataset(data, Encoders.STRING());
Dataset<Row> df = ds.toDF();
df.show();
}
Aggregations