use of org.apache.spark.sql.AnalysisException in project net.jgp.labs.spark by jgperrin.
the class Loader method start.
private void start() {
SparkConf conf = new SparkConf().setAppName("Concurrency Lab 001").setMaster(Config.MASTER).set("hello", "world");
JavaSparkContext sc = new JavaSparkContext(conf);
SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
String filename = "data/tuple-data-file.csv";
Dataset<Row> df = spark.read().format("csv").option("inferSchema", "true").option("header", "false").load(filename);
df.show();
try {
df.createGlobalTempView("myView");
} catch (AnalysisException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
System.out.println("Hmmm... Something interrupted the thread: " + e.getMessage());
}
}
Aggregations