val spark = SparkSession.builder().config(conf).getOrCreate();
spark.catalog.listTables().show();
val view = spark.createDataFrame(List(Person("Jason",34,"DBA"),Person("Tom",20,"Dev"))).createOrReplaceTempView("V_Person");
spark.catalog.listTables().show();
spark.catalog.listColumns("V_Person").show();
spark.close();
+----+--------+-----------+---------+-----------+
|name|database|description|tableType|isTemporary|
+----+--------+-----------+---------+-----------+
+----+--------+-----------+---------+-----------+
+--------+--------+-----------+---------+-----------+
| name|database|description|tableType|isTemporary|
+--------+--------+-----------+---------+-----------+
|v_person| null| null|TEMPORARY| true|
+--------+--------+-----------+---------+-----------+
+----+-----------+--------+--------+-----------+--------+
|name|description|dataType|nullable|isPartition|isBucket|
+----+-----------+--------+--------+-----------+--------+
|name| null| string| true| false| false|
| age| null| int| true| false| false|
| job| null| string| true| false| false|
+----+-----------+--------+--------+-----------+--------+
spark.sql("SELECT * from V_Person").show();
spark.sql("SELECT count(1) from V_Person").show();
spark.sql("SELECT name,age,job,rank() over(partition by name order by age) as rk from V_Person").show();
+-----+---+-------+
| name|age| job|
+-----+---+-------+
|Jason| 34|BigData|
| Tom| 20| Dev|
|Jason| 28| DBA|
+-----+---+-------+
+--------+
|count(1)|
+--------+
| 3|
+--------+
+-----+---+-------+---+
| name|age| job| rk|
+-----+---+-------+---+
|Jason| 28| DBA| 1|
|Jason| 34|BigData| 2|
| Tom| 20| Dev| 1|
+-----+---+-------+---+
val spark = SparkSession.builder().config(conf).getOrCreate();
spark.catalog.listTables().show();
import spark.implicits._;
spark.createDataset(List(Person("Jason",34,"BigData"),Person("Tom",20,"Dev"),Person("Jason",28,"DBA"))).as[Person].createOrReplaceTempView("V_Person");
spark.catalog.listTables().show();
spark.catalog.listColumns("V_Person").show();
spark.sql("SELECT * from V_Person").show();
spark.sql("SELECT count(1) from V_Person").show();
spark.sql("SELECT name,age,job,rank() over(partition by name order by age) as rk from V_Person").show();
spark.close();
+-----+---+-------+
| name|age| job|
+-----+---+-------+
|Jason| 34|BigData|
| Tom| 20| Dev|
|Jason| 28| DBA|
+-----+---+-------+
+--------+
|count(1)|
+--------+
| 3|
+--------+
+-----+---+-------+---+
| name|age| job| rk|
+-----+---+-------+---+
|Jason| 28| DBA| 1|
|Jason| 34|BigData| 2|
| Tom| 20| Dev| 1|
+-----+---+-------+---+
val spark = SparkSession.builder().config(conf).getOrCreate();
import spark.implicits._;
spark.createDataset(List(Person("Jason",34,"BigData"),Person("Tom",20,"Dev"),Person("Jason",28,"DBA"))).as[Person].createOrReplaceTempView("V_Person");
val df = spark.sql("SELECT name,age,job,rank() over(partition by name order by age) as rk from V_Person").toDF();
df.write.format("CSV").option("sep","#").save("file:///d://result_jason.csv");
spark.close();
val spark = SparkSession.builder().config(conf).getOrCreate();
import spark.implicits._;
val ds = spark.createDataset(List(Person("Jason",34,"BigData"),Person("Tom",20,"Dev"),Person("Jason",28,"DBA"))).as[Person];
ds.cache();
ds.count();
Thread.sleep(600000);
spark.close();
http://10.70.64.37:4040/storage/