object SparkSqlByText_Scala {
def main(args: Array[String]) {
System.setProperty("hadoop.home.dir", "C:\\Users\\student\\modules\\hadoop-2.6.0-cdh5.8.5")
val conf = new SparkConf().setAppName("").setMaster(SparkUitl_Java2.master)
val sc = new SparkContext(conf)
val sqlCon = new SQLContext(sc)
//read text
val lines = sc.textFile(SparkUitl_Java2.textInput);
var person:Person = null
val persons = lines.map(line=>{
val items = line.split(",")
person = new Person
person.setId(items(0).toInt)
person.setName(items(1))
person.setAge(items(2).toInt)
person.setSex(items(3))
person.setAddr(items(4))
person
})
//create dataframe
val df = sqlCon.createDataFrame(persons, new Person().getClass)
//register table
df.registerTempTable(SparkUitl_Java2.tableName)
//table operater
val sql:String = "select * from "+SparkUitl_Java2.tableName+""
val dfSql = sqlCon.sql(sql)
//row foreach
val personResult = dfSql.map(row => {
person = new Person
person.setId(row.getInt(2))
person.setName(row.getString(3))
person.setAge(row.getInt(1))
person.setSex(row.getString(4))
person.setAddr(row.getString(0))
person
})
//print result
personResult.collect().foreach(p =>{
println(p)
})
public class SparkSqlByJson_Java {
public static void main(String[] args) {
System.setProperty("hadoop.home.dir", "C:\\Users\\student\\modules\\hadoop-2.6.0-cdh5.8.5");
//create SQLContext
SparkConf conf = new SparkConf().setAppName(SparkSqlByJson_Java.class.getName()).setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
SQLContext sqlCon = new SQLContext(sc);
//create DataFrame
String path = "C:\\Users\\student\\modules\\datas\\person.json";
//sample 1
DataFrame df = sqlCon.read().json(path);
//sample 2
//row struct
List fields = new ArrayList();
fields.add(DataTypes.createStructField("id", DataTypes.IntegerType, true));
fields.add(DataTypes.createStructField( "name", DataTypes.StringType, true ));
fields.add(DataTypes.createStructField( "age", DataTypes.IntegerType, true ));
fields.add(DataTypes.createStructField( "sex", DataTypes.StringType, true ));
fields.add(DataTypes.createStructField( "addr", DataTypes.StringType, true ));
StructType st = DataTypes.createStructType(fields);
df = sqlCon.jsonFile(path,st);
// 相当于 select * from person;
df.show();
System.out.println("===============================================");
// //select name from xxx
// df.select("name").show();
// //select name, age+10 from xxx
// df.select(df.col("name"), df.col("age").plus(10)).show();
// //select * from xxx where age <=50
// df.filter(df.col("age").leq(50)).show();
// //select count form xx group by sex
// df.groupBy(df.col("sex")).count().show();
}
}
Scala语言:
object SparkSqlByJson_Scala {
def main(args: Array[String]) {
System.setProperty("hadoop.home.dir", "C:\\Users\\student\\modules\\hadoop-2.6.0-cdh5.8.5")
val conf = new SparkConf().setAppName("").setMaster("local[*]")
val sc = new SparkContext(conf)
val sqlCon = new SQLContext(sc)
val path:String = "C:\\Users\\student\\modules\\datas\\person.json"
//sample 1
// val df = sqlCon.read.json(path)
//sample 2
val st = StructType.apply(
Array(
new StructField("id", IntegerType, true),
new StructField("name", StringType, true),
StructField("age", IntegerType, true),
StructField("sex", StringType, true),
new StructField("addr", StringType, true)
))
val df = sqlCon.jsonFile(path, st)
//------------- select show -------------------------------------------
//select *
df.show();
//select name from xxx
df.select("name").show();
//select name, age+10 from xxx
df.select(df.col("name"), df.col("age").plus(10)).show();
//select * from xxx where age <=50
df.filter(df.col("age").leq(50)).show();
//select count form xx group by sex
df.groupBy(df.col("sex")).count().show();
}
}
GetUrlParam:function GetUrlParam(param){
var reg = new RegExp("(^|&)"+ param +"=([^&]*)(&|$)");
var r = window.location.search.substr(1).match(reg);
if(r!=null
==================================================
1、打开PowerDesigner12,在菜单中按照如下方式进行操作
file->Reverse Engineer->DataBase
点击后,弹出 New Physical Data Model 的对话框
2、在General选项卡中
Model name:模板名字,自
网站配置是apache+tomcat,tomcat没有报错,apache报错是:
The proxy server received an invalid response from an upstream server. The proxy server could not handle the request GET /. Reason: Error reading fr
Free variable A free variable of an expression is a variable that’s used inside the expression but not defined inside the expression. For instance, in the function literal expression (x: Int) => (x
Part Ⅰ:
《阿甘正传》Forrest Gump经典中英文对白
Forrest: Hello! My names Forrest. Forrest Gump. You wanna Chocolate? I could eat about a million and a half othese. My momma always said life was like a box ochocol
Json在数据传输中很好用,原因是JSON 比 XML 更小、更快,更易解析。
在Java程序中,如何使用处理JSON,现在有很多工具可以处理,比较流行常用的是google的gson和alibaba的fastjson,具体使用如下:
1、读取json然后处理
class ReadJSON
{
public static void main(String[] args)