Spark SQL支持使用JDBC从关系型数据库(比如MySQL)中读取数据。读取的数据,依然由DataFrame表示,可以很方便地使用Spark Core提供的各种算子进行处理。
这里有一个经验之谈,实际上用Spark SQL处理JDBC中的数据是非常有用的。比如说,你的MySQL业务数据库中,有大量的数据,比如1000万,然后,你现在需要编写一个程序,对线上的脏数据某种复杂业务逻辑的处理,甚至复杂到可能涉及到要用Spark SQL反复查询Hive中的数据,来进行关联处理。
那么此时,用Spark SQL来通过JDBC数据源,加载MySQL中的数据,然后通过各种算子进行处理,是最好的选择。因为Spark是分布式的计算框架,对于1000万数据,肯定是分布式处理的。而如果你自己手工编写一个Java程序,那么不好意思,你只能分批次处理了,先处理2万条,再处理2万条,可能运行完你的Java程序,已经是几天以后的事情了。
案例:查询分数大于80分的学生信息
注意:
grant all on testdb.* to ''@'master' with grant option;
flush privileges;
java版本:
package cn.spark.study.core;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;
public class jdbctest {
public static void main(String[] args) {
SparkConf conf = new SparkConf()
.setAppName("hivesource");
JavaSparkContext sc = new JavaSparkContext(conf);
SQLContext sqlcontext = new SQLContext(sc);
Map option = new HashMap();
option.put("url", "jdbc:mysql://master:3306/testdb");
option.put("dbtable", "student_infos");
DataFrame df1 = sqlcontext.read().format("jdbc").options(option).load();
option.clear();
option.put("url", "jdbc:mysql://master:3306/testdb");
option.put("dbtable", "student_scores");
DataFrame df2 = sqlcontext.read().format("jdbc").options(option).load();
JavaPairRDD> joinrdd = df1.javaRDD().mapToPair(new PairFunction(){
private static final long serialVersionUID = 1L;
@Override
public Tuple2 call(Row row) throws Exception {
return new Tuple2(row.getString(0),Integer.valueOf(String.valueOf(row.get(1))));
}
}).join(df2.javaRDD().mapToPair(new PairFunction(){
private static final long serialVersionUID = 1L;
@Override
public Tuple2 call(Row row) throws Exception {
return new Tuple2(row.getString(0),Integer.valueOf(String.valueOf(row.get(1))));
}
}));
JavaRDD rowrdd = joinrdd.map(new Function>,Row>(){
private static final long serialVersionUID = 1L;
@Override
public Row call(Tuple2> v) throws Exception {
return RowFactory.create(v._1,v._2._1,v._2._2);
}
});
JavaRDD filterrdd = rowrdd.filter(new Function(){
private static final long serialVersionUID = 1L;
@Override
public Boolean call(Row row) throws Exception {
if(row.getInt(2) > 80){
return true;
}
return false;
}
});
List structfield = new ArrayList();
structfield.add(DataTypes.createStructField("name", DataTypes.StringType, true));
structfield.add(DataTypes.createStructField("age", DataTypes.IntegerType, true));
structfield.add(DataTypes.createStructField("score", DataTypes.IntegerType, true));
StructType structtype = DataTypes.createStructType(structfield);
DataFrame df3 = sqlcontext.createDataFrame(filterrdd, structtype);
/* df3.write().format("jdbc").options(option).save();
*
*/
df3.javaRDD().foreach(new VoidFunction(){
private static final long serialVersionUID = 1L;
@Override
public void call(Row row) throws Exception {
String sql = "insert into good_students values (" + "'" + row.getString(0) + "'" + "," + row.getInt(1) + "," + row.getInt(2) + ")";
Class.forName("com.mysql.jdbc.Driver");
Connection conn = null;
Statement stmt = null;
try{
conn = DriverManager.getConnection("jdbc:mysql://master:3306/testdb", "", "");
stmt = conn.createStatement();
stmt.executeUpdate(sql);
}catch(Exception e){
e.printStackTrace();
}finally{
if(stmt != null){
stmt.close();
}
if(conn != null){
conn.close();
}
}
}
});
}
}
来自 “ ITPUB博客 ” ,链接:http://blog.itpub.net/30541278/viewspace-2154779/,如需转载,请注明出处,否则将追究法律责任。