spark streaming foreachRDD

foreachRDD(func)
访问DStream中的每个RDD。foreachRDD方法不返回任何值。foreachRDD是一个高阶方法,它的参数func是一个参数为RDD并且返回值为Unit的函数。它将这个参数的函数作用在原DStream中的每个RDD上。在作为参数的函数中可以使用任何RDD上的操作。需要注意的是,foreachRDD方法是在driver端执行,而foreachRDD中用到的RDD转换和行动是在worker节点上执行的。

package spark.examples.streaming
 
import java.sql.{
     PreparedStatement, Connection, DriverManager}
import java.util.concurrent.atomic.AtomicInteger
 
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{
     Seconds, StreamingContext}
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
 
object SparkStreamingForPartition {
     
  def main(args: Array[String]) {
     
    val conf = new SparkConf().setAppName("NetCatWordCount")
    conf.setMaster("local[3]")
    val ssc = new StreamingContext(conf, Seconds(5))
    //The DStream is a collection of RDD, which makes the method foreachRDD reasonable
    val dstream = ssc.socketTextStream("192.168.26.140", 9999)
    dstream.foreachRDD(rdd => {
     
      //embedded function
      def func(records: Iterator[String]) {
     
        var conn: Connection = null
        var stmt: PreparedStatement = null
        try {
     
          val url = "jdbc:mysql://192.168.26.140:3306/person";
          val user = "root";
          val password = ""
          conn = DriverManager.getConnection(url, user, password)
          records.flatMap(_.split(" ")).foreach(word => {
     
            val sql = "insert into TBL_WORDS(word) values (?)";
            stmt = conn.prepareStatement(sql);
            stmt.setString(1, word)
            stmt.executeUpdate();
          })
        } catch {
     
          case e: Exception => e.printStackTrace()
        } finally {
     
          if (stmt != null) {
     
            stmt.close()
          }
          if (conn != null) {
     
            conn.close()
          }
        }
      }
      val repartitionedRDD = rdd.repartition(3)
      repartitionedRDD.foreachPartition(func)
    })
    ssc.start()
    ssc.awaitTermination()
  }
}

你可能感兴趣的:(spark,spark,streaming)