spring boot整合scala和spark做大数据预言

原来用jetty 和 restful 做过驱动spark的计算框架,最近想用spring boot +scala + spark 重新做一个,一下是pom.xml 和代码分享
pom.xml




  4.0.0

  com.i-tudou.bd
  spring-spark-demo
  jar
  1.0-SNAPSHOT

  A Camel Scala Route

  
    UTF-8
    UTF-8
    1.8
    2.11
    2.4.0
  

  
    org.springframework.boot
    spring-boot-starter-parent
    1.5.3.RELEASE
     
  



  
    
      org.springframework.boot
      spring-boot-starter-aop
    
    
      org.springframework.boot
      spring-boot-starter-web
    

    
      org.apache.spark
      spark-core_${scala.version}
      ${spark.version}
      
        
          org.slf4j
          slf4j-log4j12
        
        
          log4j
          log4j
        
      
      compile
    
    
      mysql
      mysql-connector-java
    
    
      org.codehaus.janino
      janino
      3.0.8
    
    
      org.apache.spark
      spark-streaming_${scala.version}
      ${spark.version}
      provided
    

    
      org.apache.spark
      spark-sql_${scala.version}
      ${spark.version}
    

    
      org.springframework.boot
      spring-boot-configuration-processor
      true
    
  


application.scala 分享

package com.itudou.bd

import org.springframework.boot.SpringApplication
import org.springframework.boot.autoconfigure.{EnableAutoConfiguration, SpringBootApplication}
import org.springframework.context.annotation.{ComponentScan, Configuration}

@Configuration
@EnableAutoConfiguration
@ComponentScan
@SpringBootApplication
class Config
object springsparkdemoApplication extends App{
    SpringApplication.run(classOf[Config])
}

sparkconfig.scala 分享

package com.itudou.bd.config

import org.apache.spark.{SparkConf, SparkContext}
import org.springframework.context.annotation.{Bean, Configuration}

@Configuration
class Sparkconfig {
  private val sparkHome = "."

  private val appName = "sparkTest"

  private val master = "local"
  @Bean
  def SparkConf: SparkConf  = {
    val conf = new SparkConf().setAppName(appName).setMaster(master)
    return conf
  }
  @Bean
  def SparkContext = new SparkContext(SparkConf)
}

DataController.scala 分享

package com.itudou.bd.Controller


import java.util.Properties

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.web.bind.annotation._

import scala.util.parsing.json.{JSON, JSONObject}

@RestController
@RequestMapping (value = Array("DataController/data/"))
@CrossOrigin
class DataController {

  @Autowired
  var sc:SparkContext = _
  @GetMapping(value = Array("test"))
  def test =
  {
    val url = "jdbc:mysql://10.1.3.49:3309/tdgistaskDB?useUnicode=true&characterEncoding=UTF-8&user=root&password=123";
    val prop = new Properties();

    val sqlContext = new SQLContext(sc);
    val df = sqlContext.read.jdbc(url, "t_task", prop);

    df.createOrReplaceTempView("t_task")
    //使用SQL语句进行查询
    var df1 = sqlContext.sql("select * from t_task where parent_id = 0")

    println("1.------------->" + df1.show().toString())
    //println("1.------------->" + df1.rdd.partitions.size)
    JSON.parseFull("{lige:1}")
  }

}

你可能感兴趣的:(大数据)