spark连接Oracle

引入oracle的jar包

package com.agm.database



import java.sql.DriverManager
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{ SparkConf, SparkContext }
import org.apache.log4j.{ Level, Logger }
import org.apache.spark.sql.SQLContext
import java.util.Properties
import java.text.DecimalFormat
import java.text.SimpleDateFormat
import java.util.Date


/**
 * Created by LLS on 2017/03/14.
 */
object JdbcTest {
  
   //核心工作时间,迟到早退等的的处理
    def getCoreTime(start_time:String,end_Time:String)={
      var df:SimpleDateFormat=new SimpleDateFormat("HH:mm:ss")
      var begin:Date=df.parse(start_time)
      var end:Date = df.parse(end_Time)
      var between:Long=(end.getTime()-begin.getTime())/1000//转化成秒
      var hour:Float=between.toFloat/3600
      var decf:DecimalFormat=new DecimalFormat("#.00")
      decf.format(hour)//格式化


    }
    
  def main(args: Array[String]) {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf = new SparkConf().setAppName("Simple Application") //给Application命名    
    conf.setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc);


  /*  //这种方法没成功
val url = "jdbc:oracle:thin:@IP:1521:数据库?user=***&password=***";
    val prop = new Properties();
    val df = sqlContext.read.jdbc(url, "pat_master_index", prop);
     df.registerTempTable("patient")
     val res = sqlContext.sql("select  patient_id ,name from patient where petient_id like '9000220%'")
    res.show()
    */


    val jdbcMap = Map("url" -> "jdbc:oracle:thin:@//IP:端口/数据库名",
      "user" -> "***",
      "password" -> "***",
      "dbtable" -> "***",
      "driver" -> "oracle.jdbc.driver.OracleDriver")
    val jdbcDF = sqlContext.read.options(jdbcMap).format("jdbc").load
    jdbcDF.registerTempTable("patient")
    
    val t0 = System.nanoTime : Double
 
    val res = sqlContext.sql("select PATIENT_ID,NAME from patient where PATIENT_ID like '9000112%'")
    val t1 = System.nanoTime : Double
    println("Elapsed time " + (t1 - t0) / 1000000.0 + " msecs")
    res.show()


    sc.stop()
  }
}

你可能感兴趣的:(spark,oracle)