大数据系列第五课:scala基础

第一阶段:Spark streamingspark sqlkafkaspark内核原理(必须有一个大型项目经验);

第二阶段:spark运行的各种环境,各种故障的解决,性能优化(精通spark内核、运行原理);

第三阶段:流处理、机器学习为鳌头,需要首先掌握前两个阶段的内容;

跟随王家林老师的零基础讲解,注重动手实战,成为spark高数,笑傲大数据之林!

第一部分学习笔记

/**
 * Scala隐式转换和并发编程
 * 1、Scala隐式彻底详解  隐式参数 隐式类 隐式方法 隐式对象
 * 2、Scala并发编程详解
 */
object Essay {
  def main(args: Array[String]): Unit = {
    //类扩展
    //implicit conversion function 隐式转换函数  fileTorichFile

    implicit def rddToSequenceFileRDDFunctions[K <% Writable: ClassTag, V <% Writable: ClassTag](
      rdd: RDD[(K, V)]) = new SequenceFileRDDFunctions(rdd)
    implicit def rddToOrderedRDDFunctions[K : Ordering : ClassTag, V: ClassTag](
      rdd: RDD[(K, V)]) =
    new OrderedRDDFunctions[K, V, (K, V)](rdd)
    
    class Person(val name: String) //val 定义了的变量可以通过类的实例访问
    //class Engineer(val name: String,val salary : Double)
    //new Person("Spark").code
    class Engineer(val name: String,val salary : Double){
      def code = println("Coding ....")
    }
    //def toCode(p:Person){
     // p.code
    //}
    //隐式转化 当前上下文
    implicit def person2Enginner(p: Person):Engineer ={
      new Engineer(p.name,1000)
    }
    def toCode(p:Person){
      p.code
    }
    toCode(new Person("Scala"))
    //隐式参数
    //注入隐式值
    //implicit val
    //implicit val
    //隐式参数类型的伴生对象中去找隐式值
    class Level(val level : Int)
    def toWorker(name : String)(implicit l : Level)
    = println(name + " : " + l.level)
    
    implicit val level = new Level(8)
    toWorker("Spark")
    
  }
      implicit val default:String = "Flink"
//}
 
object Param{
   def print(content:String)(implicit language:String){
      println(language+":"+content)
   }
}
object Implicit_Parameters {

  def main(args: Array[String]) {
    Param.print("Spark")("Scala")
     
    import Context_Implicits._
    Param.print("Hadoop")
  }
}
class RicherFile(val file:File){
   def read = Source.fromFile(file.getPath()).mkString
}
 
class File_Implicits( path: String) extends File(path)
object File_Implicits{
    implicit def file2RicherFile(file:File)= new RicherFile(file) //File -> RicherFile
}

object Implicits_Internals {
	def main(args: Array[String]) {
	  val file = new File_Implicits("content.txt")
		println(file.read) 
		
	}
}
   ///Actor 类似Thread  Java 共享全局变量的加锁机制
      //akka 核心是Actor
      //spark1.6.0自主内存管理
/*    class HiActor extends Actor{
      def act(){ //相当run
        while(true){
          while(true){
            case name:String => println(name)
          }
        }
      }
    }*/
    class HiActor extends Actor{
      def act(){
        while(true){
          receive {        //Actor收消息的方法
            case name: String => println(name)
      }
      }
      }
      }
    val actor = new HiActor
    actor.start()
    actor ! "Spark"
  }
  
  //Master  Worker 都是一个Actor
  
  case class Basic(name : String,age : Int)
  case class Worker(name: String ,age : Int)
  class basicActor extends Actor{
    def act(){
      while(true){
        receive{
          case Basic(name, age) => println("Basic information " +name + " "+age)
          case Worker(name,age) => println("Worker information " +name + " "+age)
        }
      }
    }
  }
  val b = new basicActor
  b.start()
  
  
  b ! Worker("Spark", 7)
  
  
//!?: 必须等到Actor消息处理完  同步
  
  //feture
  
  //DAGScheduler Master Worker


你可能感兴趣的:(大数据系列第五课:scala基础)