(1)
val map=Map("Spark"->30,"Scala"->50,"BigData"->80)
for((k,v)<-map)yield(k,v*0.9)
map: scala.collection.immutable.Map[String,Int] = Map(Spark -> 30, Scala -> 50, BigData -> 80)
res0: scala.collection.immutable.Map[String,Double] = Map(Spark -> 27.0, Scala -> 45.0, BigData -> 72.0)
(2)
val scores=scala.collection.mutable.Map("Spark"->50,"Hadoop"->60,"Cloud"->90)
val hadoopScores=scores.getOrElse("Hadoop",0)
scores+=("R"->40)
scores-="Hadoop"
scores: scala.collection.mutable.Map[String,Int] = Map(Hadoop -> 60, Spark -> 50, Cloud -> 90)
hadoopScores: Int = 60
res0: scala.collection.mutable.Map[String,Int] = Map(Hadoop -> 60, R -> 40, Spark -> 50, Cloud -> 90)
res1: scala.collection.mutable.Map[String,Int] = Map(R -> 40, Spark -> 50, Cloud -> 90)
(3)
val sortedScore=scala.collection.immutable.SortedMap("Scala"->30,"Hadoop"->50,"Spark"->80)
sortedScore: scala.collection.immutable.SortedMap[String,Int] = Map(Hadoop -> 50, Scala -> 30, Spark -> 80)
val tuple=(1,2,3.14,"Scala","Spark")
val third=tuple._3
val (first,second,thirda,fourth,fifth)=tuple
val (f,s,_,_,_)=tuple
tuple: (Int, Int, Double, String, String) = (1,2,3.14,Scala,Spark)
third: Double = 3.14
first: Int = 1
second: Int = 2
thirda: Double = 3.14
fourth: String = Scala
fifth: String = Spark
f: Int = 1
s: Int = 2
val symbols=Array("[","-","]")
val counts=Array(2,5,2)
val pairs=symbols.zip(counts)
for((x,y)<-pairs)print(x*y)
symbols: Array[String] = Array([, -, ])
counts: Array[Int] = Array(2, 5, 2)
pairs: Array[(String, Int)] = Array(([,2), (-,5), (],2))
[[-----]]res0: Unit = ()
"Scala Spark".partition(_.isUpper)
res0: (String, String) = (SS,cala park)
val map=Map("Spark"->30,"Scala"->50,"BigData"->80)
for((k,v)<-map)yield(k,v*0.9)
val scores=scala.collection.mutable.Map("Spark"->50,"Hadoop"->60,"Cloud"->90)
val hadoopScores=scores.getOrElse("Hadoop",0)
scores+=("R"->40)
scores-="Hadoop"
val sortedScore=scala.collection.immutable.SortedMap("Scala"->30,"Hadoop"->50,"Spark"->80)
val tuple=(1,2,3.14,"Scala","Spark")
val third=tuple._3
val (first,second,thirda,fourth,fifth)=tuple
val (f,s,_,_,_)=tuple
"Scala Spark".partition(_.isUpper)
val symbols=Array("[","-","]")
val counts=Array(2,5,2)
val pairs=symbols.zip(counts)
for((x,y)<-pairs)print(x*y)
map: scala.collection.immutable.Map[String,Int] = Map(Spark -> 30, Scala -> 50, BigData -> 80)
res0: scala.collection.immutable.Map[String,Double] = Map(Spark -> 27.0, Scala -> 45.0, BigData -> 72.0)
scores: scala.collection.mutable.Map[String,Int] = Map(Hadoop -> 60, Spark -> 50, Cloud -> 90) hadoopScores: Int = 60 res1: scala.collection.mutable.Map[String,Int] = Map(Hadoop -> 60, R -> 40, Spark -> 50, Cloud -> 90) res2: scala.collection.mutable.Map[String,Int] = Map(R -> 40, Spark -> 50, Cloud -> 90) sortedScore: scala.collection.immutable.SortedMap[String,Int] = Map(Hadoop -> 50, Scala -> 30, Spark -> 80) tuple: (Int, Int, Double, String, String) = (1,2,3.14,Scala,Spark) third: Double = 3.14 first: Int = 1 second: Int = 2 thirda: Double = 3.14 fourth: String = Scala fifth: String = Spark f: Int = 1 s: Int = 2 res3: (String, String) = (SS,cala park) symbols: Array[String] = Array([, -, ]) counts: Array[Int] = Array(2, 5, 2) pairs: Array[(String, Int)] = Array(([,2), (-,5), (],2)) [[-----]]res4: Unit = ()