[置顶] SparkML实战之四:回归

package MLlib

import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.regression.LinearRegressionModel
import org.apache.spark.mllib.regression.LinearRegressionWithSGD
import org.apache.spark.mllib.linalg.Vectors
/** * Created by root on 16-1-12. * 最小二乘回归不适用正则化 --- LinearRegressionWithSGD * 岭回归使用L2正则化 --- RidgeRegressionWithSGD * Lasso只用L1正则化 --- LassoWithSGD * * 所有这些回归方法都使用相同的训练误差:均方误差 * */
object Regression {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("Regression").setMaster("local[4]")
    val sc = new SparkContext(conf)

    // Load and parse the data解析为RDD
    val data = sc.textFile("/usr/local/spark/spark-1.6.0-bin-hadoop2.4" +
      "/data/mllib/ridge-data/lpsa.data")
    val parsedData = data.map { line =>
      val parts = line.split(',')
      LabeledPoint(parts(0).toDouble, Vectors.dense(parts(1).split(' ').map(_.toDouble)))
    }.cache()

    // Building the model  普通的最小二乘回归
    val numIterations = 100
    val model = LinearRegressionWithSGD.train(parsedData, numIterations)

    // Evaluate model on training examples and compute training error
    val valuesAndPreds = parsedData.map { point =>
      val prediction = model.predict(point.features)
      (point.label, prediction)
    }
    val MSE = valuesAndPreds.map{case(v, p) => math.pow((v - p), 2)}.mean()
    println("training Mean Squared Error = " + MSE)

    // Save and load model
//    model.save(sc, "myModelPath")
//    val sameModel = LinearRegressionModel.load(sc, "myModelPath")
  }

}

你可能感兴趣的:(spark,机器学习,ml)