spark读取excel表格

参考资料:https://blog.csdn.net/qq_38689769/article/details/79471332

参考资料:https://blog.csdn.net/Dr_Guo/article/details/77374403?locationNum=9&fps=1

pom.xml:

  
        
            org.apache.poi
            poi
            3.10-FINAL
        

        
            org.apache.poi
            poi-ooxml
            3.10-FINAL
        

数据:

spark读取excel表格_第1张图片

代码:


import java.io.FileInputStream

import com.emg.join.model.{AA, BB}
import org.apache.poi.ss.usermodel.Cell
import org.apache.poi.xssf.usermodel.XSSFWorkbook
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ListBuffer

object Excels {

val conf = new SparkConf().setAppName("join")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .setMaster("local[*]")
      .registerKryoClasses(Array[Class[_]](AA.getClass, BB.getClass))
    val spark = SparkSession.builder().config(conf).getOrCreate()
    val sc = spark.sparkContext

import spark.implicits._

val filePath = "c:\\user\\id.xlsx"
//val filePath1 = "hdfs://192.168.40.0:9000/user/id.xlsx"

 val fs = new FileInputStream(filePath)
    val workbook: XSSFWorkbook = new XSSFWorkbook(fs)
    val sheet = workbook.getSheetAt(0) //获取第一个sheet
    val rowCount = sheet.getPhysicalNumberOfRows() //获取总行数

    val data = new ListBuffer[BB]()

    for (i <- 1 until rowCount) {
      val row = sheet.getRow(i)

      // 得到第一列第一行的单元格
      val cellwellname: Cell = row.getCell(0)

      //同一字段不同数据类型处理
      var wellname = 0L
      if (cellwellname.getCellType == 0) {
        wellname = cellwellname.getNumericCellValue.toLong
      }
      data.+=(BB(wellname))
      data
    }
  

    val data1 = spark.createDataset(data)

    data1.createTempView("data1")
    val result = spark.sql("select * from data1").coalesce(1)
    result.rdd.saveAsTextFile(outPath)

}

注意:

当路径为本地的时候,运行好使。当路径为hdfs时,报错找不到路径,会出现转义符问题,查了查资料还是没能解决!

有解决方法记得回复哈。

你可能感兴趣的:(小知识点,scala,spark,spark,excel)