commonCli实现参数解析和指定前缀动态参数解析

解析器

import org.apache.commons.cli.{CommandLine, HelpFormatter, Option, Options, PosixParser}
import org.slf4j.LoggerFactory

class ParameterTool(args: Array[String]) {

  val JOB_NAME = "jobName"
  val QUEUE = "queue"
  val SQL_FILE_PATH = "sqlFilePath"

  private val log = LoggerFactory.getLogger(classOf[ParameterTool])

  def parser(): CommandLine = {
    val options: Options = new Options()

    initOptions(options)

    val parser: PosixParser = new PosixParser()
    var cmd: CommandLine = null
    try {
      cmd = parser.parse(options, args)
    }
    catch {
      case ex: Exception =>
        log.error(s"Failed to parse the parameters.: ${ex.getMessage}")
        ex.printStackTrace()
        val formatter = new HelpFormatter()
        formatter.setWidth(100)
        formatter.printHelp("sqlFileExecutor", options)
        System.exit(1)
    }
    cmd
  }

  private def initOptions(options: Options) = {
    val jobName = new Option(
      "jobName",
      "jobName",
      true,
      "Name of the Spark job."
    )
    jobName.setArgName(JOB_NAME)
    jobName.setRequired(true)
    options.addOption(jobName)

    val sqlFilePath = new Option(
      "sqlFile",
      "sqlFilePath",
      true,
      "Path of the sql file to be executed by the Spark job."
    )
    sqlFilePath.setArgName(SQL_FILE_PATH)
    sqlFilePath.setRequired(true)
    options.addOption(sqlFilePath)


    val queue = new Option(
      "queue",
      "YarnQueue",
      true,
      "Yarn queue for executing the Spark job."
    )
    queue.setArgName(QUEUE)
    queue.setRequired(true)
    options.addOption(queue)


    val dynamicOption = new Option(
      "d",
      "D",
      true,
      "dynamic parameters")
    dynamicOption.setArgs(Option.UNLIMITED_VALUES)
    dynamicOption.setValueSeparator('=')
    dynamicOption.setRequired(false)
    options.addOption(dynamicOption)
  }
}

测试

  def main(args: Array[String]): Unit = {
    val parameterTool = new ParameterTool(args)
    val parser = parameterTool.parser()
    val sqlFilePath = parser.getOptionValue(parameterTool.SQL_FILE_PATH)
    val jobName = parser.getOptionValue(parameterTool.JOB_NAME)
    val queue = parser.getOptionValue(parameterTool.QUEUE)
    val configs = parser.getOptionProperties("D")
    println(jobName)
    println(configs.get("spark.executor.core"))
    println(configs.get("spark.executor.memory"))

  }

输出

tetJob
12
12G

你可能感兴趣的:(web,bigData,java,scala)