spark2.0 history server 内存溢出解决

从下面命令构建类可以看到,如果你想增加history server的内存,只能设置SPARK_DAEMON_MEMORY。 在SPARK_HISTORY_OPTS里设置 "-Xmx"不起作用。

class SparkClassCommandBuilder extends AbstractCommandBuilder {

  private final String className;
  private final List classArgs;

  SparkClassCommandBuilder(String className, List classArgs) {
    this.className = className;
    this.classArgs = classArgs;
  }

  @Override
  public List buildCommand(Map env)
      throws IOException, IllegalArgumentException {
    List javaOptsKeys = new ArrayList<>();
    String memKey = null;
    String extraClassPath = null;

    // Master, Worker, HistoryServer, ExternalShuffleService, MesosClusterDispatcher use
    // SPARK_DAEMON_JAVA_OPTS (and specific opts) + SPARK_DAEMON_MEMORY.
    if (className.equals("org.apache.spark.deploy.master.Master")) {
      javaOptsKeys.add("SPARK_DAEMON_JAVA_OPTS");
      javaOptsKeys.add("SPARK_MASTER_OPTS");
      memKey = "SPARK_DAEMON_MEMORY";
    } else if (className.equals("org.apache.spark.deploy.worker.Worker")) {
      javaOptsKeys.add("SPARK_DAEMON_JAVA_OPTS");
      javaOptsKeys.add("SPARK_WORKER_OPTS");
      memKey = "SPARK_DAEMON_MEMORY";
    } else if (className.equals("org.apache.spark.deploy.history.HistoryServer")) {
      javaOptsKeys.add("SPARK_DAEMON_JAVA_OPTS");
      javaOptsKeys.add("SPARK_HISTORY_OPTS");
      memKey = "SPARK_DAEMON_MEMORY";
    } else if (className.equals("org.apache.spark.executor.CoarseGrainedExecutorBackend")) {
      javaOptsKeys.add("SPARK_JAVA_OPTS");
      javaOptsKeys.add("SPARK_EXECUTOR_OPTS");
      memKey = "SPARK_EXECUTOR_MEMORY";
    } else if (className.equals("org.apache.spark.executor.MesosExecutorBackend")) {
      javaOptsKeys.add("SPARK_EXECUTOR_OPTS");
      memKey = "SPARK_EXECUTOR_MEMORY";
    } else if (className.equals("org.apache.spark.deploy.mesos.MesosClusterDispatcher")) {
      javaOptsKeys.add("SPARK_DAEMON_JAVA_OPTS");
    } else if (className.equals("org.apache.spark.deploy.ExternalShuffleService") ||
        className.equals("org.apache.spark.deploy.mesos.MesosExternalShuffleService")) {
      javaOptsKeys.add("SPARK_DAEMON_JAVA_OPTS");
      javaOptsKeys.add("SPARK_SHUFFLE_OPTS");
      memKey = "SPARK_DAEMON_MEMORY";
    } else {
      javaOptsKeys.add("SPARK_JAVA_OPTS");
      memKey = "SPARK_DRIVER_MEMORY";
    }

    List cmd = buildJavaCommand(extraClassPath);

    for (String key : javaOptsKeys) {
      String envValue = System.getenv(key);
      if (!isEmpty(envValue) && envValue.contains("Xmx")) {
        String msg = String.format("%s is not allowed to specify max heap(Xmx) memory settings " +
                "(was %s). Use the corresponding configuration instead.", key, envValue);
        throw new IllegalArgumentException(msg);
      }
      addOptionString(cmd, envValue);
    }

    String mem = firstNonEmpty(memKey != null ? System.getenv(memKey) : null, DEFAULT_MEM);
    cmd.add("-Xmx" + mem);
    addPermGenSizeOpt(cmd);
    cmd.add(className);
    cmd.addAll(classArgs);
    return cmd;
  }

}




你可能感兴趣的:(spark)