spark log4j日志配置

1.spark启动参数

先把log4j配置文件放到hdfs:hdfs://R2/projects/log4j-debug.properties

--conf spark.yarn.dist.files=hdfs://R2/projects/log4j-debug.properties#log4j-first.properties \
--conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=file:log4j-first.properties" \
--conf "spark.executor.extraJavaOptions=-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/heapdump.hprof -Dlog4j.configuration=file:log4j-first.properties" \

2.log4j.properties(INFO日志)

# Set everything to be logged to the console
log4j.rootCategory=INFO, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n

# Set the default spark-shell log level to WARN. When running the spark-shell, the
# log level for this class is used to overwrite the root logger's log level, so that
# the user can have different defaults for the shell and regular Spark apps.
log4j.logger.org.apache.spark.repl.Main=INFO

# Settings to quiet third party logs that are too verbose
log4j.logger.org.spark_project.jetty=ERROR
log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN
log4j.logger.org.apache.parquet=ERROR
log4j.logger.org.apache=WARN
log4j.logger.parquet=ERROR
log4j.logger.org.apache.spark.deploy.yarn=INFO

log4j.logger.org.apache.hudi=INFO

log4j.logger.org.apache.hadoop.hive.metastore.HiveMetaStoreClient=INFO
log4j.logger.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient=INFO
log4j.logger.hive.metastore=INFO

# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR

3.log4j-debug.properties(DEBUG日志)

# Set everything to be logged to the console
log4j.rootCategory=DEBUG, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n

# Set the default spark-shell log level to WARN. When running the spark-shell, the
# log level for this class is used to overwrite the root logger's log level, so that
# the user can have different defaults for the shell and regular Spark apps.
log4j.logger.org.apache.spark.repl.Main=INFO

# Settings to quiet third party logs that are too verbose
log4j.logger.org.spark_project.jetty=ERROR
log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN
log4j.logger.org.apache.parquet=ERROR
log4j.logger.org.apache=WARN
log4j.logger.parquet=ERROR
log4j.logger.org.apache.spark.deploy.yarn=INFO

log4j.logger.org.apache.hudi=INFO

log4j.logger.org.apache.hadoop.hive.metastore.HiveMetaStoreClient=INFO
log4j.logger.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient=INFO
log4j.logger.hive.metastore=INFO

# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR

你可能感兴趣的:(spark,spark,log4j,大数据,日志,debug)