Blame view

templates/hadoop121/log4j.properties 4.34 KB
f795df3ae   Thanasis Naskos   initial commit
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
  # Define some default values that can be overridden by system properties
  hadoop.root.logger=INFO,console
  hadoop.log.dir=.
  hadoop.log.file=hadoop.log
  
  #
  # Job Summary Appender 
  #
  # Use following logger to send summary to separate file defined by 
  # hadoop.mapreduce.jobsummary.log.file rolled daily:
  # hadoop.mapreduce.jobsummary.logger=INFO,JSA
  # 
  hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
  hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
  
  # Define the root logger to the system property "hadoop.root.logger".
  log4j.rootLogger=${hadoop.root.logger}, EventCounter
  
  # Logging Threshold
  log4j.threshhold=ALL
  
  #
  # Daily Rolling File Appender
  #
  
  log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
  log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
  
  # Rollver at midnight
  log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
  
  # 30-day backup
  #log4j.appender.DRFA.MaxBackupIndex=30
  log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
  
  # Pattern format: Date LogLevel LoggerName LogMessage
  log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
  # Debugging Pattern format
  #log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
  
  
  #
  # console
  # Add "console" to rootlogger above if you want to use this 
  #
  
  log4j.appender.console=org.apache.log4j.ConsoleAppender
  log4j.appender.console.target=System.err
  log4j.appender.console.layout=org.apache.log4j.PatternLayout
  log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
  
  #
  # TaskLog Appender
  #
  
  #Default values
  hadoop.tasklog.taskid=null
  hadoop.tasklog.iscleanup=false
  hadoop.tasklog.noKeepSplits=4
  hadoop.tasklog.totalLogFileSize=100
  hadoop.tasklog.purgeLogSplits=true
  hadoop.tasklog.logsRetainHours=12
  
  log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
  log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
  log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
  log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
  
  log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
  log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
  
  #
  #Security audit appender
  #
  hadoop.security.log.file=SecurityAuth.audit
  log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
  log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
  
  log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
  log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
  #new logger
  log4j.logger.SecurityLogger=OFF,console
  log4j.logger.SecurityLogger.additivity=false
  
  #
  # Rolling File Appender
  #
  
  #log4j.appender.RFA=org.apache.log4j.RollingFileAppender
  #log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
  
  # Logfile size and and 30-day backups
  #log4j.appender.RFA.MaxFileSize=1MB
  #log4j.appender.RFA.MaxBackupIndex=30
  
  #log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
  #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
  #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
  
  #
  # FSNamesystem Audit logging
  # All audit events are logged at INFO level
  #
  log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN
  
  # Custom Logging levels
  
  hadoop.metrics.log.level=INFO
  #log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
  #log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
  #log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
  log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
  
  # Jets3t library
  log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
  
  #
  # Null Appender
  # Trap security logger on the hadoop client side
  #
  log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
  
  #
  # Event Counter Appender
  # Sends counts of logging messages at different severity levels to Hadoop Metrics.
  #
  log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
  
  #
  # Job Summary Appender
  #
  log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender
  log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
  log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
  log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
  log4j.appender.JSA.DatePattern=.yyyy-MM-dd
  log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
  log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false