Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Anastasios Lisgaras
GitHubHistory
Commits
e32bf0f0
Commit
e32bf0f0
authored
Jun 03, 2015
by
Christos Kanellopoulos
Browse files
Merge pull request #22 from ARGOeu/devel
Sprint 32
parents
e9dfeecc
3c51d8a7
Changes
8
Hide whitespace changes
Inline
Side-by-side
roles/consumer/tasks/main.yml
View file @
e32bf0f0
...
...
@@ -5,12 +5,17 @@
yum
:
name=ar-consumer state=latest
notify
:
restart consumer
-
name
:
Install
packages
from ar project
-
name
:
Install
avro
from ar project
tags
:
ar-packages
yum
:
name={{ item }} state=latest
with_items
:
-
ar-sync
-
ar-compute
yum
:
name=avro state=present
-
name
:
Install argo-egi-connectors from ar project
tags
:
ar-packages
yum
:
name=argo-egi-connectors state=latest
-
name
:
Install ar-compute from ar project
tags
:
ar-packages
yum
:
name=ar-compute state=latest
-
name
:
Configure ar-compute stuff
1
tags
:
compute_config
...
...
@@ -36,6 +41,22 @@
state=present
backup=yes
-
name
:
Configure ar-compute stuff
4
tags
:
consumer_config
lineinfile
:
dest=/etc/ar-compute-engine.conf
regexp="^sync_path="
line="sync_path=/var/lib/argo-connectors"
state=present
backup=yes
-
name
:
Configure ar-compute stuff
5
tags
:
consumer_config
lineinfile
:
dest=/etc/ar-compute-engine.conf
regexp="^sync_exec="
line="sync_exec=/usr/libexec/argo-egi-connectors"
state=present
backup=yes
-
name
:
Configure ar-compute job cycle daily cron
tags
:
compute_config
cron
:
cron_file=ar_job_cycle_daily
...
...
@@ -60,14 +81,14 @@
tags
:
sync_config
file
:
path={{ item }} owner=root group=root mode=0755 state=directory
with_items
:
-
/var/lib/ar
-sync
/EGI/Cloudmon
-
/var/lib/ar
-sync
/EGI/Critical
-
/var/lib/ar
go-connectors
/EGI/Cloudmon
-
/var/lib/ar
go-connectors
/EGI/Critical
-
name
:
Make sure ownerships are OK
tags
:
consumer_config
file
:
path={{ item }} owner=root group=arstats mode=0775 state=directory
with_items
:
-
/var/lib/ar
-sync
-
/var/lib/ar
go-connectors
-
/var/lib/ar-consumer
-
name
:
Enable and start consumer service
...
...
roles/consumer/templates/ar-data-retention.conf.j2
View file @
e32bf0f0
...
...
@@ -41,16 +41,16 @@ mongoDBRetentionInDays: {{ mongo_retention_in_days }}
fileTemplate_1: /var/lib/ar-consumer/ar-consumer_log_%Y_%m_%d.txt
fileTemplate_2: /var/lib/ar-consumer/ar-consumer_log_%Y_%m_%d.avro
fileTemplate_3: /var/lib/ar-consumer/ar-consumer_error_%Y_%m_%d.txt
fileTemplate_4: /var/lib/ar
-sync
/downtimes_%Y-%m-%d.out
fileTemplate_5: /var/lib/ar
-sync
/downtimes_%Y-%m-%d.avro
fileTemplate_6: /var/lib/ar
-sync
/poem_sync_%Y_%m_%d.out
fileTemplate_7: /var/lib/ar
-sync
/poem_sync_%Y_%m_%d.avro
fileTemplate_8: /var/lib/ar
-sync
/prefilter_%Y_%m_%d.out
fileTemplate_9: /var/lib/ar
-sync
/prefilter_%Y_%m_%d.avro
fileTemplate_10: /var/lib/ar
-sync
/hepspec_sync_%Y_%m_%d.out
fileTemplate_11: /var/lib/ar
-sync
/weights_sync_%Y_%m_%d.avro
fileTemplate_12: /var/lib/ar
-sync
/sites_%Y_%m_%d.out
fileTemplate_13: /var/lib/ar
-sync
/sites_%Y_%m_%d.avro
fileTemplate_14: /var/lib/ar
-sync
/groups_%Y_%m_%d.out
fileTemplate_15: /var/lib/ar
-sync
/groups_%Y_%m_%d.avro
fileTemplate_4: /var/lib/ar
go-connectors
/downtimes_%Y-%m-%d.out
fileTemplate_5: /var/lib/ar
go-connectors
/downtimes_%Y-%m-%d.avro
fileTemplate_6: /var/lib/ar
go-connectors
/poem_sync_%Y_%m_%d.out
fileTemplate_7: /var/lib/ar
go-connectors
/poem_sync_%Y_%m_%d.avro
fileTemplate_8: /var/lib/ar
go-connectors
/prefilter_%Y_%m_%d.out
fileTemplate_9: /var/lib/ar
go-connectors
/prefilter_%Y_%m_%d.avro
fileTemplate_10: /var/lib/ar
go-connectors
/hepspec_sync_%Y_%m_%d.out
fileTemplate_11: /var/lib/ar
go-connectors
/weights_sync_%Y_%m_%d.avro
fileTemplate_12: /var/lib/ar
go-connectors
/sites_%Y_%m_%d.out
fileTemplate_13: /var/lib/ar
go-connectors
/sites_%Y_%m_%d.avro
fileTemplate_14: /var/lib/ar
go-connectors
/groups_%Y_%m_%d.out
fileTemplate_15: /var/lib/ar
go-connectors
/groups_%Y_%m_%d.avro
fileRetentionInDays: {{ files_retention_in_days }}
roles/logging_config/files/hadoop/log4j.properties
0 → 100644
View file @
e32bf0f0
# Define some default values that can be overridden by system properties
hadoop.root.logger
=
INFO,console,SYSLOG
hadoop.log.dir
=
.
hadoop.log.file
=
hadoop.log
# Define the root logger to the system property "hadoop.root.logger".
log4j.rootLogger
=
${hadoop.root.logger}, EventCounter
# Logging Threshold
log4j.threshold
=
ALL
# Null Appender
log4j.appender.NullAppender
=
org.apache.log4j.varia.NullAppender
#
# Rolling File Appender - cap space usage at 5gb.
#
hadoop.log.maxfilesize
=
256MB
hadoop.log.maxbackupindex
=
20
log4j.appender.RFA
=
org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File
=
${hadoop.log.dir}/${hadoop.log.file}
log4j.appender.RFA.MaxFileSize
=
${hadoop.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex
=
${hadoop.log.maxbackupindex}
log4j.appender.RFA.layout
=
org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.RFA.layout.ConversionPattern
=
%d{ISO8601} %p %c: %m%n
# Debugging Pattern format
#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
#
# Daily Rolling File Appender
#
log4j.appender.DRFA
=
org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File
=
${hadoop.log.dir}/${hadoop.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern
=
.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout
=
org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern
=
%d{ISO8601} %p %c: %m%n
# Debugging Pattern format
#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console
=
org.apache.log4j.ConsoleAppender
log4j.appender.console.target
=
System.err
log4j.appender.console.layout
=
org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern
=
%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
#
# configure Syslog facility appender
#
log4j.appender.SYSLOG
=
org.apache.log4j.net.SyslogAppender
log4j.appender.SYSLOG.syslogHost
=
localhost
log4j.appender.SYSLOG.facility
=
LOCAL4
log4j.appender.SYSLOG.layout
=
org.apache.log4j.PatternLayout
log4j.appender.SYSLOG.layout.conversionPattern
=
%t: %p %c:%L - %m%n
#
# TaskLog Appender
#
#Default values
hadoop.tasklog.taskid
=
null
hadoop.tasklog.iscleanup
=
false
hadoop.tasklog.noKeepSplits
=
4
hadoop.tasklog.totalLogFileSize
=
100
hadoop.tasklog.purgeLogSplits
=
true
hadoop.tasklog.logsRetainHours
=
12
log4j.appender.TLA
=
org.apache.hadoop.mapred.TaskLogAppender
log4j.appender.TLA.taskId
=
${hadoop.tasklog.taskid}
log4j.appender.TLA.isCleanup
=
${hadoop.tasklog.iscleanup}
log4j.appender.TLA.totalLogFileSize
=
${hadoop.tasklog.totalLogFileSize}
log4j.appender.TLA.layout
=
org.apache.log4j.PatternLayout
log4j.appender.TLA.layout.ConversionPattern
=
%d{ISO8601} %p %c: %m%n
#
# HDFS block state change log from block manager
#
# Uncomment the following to suppress normal block state change
# messages from BlockManager in NameNode.
#log4j.logger.BlockStateChange=WARN
#
#Security appender
#
hadoop.security.logger
=
INFO,NullAppender
hadoop.security.log.maxfilesize
=
256MB
hadoop.security.log.maxbackupindex
=
20
log4j.category.SecurityLogger
=
${hadoop.security.logger}
hadoop.security.log.file
=
SecurityAuth-${user.name}.audit
log4j.appender.RFAS
=
org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File
=
${hadoop.log.dir}/${hadoop.security.log.file}
log4j.appender.RFAS.layout
=
org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern
=
%d{ISO8601} %p %c: %m%n
log4j.appender.RFAS.MaxFileSize
=
${hadoop.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex
=
${hadoop.security.log.maxbackupindex}
#
# Daily Rolling Security appender
#
log4j.appender.DRFAS
=
org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFAS.File
=
${hadoop.log.dir}/${hadoop.security.log.file}
log4j.appender.DRFAS.layout
=
org.apache.log4j.PatternLayout
log4j.appender.DRFAS.layout.ConversionPattern
=
%d{ISO8601} %p %c: %m%n
log4j.appender.DRFAS.DatePattern
=
.yyyy-MM-dd
#
# hdfs audit logging
#
hdfs.audit.logger
=
INFO,NullAppender
hdfs.audit.log.maxfilesize
=
256MB
hdfs.audit.log.maxbackupindex
=
20
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit
=
${hdfs.audit.logger}
log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit
=
false
log4j.appender.RFAAUDIT
=
org.apache.log4j.RollingFileAppender
log4j.appender.RFAAUDIT.File
=
${hadoop.log.dir}/hdfs-audit.log
log4j.appender.RFAAUDIT.layout
=
org.apache.log4j.PatternLayout
log4j.appender.RFAAUDIT.layout.ConversionPattern
=
%d{ISO8601} %p %c{2}: %m%n
log4j.appender.RFAAUDIT.MaxFileSize
=
${hdfs.audit.log.maxfilesize}
log4j.appender.RFAAUDIT.MaxBackupIndex
=
${hdfs.audit.log.maxbackupindex}
#
# mapred audit logging
#
mapred.audit.logger
=
INFO,NullAppender
mapred.audit.log.maxfilesize
=
256MB
mapred.audit.log.maxbackupindex
=
20
log4j.logger.org.apache.hadoop.mapred.AuditLogger
=
${mapred.audit.logger}
log4j.additivity.org.apache.hadoop.mapred.AuditLogger
=
false
log4j.appender.MRAUDIT
=
org.apache.log4j.RollingFileAppender
log4j.appender.MRAUDIT.File
=
${hadoop.log.dir}/mapred-audit.log
log4j.appender.MRAUDIT.layout
=
org.apache.log4j.PatternLayout
log4j.appender.MRAUDIT.layout.ConversionPattern
=
%d{ISO8601} %p %c{2}: %m%n
log4j.appender.MRAUDIT.MaxFileSize
=
${mapred.audit.log.maxfilesize}
log4j.appender.MRAUDIT.MaxBackupIndex
=
${mapred.audit.log.maxbackupindex}
# Custom Logging levels
#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
#log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=DEBUG
# Jets3t library
log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service
=
ERROR
#
# Event Counter Appender
# Sends counts of logging messages at different severity levels to Hadoop Metrics.
#
log4j.appender.EventCounter
=
org.apache.hadoop.log.metrics.EventCounter
#
# Job Summary Appender
#
# Use following logger to send summary to separate file defined by
# hadoop.mapreduce.jobsummary.log.file :
# hadoop.mapreduce.jobsummary.logger=INFO,JSA
#
hadoop.mapreduce.jobsummary.logger
=
${hadoop.root.logger}
hadoop.mapreduce.jobsummary.log.file
=
hadoop-mapreduce.jobsummary.log
hadoop.mapreduce.jobsummary.log.maxfilesize
=
256MB
hadoop.mapreduce.jobsummary.log.maxbackupindex
=
20
log4j.appender.JSA
=
org.apache.log4j.RollingFileAppender
log4j.appender.JSA.File
=
${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
log4j.appender.JSA.MaxFileSize
=
${hadoop.mapreduce.jobsummary.log.maxfilesize}
log4j.appender.JSA.MaxBackupIndex
=
${hadoop.mapreduce.jobsummary.log.maxbackupindex}
log4j.appender.JSA.layout
=
org.apache.log4j.PatternLayout
log4j.appender.JSA.layout.ConversionPattern
=
%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
log4j.logger.org.apache.hadoop.mapred.JobInProgress$
JobSummary
=
${hadoop.mapreduce.jobsummary.logger}
log4j.additivity.org.apache.hadoop.mapred.JobInProgress$
JobSummary
=
false
#
# Yarn ResourceManager Application Summary Log
#
# Set the ResourceManager summary log filename
#yarn.server.resourcemanager.appsummary.log.file=rm-appsummary.log
# Set the ResourceManager summary log level and appender
#yarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY
# Appender for ResourceManager Application Summary Log
# Requires the following properties to be set
# - hadoop.log.dir (Hadoop Log directory)
# - yarn.server.resourcemanager.appsummary.log.file (resource manager app summary log filename)
# - yarn.server.resourcemanager.appsummary.logger (resource manager app summary log level and appender)
#log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=${yarn.server.resourcemanager.appsummary.logger}
#log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=false
#log4j.appender.RMSUMMARY=org.apache.log4j.RollingFileAppender
#log4j.appender.RMSUMMARY.File=${hadoop.log.dir}/${yarn.server.resourcemanager.appsummary.log.file}
#log4j.appender.RMSUMMARY.MaxFileSize=256MB
#log4j.appender.RMSUMMARY.MaxBackupIndex=20
#log4j.appender.RMSUMMARY.layout=org.apache.log4j.PatternLayout
#
log4j.appender.RMSUMMARY.layout.ConversionPattern
=
%d{ISO8601} %p %c{2}: %m%n
\ No newline at end of file
roles/logging_config/files/pig/log4j.properties
0 → 100644
View file @
e32bf0f0
# ***** Set root logger level to DEBUG and its only appender to A.
log4j.logger.org.apache.pig
=
info, A, SYSLOG
# ***** A is set to be a ConsoleAppender.
log4j.appender.A
=
org.apache.log4j.ConsoleAppender
# ***** A uses PatternLayout.
log4j.appender.A.layout
=
org.apache.log4j.PatternLayout
log4j.appender.A.layout.ConversionPattern
=
%-4r [%t] %-5p %c %x - %m%n
# configure Syslog facility appender
log4j.appender.SYSLOG
=
org.apache.log4j.net.SyslogAppender
log4j.appender.SYSLOG.syslogHost
=
localhost
log4j.appender.SYSLOG.facility
=
LOCAL4
log4j.appender.SYSLOG.layout
=
org.apache.log4j.PatternLayout
log4j.appender.SYSLOG.layout.conversionPattern
=
%t: %p %c:%L - %m%n
roles/logging_config/handlers/main.yml
0 → 100644
View file @
e32bf0f0
---
-
name
:
restart syslog service
service
:
name=rsyslog state=restarted
roles/logging_config/tasks/main.yml
0 → 100644
View file @
e32bf0f0
---
-
name
:
Configure syslog to accept UDP reception
1
tags
:
syslog_config
lineinfile
:
dest=/etc/rsyslog.conf
regexp='^\$ModLoad imudp'
line='$ModLoad imudp'
insertafter='^#\$ModLoad imudp'
state=present
backup=yes
notify
:
restart syslog service
-
name
:
Configure syslog to accept UDP reception
2
tags
:
syslog_config
lineinfile
:
dest=/etc/rsyslog.conf
regexp='^\$UDPServerRun'
line='$UDPServerRun 514'
insertafter='^#\$UDPServerRun'
state=present
backup=yes
notify
:
restart syslog service
-
name
:
Bind syslog UDP service to localhost interface
tags
:
syslog_config
lineinfile
:
dest=/etc/rsyslog.conf
regexp='^\$UDPServerAddress'
line='$UDPServerAddress 127.0.0.1'
insertbefore='^#\$UDPServerRun'
state=present
backup=yes
notify
:
restart syslog service
-
name
:
Configure pig related parameter
tags
:
pig_client_config
lineinfile
:
dest=/etc/pig/conf/pig.properties
regexp="^pig.logfile"
line="pig.logfile=/tmp/pig-err.log"
state=present
backup=yes
-
name
:
Insert comment for pig related parameter
tags
:
pig_client_config
lineinfile
:
dest=/etc/pig/conf/pig.properties
regexp="^# File Parameter"
insertbefore="^pig.logfile"
line="# File Parameter for pig exception dump."
state=present
backup=yes
-
name
:
Insert log4j pig config file
tags
:
pig_client_config
lineinfile
:
dest=/etc/pig/conf/pig.properties
regexp="^log4jconf="
insertbefore="# log4jconf=./conf/log4j.properties"
line="log4jconf=/etc/pig/conf/log4j.properties"
state=present
backup=yes
-
name
:
Copy pig log4j.properties file
tags
:
pig_client_config
copy
:
src=pig/log4j.properties
dest=/etc/pig/conf/log4j.properties backup=yes
owner=root group=root mode=0644
-
name
:
Copy hadoop log4j.properties file
tags
:
hadoop_client_logging_config
copy
:
src=hadoop/log4j.properties
dest=/etc/hadoop/conf/log4j.properties backup=yes
owner=root group=root mode=0644
-
name
:
Add syslog appender to compute engine configuration
tags
:
hadoop_client_logging_config
lineinfile
:
dest=/etc/ar-compute-engine.conf
regexp="^hadoop_log_root="
line="hadoop_log_root=INFO,SYSLOG"
state=present
backup=yes
roles/pig_client/tasks/main.yml
deleted
100644 → 0
View file @
e9dfeecc
-
name
:
Configure pig related parameter
tags
:
pig_client_config
lineinfile
:
dest=/etc/pig/conf/pig.properties
regexp="^pig.logfile"
line="pig.logfile=/tmp/pig-err.log"
state=present
backup=yes
-
name
:
Insert comment for pig related parameter
tags
:
pig_client_config
lineinfile
:
dest=/etc/pig/conf/pig.properties
regexp="^# File Parameter"
insertbefore="^pig.logfile"
line="# File Parameter for pig exception dump."
state=present
backup=yes
standalone.yml
View file @
e32bf0f0
...
...
@@ -8,6 +8,6 @@
-
{
role
:
ca_bundle
,
tags
:
ca_bundle
}
-
{
role
:
has_certificate
,
tags
:
has_certificate
}
-
{
role
:
consumer
,
tags
:
consumer
}
-
{
role
:
logging_config
,
tags
:
logging_config
}
-
{
role
:
mongodb
,
tags
:
mongodb
}
-
{
role
:
pig_client
,
tags
:
pig_client
}
-
{
role
:
webapi
,
tags
:
webapi
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment