Mercurial > repos > other > Puppet
view manifests/templates.pp @ 38:a1960fb961c5 puppet-3.6
Move APF refresh command to node template, as it won't work on boxes without it!
author | IBBoard <dev@ibboard.co.uk> |
---|---|
date | Sat, 14 Mar 2015 20:22:36 +0000 |
parents | 1bb941522ebf |
children | 222904296578 |
line wrap: on
line source
# Make sure packages come after their repos YumRepo<| |> -> Package<| |> # Make sure all files are in place before starting services File<| |> -> Service<| |> class basenode { $os = $operatingsystem $osver = "v${operatingsystemrelease}" include sudo include defaultusers include logwatch file { '/etc/puppet/hiera.yaml': ensure => present, content => "--- :backends: yaml :yaml: :datadir: /var/lib/hiera :hierarchy: common :logger: console", } } class basevpsnode ( $primary_ip, $secondary_ip, $mailserver, $imapserver, $firewall_cmd = 'iptables', ) { #VPS is a self-mastered Puppet machine, so bodge a Hosts file file { '/etc/hosts': ensure => present, content => "127.0.0.1 localhost puppet $primary_ip ${fqdn}", } require repos include basenode include ssh::server include vcs::server include vcs::client class { 'webserver': primary_ip => $primary_ip, secondary_ip => $secondary_ip, } include cronjobs include logrotate class { 'fail2ban': firewall_cmd => $firewall_cmd, } include tools class { 'email': mailserver => $mailserver, imapserver => $imapserver, } } ## Classes to allow facet behaviour using preconfigured setups of classes class repos { yumrepo { 'epel': mirrorlist => 'https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch', descr => "Extra Packages for Enterprise Linux", enabled => 1, failovermethod => 'priority', gpgcheck => 1, gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-6', } file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-6': ensure => present, source => 'puppet:///common/RPM-GPG-KEY-EPEL-6' } yumrepo { 'ibboard': baseurl => 'http://download.opensuse.org/repositories/home:/IBBoard:/server/CentOS_CentOS-$releasever/', descr => 'IBBoard Server', enabled => 1, gpgcheck => 1, gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-IBBoard-OBS', } file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-IBBoard-OBS': ensure => present, source => 'puppet:///common/RPM-GPG-KEY-IBBoard-OBS', before => YumRepo['ibboard'], } yumrepo { 'webtatic': mirrorlist => 'http://mirror.webtatic.com/yum/el6/$basearch/mirrorlist', descr => "Extra Packages for Enterprise Linux", enabled => 1, failovermethod => 'priority', gpgcheck => 1, gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-andy', } file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-andy': ensure => present, source => 'puppet:///common/RPM-GPG-KEY-webtatic-andy', before => YumRepo['webtatic'], } } class tools { $packages = [ 'sqlite', 'bash-completion', 'nano' ] package { $packages: ensure => latest; } } class logrotate { package { 'logrotate': ensure => latest; } file { '/etc/logrotate.d/httpd': ensure => present, source => 'puppet:///common/logrotate-httpd', require => Package['logrotate'], } file { '/etc/logrotate.d/trac': ensure => present, source => 'puppet:///common/logrotate-trac', require => Package['logrotate'], } } class logwatch { package { 'logwatch': ensure => latest; } File { ensure => present, require => Package['logwatch'], } file { '/etc/cron.daily/0logwatch': source => 'puppet:///common/0logwatch'; } file { '/etc/logwatch/scripts/shared/': ensure => directory, } file { '/etc/logwatch/scripts/services/http-error': source => 'puppet:///common/logwatch/http-error', } file { '/etc/logwatch/scripts/services/php': source => 'puppet:///common/logwatch/scripts_php', } file { '/etc/logwatch/scripts/services/mysql': source => 'puppet:///common/logwatch/scripts_mysql', } file { '/etc/logwatch/scripts/services/dovecot': source => 'puppet:///common/logwatch/dovecot', } file { '/etc/logwatch/scripts/services/postfix': source => 'puppet:///common/logwatch/postfix', } file { '/etc/logwatch/scripts/shared/applyhttperrordate': source => 'puppet:///common/logwatch/applyhttperrordate', } file { '/etc/logwatch/conf/logwatch.conf': content => 'Detail = Med', } file { '/etc/logwatch/conf/logfiles/http.conf': content => 'LogFile = apache/access_*.log', } file { '/etc/logwatch/conf/logfiles/http-error.conf': source => 'puppet:///common/logwatch/log-http-error.conf', } file { '/etc/logwatch/conf/services/http-error.conf': source => 'puppet:///common/logwatch/services-http-error.conf', } file { '/etc/logwatch/conf/logfiles/php.conf': source => 'puppet:///common/logwatch/logfiles_php.conf', } file { '/etc/logwatch/conf/services/php.conf': source => 'puppet:///common/logwatch/services_php.conf', } file { '/etc/logwatch/conf/logfiles/mysql.conf': source => 'puppet:///common/logwatch/logfiles_mysql.conf', } file { '/etc/logwatch/conf/services/mysql.conf': source => 'puppet:///common/logwatch/services_mysql.conf', } } class fail2ban ( $firewall_cmd, ) { package { 'fail2ban': ensure => latest, } service { 'fail2ban': ensure => running, enable => true } File { ensure => present, require => Package['fail2ban'], notify => Service['fail2ban'], } file { '/etc/fail2ban/jail.local': source => 'puppet:///common/fail2ban/jail.local', } file { '/etc/fail2ban/action.d/apf.conf': source => 'puppet:///common/fail2ban/apf.conf', } file { '/etc/fail2ban/action.d/firewall-ban.conf': ensure => link, target => "/etc/fail2ban/action.d/${firewall_cmd}.conf", } file { '/etc/fail2ban/filter.d/ibb-apache-exploits-instaban.conf': source => 'puppet:///common/fail2ban/ibb-apache-exploits-instaban.conf', } file { '/etc/fail2ban/filter.d/ibb-apache-shellshock.conf': source => 'puppet:///common/fail2ban/ibb-apache-shellshock.conf', } file { '/etc/fail2ban/filter.d/ibb-repeat-offender.conf': source => 'puppet:///common/fail2ban/ibb-repeat-offender.conf', } file { '/etc/fail2ban/filter.d/ibb-postfix-spammers.conf': source => 'puppet:///common/fail2ban/ibb-postfix-spammers.conf', } file { '/etc/fail2ban/filter.d/ibb-postfix-malicious.conf': source => 'puppet:///common/fail2ban/ibb-postfix-malicious.conf', } file { '/etc/fail2ban/filter.d/ibb-postfix.conf': source => 'puppet:///common/fail2ban/ibb-postfix.conf', } file { '/etc/fail2ban/fail2ban.local': content => '[Definition] logtarget = /var/log/fail2ban.log' } # Because one of our rules checks fail2ban's log, but the service dies without the file file { '/var/log/fail2ban.log': ensure => present, owner => 'root', group => 'root', mode => '0600', } } #Our web server with our configs, not just a stock one class webserver ( $primary_ip, $secondary_ip, ) { #Setup base website parameters class { 'website': base_dir => '/srv/sites', primary_ip => $primary_ip, secondary_ip => $secondary_ip, default_owner => $defaultusers::default_user, default_group => $defaultusers::default_user, default_tld => 'co.uk', default_extra_tlds => [ 'com' ], } #Configure the PHP version to use class { 'website::php': suffix => '55ibb', #IBBoard's rebuild of Webtatic's PHP 5.5 opcache => 'opcache', } #Setup MySQL, using (private) templates to make sure that we set non-std passwords and a default user if $operatingsystem == 'CentOS' and versioncmp($operatingsystemrelease, 7) >= 0 { $mysqlpackage = 'mariadb' $mysqlsuffix = '' } else { $mysqlpackage = 'mysql' $mysqlsuffix = '55w' } class { 'website::mysql': mysqluser => template('defaultusers/mysql-user'), mysqlpassword => template('defaultusers/mysql-password'), mysqlprefix => $mysqlpackage, mysqlsuffix => $mysqlsuffix, phpsuffix => '55ibb', phpmysqlsuffix => 'nd' } } class ibboardvpsnode ( $primary_ip, $secondary_ip, $mailserver, $imapserver, $firewall_cmd = 'iptables', ){ class { 'basevpsnode': primary_ip => $primary_ip, secondary_ip => $secondary_ip, mailserver => $mailserver, imapserver => $imapserver, firewall_cmd => $firewall_cmd, } # Common modules used by multiple sites (mod_auth_basic is safe because we HTTPS all the things) apache::mod { 'auth_basic':; 'authn_file':; 'authz_user':; 'auth_token':;'deflate':; 'version':; } if $operatingsystem == 'CentOS' and versioncmp($operatingsystemrelease, 7) >= 0 { apache::mod { 'authn_core':; } } $apache_packages = [ 'mod_auth_token' ] package { $apache_packages: ensure => present; } #Configure our sites, using templates for the custom fragments where the extra content is too long include adminsite website::https::multitld { 'www.ibboard': custom_fragment => template("private/apache/ibboard.fragment"), } include hiveworldterrasite include glittergothsite include devsite website::https::multitld { 'www.abiknight': custom_fragment => "$website::htmlphpfragment ErrorDocument 404 /error.php", } website::https::multitld { 'www.gracebertram': main_tld => 'com', extra_tlds => [ 'co.uk' ], docroot_owner => $defaultusers::secondary_user, docroot_group => 'editors', custom_fragment => template("private/apache/gracebertram.fragment"), } website::https { 'www.realmrunner.com': docroot => "${website::basedir}/gracebertram", # Don't give it a separate docroot because it is a redirect via the fragment docroot_owner => $defaultusers::secondary_user, docroot_group => 'editors', serveraliases => 'realmrunner.com', custom_fragment => template("private/apache/realmrunner.fragment"), } include webmailpimsite website::http { 'lktutoring.co.uk': docroot_owner => $defaultusers::secondary_user, docroot_group => 'editors', serveraliases => [ 'www.lktutoring.co.uk', 'lktutoring.com', 'www.lktutoring.com' ], ensure => 'present', custom_fragment => 'Include conf.extra/no-index.conf Include conf.custom/filter-core.conf Include conf.extra/no-www.conf Include conf.extra/no-com.conf Include conf.extra/html-php.conf #Additional custom fragment ErrorDocument 404 /error.php', } } class adminsite{ apache::mod { 'info':; 'status':; 'cgi':; } website::https::multitld { 'admin.ibboard': force_no_index => false, ssl_ca_chain => '', custom_fragment => template("private/apache/admin.fragment"), } cron { 'loadavg': command => '/usr/local/bin/run-loadavg-logger', user => apache, minute => '*/6' } cron { 'awstats': command => '/usr/local/bin/update-awstats > /srv/sites/admin/awstats.log', user => apache, hour => '*/6', minute => '0' } } class hiveworldterrasite { website::https::multitld { 'www.hiveworldterra': force_no_www => false, custom_fragment => template("private/apache/hwt.fragment"), } website::https::multitld { 'forums.hiveworldterra': custom_fragment => 'ErrorDocument 404 /error.php' } website::https::multitld { 'skins.hiveworldterra': custom_fragment => template("private/apache/skins.fragment"), } website::https::redir { 'hiveworldterra.ibboard.co.uk': redir => 'https://www.hiveworldterra.co.uk/', docroot => "${website::basedir}/hiveworldterra", separate_log => true, } } class devsite { apache::mod { # mod_wsgi for Python support 'wsgi':; } include python::venv # Create Python virtualenvs for the dev site apps python::venv::isolate { "/srv/rhodecode/virtualenv":; "/srv/trac/virtualenv":; } # Graphviz for Trac "master ticket" graphs package { 'graphviz': ensure => latest, } website::https::multitld { 'www.warfoundry': custom_fragment => template("private/apache/warfoundry.fragment"), } website::https::multitld { 'dev.ibboard': #Make sure we're the first one hit for the tiny fraction of "no support" cases we care about (potentially Python for Mercurial!) # http://en.wikipedia.org/wiki/Server_Name_Indication#No_support priority => 1, custom_fragment => template("private/apache/dev.fragment"), } } class glittergothsite { website::https::multitld { 'www.glittergoth': ip => $website::secondary_ip, priority => 1, ssl_ca_chain => 'glittergoth.ca-bundle', docroot_owner => $defaultusers::secondary_user, docroot_group => 'editors', force_no_index => false, custom_fragment => template("private/apache/glittergoth.fragment"), } website::https { 'test.glittergoth.co.uk': docroot => "${website::basedir}/glittergoth-test", docroot_owner => $defaultusers::secondary_user, docroot_group => 'editors', ip => $website::secondary_ip, force_no_index => false, custom_fragment => template("private/apache/glittergoth-test.fragment"), } # Website specific cron jobs cron { 'backupopencart': command => "/usr/local/bin/backupdb opencart", user => 'root', hour => '*/6', minute => '15', } cron { 'requestreviews': command => '/usr/local/bin/request-reviews 2> /srv/sites/admin/request-reviews.log', user => 'apache', hour => 4, minute => 5 } } class webmailpimsite { # Webmail and Personal Information Management (PIM) sites website::https { 'webmail.ibboard.co.uk': force_no_index => false, ssl_ca_chain => '', custom_fragment => template("private/apache/webmail.fragment"), } website::https { 'pim.ibboard.co.uk': force_no_index => false, lockdown_requests => false, ssl_ca_chain => '', custom_fragment => template("private/apache/pim.fragment"), } cron { 'owncloudcron': command => "/usr/local/bin/owncloud-cron", user => 'apache', minute => '*/15', } } class email ( $mailserver, $imapserver, ){ class { 'postfix': mailserver => $mailserver, } class { 'dovecot': imapserver => $imapserver, } } class cronjobs { # Add Mutt for scripts that send emails, but stop it clogging the disk by keeping copies of emails package { 'mutt': ensure => latest, } file { '/etc/Muttrc.local': content => 'set copy = no', require => Package['mutt'], } # General server-wide cron jobs Cron { user => 'root' } cron { 'backupalldbs': command => "/usr/local/bin/backupalldbs", monthday => "*/2", hour => "4", minute => "9" } cron { 'greatfirewallofchina': command => '/usr/local/bin/update-great-firewall-of-china', hour => 3, minute => 30 } cron { 'permissions': command => '/usr/local/bin/set-permissions', hour => 3, minute => 2 } # Since we're only managing the local server, use "puppet apply" instead of PuppetMaster cron { 'puppet': command => 'puppet apply /etc/puppet/manifests/site.pp | grep -v "Finished catalog run in"', hour => '*/6', minute => 5 } cron { 'purgecaches': command => "/usr/local/bin/purge-caches", hour => '4', minute => '15', weekday => '1', } # Notify of uncommitted files cron { 'check-mercurial-committed': command => "/usr/local/bin/check-hg-status", hour => '4', minute => '20', weekday => '0-6/3', #Sunday, Wednesday and Saturday morning } }