view manifests/templates.pp @ 131:0dd899a10ee1 puppet-3.6

Change all "latest" packages to "installed" Having Puppet update packages is dangerous and not correct sysadmin. We have a script for checking for updates. Let that run and let the sysadmin update when appropriate. This will prevent any potential issues from faulty service restarts in the middle of the night.
author IBBoard <dev@ibboard.co.uk>
date Wed, 26 Oct 2016 19:40:37 +0100
parents 16a931df5fd7
children b1815d10eb91
line wrap: on
line source

# Make sure packages come after their repos
YumRepo<| |> -> Package<| |>

# Make sure all files are in  place before starting services
File<| |> -> Service<| |>


class basenode {
	$os = $operatingsystem
	$osver = "v${operatingsystemrelease}"
	include sudo

	include defaultusers
	include logwatch

	file { '/etc/puppet/hiera.yaml':
		ensure => present,
		content => "---
:backends: yaml
:yaml:
  :datadir: /var/lib/hiera
:hierarchy: common
:logger: console",
	}
}

class basevpsnode (
	$primary_ip,
	$secondary_ip,
	$mailserver,
	$imapserver,
	$firewall_cmd = 'iptables',
	) {

	if $firewall_cmd == 'iptables' {
		include vpsfirewall
	}

	#VPS is a self-mastered Puppet machine, so bodge a Hosts file
	file { '/etc/hosts':
		ensure => present,
		content => "127.0.0.1   localhost
$primary_ip ${fqdn}",
	}

	require repos
	include basenode
	include private
	include dnsresolver
	include ssh::server
	include vcs::server
	include vcs::client
	class { 'webserver':
		primary_ip => $primary_ip,
		secondary_ip => $secondary_ip,
	}
	include cronjobs
	include logrotate
	class { 'fail2ban':
		firewall_cmd => $firewall_cmd,
	}
	include tools
	class { 'email':
		mailserver => $mailserver,
		imapserver => $imapserver,
	}
}

## Classes to allow facet behaviour using preconfigured setups of classes

class vpsfirewall {
	resources { "firewall":
		purge => false,
	}
	firewallchain { 'INPUT:filter:IPv4':
		purge => true,		
		ignore => [
			'-j f2b-[^ ]+$',
			'^(:|-A )f2b-',
			'--comment "Great Firewall of China"',
			'--comment "Do not purge',
			],
	}
	Firewall {
		before => Class['my_fw::post'],
		require => Class['my_fw::pre'],
	}
	class { ['my_fw::pre', 'my_fw::post']: }
	class { 'firewall': }
	firewall { '010 Whitelist Googlebot':
		source => '66.249.64.0/19',
		dport => [80,443],
		proto => tcp,
		action => accept,
	}
	firewall { '099 Blacklist spammers 1':
		source => '146.0.229.80/28',
		dport => [465, 25],
		proto => tcp,
		action => 'reject',
	}
	firewall { '099 Blacklist spammers 2':
		source => '89.43.62.0/24',
		dport => [465, 25],
		proto => tcp,
		action => 'reject',
	}
	firewall { '099 Blacklist IODC bot':
		# IODC bot makes too many bad requests, and contact form is broken
		# They don't publish a robots.txt name, so firewall it!
		source => '86.153.145.149',
		dport => [ 80, 443 ],
		proto => tcp,
		action => 'reject',
	}		
	firewallchain { 'GREATFIREWALLOFCHINA:filter:IPv4':
		ensure => present,
	}
	firewall { '050 Check our Great Firewall Against China':
		chain => 'INPUT',
		jump => 'GREATFIREWALLOFCHINA',
	}
	firewallchain { 'Fail2Ban:filter:IPv4':
		ensure => present,
	}
	firewall { '060 Check Fail2Ban':
		chain => 'INPUT',
		jump => 'Fail2Ban',
	}
	firewall { '100 allow https and http':
		dport => [80, 443],
		proto => tcp,
		action => accept,
	}
	firewall { '101 allow SMTP':
		dport => [25, 465],
		proto => tcp,
		action => accept,
	}
	firewall { '102 allow IMAPS':
		dport => 993,
		proto => tcp,
		action => accept,
	}
	# Note: SSH port will be managed separately as we 
	# put it on a different port to hide from script kiddy noise
}

class dnsresolver {
	package { 'bind':
		ensure => present,
	}

	service { 'named':
		ensure => running,
		enable => true,
	}

	file { '/etc/NetworkManager/conf.d/local-dns-resolver.conf':
		ensure => present,
		content => "[main]
dns=none",
	}

	file { '/etc/sysconfig/named':
		ensure => present,
		content => 'OPTIONS="-4"',
	}

	file { '/etc/resolv.conf':
		ensure => present,
		content => "nameserver 127.0.0.1"
	}
}

class repos {
	yumrepo { 'epel':
		mirrorlist => 'https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch',
		descr => "Extra Packages for Enterprise Linux",
		enabled => 1,
		failovermethod => 'priority',
		gpgcheck => 1,
		gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-6',
	}
	file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-6':
		ensure => present,
		source => 'puppet:///common/RPM-GPG-KEY-EPEL-6'
	}
	yumrepo { 'ibboard':
		baseurl => 'http://download.opensuse.org/repositories/home:/IBBoard:/server/CentOS_CentOS-$releasever/',
		descr => 'IBBoard Server',
		enabled => 1,
		gpgcheck => 1,
		gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-IBBoard-OBS',
	}
	file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-IBBoard-OBS':
		ensure => present,
		source => 'puppet:///common/RPM-GPG-KEY-IBBoard-OBS',
		before => YumRepo['ibboard'],
	}
	yumrepo { 'webtatic':
		mirrorlist => 'http://mirror.webtatic.com/yum/el$releasever/$basearch/mirrorlist',
		descr => "Webtatic Packages for Enterprise Linux",
		enabled => 1,
		failovermethod => 'priority',
		gpgcheck => 1,
		gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-el7',
	}
	file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-andy':
		ensure => absent,
	}
	file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-el7':
		ensure => present,
		source => 'puppet:///common/RPM-GPG-KEY-webtatic-el7',
		before => YumRepo['webtatic'],
	}
}

class tools {
	$packages = [ 'sqlite', 'bash-completion', 'nano', 'bzip2', 'mlocate', 'patch' ]
	package { $packages:
		ensure => installed;
	}
}

class logrotate {
	package { 'logrotate':
		ensure => installed;
	}
	file { '/etc/logrotate.d/httpd':
		ensure => present,
		source => 'puppet:///common/logrotate-httpd',
		require => Package['logrotate'],
	}
	file { '/etc/logrotate.d/trac':
		ensure => present,
		source => 'puppet:///common/logrotate-trac',
		require => Package['logrotate'],
	}
}

class logwatch {
	package { 'logwatch':
		ensure => installed;
	}
	File {
		ensure => present,
		require => Package['logwatch'],
	}
	file { '/etc/cron.daily/0logwatch':
		source => 'puppet:///common/0logwatch';
	}
	file { '/etc/logwatch/scripts/shared/':
		ensure => directory,
	}
	file { '/etc/logwatch/scripts/services/fail2ban':
		source => 'puppet:///common/logwatch/services-fail2ban',
	}
	file { '/etc/logwatch/scripts/services/http-error':
		source => 'puppet:///common/logwatch/http-error',
	}
	file { '/etc/logwatch/scripts/services/php':
		source => 'puppet:///common/logwatch/scripts_php',
	}
	file { '/etc/logwatch/scripts/services/mysql':
		source => 'puppet:///common/logwatch/scripts_mysql',
	}
	file { '/etc/logwatch/scripts/services/dovecot':
		source => 'puppet:///common/logwatch/dovecot',
	}
	file { '/etc/logwatch/scripts/services/postfix':
		source => 'puppet:///common/logwatch/postfix',
	}
	file { '/etc/logwatch/scripts/shared/applyhttperrordate':
		source => 'puppet:///common/logwatch/applyhttperrordate',
	}
	file { '/etc/logwatch/conf/logwatch.conf':
		content => 'Detail = Med',
	}
	file { '/etc/logwatch/conf/logfiles/http.conf':
		content => 'LogFile = apache/access_*.log',
	}
	file { '/etc/logwatch/conf/logfiles/http-error-24.conf':
		source => 'puppet:///common/logwatch/log-http-error.conf',
	}
	file { '/etc/logwatch/conf/logfiles/http-error.conf':
		ensure=> absent,
	}
	file { '/etc/logwatch/conf/services/http-error.conf':
		source => 'puppet:///common/logwatch/services-http-error.conf',
	}
	file { '/etc/logwatch/conf/logfiles/php.conf':
		source => 'puppet:///common/logwatch/logfiles_php.conf',
	}
	file { '/etc/logwatch/conf/services/php.conf':
		source => 'puppet:///common/logwatch/services_php.conf',
	}
	file { '/etc/logwatch/conf/logfiles/mysql.conf':
		source => 'puppet:///common/logwatch/logfiles_mysql.conf',
	}
	file { '/etc/logwatch/conf/services/mysql.conf':
		source => 'puppet:///common/logwatch/services_mysql.conf',
	}
}

class fail2ban (
	$firewall_cmd,
	) {
	package { 'fail2ban':
		ensure => installed,
	}
	service { 'fail2ban':
		ensure => running,
		enable => true
	}
	File {
		ensure => present,
		require => Package['fail2ban'],
		notify => Service['fail2ban'],
	}
	file { '/etc/fail2ban/fail2ban.local':
		source => 'puppet:///common/fail2ban/fail2ban.local',
	}
	file { '/etc/fail2ban/jail.local':
		source => 'puppet:///common/fail2ban/jail.local',
	}
	file { '/etc/fail2ban/action.d/apf.conf':
		source => 'puppet:///common/fail2ban/apf.conf',
	}

	if $firewall_cmd == 'iptables' {
		$firewall_ban_cmd = 'iptables-multiport'
	} else {
		$firewall_ban_cmd = $firewall_cmd
	}

	file { '/etc/fail2ban/action.d/firewall-ban.conf':
		ensure => link,
		target => "/etc/fail2ban/action.d/${firewall_ban_cmd}.conf",
	}
	file { '/etc/fail2ban/filter.d/ibb-apache-exploits-instaban.conf':
		source => 'puppet:///common/fail2ban/ibb-apache-exploits-instaban.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-apache-shellshock.conf':
		source => 'puppet:///common/fail2ban/ibb-apache-shellshock.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-repeat-offender.conf':
		source => 'puppet:///common/fail2ban/ibb-repeat-offender.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-postfix-spammers.conf':
		source => 'puppet:///common/fail2ban/ibb-postfix-spammers.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-postfix-malicious.conf':
		source => 'puppet:///common/fail2ban/ibb-postfix-malicious.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-postfix.conf':
		source => 'puppet:///common/fail2ban/ibb-postfix.conf',
	}
	# Because one of our rules checks fail2ban's log, but the service dies without the file
	file { '/var/log/fail2ban.log':
		ensure => present,
		owner => 'root',
		group => 'root',
		mode => '0600',
	}
}

#Our web server with our configs, not just a stock one
class webserver (
	$primary_ip,
	$secondary_ip,
	) {
	#Setup base website parameters
	class { 'website':
		base_dir => '/srv/sites',
		primary_ip => $primary_ip,
		secondary_ip => $secondary_ip,
		default_owner => $defaultusers::default_user,
		default_group => $defaultusers::default_user,
		default_tld => 'co.uk',
		default_extra_tlds => [ 'com' ],
	}

	# Use Webtatic's PHP 7
	$php_suffix = '70w'

	#Configure the PHP version to use
	class { 'website::php':
		suffix => $php_suffix, 
		opcache => 'opcache',
		extras => [ 'process' ],
	}

	#Setup MySQL, using (private) templates to make sure that we set non-std passwords and a default user

	if $operatingsystem == 'CentOS' and versioncmp($operatingsystemrelease, 7) >= 0 {
		$mysqlpackage = 'mariadb'
		$mysqlsuffix = ''

		$extra_packages = [
			'policycoreutils-python', # Required for SELinux
			'subversion-python', #Required for Trac
			'perl-Sys-Syslog', #Required for Perl SPF checking
		]

		package { $extra_packages:
			 ensure => installed
		}
	}
	else {
		$mysqlpackage = 'mysql'
		$mysqlsuffix = '55w'
	}
	class { 'website::mysql':
		mysqluser => template('defaultusers/mysql-user'),
		mysqlpassword => template('defaultusers/mysql-password'),
		mysqlprefix => $mysqlpackage,
		mysqlsuffix => $mysqlsuffix,
		phpsuffix => $php_suffix,
		phpmysqlsuffix => 'nd'
	}
}

class ibboardvpsnode (
	$primary_ip,
	$secondary_ip,
	$mailserver,
	$imapserver,
	$firewall_cmd = 'iptables',
	){
	class { 'basevpsnode':
		primary_ip => $primary_ip,
		secondary_ip => $secondary_ip,
		mailserver => $mailserver,
		imapserver => $imapserver,
		firewall_cmd => $firewall_cmd,
	}

	# Common modules used by multiple sites (mod_auth_basic is safe because we HTTPS all the things)
	apache::mod {
		'auth_basic':; 'authn_file':; 'authz_user':; 'auth_token':;'deflate':;
	}
	if $operatingsystem == 'CentOS' and versioncmp($operatingsystemrelease, 7) >= 0 {
		apache::mod { 
			'authn_core':;
		}
	}
	$apache_packages = [ 'mod_auth_token' ]
	package { $apache_packages:
		ensure => present;
	}

	#Configure our sites, using templates for the custom fragments where the extra content is too long
	include adminsite
	website::https::multitld { 'www.ibboard':
		custom_fragment => template("private/apache/ibboard.fragment"),
	}
	include hiveworldterrasite
	include glittergothsite
	include devsite
	website::https::multitld { 'www.abiknight':
		custom_fragment => "$website::htmlphpfragment
	ErrorDocument 404 /error.php",
	}
	website::https::multitld { 'www.gracebertram':
		main_tld => 'com',
		extra_tlds => [ 'co.uk' ],
		docroot_owner => $defaultusers::secondary_user,
		docroot_group => 'editors',
		custom_fragment => template("private/apache/gracebertram.fragment"),
	}
	website::https { 'www.realmrunner.com':
		docroot => "${website::basedir}/gracebertram", # Don't give it a separate docroot because it is a redirect via the fragment
		docroot_owner => $defaultusers::secondary_user,
		docroot_group => 'editors',
		serveraliases => 'realmrunner.com',
		custom_fragment => template("private/apache/realmrunner.fragment"),
	}
	include webmailpimsite
	website::http { 'lktutoring.co.uk':
		docroot_owner      => $defaultusers::secondary_user,
		docroot_group      => 'editors',
		serveraliases      => [ 'www.lktutoring.co.uk', 'lktutoring.com', 'www.lktutoring.com' ],
		ensure             => 'present',
		custom_fragment    => 'Include conf.extra/no-index.conf
Include conf.custom/filter-core.conf
Include conf.extra/no-www.conf
Include conf.extra/no-com.conf
Include conf.extra/html-php.conf
#Additional custom fragment
ErrorDocument 404 /error.php',
	}
}

class adminsite{
	apache::mod { 'info':; 'status':; 'cgi':; }
	website::https::multitld { 'admin.ibboard':
		force_no_index => false,
		ssl_ca_chain => '',
		custom_fragment => template("private/apache/admin.fragment"),
	}
	cron { 'loadavg':
		command => '/usr/local/bin/run-loadavg-logger',
		user => apache,
		minute => '*/6'
	}
	cron { 'awstats':
		command => '/usr/local/bin/update-awstats > /srv/sites/admin/awstats.log',
		user => apache,
		hour => '*/6',
		minute => '0'
	}
}

class hiveworldterrasite {
	website::https::multitld { 'www.hiveworldterra':
		force_no_www => false,
		custom_fragment => template("private/apache/hwt.fragment"),
	}
	website::https::multitld { 'forums.hiveworldterra': 
		custom_fragment => template("private/apache/forums.fragment"),
	}
	website::https::multitld { 'skins.hiveworldterra':
		custom_fragment => template("private/apache/skins.fragment"),
	}
	website::https::redir { 'hiveworldterra.ibboard.co.uk':
		redir => 'https://www.hiveworldterra.co.uk/',
		docroot => "${website::basedir}/hiveworldterra",
		separate_log => true,
	}
}

class devsite {
	apache::mod {
		# mod_wsgi for Python support
		'wsgi':;
	}

	include python::venv

	# Create Python virtualenvs for the dev site apps
	python::venv::isolate {
		"/srv/rhodecode/virtualenv":;
		"/srv/trac/virtualenv":;
	}

	# Graphviz for Trac "master ticket" graphs
	package { 'graphviz':
		ensure => installed,
	}

	website::https::multitld { 'www.warfoundry':
		custom_fragment => template("private/apache/warfoundry.fragment"),
	}
	website::https::multitld { 'dev.ibboard':
		#Make sure we're the first one hit for the tiny fraction of "no support" cases we care about (potentially Python for Mercurial!)
		# http://en.wikipedia.org/wiki/Server_Name_Indication#No_support
		priority => 1,
		custom_fragment => template("private/apache/dev.fragment"),
		force_no_index => false,
	}
}
class glittergothsite {
	website::https::multitld { 'www.glittergoth':
		ip => $website::secondary_ip,
		priority => 1,
		ssl_ca_chain => 'glittergoth.ca-bundle',
		docroot_owner => $defaultusers::secondary_user,
		docroot_group => 'editors',
		force_no_index => false,
		custom_fragment => template("private/apache/glittergoth.fragment"),
	}
	website::https { 'test.glittergoth.co.uk':
		docroot => "${website::basedir}/glittergoth-test",
		docroot_owner => $defaultusers::secondary_user,
		docroot_group => 'editors',
		ip => $website::secondary_ip,
		force_no_index => false,
		custom_fragment => template("private/apache/glittergoth-test.fragment"),
	}

	# Website specific cron jobs
	cron { 'backupopencart':
		command => "/usr/local/bin/backupdb opencart",
		user => 'root',
		hour => '*/6',
		minute => '15',
	}
	cron { 'requestreviews':
		command => '/usr/local/bin/request-reviews 2> /srv/sites/admin/request-reviews.log',
		user => 'apache',
		hour => 4,
		minute => 5,
		ensure => absent,
	}
}

class webmailpimsite {
	# Webmail and Personal Information Management (PIM) sites
	website::https { 'webmail.ibboard.co.uk':
		force_no_index => false,
		ssl_ca_chain => '',
		custom_fragment => template("private/apache/webmail.fragment"),
	}
	website::https { 'pim.ibboard.co.uk':
		force_no_index => false,
		lockdown_requests => false,
		ssl_ca_chain => '',
		custom_fragment => template("private/apache/pim.fragment"),
	}
	cron { 'owncloudcron':
		command => "/usr/local/bin/owncloud-cron",
		user => 'apache',
		minute => '*/15',
	}
}

class email (
	$mailserver,
	$imapserver,
	){
	class { 'postfix':
		mailserver => $mailserver,
	}
	class { 'dovecot':
		imapserver => $imapserver,
	}
	package { [ 'amavisd-new', 'clamav', 'clamav-server-systemd', 'clamav-update' ]:
		ensure => installed,
		tag => 'av',
	}
	service { 'amavisd':
		ensure => 'running',
		enable => 'true',
	}
	file { '/etc/freshclam.conf':
		ensure => present,
		source => 'puppet:///common/freshclam.conf',
		tag => 'av',
	}
	file { '/etc/sysconfig/freshclam':
		ensure => present,
		source => 'puppet:///common/freshclam',
		tag => 'av',
	}
	file { '/etc/amavisd/amavisd.conf':
		ensure => present,
		source => 'puppet:///private/postfix/amavisd.conf',
		tag => 'av',
	}
	Package<| tag == 'av' |> -> File<| tag == 'av' |>
	File<| tag == 'av' |> {
		notify => Service['amavisd'],
	}
	cron { 'Postwhite':
		command => "/usr/local/bin/postwhite 2>&1| grep -vE '^(Starting|Recursively|Getting|Querying|Removing|Sorting|$)'",
		user => 'root',
		weekday => 0,
		hour => 2,
		minute => 0,
	}
}

class cronjobs {
	# Add Mutt for scripts that send emails, but stop it clogging the disk by keeping copies of emails
	package { 'mutt':
		ensure => installed,
	}
	file { '/etc/Muttrc.local':
		content => 'set copy = no',
		require => Package['mutt'],
	}

	# General server-wide cron jobs
	Cron { user => 'root' }
	cron { 'backupalldbs':
		command => "/usr/local/bin/backupalldbs",
		monthday => "*/2",
		hour => "4",
		minute => "9"
	}
	cron { 'greatfirewallofchina':
		command => '/usr/local/bin/update-great-firewall-of-china',
		hour => 3,
		minute => 30
	}
	cron { 'permissions':
		command => '/usr/local/bin/set-permissions',
		hour => 3,
		minute => 2
	}
	# Since we're only managing the local server, use our script that wraps "puppet apply" instead of PuppetMaster
	cron { 'puppet':
		command => '/usr/local/bin/puppet-apply | grep -v "Compiled catalog for\|Finished catalog run in"',
		hour => '*/6',
		minute => 5
	}
	cron { 'purgecaches':
		command => "/usr/local/bin/purge-caches",
		hour => '4',
		minute => '15',
		weekday => '1',
	}
	# Notify of uncommitted files
	cron { 'check-mercurial-committed':
		command => "/usr/local/bin/check-hg-status",
		hour => '4',
		minute => '20',
		weekday => '0-6/3', #Sunday, Wednesday and Saturday morning
	}
	# Notify of available updates
	cron { 'check-yum-updates':
		command => '/usr/bin/yum check-updates | tail -2 | grep -Ev "^ \* \w+: \w+"',
		hour => '4',
		minute => '30',
		weekday => '0-6/3', #Sunday, Wednesday and Saturday morning
	}
	# And check whether anything needs restarting
	cron { 'check-needs-restarting':
		command => '/usr/bin/needs-restarting|grep -v "/usr/lib/systemd\|/usr/sbin/lvmetad\|/usr/lib/polkit-1/polkitd"',
		hour => '4',
		minute => '45',
		weekday => '0-6/3', #Sunday, Wednesday and Saturday morning
	}
}