view manifests/templates.pp @ 176:048bc4d6af43 puppet-3.6

Make Postfix IPv4 only We have IPv6 on the server, but not publicly routable. Disabling IPv6 through sysctl didn't work
author IBBoard <dev@ibboard.co.uk>
date Sat, 31 Mar 2018 10:19:53 +0100
parents 103a3630e9b2
children 1b605c38b375
line wrap: on
line source

# Make sure packages come after their repos
YumRepo<| |> -> Package<| |>

# Make sure all files are in  place before starting services
File<| |> -> Service<| |>


class basenode {
	$os = $operatingsystem
	$osver = "v${operatingsystemrelease}"
	include sudo

	include defaultusers
	include logwatch

	file { '/etc/puppet/hiera.yaml':
		ensure => present,
		content => "---
:backends: yaml
:yaml:
  :datadir: /var/lib/hiera
:hierarchy: common
:logger: console",
	}
}

class basevpsnode (
	$primary_ip,
	$secondary_ip,
	$mailserver,
	$imapserver,
	$firewall_cmd = 'iptables',
	) {

	if $firewall_cmd == 'iptables' {
		include vpsfirewall
	}

	#VPS is a self-mastered Puppet machine, so bodge a Hosts file
	file { '/etc/hosts':
		ensure => present,
		content => "127.0.0.1   localhost
$primary_ip ${fqdn}",
	}

	require repos
	include basenode
	include private
	include dnsresolver
	include ssh::server
	include vcs::server
	include vcs::client
	class { 'webserver':
		primary_ip => $primary_ip,
		secondary_ip => $secondary_ip,
	}
	include cronjobs
	include logrotate
	class { 'fail2ban':
		firewall_cmd => $firewall_cmd,
	}
	include tools
	class { 'email':
		mailserver => $mailserver,
		imapserver => $imapserver,
	}
}

## Classes to allow facet behaviour using preconfigured setups of classes

class vpsfirewall {
	resources { "firewall":
		purge => false,
	}
	firewallchain { 'INPUT:filter:IPv4':
		purge => true,		
		ignore => [
			'-j f2b-[^ ]+$',
			'^(:|-A )f2b-',
			'--comment "Great Firewall of China"',
			'--comment "Do not purge',
			],
	}
	Firewall {
		before => Class['my_fw::post'],
		require => Class['my_fw::pre'],
	}
	class { ['my_fw::pre', 'my_fw::post']: }
	class { 'firewall': }
	firewall { '010 Whitelist Googlebot':
		source => '66.249.64.0/19',
		dport => [80,443],
		proto => tcp,
		action => accept,
	}
	firewall { '099 Blacklist spammers 1':
		source => '146.0.229.80/28',
		dport => [465, 25],
		proto => tcp,
		action => 'reject',
	}
	firewall { '099 Blacklist spammers 2':
		source => '89.43.62.0/24',
		dport => [465, 25],
		proto => tcp,
		action => 'reject',
	}
	# German server that did 5000+ HEAD requests in <10 days to "/" on one site
	firewall { '099 Blacklist spammers 3':
		source => '78.47.182.152',
		dport => [465, 25],
		proto => tcp,
		action => 'reject',
	}
	firewall { '099 Blacklist IODC bot':
		# IODC bot makes too many bad requests, and contact form is broken
		# They don't publish a robots.txt name, so firewall it!
		source => '86.153.145.149',
		dport => [ 80, 443 ],
		proto => tcp,
		action => 'reject',
	}
	firewall { '099 Blacklist Baidu Brazil':
		#Baidu got a Brazilian netblock and are hitting us hard
		#Baidu doesn't honour "crawl-delay" in robots.txt
		#Baidu gets firewalled
		source => '131.161.8.0/22',
		dport => [ 80, 443 ],
		proto => tcp,
		action => 'reject',
	}
	firewallchain { 'GREATFIREWALLOFCHINA:filter:IPv4':
		ensure => present,
	}
	firewall { '050 Check our Great Firewall Against China':
		chain => 'INPUT',
		jump => 'GREATFIREWALLOFCHINA',
	}
	firewallchain { 'Fail2Ban:filter:IPv4':
		ensure => present,
	}
	firewall { '060 Check Fail2Ban':
		chain => 'INPUT',
		jump => 'Fail2Ban',
	}
	firewall { '100 allow https and http':
		dport => [80, 443],
		proto => tcp,
		action => accept,
	}
	firewall { '101 allow SMTP':
		dport => [25, 465],
		proto => tcp,
		action => accept,
	}
	firewall { '102 allow IMAPS':
		dport => 993,
		proto => tcp,
		action => accept,
	}
	# Note: SSH port will be managed separately as we 
	# put it on a different port to hide from script kiddy noise
}

class dnsresolver {
	package { 'bind':
		ensure => present,
	}

	service { 'named':
		ensure => running,
		enable => true,
	}

	file { '/etc/NetworkManager/conf.d/local-dns-resolver.conf':
		ensure => present,
		content => "[main]
dns=none",
	}

	file { '/etc/sysconfig/named':
		ensure => present,
		content => 'OPTIONS="-4"',
	}

	file { '/etc/resolv.conf':
		ensure => present,
		content => "nameserver 127.0.0.1"
	}
}

class repos {
	yumrepo { 'epel':
		mirrorlist => 'https://mirrors.fedoraproject.org/metalink?repo=epel-$releasever&arch=$basearch',
		descr => "Extra Packages for Enterprise Linux",
		enabled => 1,
		failovermethod => 'priority',
		gpgcheck => 1,
		gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-6',
	}
	file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-6':
		ensure => present,
		source => 'puppet:///common/RPM-GPG-KEY-EPEL-6'
	}
	yumrepo { 'ibboard':
		ensure => absent
	}
	yumrepo { 'webtatic':
		mirrorlist => 'http://mirror.webtatic.com/yum/el$releasever/$basearch/mirrorlist',
		descr => "Webtatic Packages for Enterprise Linux",
		enabled => 1,
		failovermethod => 'priority',
		gpgcheck => 1,
		gpgkey => 'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-el7',
	}
	file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-andy':
		ensure => absent,
	}
	file { '/etc/pki/rpm-gpg/RPM-GPG-KEY-webtatic-el7':
		ensure => present,
		source => 'puppet:///common/RPM-GPG-KEY-webtatic-el7',
		before => YumRepo['webtatic'],
	}

	# Install Pip and symlink it so we can use it as a package provider
	package { 'python2-pip':
		ensure => installed;
	}
	->
	file { '/usr/bin/pip-python':
		ensure => link,
		target => '/usr/bin/pip',
	} -> Package <| provider == 'pip' |>
}

class tools {
	$packages = [ 'sqlite', 'bash-completion', 'nano', 'bzip2', 'mlocate', 'patch' ]
	package { $packages:
		ensure => installed;
	}
}

class logrotate {
	package { 'logrotate':
		ensure => installed;
	}
	file { '/etc/logrotate.d/httpd':
		ensure => present,
		source => 'puppet:///common/logrotate-httpd',
		require => Package['logrotate'],
	}
	file { '/etc/logrotate.d/trac':
		ensure => present,
		source => 'puppet:///common/logrotate-trac',
		require => Package['logrotate'],
	}
}

class logwatch {
	package { 'logwatch':
		ensure => installed;
	}
	File {
		ensure => present,
		require => Package['logwatch'],
	}
	file { '/etc/cron.daily/0logwatch':
		source => 'puppet:///common/0logwatch';
	}
	file { '/etc/logwatch/scripts/shared/':
		ensure => directory,
	}
	file { '/etc/logwatch/scripts/services/fail2ban':
		source => 'puppet:///common/logwatch/services-fail2ban',
	}
	file { '/etc/logwatch/scripts/services/http-error':
		source => 'puppet:///common/logwatch/http-error',
	}
	file { '/etc/logwatch/scripts/services/php':
		source => 'puppet:///common/logwatch/scripts_php',
	}
	file { '/etc/logwatch/scripts/services/mysql':
		source => 'puppet:///common/logwatch/scripts_mysql',
	}
	file { '/etc/logwatch/scripts/services/dovecot':
		source => 'puppet:///common/logwatch/dovecot',
	}
	file { '/etc/logwatch/scripts/services/postfix':
		source => 'puppet:///common/logwatch/postfix',
	}
	file { '/etc/logwatch/scripts/shared/applyhttperrordate':
		source => 'puppet:///common/logwatch/applyhttperrordate',
	}
	file { '/etc/logwatch/conf/logwatch.conf':
		content => 'Detail = Med',
	}
	file { '/etc/logwatch/conf/logfiles/http.conf':
		content => 'LogFile = apache/access_*.log',
	}
	file { '/etc/logwatch/conf/logfiles/http-error-24.conf':
		source => 'puppet:///common/logwatch/log-http-error.conf',
	}
	file { '/etc/logwatch/conf/logfiles/http-error.conf':
		ensure=> absent,
	}
	file { '/etc/logwatch/conf/services/http-error.conf':
		source => 'puppet:///common/logwatch/services-http-error.conf',
	}
	file { '/etc/logwatch/conf/logfiles/php.conf':
		source => 'puppet:///common/logwatch/logfiles_php.conf',
	}
	file { '/etc/logwatch/conf/services/php.conf':
		source => 'puppet:///common/logwatch/services_php.conf',
	}
	file { '/etc/logwatch/conf/logfiles/mysql.conf':
		source => 'puppet:///common/logwatch/logfiles_mysql.conf',
	}
	file { '/etc/logwatch/conf/services/mysql.conf':
		source => 'puppet:///common/logwatch/services_mysql.conf',
	}
}

class fail2ban (
	$firewall_cmd,
	) {
	package { 'fail2ban':
		ensure => installed,
	}
	service { 'fail2ban':
		ensure => running,
		enable => true
	}
	File {
		ensure => present,
		require => Package['fail2ban'],
		notify => Service['fail2ban'],
	}
	file { '/etc/fail2ban/fail2ban.local':
		source => 'puppet:///common/fail2ban/fail2ban.local',
	}
	file { '/etc/fail2ban/jail.local':
		source => 'puppet:///common/fail2ban/jail.local',
	}
	file { '/etc/fail2ban/action.d/apf.conf':
		source => 'puppet:///common/fail2ban/apf.conf',
	}

	if $firewall_cmd == 'iptables' {
		$firewall_ban_cmd = 'iptables-multiport'
	} else {
		$firewall_ban_cmd = $firewall_cmd
	}

	file { '/etc/fail2ban/action.d/firewall-ban.conf':
		ensure => link,
		target => "/etc/fail2ban/action.d/${firewall_ban_cmd}.conf",
	}
	file { '/etc/fail2ban/filter.d/ibb-apache-exploits-instaban.conf':
		source => 'puppet:///common/fail2ban/ibb-apache-exploits-instaban.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-apache-shellshock.conf':
		source => 'puppet:///common/fail2ban/ibb-apache-shellshock.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-repeat-offender.conf':
		source => 'puppet:///common/fail2ban/ibb-repeat-offender.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-postfix-spammers.conf':
		source => 'puppet:///common/fail2ban/ibb-postfix-spammers.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-postfix-malicious.conf':
		source => 'puppet:///common/fail2ban/ibb-postfix-malicious.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-postfix.conf':
		source => 'puppet:///common/fail2ban/ibb-postfix.conf',
	}
	file { '/etc/fail2ban/filter.d/ibb-sshd.conf':
		source => 'puppet:///common/fail2ban/ibb-sshd.conf',
	}
	# Because one of our rules checks fail2ban's log, but the service dies without the file
	file { '/var/log/fail2ban.log':
		ensure => present,
		owner => 'root',
		group => 'root',
		mode => '0600',
	}
}

#Our web server with our configs, not just a stock one
class webserver (
	$primary_ip,
	$secondary_ip,
	) {
	#Setup base website parameters
	class { 'website':
		base_dir => '/srv/sites',
		primary_ip => $primary_ip,
		secondary_ip => $secondary_ip,
		default_owner => $defaultusers::default_user,
		default_group => $defaultusers::default_user,
		default_tld => 'co.uk',
		default_extra_tlds => [ 'com' ],
	}

	# Use Webtatic's PHP 7
	$php_suffix = '70w'

	#Configure the PHP version to use
	class { 'website::php':
		suffix => $php_suffix, 
		opcache => 'opcache',
		extras => [ 'process' ],
	}

	#Setup MySQL, using (private) templates to make sure that we set non-std passwords and a default user

	if $operatingsystem == 'CentOS' and versioncmp($operatingsystemrelease, 7) >= 0 {
		$mysqlpackage = 'mariadb'
		$mysqlsuffix = ''

		$extra_packages = [
			'policycoreutils-python', # Required for SELinux
			'subversion-python', #Required for Trac
			'perl-Sys-Syslog', #Required for Perl SPF checking
		]

		package { $extra_packages:
			 ensure => installed
		}
	}
	else {
		$mysqlpackage = 'mysql'
		$mysqlsuffix = '55w'
	}
	class { 'website::mysql':
		mysqluser => template('defaultusers/mysql-user'),
		mysqlpassword => template('defaultusers/mysql-password'),
		mysqlprefix => $mysqlpackage,
		mysqlsuffix => $mysqlsuffix,
		phpsuffix => $php_suffix,
		phpmysqlsuffix => 'nd'
	}
}

class ibboardvpsnode (
	$primary_ip,
	$secondary_ip,
	$mailserver,
	$imapserver,
	$firewall_cmd = 'iptables',
	){
	class { 'basevpsnode':
		primary_ip => $primary_ip,
		secondary_ip => $secondary_ip,
		mailserver => $mailserver,
		imapserver => $imapserver,
		firewall_cmd => $firewall_cmd,
	}

	# Common modules used by multiple sites (mod_auth_basic is safe because we HTTPS all the things)
	$mods = [ 'auth_basic',
		'authn_file',
		'authz_user',
		'deflate',
		'xsendfile'
		]
	apache::mod {
		$mods:;
	}
	if $operatingsystem == 'CentOS' and versioncmp($operatingsystemrelease, 7) >= 0 {
		apache::mod { 
			'authn_core':;
		}
	}
	$apache_packages = [ 'mod_xsendfile' ]
	package { $apache_packages:
		ensure => present;
	}

	#Configure our sites, using templates for the custom fragments where the extra content is too long
	include adminsite
	website::https::multitld { 'www.ibboard':
		custom_fragment => template("private/apache/ibboard.fragment"),
		letsencrypt_name => 'ibboard.co.uk',
	}
	include hiveworldterrasite
	include bdstrikesite
	include devsite
	website::https::multitld { 'www.abiknight':
		custom_fragment => "$website::htmlphpfragment
	ErrorDocument 404 /error.php",
		letsencrypt_name => 'abiknight.co.uk',
	}
	include webmailpimsite
}

class adminsite{
	apache::mod { 'info':; 'status':; 'cgi':; }
	website::https::multitld { 'admin.ibboard':
		force_no_index => false,
		ssl_ca_chain => '',
		custom_fragment => template("private/apache/admin.fragment"),
	}
	cron { 'loadavg':
		command => '/usr/local/bin/run-loadavg-logger',
		user => apache,
		minute => '*/6'
	}
	cron { 'awstats':
		command => '/usr/local/bin/update-awstats > /srv/sites/admin/awstats.log',
		user => apache,
		hour => '*/6',
		minute => '0'
	}
}

class hiveworldterrasite {
	website::https::multitld { 'www.hiveworldterra':
		force_no_www => false,
		letsencrypt_name => 'hiveworldterra.co.uk',
		custom_fragment => template("private/apache/hwt.fragment"),
	}
	website::https::multitld { 'forums.hiveworldterra': 
		letsencrypt_name => 'hiveworldterra.co.uk',
		custom_fragment => template("private/apache/forums.fragment"),
	}
	website::https::multitld { 'skins.hiveworldterra':
		letsencrypt_name => 'hiveworldterra.co.uk',
		custom_fragment => template("private/apache/skins.fragment"),
	}
	website::https::redir { 'hiveworldterra.ibboard.co.uk':
		redir => 'https://www.hiveworldterra.co.uk/',
		docroot => "${website::basedir}/hiveworldterra",
		letsencrypt_name => 'hiveworldterra.co.uk',
		separate_log => true,
	}
}
class bdstrikesite {
	website::https::multitld { 'www.bdstrike': 
		docroot_owner => $defaultusers::secondary_user,
		docroot_group => 'editors',
		letsencrypt_name => 'bdstrike.co.uk',
		custom_fragment => template("private/apache/bdstrike.fragment"),
	}
	$aliases = [
		'strikecreations.co.uk',
		'strikecreations.com',
		'www.strikecreations.com' ]

	website::https::redir { 'www.strikecreations.co.uk':
		redir => 'https://bdstrike.co.uk/',
		serveraliases => $aliases,
		docroot => "${website::basedir}/bdstrike",
		docroot_owner => $defaultusers::secondary_user,
		docroot_group => 'editors',
		letsencrypt_name => 'bdstrike.co.uk',
		separate_log => true,
	}
}
class devsite {
	apache::mod {
		# mod_wsgi for Python support
		'wsgi':;
	}

	include python::venv

	# Create Python virtualenvs for the dev site apps
	python::venv::isolate {
		"/srv/rhodecode/virtualenv":;
		"/srv/trac/virtualenv":;
	}

	# Graphviz for Trac "master ticket" graphs
	package { 'graphviz':
		ensure => installed,
	}

	website::https::multitld { 'www.warfoundry':
		letsencrypt_name => 'warfoundry.co.uk',
		custom_fragment => template("private/apache/warfoundry.fragment"),
	}
	website::https::multitld { 'dev.ibboard':
		#Make sure we're the first one hit for the tiny fraction of "no support" cases we care about (potentially Python for Mercurial!)
		# http://en.wikipedia.org/wiki/Server_Name_Indication#No_support
		priority => 1,
		letsencrypt_name => 'dev.ibboard.co.uk',
		custom_fragment => template("private/apache/dev.fragment"),
		force_no_index => false,
	}
}

class webmailpimsite {
	# Webmail and Personal Information Management (PIM) sites
	website::https { 'webmail.ibboard.co.uk':
		force_no_index => false,
		ssl_ca_chain => '',
		custom_fragment => template("private/apache/webmail.fragment"),
	}
	website::https { 'pim.ibboard.co.uk':
		force_no_index => false,
		lockdown_requests => false,
		ssl_ca_chain => '',
		custom_fragment => template("private/apache/pim.fragment"),
	}
	cron { 'owncloudcron':
		command => "/usr/local/bin/owncloud-cron",
		user => 'apache',
		minute => '*/15',
	}
}

class email (
	$mailserver,
	$imapserver,
	){
	class { 'postfix':
		mailserver => $mailserver,
		protocols  => 'ipv4',
	}
	class { 'dovecot':
		imapserver => $imapserver,
	}
	package { [ 'amavisd-new' ]:
		ensure => installed,
		tag => 'av',
	}
	service { 'amavisd':
		ensure => 'running',
		enable => 'true',
	}
	file { '/etc/amavisd/amavisd.conf':
		ensure => present,
		source => 'puppet:///private/postfix/amavisd.conf',
		tag => 'av',
	}
	file { '/etc/mail/spamassassin/local.cf':
		ensure => present,
		source => 'puppet:///private/postfix/spamassassin-local.cf',
		tag => 'av',
	}
	file { '/etc/mail/spamassassin/ole2macro.cf':
		ensure => present,
		source => 'puppet:///common/ole2macro.cf',
		tag => 'av',
	}
	file { '/etc/mail/spamassassin/ole2macro.pm':
		ensure => present,
		source => 'puppet:///common/spamassassin-vba-macro-master/ole2macro.pm',
		tag => 'av',
	}
	Package<| tag == 'av' |> -> File<| tag == 'av' |>
	File<| tag == 'av' |> {
		notify => Service['amavisd'],
	}
	cron { 'Postwhite':
		command => "/usr/local/bin/postwhite 2>&1| grep -vE '^(Starting|Recursively|Getting|Querying|Removing|Sorting|$)'",
		user => 'root',
		weekday => 0,
		hour => 2,
		minute => 0,
	}
}

class cronjobs {
	# Add Mutt for scripts that send emails, but stop it clogging the disk by keeping copies of emails
	package { 'mutt':
		ensure => installed,
	}
	file { '/etc/Muttrc.local':
		content => 'set copy = no',
		require => Package['mutt'],
	}

	# General server-wide cron jobs
	Cron { user => 'root' }
	cron { 'backupalldbs':
		command => "/usr/local/bin/backupalldbs",
		monthday => "*/2",
		hour => "4",
		minute => "9"
	}
	cron { 'greatfirewallofchina':
		command => '/usr/local/bin/update-great-firewall-of-china',
		hour => 3,
		minute => 30
	}
	cron { 'permissions':
		command => '/usr/local/bin/set-permissions',
		hour => 3,
		minute => 2
	}
	# Since we're only managing the local server, use our script that wraps "puppet apply" instead of PuppetMaster
	cron { 'puppet':
		command => '/usr/local/bin/puppet-apply | grep -v "Compiled catalog for\|Finished catalog run in"',
		hour => '*/6',
		minute => 5
	}
	cron { 'purgecaches':
		command => "/usr/local/bin/purge-caches",
		hour => '4',
		minute => '15',
		weekday => '1',
	}
	# Notify of uncommitted files
	cron { 'check-mercurial-committed':
		command => "/usr/local/bin/check-hg-status",
		hour => '4',
		minute => '20',
		weekday => '0-6/3', #Sunday, Wednesday and Saturday morning
	}
	# Notify of available updates
	cron { 'check-yum-updates':
		command => '/usr/bin/yum check-updates | tail -2 | grep -Ev "^ \* \w+: \w+"',
		hour => '4',
		minute => '30',
		weekday => '0-6/3', #Sunday, Wednesday and Saturday morning
	}
	# And check whether anything needs restarting
	cron { 'check-needs-restarting':
		command => '/usr/bin/needs-restarting|grep -v "/usr/lib/systemd\|/usr/sbin/lvmetad\|/usr/lib/polkit-1/polkitd"',
		hour => '4',
		minute => '45',
		weekday => '0-6/3', #Sunday, Wednesday and Saturday morning
	}
}