begin
cookbook = run_context.cookbook_collection[cookbook_name]
files = cookbook.relative_filenames_in_preferred_directory(node, :files, name.to_s)
- not files.empty?
+ !files.empty?
rescue Chef::Exceptions::FileNotFound
false
end
group "root"
mode 0644
variables new_resource.variables
- notifies :reload, "service[apache2]" if enabled? or available_name == enabled_name
+ notifies :reload, "service[apache2]" if enabled? || available_name == enabled_name
end
new_resource.updated_by_last_action(t.updated_by_last_action?)
end
action :install do
- if not installed?
+ if !installed?
package package_name
updated = true
end
action :enable do
- if not enabled?
+ unless enabled?
link enabled_name("load") do
to available_name("load")
owner "root"
notifies :restart, "service[apache2]"
end
else
- ["event", "itk", "prefork", "worker"].each do |mpm|
+ %w(event itk prefork worker).each do |mpm|
if mpm == node[:apache][:mpm]
apache_module "mpm_#{mpm}" do
action [ :enable ]
class Chef
class Util
def self.compare_versions(a, b)
- if a.kind_of?(String)
+ if a.is_a?(String)
a = a.split(".").map(&:to_i)
end
- if b.kind_of?(String)
+ if b.is_a?(String)
b = b.split(".").map(&:to_i)
end
command = git('ls-remote', @new_resource.repository, @new_resource.revision, "#{@new_resource.revision}^{}")
@resolved_reference = shell_out!(command, run_options).stdout.split("\n").last
if @resolved_reference =~ /^([0-9a-f]{40})\s+(\S+)/
- $1
+ Regexp.last_match[1]
else
nil
end
def repo_attrs
return {} unless ::File.exist?(::File.join(@new_resource.destination, ".svn"))
- @repo_attrs ||= svn_info.lines.inject({}) do |attrs, line|
+ @repo_attrs ||= svn_info.lines.ech_with_object({}) do |attrs, line|
if line =~ SVN_INFO_PATTERN
- property, value = $1, $2
+ property, value = Regexp.last_match[1], Regexp.last_match[2]
attrs[property] = value
else
- raise "Could not parse `svn info` data: #{line}"
+ fail "Could not parse `svn info` data: #{line}"
end
- attrs
end
end
chef_package = "chef_#{node[:chef][:client][:version]}_amd64.deb"
chef_platform = case node[:platform_version]
- when "12.10" then "12.04"
- when "14.04" then "13.04"
- else node[:platform_version]
+ when "12.10" then "12.04"
+ when "14.04" then "13.04"
+ else node[:platform_version]
end
directory "/var/cache/chef" do
include_recipe "apache::ssl"
chef_platform = case node[:platform_version]
- when "12.10" then "12.04"
- when "14.04" then "12.04"
- else node[:platform_version]
+ when "12.10" then "12.04"
+ when "14.04" then "12.04"
+ else node[:platform_version]
end
chef_package = "chef-server_#{node[:chef][:server][:version]}_amd64.deb"
postgresql_extension "btree_gist" do
cluster node[:db][:cluster]
database "openstreetmap"
- only_if { node[:postgresql][:clusters][node[:db][:cluster]] and node[:postgresql][:clusters][node[:db][:cluster]][:version] >= 9.0 }
+ only_if { node[:postgresql][:clusters][node[:db][:cluster]] && node[:postgresql][:clusters][node[:db][:cluster]][:version] >= 9.0 }
end
template "/etc/cron.daily/rails-db" do
details = node[:accounts][:users][name] || {}
port = 7000 + account["uid"].to_i
- if ["user", "administrator"].include?(details[:status])
+ if %w(user administrator).include?(details[:status])
user_home = details[:home] || account["home"] || "#{node[:accounts][:home]}/#{name}"
if File.directory?("#{user_home}/public_html")
files_mode 0644
end
-zones = Array.new
+zones = []
Dir.glob("/var/lib/dns/json/*.json").each do |kmlfile|
zone = File.basename(kmlfile, ".json")
group "root"
mode 0644
not_if do
- File.exist?("/etc/ssl/certs/exim.pem") and File.exist?("/etc/ssl/private/exim.key")
+ File.exist?("/etc/ssl/certs/exim.pem") && File.exist?("/etc/ssl/private/exim.key")
end
end
user "root"
group "ssl-cert"
not_if do
- File.exist?("/etc/ssl/certs/exim.pem") and File.exist?("/etc/ssl/private/exim.key")
+ File.exist?("/etc/ssl/certs/exim.pem") && File.exist?("/etc/ssl/private/exim.key")
end
end
name = account["id"]
details = node[:accounts][:users][name] || {}
- if details[:status] and account["email"]
+ if details[:status] && account["email"]
node.default[:exim][:aliases][name] = account["email"]
end
end
mode 0755
end
- if node[:recipes].include?("trace") and repository != "dns.git"
+ if node[:recipes].include?("trace") && repository != "dns.git"
template "#{git_directory}/#{repository}/hooks/post-receive" do
source "post-receive.erb"
owner "root"
-default[:hardware][:modules] = [ "loop", "lp", "rtc" ]
+default[:hardware][:modules] = %w(loop lp rtc)
default[:hardware][:sensors] = {}
-if node[:dmi] and node[:dmi][:system]
+if node[:dmi] && node[:dmi][:system]
case dmi.system.manufacturer
when "HP"
default[:apt][:sources] |= [ "management-component-pack" ]
case dmi.system.product_name
- when "ProLiant DL360 G6", "ProLiant DL360 G7"
- default[:hardware][:sensors]["power_meter-*"][:power]["power1"] = { :ignore => true }
+ when "ProLiant DL360 G6", "ProLiant DL360 G7"
+ default[:hardware][:sensors]["power_meter-*"][:power]["power1"] = { :ignore => true }
end
end
end
end
end
-if node[:kernel] and node[:kernel][:modules]
- raidmods = node[:kernel][:modules].keys & ["cciss", "hpsa", "mptsas", "mpt2sas", "megaraid_mm", "megaraid_sas", "aacraid"]
+if node[:kernel] && node[:kernel][:modules]
+ raidmods = node[:kernel][:modules].keys & %w(cciss hpsa mptsas mpt2sas megaraid_mm megaraid_sas aacraid)
unless raidmods.empty?
default[:apt][:sources] |= [ "hwraid" ]
end
end
-if node[:dmi] and node[:dmi][:system]
+if node[:dmi] && node[:dmi][:system]
case node[:dmi][:system][:manufacturer]
when "empty"
manufacturer = node[:dmi][:base_board][:manufacturer]
speed = "115200"
end
-if manufacturer == "HP" and node[:lsb][:release].to_f > 11.10
+if manufacturer == "HP" && node[:lsb][:release].to_f > 11.10
include_recipe "git"
git "/opt/hp/hp-legacy" do
status_packages["cciss-vol-status"] ||= []
when "mptsas"
tools_packages << "lsiutil"
- #status_packages["mpt-status"] ||= []
+ # status_packages["mpt-status"] ||= []
when "mpt2sas"
tools_packages << "sas2ircu"
status_packages["sas2ircu-status"] ||= []
end
node[:block_device].each do |name, attributes|
- if attributes[:vendor] == "HP" and attributes[:model] == "LOGICAL VOLUME"
+ if attributes[:vendor] == "HP" && attributes[:model] == "LOGICAL VOLUME"
if name =~ /^cciss!(c[0-9]+)d[0-9]+$/
- status_packages["cciss-vol-status"] |= [ "cciss/#{$1}d0" ]
+ status_packages["cciss-vol-status"] |= [ "cciss/#{Regexp.last_match[1]}d0" ]
else
Dir.glob("/sys/block/#{name}/device/scsi_generic/*").each do |sg|
status_packages["cciss-vol-status"] |= [ File.basename(sg) ]
end
end
-["hpacucli", "lsiutil", "sas2ircu", "megactl", "megacli", "arcconf"].each do |tools_package|
+%w(hpacucli lsiutil sas2ircu megactl megacli arcconf).each do |tools_package|
if tools_packages.include?(tools_package)
package tools_package
else
action :sync
repository repository
reference reference
- #depth 1
+ # depth 1
enable_submodules true
user node[:mediawiki][:user]
group node[:mediawiki][:group]
group node[:mediawiki][:group]
mode 0664
content "<?php require_once('#{extension_directory}/#{name}.php');\n"
- only_if do File.exist?("#{extension_directory}/#{name}.php") end
+ only_if { File.exist?("#{extension_directory}/#{name}.php") }
notifies :create, resources(:template => "#{mediawiki_directory}/LocalSettings.php")
end
end
define :mediawiki_site, :action => [ :create, :enable ] do
name = params[:name]
- #/etc/cron.d names cannot contain a dot
+ # /etc/cron.d names cannot contain a dot
cron_name = name.tr(".", "_")
aliases = Array(params[:aliases])
:recaptcha_private => params[:recaptcha_private_key]
}
-#----------------
+ #----------------
node.set_unless[:mediawiki][:sites][name] = {}
node.set[:mediawiki][:sites][name][:site_directory] = site_directory
node.set[:mediawiki][:sites][name][:version] = mediawiki_version
node.set_unless[:mediawiki][:sites][name][:wgSecretKey] = random_password(64)
-#----------------
+ #----------------
mysql_user "#{database_params[:username]}@localhost" do
password database_params[:password]
execute "#{mediawiki[:directory]}/maintenance/install.php" do
action :nothing
- #Use metanamespace as Site Name to ensure correct set namespace
+ # Use metanamespace as Site Name to ensure correct set namespace
command "php maintenance/install.php --server '#{name}' --dbtype 'mysql' --dbname '#{database_params[:name]}' --dbuser '#{database_params[:username]}' --dbpass '#{database_params[:password]}' --dbserver '#{database_params[:host]}' --scriptpath /w --pass '#{mediawiki[:site_admin_pw]}' '#{mediawiki[:metanamespace]}' '#{mediawiki[:site_admin_user]}'"
cwd mediawiki[:directory]
user node[:mediawiki][:user]
action :sync
repository mediawiki_repository
reference mediawiki_reference
- #depth 1
+ # depth 1
user node[:mediawiki][:user]
group node[:mediawiki][:group]
notifies :run, resources(:execute => "#{mediawiki[:directory]}/maintenance/install.php"), :immediately
notifies :run, resources(:execute => "#{mediawiki[:directory]}/maintenance/update.php")
end
- #Safety catch if git doesn't update but install.php hasn't run
+ # Safety catch if git doesn't update but install.php hasn't run
ruby_block "catch-installer-localsettings-run" do
block do
#
owner node[:mediawiki][:user]
group node[:mediawiki][:group]
mode 0664
- variables({
- :name => name,
- :database_params => database_params,
- :mediawiki => mediawiki
- })
+ variables :name => name, :database_params => database_params, :mediawiki => mediawiki
notifies :run, resources(:execute => "#{mediawiki[:directory]}/maintenance/update.php")
end
owner "root"
group "root"
mode 0644
- variables({
- :name => name,
- :directory => site_directory,
- :user => node[:mediawiki][:user]
- })
+ variables :name => name, :directory => site_directory, :user => node[:mediawiki][:user]
end
template "/etc/cron.daily/mediawiki-#{cron_name}-backup" do
owner "root"
group "root"
mode 0700
- variables({
- :name => name,
- :directory => site_directory,
- :database_params => database_params
- })
+ variables :name => name, :directory => site_directory, :database_params => database_params
end
- #MediaWiki Default Extension
+ # MediaWiki Default Extension
mediawiki_extension "Cite" do
site name
end
# MediaWiki Language Extension Bundle
- #fixme should automatically resolve tag
+ # FIXME: should automatically resolve tag
mw_lang_ext_bundle_tag = "2014.09"
mediawiki_extension "Babel" do
tag mw_lang_ext_bundle_tag
end
- #LocalisationUpdate Update Cron
- #template "/etc/cron.d/mediawiki-#{name}-LocalisationUpdate" do
- # cookbook "mediawiki"
- # source "mediawiki-LocalisationUpdate.cron.erb"
- # owner "root"
- # group "root"
- # mode 0755
- # variables({
- # :name => name,
- # :directory => site_directory,
- # :user => node[:mediawiki][:user]
- # })
- #end
-
- #mediawiki_extension "Translate" do
- # site name
- # template "mw-ext-Translate.inc.php.erb"
- # tag mw_lang_ext_bundle_tag
- #end
+ # LocalisationUpdate Update Cron
+ # template "/etc/cron.d/mediawiki-#{name}-LocalisationUpdate" do
+ # cookbook "mediawiki"
+ # source "mediawiki-LocalisationUpdate.cron.erb"
+ # owner "root"
+ # group "root"
+ # mode 0755
+ # variables :name => name, :directory => site_directory, :user => node[:mediawiki][:user]
+ # end
+
+ # mediawiki_extension "Translate" do
+ # site name
+ # template "mw-ext-Translate.inc.php.erb"
+ # tag mw_lang_ext_bundle_tag
+ # end
mediawiki_extension "UniversalLanguageSelector" do
site name
template "mw-ext-CirrusSearch.inc.php.erb"
end
- #OSM specifc extensions
+ # OSM specifc extensions
mediawiki_extension "osmtaginfo" do
site name
cookbook "mediawiki"
template "apache.erb"
directory site_directory
- variables({
- :aliases => aliases,
- :mediawiki => mediawiki
- })
+ variables :aliases => aliases, :mediawiki => mediawiki
notifies :reload, "service[apache2]"
end
- #Fixme - Needs to run once
+ # FIXME: needs to run once
execute "#{mediawiki[:directory]}/extensions/CirrusSearch/maintenance/updateSearchIndexConfig.php" do
action :nothing
command "php extensions/CirrusSearch/maintenance/updateSearchIndexConfig.php"
user node[:mediawiki][:user]
group node[:mediawiki][:group]
end
-
end
include_recipe "mysql"
include_recipe "git"
-#Mediawiki Base Requirements
+# Mediawiki Base Requirements
package "php5"
package "php5-cli"
package "php5-curl"
package "php-wikidiff2"
-#Mediawiki Image + SVG support
+# Mediawiki Image + SVG support
package "imagemagick"
package "librsvg2-bin"
-#Mediawiki PDF support via Extension:PdfHandler
+# Mediawiki PDF support via Extension:PdfHandler
package "ghostscript"
package "poppler-utils"
-#Mediawiki backup
+# Mediawiki backup
package "xz-utils"
link "/etc/php5/apache2/conf.d/20-wikidiff2.ini" do
CALL_TYPES.each { |k, v| puts "#{k}.label #{v}" }
else
- counts = uris_from_status(server).
- collect { |x| categorise_uri(x) }.
- inject(Hash.new) do |h, e|
- if h.has_key? e
+ counts = uris_from_status(server)
+ .collect { |x| categorise_uri(x) }
+ .each_with_object(Hash.new) do |h, e|
+ if h.key? e
h[e] += 1
else
h[e] = 1
end
- h
end
CALL_TYPES.keys.each do |type|
NUM_LINES = 10000
def uris_from_logs
- lines = Array.new
+ lines = []
max_time = nil
min_time = nil
parser = ApacheLogRegex.new('%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %x')
min_time = [min_time, t].compact.min
max_time = [max_time, t].compact.max
lines << uri
- rescue ApacheLogRegex::ParseError => e
+ rescue ApacheLogRegex::ParseError
# nil
end
end
else
min_time, max_time, lines = uris_from_logs
delta_t = (max_time - min_time).to_f * 24 * 60
- counts = lines.
- collect { |x| categorise_uri(x) }.
- inject(Hash.new) do |h, e|
- if h.has_key? e
+ counts = lines
+ .collect { |x| categorise_uri(x) }
+ .each_with_object(Hash.new) do |h, e|
+ if h.key? e
h[e] += 1
else
h[e] = 1
end
- h
end
CALL_TYPES.keys.each do |type|
CALL_TYPES.each { |k, v| puts "#{k}.label #{v}" }
else
- counts = uri_and_times_from_status(server).
- collect { |x, y| [categorise_uri(x), y] }.
- inject(Hash.new) do |h, e|
+ counts = uri_and_times_from_status(server)
+ .collect { |x, y| [categorise_uri(x), y] }
+ .each_with_object(Hash.new) do |h, e|
category, time = e
- if h.has_key? category
+ if h.key? category
h[category] += [time]
else
h[category] = [time]
end
- h
end
CALL_TYPES.keys.each do |type|
count = counts[type] || [0]
- avg = count.inject(0) { |x, y|x + y } / (1.0 * count.length)
+ avg = count.inject(0) { |a, e| a + e } / (1.0 * count.length)
puts "#{type}.value #{avg}"
end
end
exit 1
end
status =~ /(\d+\.\d+)/
- puts "memory.value #{($1.to_f * 1024 * 1024).round}"
+ puts "memory.value #{(Regexp.last_match[1].to_f * 1024 * 1024).round}"
end
if ARGV[0] == "config"
exit 1
end
status =~ /active\s+=\s+(\d+)/
- puts "active.value #{$1}"
+ puts "active.value #{Regexp.last_match[1]}"
status =~ /inactive\s+=\s+(\d+)/
- puts "inactive.value #{$1}"
+ puts "inactive.value #{Regexp.last_match[1]}"
end
if ARGV[0] == "config"
exit 1
end
status =~ /Waiting on global queue:\s+(\d+)/
- puts "global.value #{$1}"
+ puts "global.value #{Regexp.last_match[1]}"
end
if ARGV[0] == "config"
class Munin
def self.expand(template, nodes)
nodes.map do |node|
- if node.kind_of?(Hash)
- template.gsub(/%%([^%]+)%%/) { node[$1.to_sym] }
+ if node.is_a?(Hash)
+ template.gsub(/%%([^%]+)%%/) { node[Regexp.last_match[1].to_sym] }
else
template.gsub("%%", node)
end
end
node[:network][:interfaces].each do |ifname, ifattr|
- if ifattr[:encapsulation] == "Ethernet" and ifattr[:state] == "up"
+ if ifattr[:encapsulation] == "Ethernet" && ifattr[:state] == "up"
munin_plugin "if_err_#{ifname}" do
target "if_err_"
end
end
def users
- @users ||= query("SELECT * FROM user").inject({}) do |users, user|
+ @users ||= query("SELECT * FROM user").each_with_object({}) do |users, user|
name = "'#{user[:user]}'@'#{user[:host]}'"
- users[name] = USER_PRIVILEGES.inject({}) do |privileges, privilege|
+ users[name] = USER_PRIVILEGES.each_with_object({}) do |privileges, privilege|
privileges[privilege] = user["#{privilege}_priv".to_sym] == "Y"
- privileges
end
-
- users
end
end
def databases
- @databases ||= query("SHOW databases").inject({}) do |databases, database|
+ @databases ||= query("SHOW databases").each_with_object({}) do |databases, database|
databases[database[:database]] = {
:permissions => {}
}
- databases
end
query("SELECT * FROM db").each do |record|
if database = @databases[record[:db]]
user = "'#{record[:user]}'@'#{record[:host]}'"
- database[:permissions][user] = DATABASE_PRIVILEGES.inject([]) do |privileges, privilege|
+ database[:permissions][user] = DATABASE_PRIVILEGES.each_with_object([]) do |privileges, privilege|
privileges << privilege if record["#{privilege}_priv".to_sym] == "Y"
- privileges
end
end
end
networking_interfaces = networking[:interfaces] || []
networking_interfaces.each_value do |interface|
- if options[:role].nil? or interface[:role].to_s == options[:role].to_s
- if options[:family].nil? or interface[:family].to_s == options[:family].to_s
+ if options[:role].nil? || interface[:role].to_s == options[:role].to_s
+ if options[:family].nil? || interface[:family].to_s == options[:family].to_s
if block.nil?
interfaces << interface
else
require "ipaddr"
node[:networking][:interfaces].each do |name, interface|
- if interface[:role] and role = node[:networking][:roles][interface[:role]]
+ if interface[:role] && role = node[:networking][:roles][interface[:role]]
if role[interface[:family]]
node.default[:networking][:interfaces][name][:prefix] = role[interface[:family]][:prefix]
node.default[:networking][:interfaces][name][:gateway] = role[interface[:family]][:gateway]
end
node.interfaces(:role => :internal) do |interface|
- if interface[:gateway] and interface[:gateway] != interface[:address]
+ if interface[:gateway] && interface[:gateway] != interface[:address]
search(:node, "networking_interfaces*address:#{interface[:gateway]}") do |gateway|
if gateway[:openvpn]
gateway[:openvpn][:tunnels].each_value do |tunnel|
end
end
-zones = Hash.new
+zones = {}
search(:node, "networking:interfaces").collect do |n|
if n[:fqdn] != node[:fqdn]
n.interfaces.each do |interface|
- if interface[:role] == "external" and interface[:zone]
+ if interface[:role] == "external" && interface[:zone]
zones[interface[:zone]] ||= Hash.new
zones[interface[:zone]][interface[:family]] ||= Array.new
zones[interface[:zone]][interface[:family]] << interface[:address]
rate_limit "s:1/sec:5"
end
-[ "ucl", "ic", "bm" ].each do |zone|
+%w(ucl ic bm).each do |zone|
firewall_rule "accept-openvpn-#{zone}" do
action :accept
family :inet
end
end
-if not node.interfaces(:family => :inet6).empty?
+unless node.interfaces(:family => :inet6).empty?
package "shorewall6"
service "shorewall6" do
supports :status => true, :restart => true, :reload => true
end
-#service "nfs-kernel-server" do
-# action [ :enable, :start ]
-# supports :status => true, :restart => true, :reload => true
-#end
+# service "nfs-kernel-server" do
+# action [ :enable, :start ]
+# supports :status => true, :restart => true, :reload => true
+# end
exports = {}
-#default[:nginx][:mpm] = "worker"
+# default[:nginx][:mpm] = "worker"
package "nginx"
-#admins = data_bag_item("nginx", "admins")
+# admins = data_bag_item("nginx", "admins")
template "/etc/nginx/nginx.conf" do
source "nginx.conf.erb"
mode 0644
end
-
package "osmosis"
package "gcc"
package "proj-bin"
mode 0755
end
-
template "#{source_directory}/.git/hooks/post-merge" do
source "update_source.erb"
owner "nominatim"
variables :bin_directory => "#{source_directory}/utils", :mailto => email_errors
end
-
template "#{source_directory}/utils/nominatim-update" do
source "updater.erb"
user "nominatim"
recipe "ntp", "Installs and configures ntp either as a server or client"
-%w{ ubuntu debian redhat centos fedora }.each do |os|
+%w(ubuntu debian redhat centos fedora).each do |os|
supports os
end
notifies :restart, "service[ntp]"
end
-munin_plugins = [ "ntp_kernel_err", "ntp_kernel_pll_freq", "ntp_kernel_pll_off", "ntp_offset" ]
+munin_plugins = %w(ntp_kernel_err ntp_kernel_pll_freq ntp_kernel_pll_off ntp_offset)
munin_plugin "ntp_kernel_err"
munin_plugin "ntp_kernel_pll_freq"
if File.directory?("/etc/munin/plugins")
Dir.new("/etc/munin/plugins").each do |plugin|
- if plugin.match(/^ntp_/) and not munin_plugins.include?(plugin)
+ if plugin.match(/^ntp_/) && !munin_plugins.include?(plugin)
munin_plugin plugin do
action :delete
end
supports :status => true, :restart => true, :reload => true
end
-hosts = search(:node, "networking:interfaces").sort_by do |node|
- node[:hostname]
-end.collect do |node|
+hosts = search(:node, "networking:interfaces").sort_by { |n| n[:hostname] }.collect do |node|
names = [ node[:hostname] ]
node.interfaces(:role => :external).each do |interface|
node[:openvpn][:tunnels].each do |name, details|
if peer = search(:node, "fqdn:#{details[:peer][:host]}").first
- if peer[:openvpn] and not details[:peer][:address]
+ if peer[:openvpn] && !details[:peer][:address]
node.default[:openvpn][:tunnels][name][:peer][:address] = peer[:openvpn][:address]
end
if File.exist?("/etc/openvpn/#{name}.key")
node.set[:openvpn][:keys][name] = IO.read("/etc/openvpn/#{name}.key")
end
- elsif peer and peer[:openvpn]
+ elsif peer && peer[:openvpn]
file "/etc/openvpn/#{name}.key" do
owner "root"
group "root"
group "root"
mode 0644
variables :name => name,
- :address => node[:openvpn][:address],
- :port => node[:openvpn][:tunnels][name][:port],
- :mode => node[:openvpn][:tunnels][name][:mode],
- :peer => node[:openvpn][:tunnels][name][:peer]
+ :address => node[:openvpn][:address],
+ :port => node[:openvpn][:tunnels][name][:port],
+ :mode => node[:openvpn][:tunnels][name][:mode],
+ :peer => node[:openvpn][:tunnels][name][:peer]
notifies :restart, "service[openvpn]"
end
else
default[:piwik][:version] = "2.10.0"
-default[:piwik][:plugins] = [
- "Actions", "API", "BulkTracking", "Contents", "CoreAdminHome",
- "CoreConsole", "CoreHome", "CorePluginsAdmin", "CoreUpdater",
- "CoreVisualizations", "CustomVariables", "Dashboard",
- "DevicesDetection", "DevicePlugins", "DoNotTrack", "Feedback",
- "Goals", "ImageGraph", "Installation", "LanguagesManager", "Live",
- "Login", "Morpheus", "MultiSites", "PrivacyManager", "Provider",
- "Proxy", "Referrers", "Resolution", "SEO", "SitesManager",
- "UserCountry", "UserCountryMap", "UserSettings", "UsersManager",
- "Widgetize", "VisitFrequency", "VisitorInterest", "VisitsSummary",
- "VisitTime"
-]
+default[:piwik][:plugins] = %w(
+ Actions API BulkTracking Contents CoreAdminHome CoreConsole CoreHome
+ CorePluginsAdmin CoreUpdater CoreVisualizations CustomVariables
+ Dashboard DevicesDetection DevicePlugins DoNotTrack Feedback Goals
+ ImageGraph Installation LanguagesManager Live Login Morpheus MultiSites
+ PrivacyManager Provider Proxy Referrers Resolution SEO SitesManager
+ UserCountry UserCountryMap UserSettings UsersManager Widgetize
+ VisitFrequency VisitorInterest VisitsSummary VisitTime
+)
end
def open?(t)
- not closed?(t)
+ !closed?(t)
end
def activity_between?(t1, t2)
# time (see rails_port's changeset model). so it is probably enough
# for us to look at anything that was closed recently, and filter from
# there.
- @conn.
- exec("select id, created_at, closed_at, num_changes from changesets where closed_at > ((now() at time zone 'utc') - '1 hour'::interval)").
- map { |row| Changeset.new(row) }.
- select { |cs| cs.activity_between?(last_run, @now) }
+ @conn
+ .exec("select id, created_at, closed_at, num_changes from changesets where closed_at > ((now() at time zone 'utc') - '1 hour'::interval)")
+ .map { |row| Changeset.new(row) }
+ .select { |cs| cs.activity_between?(last_run, @now) }
end
# creates an XML file containing the changeset information from the
'generator' => 'replicate_changesets.rb',
'copyright' => "OpenStreetMap and contributors",
'attribution' => "http://www.openstreetmap.org/copyright",
- 'license' => "http://opendatacommons.org/licenses/odbl/1-0/" }.
- each { |k, v| doc.root[k] = v }
+ 'license' => "http://opendatacommons.org/licenses/odbl/1-0/" }
+ .each { |k, v| doc.root[k] = v }
changesets.each do |cs|
xml = XML::Node.new("changeset")
xml['user'] = res[0]['display_name']
xml['uid'] = res[0]['id']
- unless (res[0]['min_lat'].nil? ||
- res[0]['max_lat'].nil? ||
- res[0]['min_lon'].nil? ||
- res[0]['max_lon'].nil?)
+ unless res[0]['min_lat'].nil? ||
+ res[0]['max_lat'].nil? ||
+ res[0]['min_lon'].nil? ||
+ res[0]['max_lon'].nil?
xml['min_lat'] = (res[0]['min_lat'].to_f / GEO_SCALE).to_s
xml['max_lat'] = (res[0]['max_lat'].to_f / GEO_SCALE).to_s
xml['min_lon'] = (res[0]['min_lon'].to_f / GEO_SCALE).to_s
File.open(@config['state_file'], "r") do |fl|
fl.flock(File::LOCK_EX)
- sequence = (@state.has_key?('sequence') ? @state['sequence'] + 1 : 0)
+ sequence = (@state.key?('sequence') ? @state['sequence'] + 1 : 0)
data_file = @config['data_dir'] + sprintf("/%03d/%03d/%03d.osm.gz", sequence / 1000000, (sequence / 1000) % 1000, (sequence % 1000))
tmp_state = @config['state_file'] + ".tmp"
tmp_data = "/tmp/changeset_data.osm.tmp"
node.default[:incron][:planetdump] = {
:user => "www-data",
:path => "/store/backup",
- :events => [ "IN_CREATE", "IN_MOVED_TO" ],
+ :events => %w(IN_CREATE IN_MOVED_TO),
:command => "/usr/local/bin/planetdump $#"
}
suffix = cluster.tr("/", ":")
database = params[:database]
- if node[:postgresql][:clusters] and node[:postgresql][:clusters][cluster]
+ if node[:postgresql][:clusters] && node[:postgresql][:clusters][cluster]
munin_plugin "postgres_cache_#{database}:#{suffix}" do
action params[:action]
target "postgres_cache_"
end
def users
- @users ||= query("SELECT * FROM pg_user").inject({}) do |users, user|
+ @users ||= query("SELECT * FROM pg_user").each_with_oject({}) do |users, user|
users[user[:usename]] = {
:superuser => user[:usesuper] == "t",
:createdb => user[:usercreatedb] == "t",
:createrole => user[:usecatupd] == "t",
:replication => user[:userepl] == "t"
}
- users
end
end
def databases
- @databases ||= query("SELECT d.datname, u.usename, d.encoding, d.datcollate, d.datctype FROM pg_database AS d INNER JOIN pg_user AS u ON d.datdba = u.usesysid").inject({}) do |databases, database|
+ @databases ||= query("SELECT d.datname, u.usename, d.encoding, d.datcollate, d.datctype FROM pg_database AS d INNER JOIN pg_user AS u ON d.datdba = u.usesysid").each_with_object({}) do |databases, database|
databases[database[:datname]] = {
:owner => database[:usename],
:encoding => database[:encoding],
:collate => database[:datcollate],
:ctype => database[:datctype]
}
- databases
end
end
def extensions(database)
@extensions ||= {}
- @extensions[database] ||= query("SELECT extname, extversion FROM pg_extension", :database => database).inject({}) do |extensions, extension|
+ @extensions[database] ||= query("SELECT extname, extversion FROM pg_extension", :database => database).each_with_object({}) do |extensions, extension|
extensions[extension[:extname]] = {
:version => extension[:extversion]
}
- databases
end
end
def tables(database)
@tables ||= {}
- @tables[database] ||= query("SELECT n.nspname, c.relname, u.usename, c.relacl FROM pg_class AS c INNER JOIN pg_user AS u ON c.relowner = u.usesysid INNER JOIN pg_namespace AS n ON c.relnamespace = n.oid", :database => database).inject({}) do |tables, table|
+ @tables[database] ||= query("SELECT n.nspname, c.relname, u.usename, c.relacl FROM pg_class AS c INNER JOIN pg_user AS u ON c.relowner = u.usesysid INNER JOIN pg_namespace AS n ON c.relnamespace = n.oid", :database => database).each_with_object({}) do |tables, table|
name = "#{table[:nspname]}.#{table[:relname]}"
tables[name] = {
:owner => table[:usename],
:permissions => parse_acl(table[:relacl] || "{}")
}
-
- tables
end
end
private
def parse_acl(acl)
- acl.sub(/^\{(.*)\}$/, "\\1").split(",").inject({}) do |permissions, entry|
- entry = entry.sub(/^"(.*)"$/) { $1.gsub(/\\"/, '"') }.sub(/\/.*$/, "")
+ acl.sub(/^\{(.*)\}$/, "\\1").split(",").each_with_object({}) do |permissions, entry|
+ entry = entry.sub(/^"(.*)"$/) { Regexp.last_match[1].gsub(/\\"/, '"') }.sub(/\/.*$/, "")
user, privileges = entry.split("=")
user = user.sub(/^"(.*)"$/, "\\1")
"a" => :insert, "r" => :select, "w" => :update, "d" => :delete,
"D" => :truncate, "x" => :references, "t" => :trigger
}.values_at(*(privileges.chars)).compact
-
- permissions
end
end
end
include_recipe "networking"
-hosts_allow = Hash.new
-hosts_deny = Hash.new
+hosts_allow = {}
+hosts_deny = {}
node[:rsyncd][:modules].each do |name, details|
hosts_allow[name] = details[:hosts_allow] || []
description "Configures kernel parameters"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.1"
-%w{redhat centos debian ubuntu}.each do |os|
+%w(redhat centos debian ubuntu).each do |os|
supports os
end
recipe "sysctl", "Configure kernel parameters"
line.gsub!(/^(\$THINKUP_CFG\['site_root_path'\] *=) '[^']*';$/, "\\1 '/';")
line.gsub!(/^(\$THINKUP_CFG\['timezone'\] *=) '[^']*';$/, "\\1 'Europe/London';")
line.gsub!(/^(\$THINKUP_CFG\['db_user'\] *=) '[^']*';$/, "\\1 'thinkup';")
- line.gsub!(/^(\$THINKUP_CFG\['db_password'\] *=) '[^']*';$/, "\\1 '#{passwords["database"]}';")
+ line.gsub!(/^(\$THINKUP_CFG\['db_password'\] *=) '[^']*';$/, "\\1 '#{passwords['database']}';")
line.gsub!(/^(\$THINKUP_CFG\['db_name'\] *=) '[^']*';$/, "\\1 'thinkup';")
line
thinkup_cron = edit_file "/srv/thinkup.openstreetmap.org/extras/cron/config.sample" do |line|
line.gsub!(/^thinkup="[^"]*"$/, "thinkup=\"/srv/thinkup.openstreetmap.org\"")
line.gsub!(/^thinkup_username="[^"]*"$/, "thinkup_username=\"openstreetmap@jonno.cix.co.uk\"")
- line.gsub!(/^thinkup_password="[^"]*"$/, "thinkup_password=\"#{passwords["admin"]}\"")
+ line.gsub!(/^thinkup_password="[^"]*"$/, "thinkup_password=\"#{passwords['admin']}\"")
line.gsub!(/^php="[^"]*"$/, "php=\"/usr/bin/php\"")
line.gsub!(/^#crawl_interval=[0-9]+$/, "crawl_interval=30")
NODE_CACHE_FILE = "/store/database/nodes"
# turns a spherical mercator coord into a tile coord
- def Expire.tile_from_merc(point, zoom)
+ def self.tile_from_merc(point, zoom)
# renormalise into unit space [0,1]
point.x = 0.5 + point.x / SIZE
point.y = 0.5 - point.y / SIZE
end
# turns a latlon -> tile x,y given a zoom level
- def Expire.tile_from_latlon(latlon, zoom)
+ def self.tile_from_latlon(latlon, zoom)
# first convert to spherical mercator
point = PROJ.forward(latlon)
tile_from_merc(point, zoom)
end
# this must match the definition of xyz_to_meta in mod_tile
- def Expire.xyz_to_meta(x, y, z)
+ def self.xyz_to_meta(x, y, z)
# mask off the final few bits
x &= ~(METATILE - 1)
y &= ~(METATILE - 1)
# generate the path
- hash_path = (0..4).collect { |i|
+ hash_path = (0..4).collect do |i|
(((x >> 4 * i) & 0xf) << 4) | ((y >> 4 * i) & 0xf)
- }.reverse.join('/')
+ end.reverse.join('/')
z.to_s + '/' + hash_path + ".meta"
end
EXPIRY_TIME = Time.parse("2000-01-01 00:00:00")
# expire the meta tile by setting the modified time back
- def Expire.expire_meta(meta)
+ def self.expire_meta(meta)
puts "Expiring #{meta}"
File.utime(EXPIRY_TIME, EXPIRY_TIME, meta)
end
- def Expire.expire(change_file, min_zoom, max_zoom, tile_dirs)
+ def self.expire(change_file, min_zoom, max_zoom, tile_dirs)
do_expire(change_file, min_zoom, max_zoom) do |set|
new_set = Set.new
meta_set = Set.new
end
end
- def Expire.do_expire(change_file, min_zoom, max_zoom, &_)
+ def self.do_expire(change_file, min_zoom, max_zoom, &_)
# read in the osm change file
doc = XML::Document.file(change_file)
# hash map to contain all the nodes
- nodes = Hash.new
+ nodes = {}
# we put all the nodes into the hash, as it doesn't matter whether the node was
# added, deleted or modified - the tile will need updating anyway.
database "gis"
end
-[ "geography_columns",
- "planet_osm_nodes",
- "planet_osm_rels",
- "planet_osm_ways",
- "raster_columns",
- "raster_overviews",
- "spatial_ref_sys" ].each do |table|
+%w(geography_columns planet_osm_nodes planet_osm_rels planet_osm_ways raster_columns raster_overviews spatial_ref_sys).each do |table|
postgresql_table table do
cluster node[:tile][:database][:cluster]
database "gis"
end
end
-[ "geometry_columns",
- "planet_osm_line",
- "planet_osm_point",
- "planet_osm_polygon",
- "planet_osm_roads" ].each do |table|
+%w(geometry_columns planet_osm_line planet_osm_point planet_osm_polygon planet_osm_roads).each do |table|
postgresql_table table do
cluster node[:tile][:database][:cluster]
database "gis"
default[:tilecache][:tile_parent] = "parent.tile.openstreetmap.org"
default[:tilecache][:tile_siblings] = []
-#Per IP bucket refill rate
+# Per IP bucket refill rate
default[:tilecache][:ip_bucket_refill] = "4096"
-#Per IP bucket size
+# Per IP bucket size
default[:tilecache][:ip_bucket_size] = "67108864"
-#Per Class C refill rate
+# Per Class C refill rate
default[:tilecache][:net_bucket_refill] = "8192"
-#Per Class C bucket size
+# Per Class C bucket size
default[:tilecache][:net_bucket_size] = "134217728"
default[:tilecache][:ssl][:certificate] = "tile.openstreetmap"
supports :status => true, :restart => true, :reload => true
end
-#Remove unused base package
+# Remove unused base package
package "mlocate" do
action :purge
end
-#Remove ubuntu "desktop" vestigal package
+# Remove ubuntu "desktop" vestigal package
package "whoopsie" do
action :purge
end
response = Net::HTTP.new("api.openstreetmap.org").request(request)
-exit!(0) if response.kind_of?(Net::HTTPSuccess)
+exit!(0) if response.is_a?(Net::HTTPSuccess)
exit!(1)
cwd "#{rails_directory}/lib/quad_tile"
user rails_user
group rails_group
- not_if { File.exist?("#{rails_directory}/lib/quad_tile/Makefile") and File.mtime("#{rails_directory}/lib/quad_tile/Makefile") >= File.mtime("#{rails_directory}/lib/quad_tile/extconf.rb") }
+ not_if { File.exist?("#{rails_directory}/lib/quad_tile/Makefile") && File.mtime("#{rails_directory}/lib/quad_tile/Makefile") >= File.mtime("#{rails_directory}/lib/quad_tile/extconf.rb") }
end
execute "#{rails_directory}/lib/quad_tile/Makefile" do
user rails_user
group rails_group
not_if do
- File.exist?("#{rails_directory}/lib/quad_tile/quad_tile_so.so") and
- File.mtime("#{rails_directory}/lib/quad_tile/quad_tile_so.so") >= File.mtime("#{rails_directory}/lib/quad_tile/Makefile") and
- File.mtime("#{rails_directory}/lib/quad_tile/quad_tile_so.so") >= File.mtime("#{rails_directory}/lib/quad_tile/quad_tile.c") and
- File.mtime("#{rails_directory}/lib/quad_tile/quad_tile_so.so") >= File.mtime("#{rails_directory}/lib/quad_tile/quad_tile.h")
+ File.exist?("#{rails_directory}/lib/quad_tile/quad_tile_so.so") &&
+ File.mtime("#{rails_directory}/lib/quad_tile/quad_tile_so.so") >= File.mtime("#{rails_directory}/lib/quad_tile/Makefile") &&
+ File.mtime("#{rails_directory}/lib/quad_tile/quad_tile_so.so") >= File.mtime("#{rails_directory}/lib/quad_tile/quad_tile.c") &&
+ File.mtime("#{rails_directory}/lib/quad_tile/quad_tile_so.so") >= File.mtime("#{rails_directory}/lib/quad_tile/quad_tile.h")
end
notifies :touch, "file[#{rails_directory}/tmp/restart.txt]"
end
content cgimap_init
end
-if ["database_offline", "api_offline"].include?(node[:web][:status])
+if %w(database_offline api_offline).include?(node[:web][:status])
service "cgimap" do
action :stop
end
:database_password => db_passwords["gpximport"]
end
-if ["database_offline", "database_readonly", "gpx_offline"].include?(node[:web][:status])
+if %w(database_offline database_readonly gpx_offline).include?(node[:web][:status])
service "gpx-import" do
action :stop
end
rails_directory = "#{node[:web][:base_directory]}/rails"
piwik_configuration = data_bag_item("web", "piwik").to_hash.reject do |k, _|
- ["chef_type", "data_bag", "id"].include?(k)
+ %w(chef_type data_bag id).include?(k)
end
rails_port "www.openstreetmap.org" do
gpx_dir "/store/rails/gpx"
attachments_dir "/store/rails/attachments"
log_path "#{node[:web][:log_directory]}/rails.log"
- memcache_servers [ "rails1", "rails2", "rails3" ]
+ memcache_servers %w(rails1 rails2 rails3)
potlatch2_key web_passwords["potlatch2_key"]
id_key web_passwords["id_key"]
oauth_key web_passwords["oauth_key"]
-#Force apache to listen only on localhost
-#default[:apache][:listen_address] = "127.0.0.1"
+# Force apache to listen only on localhost
+# default[:apache][:listen_address] = "127.0.0.1"
# limitations under the License.
#
-#include_recipe "squid"
+# include_recipe "squid"
include_recipe "mediawiki"
recaptcha_public_key "6LdFIQATAAAAAMwtHeI8KDgPqvRbXeNYSq1gujKz"
recaptcha_private_key passwords["recaptcha"]
- #site_readonly "MAINTENANCE UPDATE: WIKI READ-ONLY. ETA: Tuesday 8:00am UTC/GMT."
-
+ # site_readonly "MAINTENANCE UPDATE: WIKI READ-ONLY. ETA: Tuesday 8:00am UTC/GMT."
end
cookbook_file "/srv/wiki.openstreetmap.org/osm_logo_wiki.png" do
apache_site "dump.wiki.openstreetmap.org" do
template "apache_wiki_dump.erb"
directory "/srv/dump.wiki.openstreetmap.org"
- variables({
- :aliases => "dump.wiki.osm.org"
- })
+ variables :aliases => "dump.wiki.osm.org"
end
template "/etc/cron.d/wiki-osm-org-dump" do
default[:apache][:mpm] = "prefork"
# Make sure httpclient and php_serialize are installed
-default[:chef][:gems] |= [ "httpclient", "php_serialize" ]
+default[:chef][:gems] |= %w(httpclient php_serialize)
# Set wordpress defaults
default[:wordpress][:user] = "wordpress"
def self.current_plugin_version(name)
if svn_cat("http://plugins.svn.wordpress.org/#{name}/trunk/readme.txt") =~ /Stable tag:\s*([^\s\r]*)[\s\r]*/
- $1
+ Regexp.last_match[1]
else
"trunk"
end
package "php-apc"
-#Required for osmosis
+# Required for osmosis
package "default-jre-headless"
-#Required for building gosmore
+# Required for building gosmore
package "build-essential"
package "libxml2-dev"
package "libgtk2.0-dev"
name "crm"
description "Role applied to CRM server"
-
default_attributes(
:exim => {
:local_domains => [ "crm.osmfoundation.org" ],
:parameters => {
"block/cciss\!c0d0/queue/nr_requests" => "512",
"block/cciss\!c0d1/queue/nr_requests" => "512",
- "block/cciss\!c0d0/queue/scheduler" => "noop",
- "block/cciss\!c0d1/queue/scheduler" => "noop"
+ "block/cciss\!c0d0/queue/scheduler" => "noop",
+ "block/cciss\!c0d1/queue/scheduler" => "noop"
}
}
},
:tilecache => {
:tile_parent => "germany.render.openstreetmap.org",
:tile_siblings => [
- "tabaluga.openstreetmap.org",
- "konqi.openstreetmap.org",
- "trogdor.openstreetmap.org",
- "nepomuk.openstreetmap.org",
- "ridgeback.openstreetmap.org",
- "fume.openstreetmap.org",
- "gorynych.openstreetmap.org",
- "simurgh.openstreetmap.org"
+ "tabaluga.openstreetmap.org",
+ "konqi.openstreetmap.org",
+ "trogdor.openstreetmap.org",
+ "nepomuk.openstreetmap.org",
+ "ridgeback.openstreetmap.org",
+ "fume.openstreetmap.org",
+ "gorynych.openstreetmap.org",
+ "simurgh.openstreetmap.org"
]
}
)
:tilecache => {
:tile_parent => "germany.render.openstreetmap.org",
:tile_siblings => [
- "tabaluga.openstreetmap.org",
- "katie.openstreetmap.org",
- "trogdor.openstreetmap.org",
- "nepomuk.openstreetmap.org",
- "ridgeback.openstreetmap.org",
- "fume.openstreetmap.org",
- "gorynych.openstreetmap.org",
- "simurgh.openstreetmap.org"
+ "tabaluga.openstreetmap.org",
+ "katie.openstreetmap.org",
+ "trogdor.openstreetmap.org",
+ "nepomuk.openstreetmap.org",
+ "ridgeback.openstreetmap.org",
+ "fume.openstreetmap.org",
+ "gorynych.openstreetmap.org",
+ "simurgh.openstreetmap.org"
]
}
)
:network_conntrack_max => {
:comment => "Increase max number of connections tracked",
:parameters => {
- "net.netfilter.nf_conntrack_max" => "131072"
+ "net.netfilter.nf_conntrack_max" => "131072"
}
}
},
:parameters => {
"block/cciss\!c0d0/queue/nr_requests" => "512",
"block/cciss\!c0d1/queue/nr_requests" => "512",
- "block/cciss\!c0d0/queue/scheduler" => "noop",
- "block/cciss\!c0d1/queue/scheduler" => "noop",
- "block/sda/queue/nr_requests" => "512",
- "block/sda/queue/scheduler" => "deadline"
+ "block/cciss\!c0d0/queue/scheduler" => "noop",
+ "block/cciss\!c0d1/queue/scheduler" => "noop",
+ "block/sda/queue/nr_requests" => "512",
+ "block/sda/queue/scheduler" => "deadline"
}
}
}
:enabled => false,
:flatnode_file => "/ssd-old/nominatim/nodes.store",
:database => {
- :cluster => "9.3/main",
- :dbname => "nominatim",
- :postgis => "2.1"
+ :cluster => "9.3/main",
+ :dbname => "nominatim",
+ :postgis => "2.1"
},
:fpm_pools => {
- :www => {
- :port => "8000",
- :pm => "dynamic",
- :max_children => "60"
- },
- :bulk => {
- :port => "8001",
- :pm => "static",
- :max_children => "10"
- }
+ :www => {
+ :port => "8000",
+ :pm => "dynamic",
+ :max_children => "60"
+ },
+ :bulk => {
+ :port => "8001",
+ :pm => "static",
+ :max_children => "10"
+ }
},
:tablespaces => {
- "Osm2pgsql_Data" => "aux",
- "Osm2pgsql_Index" => "data",
- "Place_Data" => "ssd2",
- "Place_Index" => "ssd1",
- "Address_Data" => "ssd2",
- "Address_Index" => "ssd1",
- "Search_Data" => "ssd1",
- "Search_Index" => "ssd1",
- "Aux_Data" => "aux",
- "Aux_Index" => "aux"
+ "Osm2pgsql_Data" => "aux",
+ "Osm2pgsql_Index" => "data",
+ "Place_Data" => "ssd2",
+ "Place_Index" => "ssd1",
+ "Address_Data" => "ssd2",
+ "Address_Index" => "ssd1",
+ "Search_Data" => "ssd1",
+ "Search_Index" => "ssd1",
+ "Aux_Data" => "aux",
+ "Aux_Index" => "aux"
}
}
)
:enabled => true,
:flatnode_file => "/ssd/nominatim/nodes.store",
:database => {
- :cluster => "9.3/main",
- :dbname => "nominatim",
- :postgis => "2.1"
+ :cluster => "9.3/main",
+ :dbname => "nominatim",
+ :postgis => "2.1"
},
:fpm_pools => {
- :www => {
- :port => "8000",
- :pm => "dynamic",
- :max_children => "70"
- },
- :bulk => {
- :port => "8001",
- :pm => "static",
- :max_children => "10"
- }
+ :www => {
+ :port => "8000",
+ :pm => "dynamic",
+ :max_children => "70"
+ },
+ :bulk => {
+ :port => "8001",
+ :pm => "static",
+ :max_children => "10"
+ }
},
:tablespaces => {
- "Osm2pgsql_Data" => "ssd",
- "Osm2pgsql_Index" => "ssd",
- "Place_Data" => "ssd",
- "Place_Index" => "ssd",
- "Address_Data" => "ssd",
- "Address_Index" => "ssd",
- "Search_Data" => "ssd",
- "Search_Index" => "ssd",
- "Aux_Data" => "data",
- "Aux_Index" => "ssd"
+ "Osm2pgsql_Data" => "ssd",
+ "Osm2pgsql_Index" => "ssd",
+ "Place_Data" => "ssd",
+ "Place_Index" => "ssd",
+ "Address_Data" => "ssd",
+ "Address_Index" => "ssd",
+ "Search_Data" => "ssd",
+ "Search_Index" => "ssd",
+ "Aux_Data" => "data",
+ "Aux_Index" => "ssd"
}
}
)
:parameters => {
"block/sda/queue/nr_requests" => "512",
"block/sdb/queue/nr_requests" => "512",
- "block/sda/queue/scheduler" => "noop",
- "block/sdb/queue/scheduler" => "noop"
+ "block/sda/queue/scheduler" => "noop",
+ "block/sdb/queue/scheduler" => "noop"
}
}
}
:network_conntrack_max => {
:comment => "Increase max number of connections tracked",
:parameters => {
- "net.netfilter.nf_conntrack_max" => "131072"
+ "net.netfilter.nf_conntrack_max" => "131072"
}
},
:squid_swappiness => {
default_attributes(
:hardware => {
- :modules => [
- "i2c_i801", "jc42", "w83793"
- ],
+ :modules => %w(i2c_i801 jc42 w83793),
:sensors => {
"jc42-*" => {
:temps => {