Current version of Astute and mcagents from product.git repo

This commit is contained in:
Mike Scherbakov 2013-03-15 19:16:49 +04:00
parent 15e6d5d9fd
commit d74ba017ec
59 changed files with 7802 additions and 0 deletions

24
astute/astute.gemspec Normal file
View File

@ -0,0 +1,24 @@
$:.unshift File.expand_path('lib', File.dirname(__FILE__))
require 'astute/version'
Gem::Specification.new do |s|
s.name = 'astute'
s.version = Astute::VERSION
s.summary = 'Orchestrator for OpenStack deployment'
s.description = 'Deployment Orchestrator of Puppet via MCollective. Works as a library or from CLI.'
s.authors = ['Mike Scherbakov']
s.email = ['mscherbakov@mirantis.com']
s.add_dependency 'mcollective-client', '> 2.0.0'
s.add_dependency 'symboltable', '>= 1.0.2'
s.add_dependency 'activesupport', '~> 3.0.10'
s.add_development_dependency 'rspec'
s.add_development_dependency 'mocha'
s.files = Dir.glob("{bin,lib,spec}/**/*")
s.executables = ['astute']
s.require_path = 'lib'
end

55
astute/bin/astute Executable file
View File

@ -0,0 +1,55 @@
#!/usr/bin/env ruby
require 'optparse'
require 'yaml'
begin
require 'astute'
rescue LoadError
require 'rubygems'
require 'astute'
end
class ConsoleReporter
def report(msg)
puts msg.inspect
end
end
opts = {}
optparse = OptionParser.new do |o|
o.banner = "Usage: bin/astute -f FILENAME"
o.on("-v", "--[no-]verbose", "Run verbosely") do |v|
opts[:verbose] = v
end
o.on("-f FILENAME", "Environment in YAML format. Samples are in examples directory.") do |f|
opts[:filename] = f
end
o.on("-h") { puts o; exit }
end
optparse.parse!(ARGV)
if opts[:filename].nil?
puts optparse
exit
end
reporter = ConsoleReporter.new
Astute.logger = Logger.new(STDOUT) if opts[:verbose]
environment = YAML.load_file(opts[:filename])
case environment['attributes']['deployment_engine']
when 'nailyfact'
deploy_engine = Astute::DeploymentEngine::NailyFact
when 'simplepuppet'
deploy_engine = Astute::DeploymentEngine::SimplePuppet # It just calls puppet and doesn't do any magic
else
deploy_engine = nil # Orchestrator will use it's default
end
orchestrator = Astute::Orchestrator.new(deploy_engine, log_parsing=false)
orchestrator.deploy(reporter, environment['task_uuid'], environment['nodes'], environment['attributes'])
#orchestrator.verify_networks(reporter, task_id, nodes, networks)

View File

@ -0,0 +1,474 @@
2013-01-23T09:24:16 info: 09:23:58,565 INFO : kernel command line: initrd=/images/centos63-x86_64/initrd.img ksdevice=bootif lang= locale=en_US text priority=critical kssendmac ks=http://10.0.168.2/cblr/svc/op/ks/system/slave-1 BOOT_IMAGE=/images/centos63-x86_64/vmlinuz BOOTIF=01-52-54-00-9a-db-f8
2013-01-23T09:24:16 info:
2013-01-23T09:24:16 info: 09:23:58,565 INFO : text mode forced from cmdline
2013-01-23T09:24:16 debug: 09:23:58,565 DEBUG : readNetInfo /tmp/s390net not found, early return
2013-01-23T09:24:16 info: 09:23:58,565 INFO : anaconda version 13.21.176 on x86_64 starting
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module ipv6
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module iscsi_ibft
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module iscsi_boot_sysfs
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module pcspkr
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module edd
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module floppy
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module iscsi_tcp
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module libiscsi_tcp
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module libiscsi
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module scsi_transport_iscsi
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module squashfs
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module cramfs
2013-01-23T09:24:16 debug: 09:23:58,730 DEBUG : probing buses
2013-01-23T09:24:16 debug: 09:23:58,863 DEBUG : waiting for hardware to initialize
2013-01-23T09:24:16 debug: 09:24:01,290 DEBUG : probing buses
2013-01-23T09:24:16 debug: 09:24:01,412 DEBUG : waiting for hardware to initialize
2013-01-23T09:24:16 info: 09:24:04,507 INFO : getting kickstart file
2013-01-23T09:24:16 info: 09:24:04,530 INFO : doing kickstart... setting it up
2013-01-23T09:24:16 debug: 09:24:04,531 DEBUG : activating device eth0
2013-01-23T09:24:16 info: 09:24:10,548 INFO : wait_for_iface_activation (2309): device eth0 activated
2013-01-23T09:24:16 info: 09:24:10,550 INFO : file location: http://10.0.168.2/cblr/svc/op/ks/system/slave-1
2013-01-23T09:24:16 info: 09:24:10,551 INFO : transferring http://10.0.168.2/cblr/svc/op/ks/system/slave-1
2013-01-23T09:24:16 info: 09:24:11,511 INFO : setting up kickstart
2013-01-23T09:24:16 info: 09:24:11,511 INFO : kickstart forcing text mode
2013-01-23T09:24:16 info: 09:24:11,511 INFO : kickstartFromUrl
2013-01-23T09:24:16 info: 09:24:11,511 INFO : results of url ks, url http://10.0.168.2:8080/centos/6.3/nailgun/x86_64
2013-01-23T09:24:16 err: 09:24:11,512 ERROR : got to setupCdrom without a CD device
2013-01-23T09:24:16 info: 09:24:11,512 INFO : no stage2= given, assuming http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 debug: 09:24:11,512 DEBUG : going to set language to en_US.UTF-8
2013-01-23T09:24:16 info: 09:24:11,512 INFO : setting language to en_US.UTF-8
2013-01-23T09:24:16 info: 09:24:11,551 INFO : starting STEP_METHOD
2013-01-23T09:24:16 debug: 09:24:11,551 DEBUG : loaderData->method is set, adding skipMethodDialog
2013-01-23T09:24:16 debug: 09:24:11,551 DEBUG : skipMethodDialog is set
2013-01-23T09:24:16 info: 09:24:11,560 INFO : starting STEP_STAGE2
2013-01-23T09:24:16 info: 09:24:11,560 INFO : URL_STAGE_MAIN: url is http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 info: 09:24:11,560 INFO : transferring http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/updates.img
2013-01-23T09:24:16 err: 09:24:11,563 ERROR : Error downloading http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/updates.img: HTTP response code said error
2013-01-23T09:24:16 info: 09:24:11,565 INFO : transferring http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/product.img
2013-01-23T09:24:16 err: 09:24:11,568 ERROR : Error downloading http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/product.img: HTTP response code said error
2013-01-23T09:24:16 info: 09:24:11,569 INFO : transferring http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 info: 09:24:12,077 INFO : mounted loopback device /mnt/runtime on /dev/loop0 as /tmp/install.img
2013-01-23T09:24:16 info: 09:24:12,078 INFO : got stage2 at url http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 info: 09:24:12,133 INFO : Loading SELinux policy
2013-01-23T09:24:16 info: 09:24:13,072 INFO : getting ready to spawn shell now
2013-01-23T09:24:16 info: 09:24:13,436 INFO : Running anaconda script /usr/bin/anaconda
2013-01-23T09:24:16 info: 09:24:16,109 INFO : CentOS Linux is the highest priority installclass, using it
2013-01-23T09:24:16 warning: 09:24:16,164 WARNING : /usr/lib/python2.6/site-packages/pykickstart/parser.py:713: DeprecationWarning: Script does not end with %end. This syntax has been deprecated. It may be removed from future releases, which will result in a fatal error from kickstart. Please modify your kickstart file to use this updated syntax.
2013-01-23T09:24:17 info: warnings.warn(_("%s does not end with %%end. This syntax has been deprecated. It may be removed from future releases, which will result in a fatal error from kickstart. Please modify your kickstart file to use this updated syntax.") % _("Script"), DeprecationWarning)
2013-01-23T09:24:17 info:
2013-01-23T09:24:17 info: 09:24:16,164 INFO : Running kickstart %%pre script(s)
2013-01-23T09:24:17 warning: 09:24:16,165 WARNING : '/bin/sh' specified as full path
2013-01-23T09:24:17 info: 09:24:17,369 INFO : All kickstart %%pre script(s) have been run
2013-01-23T09:24:17 info: 09:24:17,441 INFO : ISCSID is /usr/sbin/iscsid
2013-01-23T09:24:17 info: 09:24:17,442 INFO : no initiator set
2013-01-23T09:24:17 warning: 09:24:17,646 WARNING : '/usr/libexec/fcoe/fcoe_edd.sh' specified as full path
2013-01-23T09:24:18 info: 09:24:17,674 INFO : No FCoE EDD info found: No FCoE boot disk information is found in EDD!
2013-01-23T09:24:18 info:
2013-01-23T09:24:18 info: 09:24:17,674 INFO : no /etc/zfcp.conf; not configuring zfcp
2013-01-23T09:24:18 info: 09:24:17,776 INFO : created new libuser.conf at /tmp/libuser.JtvFQd with instPath="/mnt/sysimage"
2013-01-23T09:24:18 info: 09:24:17,777 INFO : anaconda called with cmdline = ['/usr/bin/anaconda', '--stage2', 'http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img', '--kickstart', '/tmp/ks.cfg', '-T', '--selinux', '--lang', 'en_US.UTF-8', '--keymap', 'us', '--repo', 'http://10.0.168.2:8080/centos/6.3/nailgun/x86_64']
2013-01-23T09:24:18 info: 09:24:17,777 INFO : Display mode = t
2013-01-23T09:24:18 info: 09:24:17,777 INFO : Default encoding = utf-8
2013-01-23T09:24:18 info: 09:24:17,898 INFO : Detected 752M of memory
2013-01-23T09:24:18 info: 09:24:17,899 INFO : Swap attempt of 1504M
2013-01-23T09:24:18 info: 09:24:18,372 INFO : ISCSID is /usr/sbin/iscsid
2013-01-23T09:24:18 info: 09:24:18,373 INFO : no initiator set
2013-01-23T09:24:19 warning: 09:24:18,893 WARNING : Timezone UTC set in kickstart is not valid.
2013-01-23T09:24:19 info: 09:24:19,012 INFO : Detected 752M of memory
2013-01-23T09:24:19 info: 09:24:19,012 INFO : Swap attempt of 1504M
2013-01-23T09:24:19 info: 09:24:19,064 INFO : setting installation environment hostname to slave-1.mirantis.com
2013-01-23T09:24:19 warning: 09:24:19,076 WARNING : step installtype does not exist
2013-01-23T09:24:19 warning: 09:24:19,076 WARNING : step confirminstall does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,079 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,079 WARNING : step complete does not exist
2013-01-23T09:24:19 info: 09:24:19,080 INFO : moving (1) to step setuptime
2013-01-23T09:24:19 debug: 09:24:19,081 DEBUG : setuptime is a direct step
2013-01-23T09:24:19 warning: 09:24:19,081 WARNING : '/usr/sbin/hwclock' specified as full path
2013-01-23T09:24:20 info: 09:24:20,002 INFO : leaving (1) step setuptime
2013-01-23T09:24:20 info: 09:24:20,003 INFO : moving (1) to step autopartitionexecute
2013-01-23T09:24:20 debug: 09:24:20,003 DEBUG : autopartitionexecute is a direct step
2013-01-23T09:24:20 info: 09:24:20,143 INFO : leaving (1) step autopartitionexecute
2013-01-23T09:24:20 info: 09:24:20,143 INFO : moving (1) to step storagedone
2013-01-23T09:24:20 debug: 09:24:20,144 DEBUG : storagedone is a direct step
2013-01-23T09:24:20 info: 09:24:20,144 INFO : leaving (1) step storagedone
2013-01-23T09:24:20 info: 09:24:20,144 INFO : moving (1) to step enablefilesystems
2013-01-23T09:24:20 debug: 09:24:20,144 DEBUG : enablefilesystems is a direct step
2013-01-23T09:25:01 debug: 09:25:00,646 DEBUG : notifying kernel of 'change' event on device /sys/class/block/vda1
2013-01-23T09:25:01 info: 09:25:01,684 INFO : failed to set SELinux context for /mnt/sysimage: [Errno 95] Operation not supported
2013-01-23T09:25:01 debug: 09:25:01,684 DEBUG : isys.py:mount()- going to mount /dev/vda1 on /mnt/sysimage as ext4 with options defaults
2013-01-23T09:25:01 debug: 09:25:01,704 DEBUG : isys.py:mount()- going to mount //dev on /mnt/sysimage/dev as bind with options defaults,bind
2013-01-23T09:25:01 debug: 09:25:01,715 DEBUG : isys.py:mount()- going to mount devpts on /mnt/sysimage/dev/pts as devpts with options gid=5,mode=620
2013-01-23T09:25:02 debug: 09:25:01,728 DEBUG : isys.py:mount()- going to mount tmpfs on /mnt/sysimage/dev/shm as tmpfs with options defaults
2013-01-23T09:25:02 info: 09:25:01,742 INFO : failed to get default SELinux context for /proc: [Errno 2] No such file or directory
2013-01-23T09:25:02 debug: 09:25:01,742 DEBUG : isys.py:mount()- going to mount proc on /mnt/sysimage/proc as proc with options defaults
2013-01-23T09:25:02 info: 09:25:01,746 INFO : failed to get default SELinux context for /proc: [Errno 2] No such file or directory
2013-01-23T09:25:02 debug: 09:25:01,755 DEBUG : isys.py:mount()- going to mount sysfs on /mnt/sysimage/sys as sysfs with options defaults
2013-01-23T09:25:02 info: 09:25:01,762 INFO : leaving (1) step enablefilesystems
2013-01-23T09:25:02 info: 09:25:01,762 INFO : moving (1) to step bootloadersetup
2013-01-23T09:25:02 debug: 09:25:01,762 DEBUG : bootloadersetup is a direct step
2013-01-23T09:25:02 info: 09:25:01,765 INFO : leaving (1) step bootloadersetup
2013-01-23T09:25:02 info: 09:25:01,765 INFO : moving (1) to step reposetup
2013-01-23T09:25:02 debug: 09:25:01,766 DEBUG : reposetup is a direct step
2013-01-23T09:25:02 err: 09:25:01,779 ERROR : Error downloading treeinfo file: [Errno 14] PYCURL ERROR 22 - "The requested URL returned error: 404"
2013-01-23T09:25:02 err: 09:25:01,917 ERROR : Error downloading treeinfo file: [Errno 14] PYCURL ERROR 22 - "The requested URL returned error: 404"
2013-01-23T09:25:02 err: 09:25:01,921 ERROR : Error downloading treeinfo file: [Errno 14] PYCURL ERROR 22 - "The requested URL returned error: 404"
2013-01-23T09:25:02 info: 09:25:01,922 INFO : added repository Nailgun with URL http://10.0.168.2:8080/centos/6.3/nailgun/x86_64
2013-01-23T09:25:02 debug: 09:25:01,930 DEBUG : Grabbing http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/repodata/repomd.xml
2013-01-23T09:25:02 debug: 09:25:01,937 DEBUG : Grabbing http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/repodata/primary.xml.gz
2013-01-23T09:25:02 debug: 09:25:01,944 DEBUG : Grabbing http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/repodata/comps.xml
2013-01-23T09:25:04 info: 09:25:04,547 INFO : leaving (1) step reposetup
2013-01-23T09:25:04 info: 09:25:04,547 INFO : moving (1) to step basepkgsel
2013-01-23T09:25:04 debug: 09:25:04,547 DEBUG : basepkgsel is a direct step
2013-01-23T09:25:04 warning: 09:25:04,665 WARNING : not adding Base group
2013-01-23T09:25:05 info: 09:25:04,810 INFO : leaving (1) step basepkgsel
2013-01-23T09:25:05 info: 09:25:04,811 INFO : moving (1) to step postselection
2013-01-23T09:25:05 debug: 09:25:04,811 DEBUG : postselection is a direct step
2013-01-23T09:25:05 info: 09:25:04,814 INFO : selected kernel package for kernel
2013-01-23T09:25:05 debug: 09:25:05,546 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/ext4/ext4.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,546 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/mbcache.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/jbd2/jbd2.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/fcoe/fcoe.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/fcoe/libfcoe.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/libfc/libfc.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/scsi_transport_fc.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/scsi_tgt.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/xts.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/lrw.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/gf128mul.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/sha256_generic.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/cbc.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-crypt.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-round-robin.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-multipath.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-snapshot.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-mirror.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-region-hash.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-log.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-zero.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-mod.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/linear.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid10.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid456.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_raid6_recov.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_pq.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/lib/raid6/raid6_pq.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_xor.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/xor.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_memcpy.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_tx.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid1.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid0.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/hw/mlx4/mlx4_ib.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/net/mlx4/mlx4_en.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/net/mlx4/mlx4_core.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/ulp/ipoib/ib_ipoib.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_cm.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_sa.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_mad.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_core.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/block/virtio_blk.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/net/virtio_net.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/ata/pata_acpi.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/ata/ata_generic.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/ata/ata_piix.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/virtio/virtio_pci.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/virtio/virtio_ring.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/virtio/virtio.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/net/ipv6/ipv6.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/firmware/iscsi_ibft.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/iscsi_boot_sysfs.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/input/misc/pcspkr.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/firmware/edd.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/block/floppy.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/iscsi_tcp.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/libiscsi_tcp.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/libiscsi.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/scsi_transport_iscsi.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,553 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/squashfs/squashfs.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,553 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/cramfs/cramfs.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,553 DEBUG : selecting kernel-devel
2013-01-23T09:25:05 debug: 09:25:05,561 DEBUG : no package matching kernel-devel.x86_64
2013-01-23T09:25:05 debug: 09:25:05,571 DEBUG : no package matching authconfig
2013-01-23T09:25:05 debug: 09:25:05,580 DEBUG : no package matching system-config-firewall-base
2013-01-23T09:25:08 info: 09:25:08,036 INFO : leaving (1) step postselection
2013-01-23T09:25:08 info: 09:25:08,037 INFO : moving (1) to step install
2013-01-23T09:25:08 info: 09:25:08,039 INFO : leaving (1) step install
2013-01-23T09:25:08 info: 09:25:08,040 INFO : moving (1) to step preinstallconfig
2013-01-23T09:25:08 debug: 09:25:08,040 DEBUG : preinstallconfig is a direct step
2013-01-23T09:25:08 debug: 09:25:08,045 DEBUG : isys.py:mount()- going to mount /selinux on /mnt/sysimage/selinux as selinuxfs with options defaults
2013-01-23T09:25:08 debug: 09:25:08,055 DEBUG : isys.py:mount()- going to mount /proc/bus/usb on /mnt/sysimage/proc/bus/usb as usbfs with options defaults
2013-01-23T09:25:08 info: 09:25:08,069 INFO : copy_to_sysimage: source '/etc/multipath/wwids' does not exist.
2013-01-23T09:25:08 info: 09:25:08,069 INFO : copy_to_sysimage: source '/etc/multipath/bindings' does not exist.
2013-01-23T09:25:08 info: 09:25:08,081 INFO : copy_to_sysimage: source '/etc/multipath/wwids' does not exist.
2013-01-23T09:25:08 info: 09:25:08,081 INFO : copy_to_sysimage: source '/etc/multipath/bindings' does not exist.
2013-01-23T09:25:08 info: 09:25:08,086 INFO : leaving (1) step preinstallconfig
2013-01-23T09:25:08 info: 09:25:08,086 INFO : moving (1) to step installpackages
2013-01-23T09:25:08 debug: 09:25:08,086 DEBUG : installpackages is a direct step
2013-01-23T09:25:08 info: 09:25:08,087 INFO : Preparing to install packages
2013-01-23T09:25:10 info: Installing libgcc-4.4.6-4.el6.x86_64
2013-01-23T09:25:10 info: warning: libgcc-4.4.6-4.el6.x86_64: Header V3 RSA/SHA1 Signature, key ID c105b9de: NOKEY
2013-01-23T09:25:10 info: Installing setup-2.8.14-16.el6.noarch
2013-01-23T09:25:10 info: Installing filesystem-2.4.30-3.el6.x86_64
2013-01-23T09:25:11 info: Installing basesystem-10.0-4.el6.noarch
2013-01-23T09:25:11 info: Installing kernel-headers-2.6.32-279.19.1.el6.centos.plus.x86_64
2013-01-23T09:25:11 info: Installing ca-certificates-2010.63-3.el6_1.5.noarch
2013-01-23T09:25:11 info: Installing ncurses-base-5.7-3.20090208.el6.x86_64
2013-01-23T09:25:12 info: Installing tzdata-2012i-2.el6.noarch
2013-01-23T09:25:13 info: Installing glibc-common-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:25 info: Installing nss-softokn-freebl-3.12.9-11.el6.x86_64
2013-01-23T09:25:25 info: Installing glibc-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:28 info: Installing ncurses-libs-5.7-3.20090208.el6.x86_64
2013-01-23T09:25:28 info: Installing bash-4.1.2-9.el6_2.x86_64
2013-01-23T09:25:28 info: Installing libattr-2.4.44-7.el6.x86_64
2013-01-23T09:25:29 info: Installing libcap-2.16-5.5.el6.x86_64
2013-01-23T09:25:29 info: Installing zlib-1.2.3-27.el6.x86_64
2013-01-23T09:25:29 info: Installing info-4.13a-8.el6.x86_64
2013-01-23T09:25:29 info: Installing db4-4.7.25-17.el6.x86_64
2013-01-23T09:25:29 info: Installing libacl-2.2.49-6.el6.x86_64
2013-01-23T09:25:29 info: Installing audit-libs-2.2-2.el6.x86_64
2013-01-23T09:25:29 info: Installing libcom_err-1.41.12-12.el6.x86_64
2013-01-23T09:25:29 info: Installing nspr-4.9.1-2.el6_3.x86_64
2013-01-23T09:25:29 info: Installing popt-1.13-7.el6.x86_64
2013-01-23T09:25:29 info: Installing chkconfig-1.3.49.3-2.el6.x86_64
2013-01-23T09:25:29 info: Installing nss-util-3.13.5-1.el6_3.x86_64
2013-01-23T09:25:30 info: Installing bzip2-libs-1.0.5-7.el6_0.x86_64
2013-01-23T09:25:30 info: Installing libsepol-2.0.41-4.el6.x86_64
2013-01-23T09:25:30 info: Installing libselinux-2.0.94-5.3.el6.x86_64
2013-01-23T09:25:30 info: Installing shadow-utils-4.1.4.2-13.el6.x86_64
2013-01-23T09:25:30 info: Installing sed-4.2.1-10.el6.x86_64
2013-01-23T09:25:30 info: Installing glib2-2.22.5-7.el6.x86_64
2013-01-23T09:25:30 info: Installing gamin-0.1.10-9.el6.x86_64
2013-01-23T09:25:31 info: Installing libstdc++-4.4.6-4.el6.x86_64
2013-01-23T09:25:31 info: Installing gmp-4.3.1-7.el6_2.2.x86_64
2013-01-23T09:25:31 info: Installing readline-6.0-4.el6.x86_64
2013-01-23T09:25:31 info: Installing sqlite-3.6.20-1.el6.x86_64
2013-01-23T09:25:31 info: Installing file-libs-5.04-13.el6.x86_64
2013-01-23T09:25:31 info: Installing dbus-libs-1.2.24-7.el6_3.x86_64
2013-01-23T09:25:32 info: Installing lua-5.1.4-4.1.el6.x86_64
2013-01-23T09:25:32 info: Installing pcre-7.8-4.el6.x86_64
2013-01-23T09:25:32 info: Installing grep-2.6.3-3.el6.x86_64
2013-01-23T09:25:32 info: Installing libidn-1.18-2.el6.x86_64
2013-01-23T09:25:32 info: Installing gawk-3.1.7-9.el6.x86_64
2013-01-23T09:25:32 info: Installing libuuid-2.17.2-12.7.el6_3.x86_64
2013-01-23T09:25:32 info: Installing libblkid-2.17.2-12.7.el6_3.x86_64
2013-01-23T09:25:32 info: Installing xz-libs-4.999.9-0.3.beta.20091007git.el6.x86_64
2013-01-23T09:25:32 info: Installing elfutils-libelf-0.152-1.el6.x86_64
2013-01-23T09:25:32 info: Installing gdbm-1.8.0-36.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-Pod-Escapes-1.04-127.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-libs-5.10.1-127.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-Module-Pluggable-3.90-127.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-version-0.77-127.el6.x86_64
2013-01-23T09:25:33 info: Installing perl-Pod-Simple-3.13-127.el6.x86_64
2013-01-23T09:25:33 info: Installing perl-5.10.1-127.el6.x86_64
2013-01-23T09:25:39 info: Installing libgpg-error-1.7-4.el6.x86_64
2013-01-23T09:25:39 info: Installing findutils-4.4.2-6.el6.x86_64
2013-01-23T09:25:39 info: Installing libselinux-utils-2.0.94-5.3.el6.x86_64
2013-01-23T09:25:39 info: Installing iptables-1.4.7-5.1.el6_2.x86_64
2013-01-23T09:25:39 info: Installing cyrus-sasl-lib-2.1.23-13.el6_3.1.x86_64
2013-01-23T09:25:39 info: Installing cpio-2.10-11.el6_3.x86_64
2013-01-23T09:25:39 info: Installing binutils-2.20.51.0.2-5.34.el6.x86_64
2013-01-23T09:25:40 info: Installing which-2.19-6.el6.x86_64
2013-01-23T09:25:40 info: Installing libedit-2.11-4.20080712cvs.1.el6.x86_64
2013-01-23T09:25:40 info: Installing sysvinit-tools-2.87-4.dsf.el6.x86_64
2013-01-23T09:25:40 info: Installing tcp_wrappers-libs-7.6-57.el6.x86_64
2013-01-23T09:25:40 info: Installing expat-2.0.1-11.el6_2.x86_64
2013-01-23T09:25:40 info: Installing pth-2.0.7-9.3.el6.x86_64
2013-01-23T09:25:41 info: Installing dbus-glib-0.86-5.el6.x86_64
2013-01-23T09:25:41 info: Installing iproute-2.6.32-20.el6.x86_64
2013-01-23T09:25:41 info: Installing libgcrypt-1.4.5-9.el6_2.2.x86_64
2013-01-23T09:25:41 info: Installing grubby-7.0.15-3.el6.x86_64
2013-01-23T09:25:41 info: Installing libnih-1.0.1-7.el6.x86_64
2013-01-23T09:25:41 info: Installing upstart-0.6.5-12.el6.x86_64
2013-01-23T09:25:41 info: Installing file-5.04-13.el6.x86_64
2013-01-23T09:25:41 info: Installing nss-softokn-3.12.9-11.el6.x86_64
2013-01-23T09:25:41 info: Installing ppl-0.10.2-11.el6.x86_64
2013-01-23T09:25:41 info: Installing cloog-ppl-0.15.7-1.2.el6.x86_64
2013-01-23T09:25:42 info: Installing mpfr-2.4.1-6.el6.x86_64
2013-01-23T09:25:42 info: Installing cpp-4.4.6-4.el6.x86_64
2013-01-23T09:25:43 info: Installing libusb-0.1.12-23.el6.x86_64
2013-01-23T09:25:43 info: Installing libutempter-1.1.5-4.1.el6.x86_64
2013-01-23T09:25:43 info: Installing MAKEDEV-3.24-6.el6.x86_64
2013-01-23T09:25:43 info: Installing vim-minimal-7.2.411-1.8.el6.x86_64
2013-01-23T09:25:43 info: Installing procps-3.2.8-23.el6.x86_64
2013-01-23T09:25:43 info: Installing psmisc-22.6-15.el6_0.1.x86_64
2013-01-23T09:25:43 info: Installing net-tools-1.60-110.el6_2.x86_64
2013-01-23T09:25:43 info: Installing checkpolicy-2.0.22-1.el6.x86_64
2013-01-23T09:25:44 info: Installing libselinux-ruby-2.0.94-5.3.el6.x86_64
2013-01-23T09:25:44 info: Installing augeas-libs-0.9.0-4.el6.x86_64
2013-01-23T09:25:44 info: Installing tar-1.23-7.el6.x86_64
2013-01-23T09:25:44 info: Installing bzip2-1.0.5-7.el6_0.x86_64
2013-01-23T09:25:44 info: Installing pinentry-0.7.6-6.el6.x86_64
2013-01-23T09:25:46 info: Installing libss-1.41.12-12.el6.x86_64
2013-01-23T09:25:46 info: Installing e2fsprogs-libs-1.41.12-12.el6.x86_64
2013-01-23T09:25:46 info: Installing db4-utils-4.7.25-17.el6.x86_64
2013-01-23T09:25:46 info: Installing libgomp-4.4.6-4.el6.x86_64
2013-01-23T09:25:46 info: Installing diffutils-2.8.1-28.el6.x86_64
2013-01-23T09:25:46 info: Installing libxml2-2.7.6-8.el6_3.3.x86_64
2013-01-23T09:25:47 info: Installing glibc-headers-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:48 info: Installing glibc-devel-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:49 info: Installing ncurses-5.7-3.20090208.el6.x86_64
2013-01-23T09:25:49 info: Installing groff-1.18.1.4-21.el6.x86_64
2013-01-23T09:25:50 info: Installing less-436-10.el6.x86_64
2013-01-23T09:25:50 info: Installing coreutils-libs-8.4-19.el6.x86_64
2013-01-23T09:25:50 info: Installing gzip-1.3.12-18.el6.x86_64
2013-01-23T09:25:50 info: Installing cracklib-2.8.16-4.el6.x86_64
2013-01-23T09:25:50 info: Installing cracklib-dicts-2.8.16-4.el6.x86_64
2013-01-23T09:25:51 info: Installing coreutils-8.4-19.el6.x86_64
2013-01-23T09:25:52 info: Installing pam-1.1.1-10.el6_2.1.x86_64
2013-01-23T09:25:54 info: Installing module-init-tools-3.9-20.el6.x86_64
2013-01-23T09:25:55 info: Installing hwdata-0.233-7.8.el6.noarch
2013-01-23T09:25:57 info: Installing redhat-logos-60.0.14-12.el6.centos.noarch
2013-01-23T09:25:59 info: Installing plymouth-scripts-0.8.3-24.el6.centos.x86_64
2013-01-23T09:25:59 info: Installing logrotate-3.7.8-15.el6.x86_64
2013-01-23T09:25:59 info: Installing nss-3.13.5-1.el6_3.x86_64
2013-01-23T09:25:59 info: Installing nss-sysinit-3.13.5-1.el6_3.x86_64
2013-01-23T09:25:59 info: Installing nss-tools-3.13.5-1.el6_3.x86_64
2013-01-23T09:26:00 info: Installing openldap-2.4.23-26.el6_3.2.x86_64
2013-01-23T09:26:00 info: Installing compat-readline5-5.2-17.1.el6.x86_64
2013-01-23T09:26:00 info: Installing libcap-ng-0.6.4-3.el6_0.1.x86_64
2013-01-23T09:26:00 info: Installing ethtool-2.6.33-0.3.el6.x86_64
2013-01-23T09:26:00 info: Installing mingetty-1.08-5.el6.x86_64
2013-01-23T09:26:00 info: Installing vconfig-1.9-8.1.el6.x86_64
2013-01-23T09:26:00 info: Installing dmidecode-2.11-2.el6.x86_64
2013-01-23T09:26:00 info: Installing keyutils-libs-1.4-4.el6.x86_64
2013-01-23T09:26:00 info: Installing krb5-libs-1.9-33.el6_3.3.x86_64
2013-01-23T09:26:01 info: Installing openssl-1.0.0-25.el6_3.1.x86_64
2013-01-23T09:26:01 info: Installing ruby-libs-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:03 info: Installing ruby-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:03 info: Installing libssh2-1.2.2-11.el6_3.x86_64
2013-01-23T09:26:03 info: Installing libcurl-7.19.7-26.el6_2.4.x86_64
2013-01-23T09:26:03 info: Installing curl-7.19.7-26.el6_2.4.x86_64
2013-01-23T09:26:03 info: Installing rpm-libs-4.8.0-27.el6.x86_64
2013-01-23T09:26:04 info: Installing rpm-4.8.0-27.el6.x86_64
2013-01-23T09:26:04 info: Installing gnupg2-2.0.14-4.el6.x86_64
2013-01-23T09:26:04 info: Installing gpgme-1.1.8-3.el6.x86_64
2013-01-23T09:26:05 info: Installing ruby-irb-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:05 info: Installing ruby-rdoc-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:06 info: Installing rubygems-1.3.7-1.el6.noarch
2013-01-23T09:26:06 info: Installing rubygem-stomp-1.1.8-1.el6.noarch
2013-01-23T09:26:06 info: warning: rubygem-stomp-1.1.8-1.el6.noarch: Header V3 RSA/SHA256 Signature, key ID 0608b895: NOKEY
2013-01-23T09:26:06 info: Installing mcollective-common-2.2.2-1.el6.noarch
2013-01-23T09:26:06 info: warning: mcollective-common-2.2.2-1.el6.noarch: Header V4 RSA/SHA1 Signature, key ID 4bd6ec30: NOKEY
2013-01-23T09:26:07 info: Installing mcollective-2.2.2-1.el6.noarch
2013-01-23T09:26:07 info: Installing ruby-augeas-0.4.1-1.el6.x86_64
2013-01-23T09:26:07 info: Installing ruby-shadow-1.4.1-13.el6.x86_64
2013-01-23T09:26:07 info: Installing fipscheck-lib-1.2.0-7.el6.x86_64
2013-01-23T09:26:07 info: Installing fipscheck-1.2.0-7.el6.x86_64
2013-01-23T09:26:07 info: Installing ustr-1.0.4-9.1.el6.x86_64
2013-01-23T09:26:07 info: Installing libsemanage-2.0.43-4.1.el6.x86_64
2013-01-23T09:26:07 info: Installing libffi-3.0.5-3.2.el6.x86_64
2013-01-23T09:26:07 info: Installing python-libs-2.6.6-29.el6_3.3.x86_64
2013-01-23T09:26:08 info: Installing python-2.6.6-29.el6_3.3.x86_64
2013-01-23T09:26:12 info: Installing scapy-2.0.0.10-5.el6.noarch
2013-01-23T09:26:13 info: Installing yum-metadata-parser-1.1.2-16.el6.x86_64
2013-01-23T09:26:13 info: Installing pygpgme-0.1-18.20090824bzr68.el6.x86_64
2013-01-23T09:26:13 info: Installing rpm-python-4.8.0-27.el6.x86_64
2013-01-23T09:26:13 info: Installing python-iniparse-0.3.1-2.1.el6.noarch
2013-01-23T09:26:13 info: Installing python-pycurl-7.19.0-8.el6.x86_64
2013-01-23T09:26:13 info: Installing python-urlgrabber-3.9.1-8.el6.noarch
2013-01-23T09:26:13 info: Installing yum-plugin-fastestmirror-1.1.30-14.el6.noarch
2013-01-23T09:26:13 info: Installing yum-3.2.29-30.el6.centos.noarch
2013-01-23T09:26:13 info: Installing dash-0.5.5.1-3.1.el6.x86_64
2013-01-23T09:26:14 info: Installing pciutils-libs-3.1.4-11.el6.x86_64
2013-01-23T09:26:14 info: Installing pciutils-3.1.4-11.el6.x86_64
2013-01-23T09:26:14 info: Installing facter-1.6.17-1.el6.x86_64
2013-01-23T09:26:14 info: Installing plymouth-core-libs-0.8.3-24.el6.centos.x86_64
2013-01-23T09:26:14 info: Installing kbd-misc-1.15-11.el6.noarch
2013-01-23T09:26:14 info: Installing centos-release-6-3.el6.centos.9.x86_64
2013-01-23T09:26:14 info: Installing iputils-20071127-16.el6.x86_64
2013-01-23T09:26:14 info: Installing util-linux-ng-2.17.2-12.7.el6_3.x86_64
2013-01-23T09:26:15 info: Installing initscripts-9.03.31-2.el6.centos.1.x86_64
2013-01-23T09:26:16 info: Installing udev-147-2.42.el6.x86_64
2013-01-23T09:26:16 info: Installing openssh-5.3p1-81.el6_3.x86_64
2013-01-23T09:26:16 info: Installing kbd-1.15-11.el6.x86_64
2013-01-23T09:26:16 info: Installing rsyslog-5.8.10-2.el6.x86_64
2013-01-23T09:26:17 info: Installing exim-4.72-4.el6.x86_64
2013-01-23T09:26:17 info: Installing crontabs-1.10-33.el6.noarch
2013-01-23T09:26:17 info: Installing cronie-anacron-1.4.4-7.el6.x86_64
2013-01-23T09:26:17 info: Installing cronie-1.4.4-7.el6.x86_64
2013-01-23T09:26:17 info: Installing ntpdate-4.2.4p8-2.el6.centos.x86_64
2013-01-23T09:26:17 info: Installing dhcp-common-4.1.1-31.0.1.P1.el6.centos.1.x86_64
2013-01-23T09:26:17 info: Installing kernel-firmware-2.6.32-279.19.1.el6.centos.plus.noarch
2013-01-23T09:26:19 info: Installing libdrm-2.4.25-2.el6.x86_64
2013-01-23T09:26:19 info: Installing plymouth-0.8.3-24.el6.centos.x86_64
2013-01-23T09:26:19 info: Installing dracut-004-284.el6_3.1.noarch
2013-01-23T09:26:19 info: Installing dracut-kernel-004-284.el6_3.1.noarch
2013-01-23T09:26:19 info: Installing kernel-2.6.32-279.19.1.el6.centos.plus.x86_64
2013-01-23T09:26:27 info: Installing dhclient-4.1.1-31.0.1.P1.el6.centos.1.x86_64
2013-01-23T09:26:27 info: Installing ntp-4.2.4p8-2.el6.centos.x86_64
2013-01-23T09:26:27 info: Installing openssh-clients-5.3p1-81.el6_3.x86_64
2013-01-23T09:26:27 info: Installing openssh-server-5.3p1-81.el6_3.x86_64
2013-01-23T09:26:28 info: Installing puppet-2.7.19-1.el6.noarch
2013-01-23T09:26:30 info: Installing policycoreutils-2.0.83-19.24.el6.x86_64
2013-01-23T09:26:31 info: Installing nailgun-net-check-0.0.2-1.x86_64
2013-01-23T09:26:31 info: Installing grub-0.97-77.el6.x86_64
2013-01-23T09:26:31 info: Installing nailgun-mcagents-0.1.0-1.x86_64
2013-01-23T09:26:31 info: Installing ruby-devel-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:31 info: Installing wget-1.12-1.4.el6.x86_64
2013-01-23T09:26:31 info: Installing sudo-1.7.4p5-13.el6_3.x86_64
2013-01-23T09:26:31 info: Installing nailgun-agent-0.1.0-1.x86_64
2013-01-23T09:26:31 info: Installing gcc-4.4.6-4.el6.x86_64
2013-01-23T09:26:35 info: Installing e2fsprogs-1.41.12-12.el6.x86_64
2013-01-23T09:26:35 info: Installing iptables-ipv6-1.4.7-5.1.el6_2.x86_64
2013-01-23T09:26:35 info: Installing acl-2.2.49-6.el6.x86_64
2013-01-23T09:26:35 info: Installing make-3.81-20.el6.x86_64
2013-01-23T09:26:35 info: Installing attr-2.4.44-7.el6.x86_64
2013-01-23T09:27:14 info: 09:27:14,602 INFO : leaving (1) step installpackages
2013-01-23T09:27:14 info: 09:27:14,603 INFO : moving (1) to step postinstallconfig
2013-01-23T09:27:14 debug: 09:27:14,604 DEBUG : postinstallconfig is a direct step
2013-01-23T09:27:14 info: 09:27:14,628 INFO : leaving (1) step postinstallconfig
2013-01-23T09:27:14 info: 09:27:14,628 INFO : moving (1) to step writeconfig
2013-01-23T09:27:14 debug: 09:27:14,629 DEBUG : writeconfig is a direct step
2013-01-23T09:27:14 info: 09:27:14,629 INFO : Writing main configuration
2013-01-23T09:27:14 warning: 09:27:14,638 WARNING : '/usr/sbin/authconfig' specified as full path
2013-01-23T09:27:14 err: 09:27:14,661 ERROR : Error running /usr/sbin/authconfig: No such file or directory
2013-01-23T09:27:14 err: 09:27:14,662 ERROR : Error running ['--update', '--nostart', '--enableshadow', '--passalgo=sha512']: Error running /usr/sbin/authconfig: No such file or directory
2013-01-23T09:27:14 warning: 09:27:14,665 WARNING : '/usr/sbin/lokkit' specified as full path
2013-01-23T09:27:14 err: 09:27:14,680 ERROR : Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 err: 09:27:14,681 ERROR : lokkit run failed: Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 warning: 09:27:14,681 WARNING : '/usr/sbin/lokkit' specified as full path
2013-01-23T09:27:14 err: 09:27:14,694 ERROR : Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 err: 09:27:14,695 ERROR : lokkit run failed: Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 info: 09:27:14,798 INFO : removing libuser.conf at /tmp/libuser.JtvFQd
2013-01-23T09:27:14 info: 09:27:14,799 INFO : created new libuser.conf at /tmp/libuser.JtvFQd with instPath="/mnt/sysimage"
2013-01-23T09:27:14 info: 09:27:14,821 INFO : leaving (1) step writeconfig
2013-01-23T09:27:14 info: 09:27:14,821 INFO : moving (1) to step firstboot
2013-01-23T09:27:14 debug: 09:27:14,821 DEBUG : firstboot is a direct step
2013-01-23T09:27:14 info: 09:27:14,821 INFO : leaving (1) step firstboot
2013-01-23T09:27:14 info: 09:27:14,822 INFO : moving (1) to step instbootloader
2013-01-23T09:27:14 debug: 09:27:14,822 DEBUG : instbootloader is a direct step
2013-01-23T09:27:14 info: *** FINISHED INSTALLING PACKAGES ***
2013-01-23T09:27:15 warning: 09:27:14,989 WARNING : '/sbin/grub-install' specified as full path
2013-01-23T09:27:15 warning: 09:27:15,038 WARNING : '/sbin/grub' specified as full path
2013-01-23T09:27:17 info: 09:27:17,176 INFO : leaving (1) step instbootloader
2013-01-23T09:27:17 info: 09:27:17,177 INFO : moving (1) to step reipl
2013-01-23T09:27:17 debug: 09:27:17,177 DEBUG : reipl is a direct step
2013-01-23T09:27:17 info: 09:27:17,177 INFO : leaving (1) step reipl
2013-01-23T09:27:17 info: 09:27:17,177 INFO : moving (1) to step writeksconfig
2013-01-23T09:27:17 debug: 09:27:17,177 DEBUG : writeksconfig is a direct step
2013-01-23T09:27:17 info: 09:27:17,177 INFO : Writing autokickstart file
2013-01-23T09:27:17 info: 09:27:17,183 INFO : leaving (1) step writeksconfig
2013-01-23T09:27:17 info: 09:27:17,183 INFO : moving (1) to step setfilecon
2013-01-23T09:27:17 debug: 09:27:17,183 DEBUG : setfilecon is a direct step
2013-01-23T09:27:17 info: 09:27:17,184 INFO : setting SELinux contexts for anaconda created files
2013-01-23T09:27:19 info: 09:27:18,940 INFO : leaving (1) step setfilecon
2013-01-23T09:27:19 info: 09:27:18,940 INFO : moving (1) to step copylogs
2013-01-23T09:27:19 debug: 09:27:18,941 DEBUG : copylogs is a direct step
2013-01-23T09:27:19 info: 09:27:18,941 INFO : Copying anaconda logs
2013-01-23T09:27:19 info: 09:27:18,943 INFO : leaving (1) step copylogs
2013-01-23T09:27:19 info: 09:27:18,943 INFO : moving (1) to step methodcomplete
2013-01-23T09:27:19 debug: 09:27:18,943 DEBUG : methodcomplete is a direct step
2013-01-23T09:27:19 info: 09:27:18,943 INFO : leaving (1) step methodcomplete
2013-01-23T09:27:19 info: 09:27:18,943 INFO : moving (1) to step postscripts
2013-01-23T09:27:19 debug: 09:27:18,944 DEBUG : postscripts is a direct step
2013-01-23T09:27:19 info: 09:27:18,944 INFO : Running kickstart %%post script(s)
2013-01-23T09:27:19 warning: 09:27:18,946 WARNING : '/bin/sh' specified as full path
2013-01-23T09:28:30 info: 09:28:30,453 INFO : All kickstart %%post script(s) have been run
2013-01-23T09:28:30 info: 09:28:30,454 INFO : leaving (1) step postscripts
2013-01-23T09:28:30 info: 09:28:30,454 INFO : moving (1) to step dopostaction
2013-01-23T09:28:30 debug: 09:28:30,455 DEBUG : dopostaction is a direct step
2013-01-23T09:28:30 info: 09:28:30,455 INFO : leaving (1) step dopostaction

View File

@ -0,0 +1,22 @@
# This is example config file for Astute. Your config file should be placed
# to /opt/astute/astute.conf. You can check default values in config.rb file.
---
# MC_RETRIES is used in mclient.rb file.
# MClient tries MC_RETRIES times to call MCagent before failure.
MC_RETRIES: 5
# PUPPET_TIMEOUT is used in puppetd.rb file.
# Maximum time (in seconds) Astute waits for the whole deployment.
PUPPET_TIMEOUT: 3600
# PUPPET_DEPLOY_INTERVAL is used in puppetd.rb file.
# Astute sleeps for PUPPET_DEPLOY_INTERVAL seconds, then check Puppet agents
# statuses again.
PUPPET_DEPLOY_INTERVAL: 2
# PUPPET_FADE_TIMEOUT is used in puppetd.rb file.
# After Puppet agent has finished real work it spend some time to graceful exit.
# PUPPET_FADE_TIMEOUT means how long (in seconds) Astute can take for Puppet
# to exit after real work has finished.
PUPPET_FADE_TIMEOUT: 60
# PUPPET_FADE_INTERVAL is used in puppetd.rb file.
# Retry every PUPPET_FADE_INTERVAL seconds to check puppet state if it was
# in 'running' state.
PUPPET_FADE_INTERVAL: 1

View File

@ -0,0 +1,73 @@
# This is example environment configuration file for Astute.
---
# `task_uuid' is used for logging purposes only. You can use it to tag log
# messages which relaited to different Astute runs.
task_uuid: deployment_task
# `attributes' section describe attributes of OpenStack installation.
attributes:
# `deployment_mode' shows what type of installation you choose.
# Can be:
# `singlenode_compute' - means only one node will be deployed. It will
# contain Controller and Compute components of OpenStack.
# `multinode_compute' - means one Controller node and some Compute nodes
# will be deployed.
# `ha_compute' - means at least three Controller nodes and some Compute
# nodes will be deployed in High Availability mode of Controller
# components.
# In last two cases Astute first of all deployes Contoller components,
# after that it deployes Compute components and finaly deployes other
# components if they exist.
deployment_mode: multinode_compute
# `deployment_engine' - shows how you will handle attributes for Puppet
# manifests.
# `simplepuppet' - means you should manualy set up all necessary attributes
# in your site.pp file. This engine simply run Puppet agent on the
# client's side. In this case Puppet agent uses attributes from site.pp.
# `nailyfact' - means it automaticaly calculate some necessary attributes
# e.g. `controller_node_public' address. Also it store necessary
# attributes on the node's side. In this case Puppet agent uses these
# attributes via `facts'.
deployment_engine: simplepuppet
# All other attributes are optional and make sence for `nailyfact' deployment
# engine only.
glance:
db_password: glance
user_password: glance
mysql:
root_password: nova
keystone:
db_password: admin
admin_tenant: admin
admin_token: admin
rabbit:
password: nova
user: nova
nova:
db_password: nova
user_password: nova
auto_assign_floating_ip: false
fixed_network_range: 10.0.0.0/24
storage_network_range: 172.16.0.0/24
floating_network_range: 240.0.0.0/24
management_network_range: 192.168.0.0/24
public_network_range: 240.0.1.0/24
# `nodes' section obviously describe your nodes.
nodes:
# `role' attribute can be:
# `controller' - node should include Controller component of OpenStack.
# `compute' - node should include Compute component of OpenStack.
# other values - any other components.
# Nodes with different roles should be deployed in determinate order.
# See `deployment_mode' for explanations.
- role: controller
# `uid' is unique identifier of node. Can be any string. It's used to call
# particular node via MCollective.
uid: devnailgun1.mirantis.com
# All other node's attributes are optional and make sence for `nailyfact'
# deployment engine only.
- role: compute
uid: devnailgun2.mirantis.com

View File

@ -0,0 +1,12 @@
---
nodes:
- status: provisioned
role: controller
uid: devnailgun.mirantis.com
- status: provisioned
role: compute
uid: devnailgun.mirantis.com
attributes:
deployment_mode: multinode_compute
deployment_engine: simplepuppet
task_uuid: deployment_task

View File

@ -0,0 +1,85 @@
---
nodes:
- status: provisioned
network_data:
- gateway: 192.168.0.1
name: management
netmask: 255.255.255.0
vlan: 102
ip: 192.168.0.2/24
dev: eth0
brd: 192.168.0.255
- gateway: 240.0.1.1
name: public
netmask: 255.255.255.0
vlan: 101
ip: 240.0.1.2/24
dev: eth0
brd: 240.0.1.255
- name: floating
vlan: 120
dev: eth0
- name: fixed
vlan: 103
dev: eth0
- name: storage
vlan: 104
dev: eth0
role: controller
ip: 10.20.0.200
uid: devnailgun.mirantis.com
mac: 52:54:00:0E:B8:F5
- status: provisioned
network_data:
- gateway: 192.168.0.1
name: management
netmask: 255.255.255.0
vlan: 102
ip: 192.168.0.3/24
dev: eth0
brd: 192.168.0.255
- gateway: 240.0.1.1
name: public
netmask: 255.255.255.0
vlan: 101
ip: 240.0.1.3/24
dev: eth0
brd: 240.0.1.255
- name: floating
vlan: 120
dev: eth0
- name: fixed
vlan: 103
dev: eth0
- name: storage
vlan: 104
dev: eth0
role: compute
ip: 10.20.0.221
uid: devnailgun.mirantis.com
mac: 52:54:00:50:91:DD
attributes:
deployment_mode: multinode_compute
deployment_engine: nailyfact
glance:
db_password: glance
user_password: glance
mysql:
root_password: nova
keystone:
db_password: admin
admin_tenant: admin
admin_token: admin
rabbit:
password: nova
user: nova
nova:
db_password: nova
user_password: nova
auto_assign_floating_ip: false
fixed_network_range: 10.0.0.0/24
storage_network_range: 172.16.0.0/24
floating_network_range: 240.0.0.0/24
management_network_range: 192.168.0.0/24
public_network_range: 240.0.1.0/24
task_uuid: deployment_task

44
astute/lib/astute.rb Normal file
View File

@ -0,0 +1,44 @@
require 'rubygems'
require 'json'
require 'logger'
require 'astute/config'
require 'astute/logparser'
require 'astute/orchestrator'
require 'astute/metadata'
require 'astute/deployment_engine'
require 'astute/network'
require 'astute/puppetd'
require 'astute/rpuppet'
require 'astute/deployment_engine/simple_puppet'
require 'astute/deployment_engine/nailyfact'
module Astute
autoload 'Context', 'astute/context'
autoload 'MClient', 'astute/mclient'
autoload 'ProxyReporter', 'astute/reporter'
autoload 'NodesRemover', 'astute/nodes_remover'
autoload 'Node', 'astute/node'
autoload 'NodesHash', 'astute/node'
LogParser.autoload :ParseDeployLogs, 'astute/logparser/deployment'
LogParser.autoload :ParseProvisionLogs, 'astute/logparser/provision'
LogParser.autoload :Patterns, 'astute/logparser/parser_patterns'
def self.logger
@logger ||= Logger.new('/var/log/astute.log')
@logger.formatter = proc {|severity, datetime, progname, msg|
severity_map = {'DEBUG' => 'debug', 'INFO' => 'info', 'WARN' => 'warning',
'ERROR' => 'err', 'FATAL' => 'crit'}
"#{datetime.strftime("%Y-%m-%dT%H:%M:%S")} #{severity_map[severity]}: #{msg}\n"
}
@logger
end
def self.logger=(logger)
@logger = logger
end
config_file = '/opt/astute/astute.conf'
Astute.config.update(YAML.load(File.read(config_file))) if File.exists?(config_file)
end

View File

@ -0,0 +1,49 @@
require 'symboltable'
require 'singleton'
module Astute
class ConfigError < StandardError; end
class UnknownOptionError < ConfigError
attr_reader :name
def initialize(name)
super("Unknown config option #{name}")
@name = name
end
end
class MyConfig
include Singleton
attr_reader :configtable
def initialize
@configtable = SymbolTable.new
end
end
class ParseError < ConfigError
attr_reader :line
def initialize(message, line)
super(message)
@line = line
end
end
def self.config
config = MyConfig.instance.configtable
config.update(default_config) if config.empty?
return config
end
def self.default_config
conf = {}
conf[:PUPPET_TIMEOUT] = 60*60 # maximum time it waits for the whole deployment
conf[:PUPPET_DEPLOY_INTERVAL] = 2 # sleep for ## sec, then check puppet status again
conf[:PUPPET_FADE_TIMEOUT] = 60 # How long it can take for puppet to exit after dumping to last_run_summary
conf[:MC_RETRIES] = 5 # MClient tries to call mcagent before failure
conf[:MC_RETRY_INTERVAL] = 1 # MClient sleeps for ## sec between retries
conf[:PUPPET_FADE_INTERVAL] = 1 # Retry every ## seconds to check puppet state if it was running
return conf
end
end

View File

@ -0,0 +1,11 @@
module Astute
class Context
attr_accessor :task_id, :reporter, :deploy_log_parser
def initialize(task_id, reporter, deploy_log_parser=nil)
@task_id = task_id
@reporter = reporter
@deploy_log_parser = deploy_log_parser
end
end
end

View File

@ -0,0 +1,177 @@
require 'json'
require 'timeout'
module Astute
class DeploymentEngine
def initialize(context)
if self.class.superclass.name == 'Object'
raise "Instantiation of this superclass is not allowed. Please subclass from #{self.class.name}."
end
@ctx = context
end
def deploy(nodes, attrs)
# See implementation in subclasses, this may be everriden
attrs['deployment_mode'] ||= 'multinode_compute' # simple multinode deployment is the default
@ctx.deploy_log_parser.deploy_type = attrs['deployment_mode']
Astute.logger.info "Deployment mode #{attrs['deployment_mode']}"
result = self.send("deploy_#{attrs['deployment_mode']}", nodes, attrs)
end
def method_missing(method, *args)
Astute.logger.error "Method #{method} is not implemented for #{self.class}, raising exception."
raise "Method #{method} is not implemented for #{self.class}"
end
def attrs_singlenode_compute(nodes, attrs)
ctrl_management_ip = nodes[0]['network_data'].select {|nd| nd['name'] == 'management'}[0]['ip']
ctrl_public_ip = nodes[0]['network_data'].select {|nd| nd['name'] == 'public'}[0]['ip']
attrs['controller_node_address'] = ctrl_management_ip.split('/')[0]
attrs['controller_node_public'] = ctrl_public_ip.split('/')[0]
attrs
end
def deploy_singlenode_compute(nodes, attrs)
# TODO(mihgen) some real stuff is needed
Astute.logger.info "Starting deployment of single node OpenStack"
deploy_piece(nodes, attrs)
end
# we mix all attrs and prepare them for Puppet
# Works for multinode_compute deployment mode
def attrs_multinode_compute(nodes, attrs)
ctrl_nodes = nodes.select {|n| n['role'] == 'controller'}
# TODO(mihgen): we should report error back if there are not enough metadata passed
ctrl_management_ips = []
ctrl_public_ips = []
ctrl_nodes.each do |n|
ctrl_management_ips << n['network_data'].select {|nd| nd['name'] == 'management'}[0]['ip']
ctrl_public_ips << n['network_data'].select {|nd| nd['name'] == 'public'}[0]['ip']
end
attrs['controller_node_address'] = ctrl_management_ips[0].split('/')[0]
attrs['controller_node_public'] = ctrl_public_ips[0].split('/')[0]
attrs
end
# This method is called by Ruby metaprogramming magic from deploy method
# It should not contain any magic with attributes, and should not directly run any type of MC plugins
# It does only support of deployment sequence. See deploy_piece implementation in subclasses.
def deploy_multinode_compute(nodes, attrs)
ctrl_nodes = nodes.select {|n| n['role'] == 'controller'}
Astute.logger.info "Starting deployment of controllers"
deploy_piece(ctrl_nodes, attrs)
compute_nodes = nodes.select {|n| n['role'] == 'compute'}
Astute.logger.info "Starting deployment of computes"
deploy_piece(compute_nodes, attrs)
other_nodes = nodes - ctrl_nodes - compute_nodes
Astute.logger.info "Starting deployment of other nodes"
deploy_piece(other_nodes, attrs)
return
end
def attrs_ha_compute(nodes, attrs)
# TODO(mihgen): we should report error back if there are not enough metadata passed
ctrl_nodes = nodes.select {|n| n['role'] == 'controller'}
ctrl_manag_addrs = {}
ctrl_public_addrs = {}
ctrl_nodes.each do |n|
# current puppet modules require `hostname -s`
hostname = n['fqdn'].split(/\./)[0]
ctrl_manag_addrs.merge!({hostname =>
n['network_data'].select {|nd| nd['name'] == 'management'}[0]['ip'].split(/\//)[0]})
ctrl_public_addrs.merge!({hostname =>
n['network_data'].select {|nd| nd['name'] == 'public'}[0]['ip'].split(/\//)[0]})
end
attrs['ctrl_hostnames'] = ctrl_nodes.map {|n| n['fqdn'].split(/\./)[0]}
attrs['master_hostname'] = ctrl_nodes[0]['fqdn'].split(/\./)[0]
attrs['ctrl_public_addresses'] = ctrl_public_addrs
attrs['ctrl_management_addresses'] = ctrl_manag_addrs
attrs
end
def deploy_ha_compute(nodes, attrs)
ctrl_nodes = nodes.select {|n| n['role'] == 'controller'}
Astute.logger.info "Starting deployment of all controllers one by one, ignoring failure"
ctrl_nodes.each {|n| deploy_piece([n], attrs, retries=0, change_node_status=false)}
Astute.logger.info "Starting deployment of all controllers, ignoring failure"
deploy_piece(ctrl_nodes, attrs, retries=0, change_node_status=false)
Astute.logger.info "Starting deployment of 1st controller again, ignoring failure"
deploy_piece([ctrl_nodes[0]], attrs, retries=0, change_node_status=false)
Astute.logger.info "Starting deployment of all controllers, retries=0"
deploy_piece(ctrl_nodes, attrs, retries=0, change_node_status=false)
retries = 3
Astute.logger.info "Starting deployment of all controllers until it completes, "\
"allowed retries: #{retries}"
deploy_piece(ctrl_nodes, attrs, retries=retries)
compute_nodes = nodes.select {|n| n['role'] == 'compute'}
Astute.logger.info "Starting deployment of computes"
deploy_piece(compute_nodes, attrs)
other_nodes = nodes - ctrl_nodes - compute_nodes
Astute.logger.info "Starting deployment of other nodes"
deploy_piece(other_nodes, attrs)
return
end
private
def nodes_status(nodes, status, data_to_merge)
{'nodes' => nodes.map { |n| {'uid' => n['uid'], 'status' => status}.merge(data_to_merge) }}
end
def validate_nodes(nodes)
if nodes.empty?
Astute.logger.info "#{@ctx.task_id}: Nodes to deploy are not provided. Do nothing."
return false
end
return true
end
def calculate_networks(data)
interfaces = {}
data ||= []
Astute.logger.info "calculate_networks function was provided with #{data.size} interfaces"
data.each do |iface|
Astute.logger.debug "Calculating network for #{iface.inspect}"
if iface['vlan'] and iface['vlan'] != 0
name = [iface['dev'], iface['vlan']].join('.')
interfaces[name] = {"vlan" => "yes"}
else
name = iface['dev']
interfaces[name] = {}
end
interfaces[name]['bootproto'] = 'none'
if iface['ip']
ipaddr = iface['ip'].split('/')[0]
interfaces[name]['ipaddr'] = ipaddr
interfaces[name]['netmask'] = iface['netmask'] #=IPAddr.new('255.255.255.255').mask(ipmask[1]).to_s
interfaces[name]['bootproto'] = 'static'
if iface['brd']
interfaces[name]['broadcast'] = iface['brd']
end
end
if iface['gateway'] and iface['name'] =~ /^public$/i
interfaces[name]['gateway'] = iface['gateway']
end
interfaces[name]['ensure'] = 'present'
Astute.logger.debug "Calculated network for interface: #{name}, data: #{interfaces[name].inspect}"
end
interfaces['lo'] = {} unless interfaces.has_key?('lo')
interfaces['eth0'] = {'bootproto' => 'dhcp',
'ensure' => 'present'} unless interfaces.has_key?('eth0')
# Example of return:
# {"eth0":{"ensure":"present","bootproto":"dhcp"},"lo":{},
# "eth0.102":{"ipaddr":"10.20.20.20","ensure":"present","vlan":"yes",
# "netmask":"255.255.255.0","broadcast":"10.20.20.255","bootproto":"static"}}
return interfaces
end
end
end

View File

@ -0,0 +1,58 @@
class Astute::DeploymentEngine::NailyFact < Astute::DeploymentEngine
def deploy(nodes, attrs)
attrs_for_mode = self.send("attrs_#{attrs['deployment_mode']}", nodes, attrs)
super(nodes, attrs_for_mode)
end
def create_facts(node, attrs)
metapublisher = Astute::Metadata.method(:publish_facts)
# calculate_networks method is common and you can find it in superclass
# if node['network_data'] is undefined, we use empty list because we later try to iterate over it
# otherwise we will get KeyError
node_network_data = node['network_data'].nil? ? [] : node['network_data']
network_data_puppet = calculate_networks(node_network_data)
metadata = {
'role' => node['role'],
'uid' => node['uid'],
'network_data' => network_data_puppet.to_json
}
attrs.each do |k, v|
if v.is_a? String
metadata[k] = v
else
# And it's the problem on the puppet side now to decode json
metadata[k] = v.to_json
end
end
# Let's calculate interface settings we need for OpenStack:
node_network_data.each do |iface|
device = if iface['vlan'] && iface['vlan'] > 0
[iface['dev'], iface['vlan']].join('.')
else
iface['dev']
end
metadata["#{iface['name']}_interface"] = device
end
# internal_address is required for HA..
metadata['internal_address'] = node['network_data'].select{|nd| nd['name'] == 'management' }[0]['ip'].split('/')[0]
metapublisher.call(@ctx, node['uid'], metadata)
end
def deploy_piece(nodes, attrs, retries=2, change_node_status=true)
return false unless validate_nodes(nodes)
@ctx.reporter.report nodes_status(nodes, 'deploying', {'progress' => 0})
Astute.logger.info "#{@ctx.task_id}: Calculation of required attributes to pass, include netw.settings"
nodes.each do |node|
create_facts(node, attrs)
end
Astute.logger.info "#{@ctx.task_id}: All required attrs/metadata passed via facts extension. Starting deployment."
Astute::PuppetdDeployer.deploy(@ctx, nodes, retries, change_node_status)
nodes_roles = nodes.map { |n| { n['uid'] => n['role'] } }
Astute.logger.info "#{@ctx.task_id}: Finished deployment of nodes => roles: #{nodes_roles.inspect}"
end
end

View File

@ -0,0 +1,11 @@
class Astute::DeploymentEngine::SimplePuppet < Astute::DeploymentEngine
# It is trivial puppet run. It's assumed that user has prepared site.pp
# with all required parameters for modules
def deploy_piece(nodes, *args)
return false unless validate_nodes(nodes)
@ctx.reporter.report nodes_status(nodes, 'deploying', {'progress' => 0})
Astute::PuppetdDeployer.deploy(@ctx, nodes)
nodes_roles = nodes.map { |n| { n['uid'] => n['role'] } }
Astute.logger.info "#{@ctx.task_id}: Finished deployment of nodes => roles: #{nodes_roles.inspect}"
end
end

View File

@ -0,0 +1,132 @@
module Astute
module LogParser
LOG_PORTION = 10000
# Default values. Can be overrided by pattern_spec.
# E.g. pattern_spec = {'separator' => 'new_separator', ...}
PATH_PREFIX = '/var/log/remote/'
SEPARATOR = "SEPARATOR\n"
class NoParsing
def initialize(*args)
end
def method_missing(*args)
# We just eat the call if we don't want to deal with logs
end
def progress_calculate(*args)
[]
end
end
class ParseNodeLogs
attr_reader :pattern_spec
def initialize(pattern_spec)
@nodes_states = {}
@pattern_spec = pattern_spec
@pattern_spec['path_prefix'] ||= PATH_PREFIX.to_s
@pattern_spec['separator'] ||= SEPARATOR.to_s
end
def progress_calculate(uids_to_calc, nodes)
nodes_progress = []
uids_to_calc.each do |uid|
node = nodes.select {|n| n['uid'] == uid}[0] # NOTE: use nodes hash
node_pattern_spec = @nodes_states[uid]
unless node_pattern_spec
node_pattern_spec = Marshal.load(Marshal.dump(@pattern_spec))
@nodes_states[uid] = node_pattern_spec
end
path = "#{@pattern_spec['path_prefix']}#{node['ip']}/#{@pattern_spec['filename']}"
begin
progress = (get_log_progress(path, node_pattern_spec)*100).to_i # Return percent of progress
rescue Exception => e
Astute.logger.warn "Some error occurred when calculate progress for node '#{uid}': #{e.message}, trace: #{e.backtrace.inspect}"
progress = 0
end
nodes_progress << {
'uid' => uid,
'progress' => progress
}
end
nodes_progress
end
def prepare(nodes)
@nodes_states = {}
nodes.each do |node|
path = "#{@pattern_spec['path_prefix']}#{node['ip']}/#{@pattern_spec['filename']}"
File.open(path, 'a') {|fo| fo.write @pattern_spec['separator'] } if File.writable?(path)
end
end
def pattern_spec= (pattern_spec)
initialise(pattern_spec) # NOTE: bug?
end
private
def get_log_progress(path, node_pattern_spec)
unless File.readable?(path)
Astute.logger.debug "Can't read file with logs: #{path}"
return 0
end
if node_pattern_spec.nil?
Astute.logger.warn "Can't parse logs. Pattern_spec is empty."
return 0
end
progress = nil
File.open(path) do |fo|
# Try to find well-known ends of log.
endlog = find_endlog_patterns(fo, node_pattern_spec)
return endlog if endlog
# Start reading from end of file.
fo.pos = fo.stat.size
# Method 'calculate' should be defined at child classes.
progress = calculate(fo, node_pattern_spec)
node_pattern_spec['file_pos'] = fo.pos
end
unless progress
Astute.logger.warn("Wrong pattern #{node_pattern_spec.inspect} defined for calculating progress via logs.")
return 0
end
progress
end
def find_endlog_patterns(fo, pattern_spec)
# Pattern example:
# pattern_spec = {...,
# 'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
# }
endlog_patterns = pattern_spec['endlog_patterns']
return nil unless endlog_patterns
fo.pos = fo.stat.size
chunk = get_chunk(fo, 100)
return nil unless chunk
endlog_patterns.each do |pattern|
return pattern['progress'] if chunk.end_with?(pattern['pattern'])
end
nil
end
def get_chunk(fo, size=nil, pos=nil)
if pos
fo.pos = pos
return fo.read
end
size = LOG_PORTION unless size
return nil if fo.pos == 0
size = fo.pos if fo.pos < size
next_pos = fo.pos - size
fo.pos = next_pos
block = fo.read(size)
fo.pos = next_pos
block
end
end
end
end

View File

@ -0,0 +1,156 @@
module Astute
module LogParser
class ParseDeployLogs <ParseNodeLogs
attr_reader :deploy_type
def initialize(deploy_type='multinode_compute')
@deploy_type = deploy_type
pattern_spec = Patterns::get_default_pattern(
"puppet-log-components-list-#{@deploy_type}-controller")
super(pattern_spec)
end
def deploy_type= (deploy_type)
@deploy_type = deploy_type
@nodes_states = {}
end
def progress_calculate(uids_to_calc, nodes)
# Just create correct pattern for each node and then call parent method.
uids_to_calc.each do |uid|
node = nodes.select {|n| n['uid'] == uid}[0]
unless @nodes_states[uid]
pattern_spec = Patterns::get_default_pattern(
"puppet-log-components-list-#{@deploy_type}-#{node['role']}")
pattern_spec['path_prefix'] ||= PATH_PREFIX.to_s
pattern_spec['separator'] ||= SEPARATOR.to_s
@nodes_states[uid] = pattern_spec
end
end
super(uids_to_calc, nodes)
end
private
def calculate(fo, node_pattern_spec)
case node_pattern_spec['type']
when 'count-lines'
progress = simple_line_counter(fo, node_pattern_spec)
when 'components-list'
progress = component_parser(fo, node_pattern_spec)
end
return progress
end
def simple_line_counter(fo, pattern_spec)
# Pattern specification example:
# pattern_spec = {'type' => 'count-lines',
# 'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
# 'expected_line_number' => 500}
# Use custom separator if defined.
separator = pattern_spec['separator']
counter = 0
end_of_scope = false
previous_subchunk = ''
until end_of_scope
chunk = get_chunk(fo, pattern_spec['chunk_size'])
break unless chunk
# Trying to find separator on border between chunks.
subchunk = chunk.slice((1-separator.size)..-1)
# End of file reached. Exit from cycle.
end_of_scope = true unless subchunk
if subchunk and (subchunk + previous_subchunk).include?(separator)
# Separator found on border between chunks. Exit from cycle.
end_of_scope = true
continue
end
pos = chunk.rindex(separator)
if pos
end_of_scope = true
chunk = chunk.slice((pos + separator.size)..-1)
end
counter += chunk.count("\n")
end
number = pattern_spec['expected_line_number']
unless number
Astute.logger.warn("Wrong pattern #{pattern_spec.inspect} defined for calculating progress via log.")
return 0
end
progress = counter.to_f / number
progress = 1 if progress > 1
return progress
end
def component_parser(fo, pattern_spec)
# Pattern specification example:
# pattern_spec = {'type' => 'components-list',
# 'chunk_size' => 40000,
# 'components_list' => [
# {'name' => 'Horizon', 'weight' => 10, 'patterns' => [
# {'pattern' => '/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created', 'progress' => 0.1},
# {'pattern' => '/Stage[main]/Horizon/File_line[horizon_redirect_rule]/ensure) created', 'progress' => 0.3},
# {'pattern' => '/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/group)', 'progress' => 0.7},
# {'pattern' => '/Stage[main]/Horizon/Service[$::horizon::params::http_service]/ensure)'\
# ' ensure changed \'stopped\' to \'running\'', 'progress' => 1},
# ]
# },
# ]
# }
# Use custom separator if defined.
separator = pattern_spec['separator']
components_list = pattern_spec['components_list']
unless components_list
Astute.logger.warn("Wrong pattern #{pattern_spec.inspect} defined for calculating progress via logs.")
return 0
end
chunk = get_chunk(fo, pos=pattern_spec['file_pos'])
return 0 unless chunk
pos = chunk.rindex(separator)
chunk = chunk.slice((pos + separator.size)..-1) if pos
block = chunk.split("\n")
# Update progress of each component.
while block.any?
string = block.pop
components_list.each do |component|
matched_pattern = nil
component['patterns'].each do |pattern|
if pattern['regexp']
matched_pattern = pattern if string.match(pattern['pattern'])
else
matched_pattern = pattern if string.include?(pattern['pattern'])
end
break if matched_pattern
end
if matched_pattern and
(not component['_progress'] or matched_pattern['progress'] > component['_progress'])
component['_progress'] = matched_pattern['progress']
end
end
end
# Calculate integral progress.
weighted_components = components_list.select{|n| n['weight']}
weight_sum = 0.0
if weighted_components.any?
weighted_components.each{|n| weight_sum += n['weight']}
weight_sum = weight_sum * components_list.length / weighted_components.length
raise "Total weight of weighted components equal to zero." if weight_sum == 0
end
nonweighted_delta = 1.0 / components_list.length
progress = 0
components_list.each do |component|
component['_progress'] = 0.0 unless component['_progress']
weight = component['weight']
if weight
progress += component['_progress'] * weight / weight_sum
else
progress += component['_progress'] * nonweighted_delta
end
end
return progress
end
end
end
end

View File

@ -0,0 +1,501 @@
module Astute
module LogParser
module Patterns
def self.get_default_pattern(key)
return Marshal.load(Marshal.dump(@default_patterns[key]))
end
def self.list_default_patterns
return @default_patterns.keys
end
@default_patterns = {
'anaconda-log-supposed-time-baremetal' => # key for default baremetal provision pattern
{'type' => 'supposed-time',
'chunk_size' => 10000,
'date_format' => '%Y-%m-%dT%H:%M:%S',
'date_regexp' => '^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}',
'pattern_list' => [
{'pattern' => 'Running anaconda script', 'supposed_time' => 60},
{'pattern' => 'moving (1) to step enablefilesystems', 'supposed_time' => 3},
{'pattern' => "notifying kernel of 'change' event on device", 'hdd_size_multiplier' => 0.3},
{'pattern' => 'Preparing to install packages', 'supposed_time' => 9},
{'pattern' => 'Installing glibc-common-2.12', 'supposed_time' => 9},
{'pattern' => 'Installing bash-4.1.2', 'supposed_time' => 11},
{'pattern' => 'Installing coreutils-8.4-19', 'supposed_time' => 20},
{'pattern' => 'Installing centos-release-6-3', 'supposed_time' => 21},
{'pattern' => 'Installing attr-2.4.44', 'supposed_time' => 23},
{'pattern' => 'leaving (1) step installpackages', 'supposed_time' => 60},
{'pattern' => 'moving (1) to step postscripts', 'supposed_time' => 4},
{'pattern' => 'leaving (1) step postscripts', 'supposed_time' => 130},
{'pattern' => 'wait while node rebooting', 'supposed_time' => 20},
].reverse,
'filename' => 'install/anaconda.log'
},
'anaconda-log-supposed-time-kvm' => # key for default kvm provision pattern
{'type' => 'supposed-time',
'chunk_size' => 10000,
'date_format' => '%Y-%m-%dT%H:%M:%S',
'date_regexp' => '^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}',
'pattern_list' => [
{'pattern' => 'Running anaconda script', 'supposed_time' => 60},
{'pattern' => 'moving (1) to step enablefilesystems', 'supposed_time' => 3},
{'pattern' => "notifying kernel of 'change' event on device", 'hdd_size_multiplier' => 1.5},
{'pattern' => 'Preparing to install packages', 'supposed_time' => 12},
{'pattern' => 'Installing glibc-common-2.12', 'supposed_time' => 15},
{'pattern' => 'Installing bash-4.1.2', 'supposed_time' => 15},
{'pattern' => 'Installing coreutils-8.4-19', 'supposed_time' => 33},
{'pattern' => 'Installing centos-release-6-3', 'supposed_time' => 21},
{'pattern' => 'Installing attr-2.4.44', 'supposed_time' => 48},
{'pattern' => 'leaving (1) step installpackages', 'supposed_time' => 100},
{'pattern' => 'moving (1) to step postscripts', 'supposed_time' => 4},
{'pattern' => 'leaving (1) step postscripts', 'supposed_time' => 200},
{'pattern' => 'wait while node rebooting', 'supposed_time' => 20},
].reverse,
'filename' => 'install/anaconda.log'
},
'puppet-log-components-list-ha_compute-controller' => # key for default HA deploy pattern
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'components_list' => [
{'name' => 'Galera', 'weight' => 5, 'patterns' => [
{'pattern' => '/Stage[main]/Galera/File[/etc/mysql]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Galera/Package[galera]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Galera/Package[MySQL-client]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Galera/Package[MySQL-server]/ensure) created', 'progress' => 0.6},
{'pattern' => "/Stage[main]/Galera/Service[mysql-galera]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.8},
{'pattern' => '/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully', 'progress' => 0.9},
{'pattern' => '/Stage[main]/Galera::Galera_master_final_config/Exec'\
'[first-galera-node-final-config]/returns) executed successfully', 'progress' => 1},
]
},
{'name' => 'Glance', 'weight' => 5, 'patterns' => [
{'pattern' => '/Stage[main]/Glance/Package[glance]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database[glance]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/'\
'Database_user[glance@]/ensure) created', 'progress' => 0.7},
{'pattern' => '/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/'\
'admin_user]/value) value changed', 'progress' => 0.71},
{'pattern' => '/Stage[main]/Glance::Keystone::Auth/Keystone_endpoint[glance]/ensure) created', 'progress' => 0.8},
{'pattern' => "/Stage[main]/Glance::Registry/Service[glance-registry]/ensure)"\
" ensure changed 'stopped' to 'running'", 'progress' => 0.95},
{'pattern' => "/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed"\
" 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Haproxy', 'weight' => 5, 'patterns' => [
{'pattern' => '/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/'\
'concat/_etc_haproxy_haproxy.cfg]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/'\
'concat/_etc_haproxy_haproxy.cfg/fragments.concat.out]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/Exec[concat_/etc/haproxy/'\
'haproxy.cfg]/returns) executed successfully', 'progress' => 0.8},
{'pattern' => "/Stage[main]/Haproxy/Service[haproxy]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Horizon', 'weight' => 5, 'patterns' => [
{'pattern' => '/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Horizon/File[/etc/openstack-dashboard/'\
'local_settings]/content) content changed', 'progress' => 0.8},
{'pattern' => "/Stage[main]/Horizon/Service[\$::horizon::params::http_service]/"\
"ensure) ensure changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Keepalived', 'weight' => 1, 'patterns' => [
{'pattern' => '/Stage[main]/Keepalived::Install/Package[keepalived]/ensure) created', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/'\
'File[/etc/keepalived/keepalived.conf]/content) content changed', 'progress' => 0.6},
{'pattern' => "/Stage[main]/Keepalived::Service/Service[keepalived]/ensure) ensure"\
" changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Keystone', 'weight' => 1, 'patterns' => [
{'pattern' => '/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database[keystone]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Keystone/Package[keystone]/ensure) created', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created', 'progress' => 0.7},
{'pattern' => "/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.8},
{'pattern' => '/Stage[main]/Keystone::Roles::Admin/Keystone_user_role[admin@admin]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Memcached', 'weight' => 1, 'patterns' => [
{'pattern' => '/Stage[main]/Memcached/User[memcached]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Memcached/Package[memcached]/ensure) created', 'progress' => 0.4},
{'pattern' => "/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Rabbitmq', 'weight' => 1, 'patterns' => [
{'pattern' => '/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created', 'progress' => 0.3},
{'pattern' => "/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running", 'progress' => 0.7},
{'pattern' => '/Stage[main]/Rabbitmq::Server/Rabbitmq_user[guest]/ensure) removed', 'progress' => 1},
]
},
{'name' => 'Rsync/Xinetd', 'weight' => 1, 'patterns' => [
{'pattern' => '/Stage[main]/Xinetd/Package[xinetd]/ensure) created', 'progress' => 0.2},
{'pattern' => '(/Stage[main]/Xinetd/File[/etc/xinetd.conf]/content) content changed', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Rsync::Server/File[/etc/rsync.d]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/content) content changed', 'progress' => 1},
]
},
{'name' => 'Swift', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Swift::Xfs/Package[xfsprogs]/ensure) created', 'progress' => 0.01},
{'pattern' => '/Stage[main]/Swift/File[/etc/swift/swift.conf]/content) content changed', 'progress' => 0.05},
{'pattern' => '/Stage[main]/Swift/File[/home/swift]/ensure) created', 'progress' => 0.07},
{'pattern' => '/Stage[main]/Swift::Storage::All/File[/srv/node]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File'\
'[/etc/swift/account-server/]/ensure) created', 'progress' => 0.12},
{'pattern' => '/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Package'\
'[swift-object]/ensure) created', 'progress' => 0.15},
{'pattern' => "/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Service"\
"[swift-account]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.18},
{'pattern' => "/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Service"\
"[swift-object]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.2},
{'pattern' => '/Stage[main]/Swift::Keystone::Auth/Keystone_service[swift]/ensure) created', 'progress' => 0.23},
{'pattern' => '/Stage[main]/Swift::Keystone::Auth/Keystone_user_role[swift@services]/ensure) created', 'progress' => 0.28},
{'pattern' => '/Stage\[main\]/Swift::Storage::Container/Ring_container_device\[[0-9.:]+\]/ensure\) created',
'regexp' => true, 'progress' => 0.33},
{'pattern' => "/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/"\
"account-server/]/group) group changed 'root' to 'swift'", 'progress' => 0.36},
{'pattern' => '/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec'\
'[hours_passed_object]/returns) executed successfully', 'progress' => 0.39},
{'pattern' => '/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec'\
'[hours_passed_account]/returns) executed successfully', 'progress' => 0.42},
{'pattern' => '/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec'\
'[rebalance_account]/returns) executed successfully', 'progress' => 0.44},
{'pattern' => '/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec'\
'[hours_passed_container]/returns) executed successfully', 'progress' => 0.49},
{'pattern' => '/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec'\
'[rebalance_container]/returns) executed successfully', 'progress' => 0.52},
{'pattern' => '/Stage[main]/Swift::Proxy/Package[swift-proxy]/ensure) created', 'progress' => 0.55},
{'pattern' => '/Service[swift-container-replicator]/ensure) ensure changed \'stopped\'', 'progress' => 0.9},
{'pattern' => '/Service[swift-accaunt-replicator]/ensure) ensure changed \'stopped\'', 'progress' => 0.95},
{'pattern' => '/Service[swift-object-replicator]/ensure) ensure changed \'stopped\'', 'progress' => 1},
]
},
{'name' => 'Nova', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[parted]/ensure) created', 'progress' => 0.11},
{'pattern' => '/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created', 'progress' => 0.13},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created', 'progress' => 0.15},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created', 'progress' => 0.2},
{'pattern' => '(/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created', 'progress' => 0.25},
{'pattern' => '/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[nova@/]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database[nova]/ensure) created', 'progress' => 0.35},
{'pattern' => "/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_grant"\
"[nova@127.0.0.1/nova]/privileges) privileges changed '' to 'all'", 'progress' => 0.4},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package'\
'[nova-vncproxy]/ensure) created', 'progress' => 0.45},
{'pattern' => '/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_volume]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Nova::Keystone::Auth/Keystone_user_role[nova@services]/ensure) created', 'progress' => 0.55},
{'pattern' => '/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully', 'progress' => 0.7},
{'pattern' => "/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service"\
"[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.85},
{'pattern' => '/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network'\
'nova-vm-net]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Openstack', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Openstack::Firewall/File[iptables]/ensure) defined content as', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Openstack::Glance/Package[swift]/ensure) created', 'progress' => 0.15},
{'pattern' => '/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Openstack::Controller_ha/Package[socat]/ensure) created', 'progress' => 0.25},
{'pattern' => '/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/File[/srv/loopback-device]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created', 'progress' => 0.45},
{'pattern' => '/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully', 'progress' => 0.65},
{'pattern' => '/Stage[main]/Openstack::Swift::Proxy/Ring_object_device\[[0-9.:]+\]/ensure\) created',
'regexp' => true, 'progress' => 0.75},
{'pattern' => '/Stage[main]/Openstack::Swift::Proxy/Ring_container_device\[[0-9.:]+\]/ensure\) created',
'regexp' => true, 'progress' => 0.8},
{'pattern' => '/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]/ensure) created', 'progress' => 1},
]
},
]
},
'puppet-log-components-list-ha_compute-compute' =>
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'components_list' => [
{'name' => 'Keystone', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Mysql', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Nova', 'weight' => 5, 'patterns' => [
{'pattern' => '/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[parted]/ensure) created', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created', 'progress' => 0.28},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created', 'progress' => 0.32},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created', 'progress' => 0.35},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[nova@/]/ensure) created', 'progress' => 0.43},
{'pattern' => '/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed', 'progress' => 0.8},
{'pattern' => '/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully', 'progress' => 0.85},
{'pattern' => '/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network'\
'nova-vm-net]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Nova::Compute', 'weight' => 15, 'patterns' => [
{'pattern' => '/Stage[main]/Nova::Compute/Package[bridge-utils]/ensure) created', 'progress' => 0.15},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Exec[symlink-qemu-kvm]/returns) executed successfully', 'progress' => 0.25},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Package[libvirt]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Package[dnsmasq-utils]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/vncserver_listen]/ensure) created', 'progress' => 0.55},
{'pattern' => '/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Package[nova-compute]/ensure) created', 'progress' => 0.88},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Package[avahi]/ensure) created', 'progress' => 0.9},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Service[messagebus]/ensure) ensure changed', 'progress' => 0.93},
{'pattern' => '/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]/ensure) ensure changed', 'progress' => 0.97},
{'pattern' => '/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]) Triggered', 'progress' => 1},
]
},
{'name' => 'Openstack', 'weight' => 2, 'patterns' => [
{'pattern' => '/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/metadata_host]/ensure) created', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/memcached_servers]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Openstack::Compute/Augeas[sysconfig-libvirt]/returns) executed successfully', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/multi_host]/ensure) created', 'progress' => 0.8},
{'pattern' => '/Stage[main]/Openstack::Compute/Augeas[libvirt-conf]/returns) executed successfully', 'progress' => 1},
]
},
]
},
'puppet-log-components-list-singlenode_compute-controller' =>
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'components_list' => [
{'name' => 'Glance', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Glance/Package[glance]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database[glance]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/'\
'Database_user[glance@]/ensure) created', 'progress' => 0.7},
{'pattern' => '/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/'\
'admin_user]/value) value changed', 'progress' => 0.71},
{'pattern' => '/Stage[main]/Glance::Keystone::Auth/Keystone_endpoint[glance]/ensure) created', 'progress' => 0.8},
{'pattern' => "/Stage[main]/Glance::Registry/Service[glance-registry]/ensure)"\
" ensure changed 'stopped' to 'running'", 'progress' => 0.95},
{'pattern' => "/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed"\
" 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Horizon', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Horizon/File[/etc/openstack-dashboard/'\
'local_settings]/content) content changed', 'progress' => 0.8},
{'pattern' => "/Stage[main]/Horizon/Service[\$::horizon::params::http_service]/"\
"ensure) ensure changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Keystone', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database[keystone]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Keystone/Package[keystone]/ensure) created', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created', 'progress' => 0.7},
{'pattern' => "/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.8},
{'pattern' => '/Stage[main]/Keystone::Roles::Admin/Keystone_user_role[admin@admin]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Memcached', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Memcached/User[memcached]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Memcached/Package[memcached]/ensure) created', 'progress' => 0.6},
{'pattern' => "/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Rabbitmq', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created', 'progress' => 0.3},
{'pattern' => "/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running", 'progress' => 0.7},
{'pattern' => '/Stage[main]/Rabbitmq::Server/Rabbitmq_user[guest]/ensure) removed', 'progress' => 1},
]
},
{'name' => 'Nova', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[parted]/ensure) created', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created', 'progress' => 0.28},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created', 'progress' => 0.32},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created', 'progress' => 0.35},
{'pattern' => '(/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[nova@/]/ensure) created', 'progress' => 0.43},
{'pattern' => '/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database[nova]/ensure) created', 'progress' => 0.48},
{'pattern' => "/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_grant"\
"[nova@127.0.0.1/nova]/privileges) privileges changed '' to 'all'", 'progress' => 0.51},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package'\
'[nova-vncproxy]/ensure) created', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_volume]/ensure) created', 'progress' => 0.68},
{'pattern' => '/Stage[main]/Nova::Keystone::Auth/Keystone_user_role[nova@services]/ensure) created', 'progress' => 0.75},
{'pattern' => '/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed', 'progress' => 0.8},
{'pattern' => '/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully', 'progress' => 0.85},
{'pattern' => "/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service"\
"[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.9},
{'pattern' => '/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network'\
'nova-vm-net]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Openstack', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Openstack::Firewall/File[iptables]/ensure) defined content as', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Openstack::Glance/Package[swift]/ensure) created', 'progress' => 0.15},
{'pattern' => '/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created', 'progress' => 0.45},
{'pattern' => '/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully', 'progress' => 0.65},
{'pattern' => '/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]/ensure) created', 'progress' => 1},
]
},
]
},
'puppet-log-components-list-multinode_compute-controller' =>
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'components_list' => [
{'name' => 'Glance', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Glance/Package[glance]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database[glance]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/'\
'Database_user[glance@]/ensure) created', 'progress' => 0.7},
{'pattern' => '/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/'\
'admin_user]/value) value changed', 'progress' => 0.71},
{'pattern' => '/Stage[main]/Glance::Keystone::Auth/Keystone_endpoint[glance]/ensure) created', 'progress' => 0.8},
{'pattern' => "/Stage[main]/Glance::Registry/Service[glance-registry]/ensure)"\
" ensure changed 'stopped' to 'running'", 'progress' => 0.95},
{'pattern' => "/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed"\
" 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Horizon', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Horizon/File[/etc/openstack-dashboard/'\
'local_settings]/content) content changed', 'progress' => 0.8},
{'pattern' => "/Stage[main]/Horizon/Service[\$::horizon::params::http_service]/"\
"ensure) ensure changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Keystone', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database[keystone]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Keystone/Package[keystone]/ensure) created', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created', 'progress' => 0.7},
{'pattern' => "/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.8},
{'pattern' => '/Stage[main]/Keystone::Roles::Admin/Keystone_user_role[admin@admin]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Memcached', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Memcached/User[memcached]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Memcached/Package[memcached]/ensure) created', 'progress' => 0.6},
{'pattern' => "/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 1},
]
},
{'name' => 'Rabbitmq', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created', 'progress' => 0.3},
{'pattern' => "/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running", 'progress' => 0.7},
{'pattern' => '/Stage[main]/Rabbitmq::Server/Rabbitmq_user[guest]/ensure) removed', 'progress' => 1},
]
},
{'name' => 'Nova', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[parted]/ensure) created', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created', 'progress' => 0.28},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created', 'progress' => 0.32},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created', 'progress' => 0.35},
{'pattern' => '(/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[nova@/]/ensure) created', 'progress' => 0.43},
{'pattern' => '/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database[nova]/ensure) created', 'progress' => 0.48},
{'pattern' => "/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_grant"\
"[nova@127.0.0.1/nova]/privileges) privileges changed '' to 'all'", 'progress' => 0.51},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package'\
'[nova-vncproxy]/ensure) created', 'progress' => 0.6},
{'pattern' => '/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_volume]/ensure) created', 'progress' => 0.68},
{'pattern' => '/Stage[main]/Nova::Keystone::Auth/Keystone_user_role[nova@services]/ensure) created', 'progress' => 0.75},
{'pattern' => '/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed', 'progress' => 0.8},
{'pattern' => '/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully', 'progress' => 0.85},
{'pattern' => "/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service"\
"[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'", 'progress' => 0.9},
{'pattern' => '/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network'\
'nova-vm-net]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Openstack', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Openstack::Firewall/File[iptables]/ensure) defined content as', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Openstack::Glance/Package[swift]/ensure) created', 'progress' => 0.15},
{'pattern' => '/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created', 'progress' => 0.45},
{'pattern' => '/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully', 'progress' => 0.65},
{'pattern' => '/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]/ensure) created', 'progress' => 1},
]
},
]
},
'puppet-log-components-list-multinode_compute-compute' =>
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'components_list' => [
{'name' => 'Keystone', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Mysql', 'weight' => 10, 'patterns' => [
{'pattern' => '/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Nova', 'weight' => 5, 'patterns' => [
{'pattern' => '/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created', 'progress' => 0.1},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[parted]/ensure) created', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created', 'progress' => 0.28},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created', 'progress' => 0.32},
{'pattern' => '/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created', 'progress' => 0.35},
{'pattern' => '/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[nova@/]/ensure) created', 'progress' => 0.43},
{'pattern' => '/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed', 'progress' => 0.8},
{'pattern' => '/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully', 'progress' => 0.85},
{'pattern' => '/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network'\
'nova-vm-net]/ensure) created', 'progress' => 1},
]
},
{'name' => 'Nova::Compute', 'weight' => 15, 'patterns' => [
{'pattern' => '/Stage[main]/Nova::Compute/Package[bridge-utils]/ensure) created', 'progress' => 0.15},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Exec[symlink-qemu-kvm]/returns) executed successfully', 'progress' => 0.25},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Package[libvirt]/ensure) created', 'progress' => 0.3},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Package[dnsmasq-utils]/ensure) created', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/vncserver_listen]/ensure) created', 'progress' => 0.55},
{'pattern' => '/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Package[nova-compute]/ensure) created', 'progress' => 0.88},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Package[avahi]/ensure) created', 'progress' => 0.9},
{'pattern' => '/Stage[main]/Nova::Compute::Libvirt/Service[messagebus]/ensure) ensure changed', 'progress' => 0.93},
{'pattern' => '/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]/ensure) ensure changed', 'progress' => 0.97},
{'pattern' => '/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]) Triggered', 'progress' => 1},
]
},
{'name' => 'Openstack', 'weight' => 2, 'patterns' => [
{'pattern' => '/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/metadata_host]/ensure) created', 'progress' => 0.2},
{'pattern' => '/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/memcached_servers]/ensure) created', 'progress' => 0.4},
{'pattern' => '/Stage[main]/Openstack::Compute/Augeas[sysconfig-libvirt]/returns) executed successfully', 'progress' => 0.5},
{'pattern' => '/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/multi_host]/ensure) created', 'progress' => 0.8},
{'pattern' => '/Stage[main]/Openstack::Compute/Augeas[libvirt-conf]/returns) executed successfully', 'progress' => 1},
]
},
]
},
}
end
end
end

View File

@ -0,0 +1,239 @@
require 'date'
module Astute
module LogParser
class ParseProvisionLogs <ParseNodeLogs
def initialize
pattern_spec = Patterns::get_default_pattern('anaconda-log-supposed-time-kvm')
super(pattern_spec)
end
def progress_calculate(uids_to_calc, nodes)
# Just create correct pattern for each node and then call parent method.
uids_to_calc.each do |uid|
node = nodes.select {|n| n['uid'] == uid}[0]
unless @nodes_states[uid]
@nodes_states[uid] = get_pattern_for_node(node)
end
end
super(uids_to_calc, nodes)
end
private
def calculate(fo, node_pattern_spec)
case node_pattern_spec['type']
when 'pattern-list'
progress = simple_pattern_finder(fo, node_pattern_spec)
when 'supposed-time'
progress = supposed_time_parser(fo, node_pattern_spec)
end
return progress
end
def get_pattern_for_node(node)
if node['manufacturer'] == 'KVM'
pattern_spec = Patterns::get_default_pattern('anaconda-log-supposed-time-kvm')
else
pattern_spec = Patterns::get_default_pattern('anaconda-log-supposed-time-baremetal')
end
pattern_spec['path_prefix'] ||= PATH_PREFIX.to_s
pattern_spec['separator'] ||= SEPARATOR.to_s
hdd = node['meta']['disks'].select{|disk| not disk['removable']}[0]
if hdd
# Convert size from bytes to GB
hdd_size = hdd['size'] / 10 ** 9
else
# Default hdd size is 20 GB
hdd_size = 20
end
hdd_pattern = pattern_spec['pattern_list'].select {|el| el['hdd_size_multiplier']}[0]
hdd_pattern['supposed_time'] = (hdd_pattern['hdd_size_multiplier'] * hdd_size).to_i
return pattern_spec
end
def supposed_time_parser(fo, pattern_spec)
# Pattern specification example:
# pattern_spec = {'type' => 'supposed-time',
# 'chunk_size' => 10000,
# 'date_format' => '%Y-%m-%dT%H:%M:%S',
# 'date_regexp' => '^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}',
# 'pattern_list' => [
# {'pattern' => 'Running anaconda script', 'supposed_time' => 60},
# {'pattern' => 'moving (1) to step enablefilesystems', 'supposed_time' => 3},
# {'pattern' => "notifying kernel of 'change' event on device", 'supposed_time' => 200},
# {'pattern' => 'Preparing to install packages', 'supposed_time' => 9},
# {'pattern' => 'Installing glibc-common-2.12', 'supposed_time' => 9},
# {'pattern' => 'Installing bash-4.1.2', 'supposed_time' => 11},
# {'pattern' => 'Installing coreutils-8.4-19', 'supposed_time' => 20},
# {'pattern' => 'Installing centos-release-6-3', 'supposed_time' => 21},
# {'pattern' => 'Installing attr-2.4.44', 'supposed_time' => 23},
# {'pattern' => 'leaving (1) step installpackages', 'supposed_time' => 60},
# {'pattern' => 'moving (1) to step postscripts', 'supposed_time' => 4},
# {'pattern' => 'leaving (1) step postscripts', 'supposed_time' => 130},
# ].reverse,
# 'filename' => 'install/anaconda.log'
# }
# Use custom separator if defined.
separator = pattern_spec['separator']
log_patterns = pattern_spec['pattern_list']
date_format = pattern_spec['date_format']
date_regexp = pattern_spec['date_regexp']
unless date_regexp and date_format and log_patterns
Astute.logger.warn("Wrong pattern_spec #{pattern_spec.inspect} defined for calculating progress via logs.")
return 0
end
def self.get_elapsed_time(patterns)
elapsed_time = 0
patterns.each do |p|
if p['_progress']
break
else
elapsed_time += p['supposed_time']
end
end
return elapsed_time
end
def self.get_progress(base_progress, elapsed_time, delta_time, supposed_time=nil)
return 1.0 if elapsed_time.zero?
k = (1.0 - base_progress) / elapsed_time
supposed_time ? surplus = delta_time - supposed_time : surplus = nil
if surplus and surplus > 0
progress = supposed_time * k + surplus * k/3 + base_progress
else
progress = delta_time * k + base_progress
end
progress = 1.0 if progress > 1
return progress
end
def self.get_seconds_from_time(date)
hours, mins, secs, frac = Date::day_fraction_to_time(date)
return hours*60*60 + mins*60 + secs
end
chunk = get_chunk(fo, pattern_spec['chunk_size'])
return 0 unless chunk
pos = chunk.rindex(separator)
chunk = chunk.slice((pos + separator.size)..-1) if pos
block = chunk.split("\n")
now = DateTime.now()
prev_time = pattern_spec['_prev_time'] ||= now
prev_progress = pattern_spec['_prev_progress'] ||= 0
elapsed_time = pattern_spec['_elapsed_time'] ||= get_elapsed_time(log_patterns)
seconds_since_prev = get_seconds_from_time(now - prev_time)
until block.empty?
string = block.pop
log_patterns.each do |pattern|
if string.include?(pattern['pattern'])
if pattern['_progress']
# We not found any new messages. Calculate progress with old data.
progress = get_progress(prev_progress, elapsed_time,
seconds_since_prev, pattern['supposed_time'])
return progress
else
# We found message that we never find before. We need to:
# calculate progress for this message;
# recalculate control point and elapsed_time;
# calculate progress for current time.
# Trying to find timestamp of message.
date_string = string.match(date_regexp)
if date_string
# Get relative time when the message realy occured.
date = DateTime.strptime(date_string[0], date_format) - prev_time.offset
real_time = get_seconds_from_time(date - prev_time)
# Update progress of the message.
prev_supposed_time = log_patterns.select{|n| n['_progress'] == prev_progress}[0]
prev_supposed_time = prev_supposed_time['supposed_time'] if prev_supposed_time
progress = get_progress(prev_progress, elapsed_time, real_time, prev_supposed_time)
pattern['_progress'] = progress
# Recalculate elapsed time.
elapsed_time = pattern_spec['_elapsed_time'] = get_elapsed_time(log_patterns)
# Update time and progress for control point.
prev_time = pattern_spec['_prev_time'] = date
prev_progress = pattern_spec['_prev_progress'] = progress
seconds_since_prev = get_seconds_from_time(now - date)
# Calculate progress for current time.
progress = get_progress(prev_progress, elapsed_time,
seconds_since_prev, pattern['supposed_time'])
return progress
else
Astute.logger.info("Can't gather date (format: '#{date_regexp}') from string: #{string}")
end
end
end
end
end
# We found nothing.
progress = get_progress(prev_progress, elapsed_time, seconds_since_prev, log_patterns[0]['supposed_time'])
return progress
end
def simple_pattern_finder(fo, pattern_spec)
# Pattern specification example:
# pattern_spec = {'type' => 'pattern-list', 'separator' => "custom separator\n",
# 'chunk_size' => 40000,
# 'pattern_list' => [
# {'pattern' => 'Running kickstart %%pre script', 'progress' => 0.08},
# {'pattern' => 'to step enablefilesystems', 'progress' => 0.09},
# {'pattern' => 'to step reposetup', 'progress' => 0.13},
# {'pattern' => 'to step installpackages', 'progress' => 0.16},
# {'pattern' => 'Installing',
# 'number' => 210, # Now it install 205 packets. Add 5 packets for growth in future.
# 'p_min' => 0.16, # min percent
# 'p_max' => 0.87 # max percent
# },
# {'pattern' => 'to step postinstallconfig', 'progress' => 0.87},
# {'pattern' => 'to step dopostaction', 'progress' => 0.92},
# ]
# }
# Use custom separator if defined.
separator = pattern_spec['separator']
log_patterns = pattern_spec['pattern_list']
unless log_patterns
Astute.logger.warn("Wrong pattern #{pattern_spec.inspect} defined for calculating progress via logs.")
return 0
end
chunk = get_chunk(fo, pattern_spec['chunk_size'])
# NOTE(mihgen): Following line fixes "undefined method `rindex' for nil:NilClass" for empty log file
return 0 unless chunk
pos = chunk.rindex(separator)
chunk = chunk.slice((pos + separator.size)..-1) if pos
block = chunk.split("\n")
return 0 unless block
while true
string = block.pop
return 0 unless string # If we found nothing
log_patterns.each do |pattern|
if string.include?(pattern['pattern'])
return pattern['progress'] if pattern['progress']
if pattern['number']
string = block.pop
counter = 1
while string
counter += 1 if string.include?(pattern['pattern'])
string = block.pop
end
progress = counter.to_f / pattern['number']
progress = 1 if progress > 1
progress = pattern['p_min'] + progress * (pattern['p_max'] - pattern['p_min'])
return progress
end
Astute.logger.warn("Wrong pattern #{pattern_spec.inspect} defined for calculating progress via log.")
end
end
end
end
end
end
end

View File

@ -0,0 +1,101 @@
require 'mcollective'
require 'active_support/core_ext/class/attribute_accessors'
module Astute
class MClient
include MCollective::RPC
attr_accessor :retries
cattr_accessor :semaphore
def initialize(ctx, agent, nodes=nil, check_result=true, timeout=nil)
@task_id = ctx.task_id
@agent = agent
@nodes = nodes.map { |n| n.to_s }
@check_result = check_result
try_synchronize do
@mc = rpcclient(agent, :exit_on_failure => false)
end
@mc.timeout = timeout if timeout
@mc.progress = false
@retries = Astute.config.MC_RETRIES
unless @nodes.nil?
@mc.discover(:nodes => @nodes)
end
end
def method_missing(method, *args)
try_synchronize do
@mc_res = @mc.send(method, *args)
end
if method == :discover
@nodes = args[0][:nodes]
return @mc_res
end
# Enable if needed. In normal case it eats the screen pretty fast
log_result(@mc_res, method)
return @mc_res unless @check_result
err_msg = ''
# Following error might happen because of misconfiguration, ex. direct_addressing = 1 only on client
# or.. could be just some hang? Let's retry if @retries is set
if @mc_res.length < @nodes.length
# some nodes didn't respond
retry_index = 1
while retry_index <= @retries
sleep rand
nodes_responded = @mc_res.map { |n| n.results[:sender] }
not_responded = @nodes - nodes_responded
Astute.logger.debug "Retry ##{retry_index} to run mcollective agent on nodes: '#{not_responded.join(',')}'"
@mc.discover(:nodes => not_responded)
try_synchronize do
@new_res = @mc.send(method, *args)
end
log_result(@new_res, method)
# @new_res can have some nodes which finally responded
@mc_res += @new_res
break if @mc_res.length == @nodes.length
retry_index += 1
end
if @mc_res.length < @nodes.length
nodes_responded = @mc_res.map { |n| n.results[:sender] }
not_responded = @nodes - nodes_responded
err_msg += "MCollective agents '#{not_responded.join(',')}' didn't respond. \n"
end
end
failed = @mc_res.select{|x| x.results[:statuscode] != 0 }
if failed.any?
err_msg += "MCollective call failed in agent '#{@agent}', "\
"method '#{method}', failed nodes: #{failed.map{|x| x.results[:sender]}.join(',')} \n"
end
unless err_msg.empty?
Astute.logger.error err_msg
raise "#{@task_id}: #{err_msg}"
end
@mc_res
end
private
def log_result(result, method)
result.each do |node|
Astute.logger.debug "#{@task_id}: MC agent '#{node.agent}', method '#{method}', "\
"results: #{node.results.inspect}"
end
end
def try_synchronize
if self.semaphore
self.semaphore.synchronize do
yield
end
else
yield
end
end
end
end

View File

@ -0,0 +1,15 @@
require 'json'
require 'ipaddr'
module Astute
module Metadata
def self.publish_facts(ctx, uid, metadata)
# This is synchronious RPC call, so we are sure that data were sent and processed remotely
Astute.logger.info "#{ctx.task_id}: nailyfact - storing metadata for node uid=#{uid}"
Astute.logger.debug "#{ctx.task_id}: nailyfact stores metadata: #{metadata.inspect}"
nailyfact = MClient.new(ctx, "nailyfact", [uid])
# TODO(mihgen) check results!
stats = nailyfact.post(:value => metadata.to_json)
end
end
end

View File

@ -0,0 +1,50 @@
module Astute
module Network
def self.check_network(ctx, nodes, networks)
if nodes.empty?
Astute.logger.info "#{ctx.task_id}: Network checker: nodes list is empty. Nothing to check."
return {'status' => 'error', 'error' => "Nodes list is empty. Nothing to check."}
elsif nodes.size == 1
Astute.logger.info "#{ctx.task_id}: Network checker: nodes list contains one node only. Do nothing."
return {'nodes' =>
[{'uid'=>nodes[0]['uid'],
'networks'=>[{'vlans' => networks.map {|n| n['vlan_id'].to_i}, 'iface'=>'eth0'}]
}]
}
end
uids = nodes.map {|n| n['uid']}
# TODO Everything breakes if agent not found. We have to handle that
net_probe = MClient.new(ctx, "net_probe", uids)
data_to_send = {'eth0' => networks.map {|n| n['vlan_id']}.join(',')}
net_probe.start_frame_listeners(:interfaces => data_to_send.to_json)
ctx.reporter.report({'progress' => 30})
# Interface name is hardcoded for now. Later we expect it to be passed from Nailgun backend
data_to_send = {'eth0' => networks.map {|n| n['vlan_id']}.join(',')}
net_probe.send_probing_frames(:interfaces => data_to_send.to_json)
ctx.reporter.report({'progress' => 60})
stats = net_probe.get_probing_info
result = stats.map {|node| {'uid' => node.results[:sender],
'networks' => check_vlans_by_traffic(
node.results[:sender],
node.results[:data][:neighbours])} }
Astute.logger.debug "#{ctx.task_id}: Network checking is done. Results: #{result.inspect}"
{'nodes' => result}
end
private
def self.check_vlans_by_traffic(uid, data)
data.map do |iface, vlans|
{
'iface' => iface,
'vlans' => vlans.reject{ |k, v|
v.size == 1 && v.has_key?(uid)
}.keys.map(&:to_i)
}
end
end
end
end

78
astute/lib/astute/node.rb Normal file
View File

@ -0,0 +1,78 @@
require 'active_support/core_ext/hash/indifferent_access'
require 'ostruct'
module Astute
class Node < OpenStruct
def initialize(hash=nil)
if hash && (uid = hash['uid'])
hash = hash.dup
hash['uid'] = uid.to_s
else
raise TypeError.new("Invalid data: #{hash.inspect}")
end
super hash
end
def [](key)
send key
end
def []=(key, value)
send "#{key}=", value
end
def uid
@table[:uid]
end
def uid=(_)
raise TypeError.new('Read-only attribute')
end
def to_hash
@table.with_indifferent_access
end
end
class NodesHash < Hash
alias uids keys
alias nodes values
def self.build(nodes)
return nodes if nodes.kind_of? self
nodes.inject(self.new) do |hash, node|
hash << node
hash
end
end
def <<(node)
node = normalize_value(node)
self[node.uid] = node
self
end
def push(*nodes)
nodes.each{|node| self.<< node }
self
end
def [](key)
super key.to_s
end
private
def []=(*args)
super
end
def normalize_value(node)
if node.kind_of? Node
node
else
Node.new(node.to_hash)
end
end
end
end

View File

@ -0,0 +1,72 @@
module Astute
class NodesRemover
def initialize(ctx, nodes)
@ctx = ctx
@nodes = NodesHash.build(nodes)
end
def remove
# TODO(mihgen): 1. Nailgun should process node error message
# 2. Should we rename nodes -> removed_nodes array?
# 3. If exception is raised here, we should not fully fall into error, but only failed node
erased_nodes, error_nodes, inaccessible_nodes = remove_nodes(@nodes)
error_nodes.merge! inaccessible_nodes
retry_remove_nodes error_nodes, erased_nodes, Astute.config[:MC_RETRIES], Astute.config[:MC_RETRY_INTERVAL]
if error_nodes.empty?
answer = {'nodes' => erased_nodes.nodes.map(&:to_hash)}
else
answer = {'status' => 'error', 'nodes' => erased_nodes.nodes.map(&:to_hash), 'error_nodes' => error_nodes.nodes.map(&:to_hash)}
Astute.logger.error "#{@ctx.task_id}: Removing of nodes #{@nodes.uids.inspect} finished "\
"with errors: #{error_nodes.nodes.inspect}"
end
Astute.logger.info "#{@ctx.task_id}: Finished removing of nodes: #{@nodes.uids.inspect}"
answer
end
private
def remove_nodes(nodes)
if nodes.empty?
Astute.logger.info "#{@ctx.task_id}: Nodes to remove are not provided. Do nothing."
return Array.new(3){ NodesHash.new }
end
Astute.logger.info "#{@ctx.task_id}: Starting removing of nodes: #{nodes.uids.inspect}"
remover = MClient.new(@ctx, "erase_node", nodes.uids.sort, check_result=false)
responses = remover.erase_node(:reboot => true)
Astute.logger.debug "#{@ctx.task_id}: Data received from nodes: #{responses.inspect}"
inaccessible_uids = nodes.uids - responses.map{|response| response.results[:sender] }
inaccessible_nodes = NodesHash.build(inaccessible_uids.map do |uid|
{'uid' => uid, 'error' => 'Node not answered by RPC.'}
end)
error_nodes = NodesHash.new
erased_nodes = NodesHash.new
responses.each do |response|
node = Node.new('uid' => response.results[:sender])
if response.results[:statuscode] != 0
node['error'] = "RPC agent 'erase_node' failed. Result: #{response.results.inspect}"
error_nodes << node
elsif not response.results[:data][:rebooted]
node['error'] = "RPC method 'erase_node' failed with message: #{response.results[:data][:error_msg]}"
error_nodes << node
else
erased_nodes << node
end
end
[erased_nodes, error_nodes, inaccessible_nodes]
end
def retry_remove_nodes(error_nodes, erased_nodes, retries=3, interval=1)
until retries == 0
retries -= 1
retried_erased_nodes = remove_nodes(error_nodes)[0]
retried_erased_nodes.each do |uid, node|
error_nodes.delete uid
erased_nodes << node
end
return if error_nodes.empty?
sleep interval if interval > 0
end
end
end
end

View File

@ -0,0 +1,49 @@
module Astute
class Orchestrator
def initialize(deploy_engine=nil, log_parsing=false)
@deploy_engine = deploy_engine || Astute::DeploymentEngine::NailyFact
if log_parsing
@log_parser = LogParser::ParseDeployLogs.new
else
@log_parser = LogParser::NoParsing.new
end
end
def node_type(reporter, task_id, nodes, timeout=nil)
context = Context.new(task_id, reporter)
uids = nodes.map {|n| n['uid']}
systemtype = MClient.new(context, "systemtype", uids, check_result=false, timeout)
systems = systemtype.get_type
systems.map do |n|
{
'uid' => n.results[:sender],
'node_type' => n.results[:data][:node_type].chomp
}
end
end
def deploy(up_reporter, task_id, nodes, attrs)
raise "Nodes to deploy are not provided!" if nodes.empty?
# Following line fixes issues with uids: it should always be string
nodes.map { |x| x['uid'] = x['uid'].to_s } # NOTE: perform that on environment['nodes'] initialization
proxy_reporter = ProxyReporter.new(up_reporter)
context = Context.new(task_id, proxy_reporter, @log_parser)
deploy_engine_instance = @deploy_engine.new(context)
Astute.logger.info "Using #{deploy_engine_instance.class} for deployment."
begin
@log_parser.prepare(nodes)
rescue Exception => e
Astute.logger.warn "Some error occurred when prepare LogParser: #{e.message}, trace: #{e.backtrace.inspect}"
end
deploy_engine_instance.deploy(nodes, attrs)
end
def remove_nodes(reporter, task_id, nodes)
NodesRemover.new(Context.new(task_id, reporter), nodes).remove
end
def verify_networks(reporter, task_id, nodes, networks)
Network.check_network(Context.new(task_id, reporter), nodes, networks)
end
end
end

View File

@ -0,0 +1,137 @@
require 'json'
require 'timeout'
module Astute
module PuppetdDeployer
private
# Runs puppetd.runonce only if puppet is not running on the host at the time
# If it does running, it waits a bit and tries again.
# Returns list of nodes uids which appear to be with hung puppet.
def self.puppetd_runonce(puppetd, uids)
started = Time.now.to_i
while Time.now.to_i - started < Astute.config.PUPPET_FADE_TIMEOUT
puppetd.discover(:nodes => uids)
last_run = puppetd.last_run_summary
running = last_run.select {|x| x.results[:data][:status] == 'running'}.map {|n| n.results[:sender]}
not_running = uids - running
if not_running.any?
puppetd.discover(:nodes => not_running)
puppetd.runonce
end
uids = running
break if uids.empty?
sleep Astute.config.PUPPET_FADE_INTERVAL
end
Astute.logger.debug "puppetd_runonce completed within #{Time.now.to_i - started} seconds."
Astute.logger.debug "Following nodes have puppet hung: '#{running.join(',')}'" if running.any?
running
end
def self.calc_nodes_status(last_run, prev_run)
# Finished are those which are not in running state,
# and changed their last_run time, which is changed after application of catalog,
# at the time of updating last_run_summary file. At that particular time puppet is
# still running, and will finish in a couple of seconds.
finished = last_run.select {|x| x.results[:data][:time]['last_run'] !=
prev_run.select {|ps|
ps.results[:sender] == x.results[:sender]
}[0].results[:data][:time]['last_run'] and x.results[:data][:status] != 'running'}
# Looking for error_nodes among only finished - we don't bother previous failures
error_nodes = finished.select { |n|
n.results[:data][:resources]['failed'] != 0}.map {|x| x.results[:sender]}
succeed_nodes = finished.select { |n|
n.results[:data][:resources]['failed'] == 0}.map {|x| x.results[:sender]}
# Running are all which didn't appear in finished
running_nodes = last_run.map {|n| n.results[:sender]} - finished.map {|n| n.results[:sender]}
nodes_to_check = running_nodes + succeed_nodes + error_nodes
unless nodes_to_check.size == last_run.size
raise "Shoud never happen. Internal error in nodes statuses calculation. Statuses calculated for: #{nodes_to_check.inspect},"
"nodes passed to check statuses of: #{last_run.map {|n| n.results[:sender]}}"
end
{'succeed' => succeed_nodes, 'error' => error_nodes, 'running' => running_nodes}
end
public
def self.deploy(ctx, nodes, retries=2, change_node_status=true)
# TODO: can we hide retries, ignore_failure into @ctx ?
uids = nodes.map {|n| n['uid']}
# TODO(mihgen): handle exceptions from mclient, raised if agent does not respond or responded with error
puppetd = MClient.new(ctx, "puppetd", uids)
prev_summary = puppetd.last_run_summary
# Keep info about retries for each node
node_retries = {}
uids.each {|x| node_retries.merge!({x => retries}) }
Astute.logger.debug "Waiting for puppet to finish deployment on all nodes (timeout = #{Astute.config.PUPPET_TIMEOUT} sec)..."
time_before = Time.now
Timeout::timeout(Astute.config.PUPPET_TIMEOUT) do
puppetd_runonce(puppetd, uids)
nodes_to_check = uids
last_run = prev_summary
while nodes_to_check.any?
calc_nodes = calc_nodes_status(last_run, prev_summary)
Astute.logger.debug "Nodes statuses: #{calc_nodes.inspect}"
# At least we will report about successfully deployed nodes
nodes_to_report = []
nodes_to_report.concat(calc_nodes['succeed'].map { |n| {'uid' => n, 'status' => 'ready'} }) if change_node_status
# Process retries
nodes_to_retry = []
calc_nodes['error'].each do |uid|
if node_retries[uid] > 0
node_retries[uid] -= 1
Astute.logger.debug "Puppet on node #{uid.inspect} will be restarted. "\
"#{node_retries[uid]} retries remained."
nodes_to_retry << uid
else
Astute.logger.debug "Node #{uid.inspect} has failed to deploy. There is no more retries for puppet run."
nodes_to_report << {'uid' => uid, 'status' => 'error', 'error_type' => 'deploy'} if change_node_status
end
end
if nodes_to_retry.any?
Astute.logger.info "Retrying to run puppet for following error nodes: #{nodes_to_retry.join(',')}"
puppetd_runonce(puppetd, nodes_to_retry)
# We need this magic with prev_summary to reflect new puppetd run statuses..
prev_summary.delete_if { |x| nodes_to_retry.include?(x.results[:sender]) }
prev_summary += last_run.select { |x| nodes_to_retry.include?(x.results[:sender]) }
end
# /end of processing retries
if calc_nodes['running'].any?
begin
# Pass nodes because logs calculation needs IP address of node, not just uid
nodes_progress = ctx.deploy_log_parser.progress_calculate(calc_nodes['running'], nodes)
if nodes_progress.any?
Astute.logger.debug "Got progress for nodes: #{nodes_progress.inspect}"
# Nodes with progress are running, so they are not included in nodes_to_report yet
nodes_progress.map! {|x| x.merge!({'status' => 'deploying'})}
nodes_to_report += nodes_progress
end
rescue Exception => e
Astute.logger.warn "Some error occurred when parse logs for nodes progress: #{e.message}, "\
"trace: #{e.backtrace.inspect}"
end
end
ctx.reporter.report('nodes' => nodes_to_report) if nodes_to_report.any?
# we will iterate only over running nodes and those that we restart deployment for
nodes_to_check = calc_nodes['running'] + nodes_to_retry
break if nodes_to_check.empty?
sleep Astute.config.PUPPET_DEPLOY_INTERVAL
puppetd.discover(:nodes => nodes_to_check)
last_run = puppetd.last_run_summary
end
end
time_spent = Time.now - time_before
Astute.logger.info "#{ctx.task_id}: Spent #{time_spent} seconds on puppet run "\
"for following nodes(uids): #{nodes.map {|n| n['uid']}.join(',')}"
end
end
end

View File

@ -0,0 +1,109 @@
require 'set'
STATES = {
'offline' => 0,
'discover' => 10,
'provisioning' => 30,
'provisioned' => 40,
'deploying' => 50,
'ready' => 60,
'error' => 70
}
module Astute
class ProxyReporter
def initialize(up_reporter)
@up_reporter = up_reporter
@nodes = []
end
def report(data)
nodes_to_report = []
nodes = (data['nodes'] or [])
nodes.each do |node|
node = node_validate(node)
nodes_to_report << node if node
end
# Let's report only if nodes updated
if nodes_to_report.any?
data['nodes'] = nodes_to_report
@up_reporter.report(data)
# Update nodes attributes in @nodes.
nodes_to_report.each do |node|
saved_node = @nodes.select {|x| x['uid'] == node['uid']}.first # NOTE: use nodes hash
if saved_node
node.each {|k, v| saved_node[k] = v}
else
@nodes << node
end
end
end
end
private
def node_validate(node)
# Validate basic correctness of attributes.
err = []
if node['status'].nil?
err << "progress value provided, but no status" unless node['progress'].nil?
else
err << "Status provided #{node['status']} is not supported" if STATES[node['status']].nil?
end
unless node['uid']
err << "Node uid is not provided"
end
if err.any?
msg = "Validation of node: #{node.inspect} for report failed: #{err.join('; ')}."
Astute.logger.error(msg)
raise msg
end
# Validate progress field.
unless node['progress'].nil?
if node['progress'] > 100
Astute.logger.warn("Passed report for node with progress > 100: "\
"#{node.inspect}. Adjusting progress to 100.")
node['progress'] = 100
end
if node['progress'] < 0
Astute.logger.warn("Passed report for node with progress < 0: "\
"#{node.inspect}. Adjusting progress to 0.")
node['progress'] = 0
end
end
if not node['status'].nil? and ['provisioned', 'ready'].include?(node['status']) and node['progress'] != 100
Astute.logger.warn("In #{node['status']} state node should have progress 100, "\
"but node passed: #{node.inspect}. Setting it to 100")
node['progress'] = 100
end
# Comparison with previous state.
saved_node = @nodes.select {|x| x['uid'] == node['uid']}.first
unless saved_node.nil?
saved_status = (STATES[saved_node['status']] or 0)
node_status = (STATES[node['status']] or saved_status)
saved_progress = (saved_node['progress'] or 0)
node_progress = (node['progress'] or saved_progress)
if node_status < saved_status
Astute.logger.warn("Attempt to assign lower status detected: "\
"Status was: #{saved_status}, attempted to "\
"assign: #{node_status}. Skipping this node (id=#{node['uid']})")
return
end
if node_progress < saved_progress and node_status == saved_status
Astute.logger.warn("Attempt to assign lesser progress detected: "\
"Progress was: #{saved_progress}, attempted to "\
"assign: #{node_progress}. Skipping this node (id=#{node['uid']})")
return
end
# We need to update node here only if progress is greater, or status changed
return if node.select{|k, v| not saved_node[k].eql?(v)}.empty?
end
node
end
end
end

View File

@ -0,0 +1,27 @@
require 'json'
require 'timeout'
module Astute
module RpuppetDeployer
def self.rpuppet_deploy(ctx, nodes, parameters, classes, env="production")
if nodes.empty?
Astute.logger.info "#{ctx.task_id}: Nodes to deploy are not provided. Do nothing."
return false
end
uids = nodes.map {|n| n['uid']}
rpuppet = MClient.new(ctx, "rpuppet", uids)
data = {"parameters" => parameters,
"classes" => classes,
"environment" => env}
Astute.logger.debug "Waiting for puppet to finish deployment on all nodes (timeout = #{Astute.config.PUPPET_TIMEOUT} sec)..."
time_before = Time.now
Timeout::timeout(Astute.config.PUPPET_TIMEOUT) do
rpuppet.run(:data => data.to_json)
end
time_spent = Time.now - time_before
Astute.logger.info "#{ctx.task_id}: Spent #{time_spent} seconds on puppet run for following nodes(uids): #{nodes.map {|n| n['uid']}.join(',')}"
end
end
end

View File

@ -0,0 +1,3 @@
module Astute
VERSION = '0.0.1'
end

10
astute/module.mk Normal file
View File

@ -0,0 +1,10 @@
ASTUTE_VERSION:=$(shell ruby -e "require '$(SOURCE_DIR)/astute/lib/astute/version.rb'; puts Astute::VERSION")
$(BUILD_DIR)/packages/gems/astute-$(ASTUTE_VERSION).gem: \
$(SOURCE_DIR)/astute/astute.gemspec \
$(call find-files,astute/bin) \
$(call find-files,astute/lib) \
$(call find-files,astute/spec)
@mkdir -p $(@D)
cd $(SOURCE_DIR)/astute && gem build astute.gemspec
mv $(SOURCE_DIR)/astute/astute-$(ASTUTE_VERSION).gem $@

10
astute/setup-env.sh Executable file
View File

@ -0,0 +1,10 @@
#!/bin/bash
for agent in `ls ../mcagent/`; do
echo "Linking agent $agent of mcollective.."
ln -sf `readlink -f ../mcagent/$agent` /usr/libexec/mcollective/mcollective/agent/$agent
done
ln -sfT `readlink -f ../puppet/nailytest` /etc/puppet/modules/nailytest
ln -sf `readlink -f ../puppet/nailytest/examples/site.pp` /etc/puppet/manifests/site.pp
ln -sf `readlink -f ../bootstrap/sync/usr/bin/net_probe.py` /usr/bin/net_probe.py
uuidgen > /etc/bootif # for net_probe plugin
service mcollective restart

View File

@ -0,0 +1,474 @@
2013-01-23T09:24:16 info: 09:23:58,565 INFO : kernel command line: initrd=/images/centos63-x86_64/initrd.img ksdevice=bootif lang= locale=en_US text priority=critical kssendmac ks=http://10.0.168.2/cblr/svc/op/ks/system/slave-1 BOOT_IMAGE=/images/centos63-x86_64/vmlinuz BOOTIF=01-52-54-00-9a-db-f8
2013-01-23T09:24:16 info:
2013-01-23T09:24:16 info: 09:23:58,565 INFO : text mode forced from cmdline
2013-01-23T09:24:16 debug: 09:23:58,565 DEBUG : readNetInfo /tmp/s390net not found, early return
2013-01-23T09:24:16 info: 09:23:58,565 INFO : anaconda version 13.21.176 on x86_64 starting
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module ipv6
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module iscsi_ibft
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module iscsi_boot_sysfs
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module pcspkr
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module edd
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module floppy
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module iscsi_tcp
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module libiscsi_tcp
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module libiscsi
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module scsi_transport_iscsi
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module squashfs
2013-01-23T09:24:16 debug: 09:23:58,729 DEBUG : Saving module cramfs
2013-01-23T09:24:16 debug: 09:23:58,730 DEBUG : probing buses
2013-01-23T09:24:16 debug: 09:23:58,863 DEBUG : waiting for hardware to initialize
2013-01-23T09:24:16 debug: 09:24:01,290 DEBUG : probing buses
2013-01-23T09:24:16 debug: 09:24:01,412 DEBUG : waiting for hardware to initialize
2013-01-23T09:24:16 info: 09:24:04,507 INFO : getting kickstart file
2013-01-23T09:24:16 info: 09:24:04,530 INFO : doing kickstart... setting it up
2013-01-23T09:24:16 debug: 09:24:04,531 DEBUG : activating device eth0
2013-01-23T09:24:16 info: 09:24:10,548 INFO : wait_for_iface_activation (2309): device eth0 activated
2013-01-23T09:24:16 info: 09:24:10,550 INFO : file location: http://10.0.168.2/cblr/svc/op/ks/system/slave-1
2013-01-23T09:24:16 info: 09:24:10,551 INFO : transferring http://10.0.168.2/cblr/svc/op/ks/system/slave-1
2013-01-23T09:24:16 info: 09:24:11,511 INFO : setting up kickstart
2013-01-23T09:24:16 info: 09:24:11,511 INFO : kickstart forcing text mode
2013-01-23T09:24:16 info: 09:24:11,511 INFO : kickstartFromUrl
2013-01-23T09:24:16 info: 09:24:11,511 INFO : results of url ks, url http://10.0.168.2:8080/centos/6.3/nailgun/x86_64
2013-01-23T09:24:16 err: 09:24:11,512 ERROR : got to setupCdrom without a CD device
2013-01-23T09:24:16 info: 09:24:11,512 INFO : no stage2= given, assuming http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 debug: 09:24:11,512 DEBUG : going to set language to en_US.UTF-8
2013-01-23T09:24:16 info: 09:24:11,512 INFO : setting language to en_US.UTF-8
2013-01-23T09:24:16 info: 09:24:11,551 INFO : starting STEP_METHOD
2013-01-23T09:24:16 debug: 09:24:11,551 DEBUG : loaderData->method is set, adding skipMethodDialog
2013-01-23T09:24:16 debug: 09:24:11,551 DEBUG : skipMethodDialog is set
2013-01-23T09:24:16 info: 09:24:11,560 INFO : starting STEP_STAGE2
2013-01-23T09:24:16 info: 09:24:11,560 INFO : URL_STAGE_MAIN: url is http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 info: 09:24:11,560 INFO : transferring http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/updates.img
2013-01-23T09:24:16 err: 09:24:11,563 ERROR : Error downloading http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/updates.img: HTTP response code said error
2013-01-23T09:24:16 info: 09:24:11,565 INFO : transferring http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/product.img
2013-01-23T09:24:16 err: 09:24:11,568 ERROR : Error downloading http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/product.img: HTTP response code said error
2013-01-23T09:24:16 info: 09:24:11,569 INFO : transferring http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 info: 09:24:12,077 INFO : mounted loopback device /mnt/runtime on /dev/loop0 as /tmp/install.img
2013-01-23T09:24:16 info: 09:24:12,078 INFO : got stage2 at url http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img
2013-01-23T09:24:16 info: 09:24:12,133 INFO : Loading SELinux policy
2013-01-23T09:24:16 info: 09:24:13,072 INFO : getting ready to spawn shell now
2013-01-23T09:24:16 info: 09:24:13,436 INFO : Running anaconda script /usr/bin/anaconda
2013-01-23T09:24:16 info: 09:24:16,109 INFO : CentOS Linux is the highest priority installclass, using it
2013-01-23T09:24:16 warning: 09:24:16,164 WARNING : /usr/lib/python2.6/site-packages/pykickstart/parser.py:713: DeprecationWarning: Script does not end with %end. This syntax has been deprecated. It may be removed from future releases, which will result in a fatal error from kickstart. Please modify your kickstart file to use this updated syntax.
2013-01-23T09:24:17 info: warnings.warn(_("%s does not end with %%end. This syntax has been deprecated. It may be removed from future releases, which will result in a fatal error from kickstart. Please modify your kickstart file to use this updated syntax.") % _("Script"), DeprecationWarning)
2013-01-23T09:24:17 info:
2013-01-23T09:24:17 info: 09:24:16,164 INFO : Running kickstart %%pre script(s)
2013-01-23T09:24:17 warning: 09:24:16,165 WARNING : '/bin/sh' specified as full path
2013-01-23T09:24:17 info: 09:24:17,369 INFO : All kickstart %%pre script(s) have been run
2013-01-23T09:24:17 info: 09:24:17,441 INFO : ISCSID is /usr/sbin/iscsid
2013-01-23T09:24:17 info: 09:24:17,442 INFO : no initiator set
2013-01-23T09:24:17 warning: 09:24:17,646 WARNING : '/usr/libexec/fcoe/fcoe_edd.sh' specified as full path
2013-01-23T09:24:18 info: 09:24:17,674 INFO : No FCoE EDD info found: No FCoE boot disk information is found in EDD!
2013-01-23T09:24:18 info:
2013-01-23T09:24:18 info: 09:24:17,674 INFO : no /etc/zfcp.conf; not configuring zfcp
2013-01-23T09:24:18 info: 09:24:17,776 INFO : created new libuser.conf at /tmp/libuser.JtvFQd with instPath="/mnt/sysimage"
2013-01-23T09:24:18 info: 09:24:17,777 INFO : anaconda called with cmdline = ['/usr/bin/anaconda', '--stage2', 'http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/images/install.img', '--kickstart', '/tmp/ks.cfg', '-T', '--selinux', '--lang', 'en_US.UTF-8', '--keymap', 'us', '--repo', 'http://10.0.168.2:8080/centos/6.3/nailgun/x86_64']
2013-01-23T09:24:18 info: 09:24:17,777 INFO : Display mode = t
2013-01-23T09:24:18 info: 09:24:17,777 INFO : Default encoding = utf-8
2013-01-23T09:24:18 info: 09:24:17,898 INFO : Detected 752M of memory
2013-01-23T09:24:18 info: 09:24:17,899 INFO : Swap attempt of 1504M
2013-01-23T09:24:18 info: 09:24:18,372 INFO : ISCSID is /usr/sbin/iscsid
2013-01-23T09:24:18 info: 09:24:18,373 INFO : no initiator set
2013-01-23T09:24:19 warning: 09:24:18,893 WARNING : Timezone UTC set in kickstart is not valid.
2013-01-23T09:24:19 info: 09:24:19,012 INFO : Detected 752M of memory
2013-01-23T09:24:19 info: 09:24:19,012 INFO : Swap attempt of 1504M
2013-01-23T09:24:19 info: 09:24:19,064 INFO : setting installation environment hostname to slave-1.mirantis.com
2013-01-23T09:24:19 warning: 09:24:19,076 WARNING : step installtype does not exist
2013-01-23T09:24:19 warning: 09:24:19,076 WARNING : step confirminstall does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,077 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,078 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,079 WARNING : step complete does not exist
2013-01-23T09:24:19 warning: 09:24:19,079 WARNING : step complete does not exist
2013-01-23T09:24:19 info: 09:24:19,080 INFO : moving (1) to step setuptime
2013-01-23T09:24:19 debug: 09:24:19,081 DEBUG : setuptime is a direct step
2013-01-23T09:24:19 warning: 09:24:19,081 WARNING : '/usr/sbin/hwclock' specified as full path
2013-01-23T09:24:20 info: 09:24:20,002 INFO : leaving (1) step setuptime
2013-01-23T09:24:20 info: 09:24:20,003 INFO : moving (1) to step autopartitionexecute
2013-01-23T09:24:20 debug: 09:24:20,003 DEBUG : autopartitionexecute is a direct step
2013-01-23T09:24:20 info: 09:24:20,143 INFO : leaving (1) step autopartitionexecute
2013-01-23T09:24:20 info: 09:24:20,143 INFO : moving (1) to step storagedone
2013-01-23T09:24:20 debug: 09:24:20,144 DEBUG : storagedone is a direct step
2013-01-23T09:24:20 info: 09:24:20,144 INFO : leaving (1) step storagedone
2013-01-23T09:24:20 info: 09:24:20,144 INFO : moving (1) to step enablefilesystems
2013-01-23T09:24:20 debug: 09:24:20,144 DEBUG : enablefilesystems is a direct step
2013-01-23T09:25:01 debug: 09:25:00,646 DEBUG : notifying kernel of 'change' event on device /sys/class/block/vda1
2013-01-23T09:25:01 info: 09:25:01,684 INFO : failed to set SELinux context for /mnt/sysimage: [Errno 95] Operation not supported
2013-01-23T09:25:01 debug: 09:25:01,684 DEBUG : isys.py:mount()- going to mount /dev/vda1 on /mnt/sysimage as ext4 with options defaults
2013-01-23T09:25:01 debug: 09:25:01,704 DEBUG : isys.py:mount()- going to mount //dev on /mnt/sysimage/dev as bind with options defaults,bind
2013-01-23T09:25:01 debug: 09:25:01,715 DEBUG : isys.py:mount()- going to mount devpts on /mnt/sysimage/dev/pts as devpts with options gid=5,mode=620
2013-01-23T09:25:02 debug: 09:25:01,728 DEBUG : isys.py:mount()- going to mount tmpfs on /mnt/sysimage/dev/shm as tmpfs with options defaults
2013-01-23T09:25:02 info: 09:25:01,742 INFO : failed to get default SELinux context for /proc: [Errno 2] No such file or directory
2013-01-23T09:25:02 debug: 09:25:01,742 DEBUG : isys.py:mount()- going to mount proc on /mnt/sysimage/proc as proc with options defaults
2013-01-23T09:25:02 info: 09:25:01,746 INFO : failed to get default SELinux context for /proc: [Errno 2] No such file or directory
2013-01-23T09:25:02 debug: 09:25:01,755 DEBUG : isys.py:mount()- going to mount sysfs on /mnt/sysimage/sys as sysfs with options defaults
2013-01-23T09:25:02 info: 09:25:01,762 INFO : leaving (1) step enablefilesystems
2013-01-23T09:25:02 info: 09:25:01,762 INFO : moving (1) to step bootloadersetup
2013-01-23T09:25:02 debug: 09:25:01,762 DEBUG : bootloadersetup is a direct step
2013-01-23T09:25:02 info: 09:25:01,765 INFO : leaving (1) step bootloadersetup
2013-01-23T09:25:02 info: 09:25:01,765 INFO : moving (1) to step reposetup
2013-01-23T09:25:02 debug: 09:25:01,766 DEBUG : reposetup is a direct step
2013-01-23T09:25:02 err: 09:25:01,779 ERROR : Error downloading treeinfo file: [Errno 14] PYCURL ERROR 22 - "The requested URL returned error: 404"
2013-01-23T09:25:02 err: 09:25:01,917 ERROR : Error downloading treeinfo file: [Errno 14] PYCURL ERROR 22 - "The requested URL returned error: 404"
2013-01-23T09:25:02 err: 09:25:01,921 ERROR : Error downloading treeinfo file: [Errno 14] PYCURL ERROR 22 - "The requested URL returned error: 404"
2013-01-23T09:25:02 info: 09:25:01,922 INFO : added repository Nailgun with URL http://10.0.168.2:8080/centos/6.3/nailgun/x86_64
2013-01-23T09:25:02 debug: 09:25:01,930 DEBUG : Grabbing http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/repodata/repomd.xml
2013-01-23T09:25:02 debug: 09:25:01,937 DEBUG : Grabbing http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/repodata/primary.xml.gz
2013-01-23T09:25:02 debug: 09:25:01,944 DEBUG : Grabbing http://10.0.168.2:8080/centos/6.3/nailgun/x86_64/repodata/comps.xml
2013-01-23T09:25:04 info: 09:25:04,547 INFO : leaving (1) step reposetup
2013-01-23T09:25:04 info: 09:25:04,547 INFO : moving (1) to step basepkgsel
2013-01-23T09:25:04 debug: 09:25:04,547 DEBUG : basepkgsel is a direct step
2013-01-23T09:25:04 warning: 09:25:04,665 WARNING : not adding Base group
2013-01-23T09:25:05 info: 09:25:04,810 INFO : leaving (1) step basepkgsel
2013-01-23T09:25:05 info: 09:25:04,811 INFO : moving (1) to step postselection
2013-01-23T09:25:05 debug: 09:25:04,811 DEBUG : postselection is a direct step
2013-01-23T09:25:05 info: 09:25:04,814 INFO : selected kernel package for kernel
2013-01-23T09:25:05 debug: 09:25:05,546 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/ext4/ext4.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,546 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/mbcache.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/jbd2/jbd2.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/fcoe/fcoe.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/fcoe/libfcoe.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/libfc/libfc.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/scsi_transport_fc.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/scsi_tgt.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/xts.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/lrw.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,547 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/gf128mul.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/sha256_generic.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/cbc.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-crypt.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-round-robin.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-multipath.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-snapshot.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-mirror.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,548 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-region-hash.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-log.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-zero.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/dm-mod.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/linear.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid10.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid456.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_raid6_recov.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_pq.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/lib/raid6/raid6_pq.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_xor.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,549 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/xor.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_memcpy.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/crypto/async_tx/async_tx.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid1.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/md/raid0.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/hw/mlx4/mlx4_ib.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/net/mlx4/mlx4_en.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/net/mlx4/mlx4_core.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/ulp/ipoib/ib_ipoib.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_cm.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,550 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_sa.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_mad.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/infiniband/core/ib_core.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/block/virtio_blk.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/net/virtio_net.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/ata/pata_acpi.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/ata/ata_generic.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/ata/ata_piix.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/virtio/virtio_pci.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/virtio/virtio_ring.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/virtio/virtio.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/net/ipv6/ipv6.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,551 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/firmware/iscsi_ibft.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/iscsi_boot_sysfs.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/input/misc/pcspkr.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/firmware/edd.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/block/floppy.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/iscsi_tcp.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/libiscsi_tcp.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/libiscsi.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,552 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/drivers/scsi/scsi_transport_iscsi.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,553 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/squashfs/squashfs.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,553 DEBUG : Checking for DUD module /lib/modules/2.6.32-279.el6.x86_64/kernel/fs/cramfs/cramfs.ko.gz
2013-01-23T09:25:05 debug: 09:25:05,553 DEBUG : selecting kernel-devel
2013-01-23T09:25:05 debug: 09:25:05,561 DEBUG : no package matching kernel-devel.x86_64
2013-01-23T09:25:05 debug: 09:25:05,571 DEBUG : no package matching authconfig
2013-01-23T09:25:05 debug: 09:25:05,580 DEBUG : no package matching system-config-firewall-base
2013-01-23T09:25:08 info: 09:25:08,036 INFO : leaving (1) step postselection
2013-01-23T09:25:08 info: 09:25:08,037 INFO : moving (1) to step install
2013-01-23T09:25:08 info: 09:25:08,039 INFO : leaving (1) step install
2013-01-23T09:25:08 info: 09:25:08,040 INFO : moving (1) to step preinstallconfig
2013-01-23T09:25:08 debug: 09:25:08,040 DEBUG : preinstallconfig is a direct step
2013-01-23T09:25:08 debug: 09:25:08,045 DEBUG : isys.py:mount()- going to mount /selinux on /mnt/sysimage/selinux as selinuxfs with options defaults
2013-01-23T09:25:08 debug: 09:25:08,055 DEBUG : isys.py:mount()- going to mount /proc/bus/usb on /mnt/sysimage/proc/bus/usb as usbfs with options defaults
2013-01-23T09:25:08 info: 09:25:08,069 INFO : copy_to_sysimage: source '/etc/multipath/wwids' does not exist.
2013-01-23T09:25:08 info: 09:25:08,069 INFO : copy_to_sysimage: source '/etc/multipath/bindings' does not exist.
2013-01-23T09:25:08 info: 09:25:08,081 INFO : copy_to_sysimage: source '/etc/multipath/wwids' does not exist.
2013-01-23T09:25:08 info: 09:25:08,081 INFO : copy_to_sysimage: source '/etc/multipath/bindings' does not exist.
2013-01-23T09:25:08 info: 09:25:08,086 INFO : leaving (1) step preinstallconfig
2013-01-23T09:25:08 info: 09:25:08,086 INFO : moving (1) to step installpackages
2013-01-23T09:25:08 debug: 09:25:08,086 DEBUG : installpackages is a direct step
2013-01-23T09:25:08 info: 09:25:08,087 INFO : Preparing to install packages
2013-01-23T09:25:10 info: Installing libgcc-4.4.6-4.el6.x86_64
2013-01-23T09:25:10 info: warning: libgcc-4.4.6-4.el6.x86_64: Header V3 RSA/SHA1 Signature, key ID c105b9de: NOKEY
2013-01-23T09:25:10 info: Installing setup-2.8.14-16.el6.noarch
2013-01-23T09:25:10 info: Installing filesystem-2.4.30-3.el6.x86_64
2013-01-23T09:25:11 info: Installing basesystem-10.0-4.el6.noarch
2013-01-23T09:25:11 info: Installing kernel-headers-2.6.32-279.19.1.el6.centos.plus.x86_64
2013-01-23T09:25:11 info: Installing ca-certificates-2010.63-3.el6_1.5.noarch
2013-01-23T09:25:11 info: Installing ncurses-base-5.7-3.20090208.el6.x86_64
2013-01-23T09:25:12 info: Installing tzdata-2012i-2.el6.noarch
2013-01-23T09:25:13 info: Installing glibc-common-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:25 info: Installing nss-softokn-freebl-3.12.9-11.el6.x86_64
2013-01-23T09:25:25 info: Installing glibc-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:28 info: Installing ncurses-libs-5.7-3.20090208.el6.x86_64
2013-01-23T09:25:28 info: Installing bash-4.1.2-9.el6_2.x86_64
2013-01-23T09:25:28 info: Installing libattr-2.4.44-7.el6.x86_64
2013-01-23T09:25:29 info: Installing libcap-2.16-5.5.el6.x86_64
2013-01-23T09:25:29 info: Installing zlib-1.2.3-27.el6.x86_64
2013-01-23T09:25:29 info: Installing info-4.13a-8.el6.x86_64
2013-01-23T09:25:29 info: Installing db4-4.7.25-17.el6.x86_64
2013-01-23T09:25:29 info: Installing libacl-2.2.49-6.el6.x86_64
2013-01-23T09:25:29 info: Installing audit-libs-2.2-2.el6.x86_64
2013-01-23T09:25:29 info: Installing libcom_err-1.41.12-12.el6.x86_64
2013-01-23T09:25:29 info: Installing nspr-4.9.1-2.el6_3.x86_64
2013-01-23T09:25:29 info: Installing popt-1.13-7.el6.x86_64
2013-01-23T09:25:29 info: Installing chkconfig-1.3.49.3-2.el6.x86_64
2013-01-23T09:25:29 info: Installing nss-util-3.13.5-1.el6_3.x86_64
2013-01-23T09:25:30 info: Installing bzip2-libs-1.0.5-7.el6_0.x86_64
2013-01-23T09:25:30 info: Installing libsepol-2.0.41-4.el6.x86_64
2013-01-23T09:25:30 info: Installing libselinux-2.0.94-5.3.el6.x86_64
2013-01-23T09:25:30 info: Installing shadow-utils-4.1.4.2-13.el6.x86_64
2013-01-23T09:25:30 info: Installing sed-4.2.1-10.el6.x86_64
2013-01-23T09:25:30 info: Installing glib2-2.22.5-7.el6.x86_64
2013-01-23T09:25:30 info: Installing gamin-0.1.10-9.el6.x86_64
2013-01-23T09:25:31 info: Installing libstdc++-4.4.6-4.el6.x86_64
2013-01-23T09:25:31 info: Installing gmp-4.3.1-7.el6_2.2.x86_64
2013-01-23T09:25:31 info: Installing readline-6.0-4.el6.x86_64
2013-01-23T09:25:31 info: Installing sqlite-3.6.20-1.el6.x86_64
2013-01-23T09:25:31 info: Installing file-libs-5.04-13.el6.x86_64
2013-01-23T09:25:31 info: Installing dbus-libs-1.2.24-7.el6_3.x86_64
2013-01-23T09:25:32 info: Installing lua-5.1.4-4.1.el6.x86_64
2013-01-23T09:25:32 info: Installing pcre-7.8-4.el6.x86_64
2013-01-23T09:25:32 info: Installing grep-2.6.3-3.el6.x86_64
2013-01-23T09:25:32 info: Installing libidn-1.18-2.el6.x86_64
2013-01-23T09:25:32 info: Installing gawk-3.1.7-9.el6.x86_64
2013-01-23T09:25:32 info: Installing libuuid-2.17.2-12.7.el6_3.x86_64
2013-01-23T09:25:32 info: Installing libblkid-2.17.2-12.7.el6_3.x86_64
2013-01-23T09:25:32 info: Installing xz-libs-4.999.9-0.3.beta.20091007git.el6.x86_64
2013-01-23T09:25:32 info: Installing elfutils-libelf-0.152-1.el6.x86_64
2013-01-23T09:25:32 info: Installing gdbm-1.8.0-36.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-Pod-Escapes-1.04-127.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-libs-5.10.1-127.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-Module-Pluggable-3.90-127.el6.x86_64
2013-01-23T09:25:32 info: Installing perl-version-0.77-127.el6.x86_64
2013-01-23T09:25:33 info: Installing perl-Pod-Simple-3.13-127.el6.x86_64
2013-01-23T09:25:33 info: Installing perl-5.10.1-127.el6.x86_64
2013-01-23T09:25:39 info: Installing libgpg-error-1.7-4.el6.x86_64
2013-01-23T09:25:39 info: Installing findutils-4.4.2-6.el6.x86_64
2013-01-23T09:25:39 info: Installing libselinux-utils-2.0.94-5.3.el6.x86_64
2013-01-23T09:25:39 info: Installing iptables-1.4.7-5.1.el6_2.x86_64
2013-01-23T09:25:39 info: Installing cyrus-sasl-lib-2.1.23-13.el6_3.1.x86_64
2013-01-23T09:25:39 info: Installing cpio-2.10-11.el6_3.x86_64
2013-01-23T09:25:39 info: Installing binutils-2.20.51.0.2-5.34.el6.x86_64
2013-01-23T09:25:40 info: Installing which-2.19-6.el6.x86_64
2013-01-23T09:25:40 info: Installing libedit-2.11-4.20080712cvs.1.el6.x86_64
2013-01-23T09:25:40 info: Installing sysvinit-tools-2.87-4.dsf.el6.x86_64
2013-01-23T09:25:40 info: Installing tcp_wrappers-libs-7.6-57.el6.x86_64
2013-01-23T09:25:40 info: Installing expat-2.0.1-11.el6_2.x86_64
2013-01-23T09:25:40 info: Installing pth-2.0.7-9.3.el6.x86_64
2013-01-23T09:25:41 info: Installing dbus-glib-0.86-5.el6.x86_64
2013-01-23T09:25:41 info: Installing iproute-2.6.32-20.el6.x86_64
2013-01-23T09:25:41 info: Installing libgcrypt-1.4.5-9.el6_2.2.x86_64
2013-01-23T09:25:41 info: Installing grubby-7.0.15-3.el6.x86_64
2013-01-23T09:25:41 info: Installing libnih-1.0.1-7.el6.x86_64
2013-01-23T09:25:41 info: Installing upstart-0.6.5-12.el6.x86_64
2013-01-23T09:25:41 info: Installing file-5.04-13.el6.x86_64
2013-01-23T09:25:41 info: Installing nss-softokn-3.12.9-11.el6.x86_64
2013-01-23T09:25:41 info: Installing ppl-0.10.2-11.el6.x86_64
2013-01-23T09:25:41 info: Installing cloog-ppl-0.15.7-1.2.el6.x86_64
2013-01-23T09:25:42 info: Installing mpfr-2.4.1-6.el6.x86_64
2013-01-23T09:25:42 info: Installing cpp-4.4.6-4.el6.x86_64
2013-01-23T09:25:43 info: Installing libusb-0.1.12-23.el6.x86_64
2013-01-23T09:25:43 info: Installing libutempter-1.1.5-4.1.el6.x86_64
2013-01-23T09:25:43 info: Installing MAKEDEV-3.24-6.el6.x86_64
2013-01-23T09:25:43 info: Installing vim-minimal-7.2.411-1.8.el6.x86_64
2013-01-23T09:25:43 info: Installing procps-3.2.8-23.el6.x86_64
2013-01-23T09:25:43 info: Installing psmisc-22.6-15.el6_0.1.x86_64
2013-01-23T09:25:43 info: Installing net-tools-1.60-110.el6_2.x86_64
2013-01-23T09:25:43 info: Installing checkpolicy-2.0.22-1.el6.x86_64
2013-01-23T09:25:44 info: Installing libselinux-ruby-2.0.94-5.3.el6.x86_64
2013-01-23T09:25:44 info: Installing augeas-libs-0.9.0-4.el6.x86_64
2013-01-23T09:25:44 info: Installing tar-1.23-7.el6.x86_64
2013-01-23T09:25:44 info: Installing bzip2-1.0.5-7.el6_0.x86_64
2013-01-23T09:25:44 info: Installing pinentry-0.7.6-6.el6.x86_64
2013-01-23T09:25:46 info: Installing libss-1.41.12-12.el6.x86_64
2013-01-23T09:25:46 info: Installing e2fsprogs-libs-1.41.12-12.el6.x86_64
2013-01-23T09:25:46 info: Installing db4-utils-4.7.25-17.el6.x86_64
2013-01-23T09:25:46 info: Installing libgomp-4.4.6-4.el6.x86_64
2013-01-23T09:25:46 info: Installing diffutils-2.8.1-28.el6.x86_64
2013-01-23T09:25:46 info: Installing libxml2-2.7.6-8.el6_3.3.x86_64
2013-01-23T09:25:47 info: Installing glibc-headers-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:48 info: Installing glibc-devel-2.12-1.80.el6_3.6.x86_64
2013-01-23T09:25:49 info: Installing ncurses-5.7-3.20090208.el6.x86_64
2013-01-23T09:25:49 info: Installing groff-1.18.1.4-21.el6.x86_64
2013-01-23T09:25:50 info: Installing less-436-10.el6.x86_64
2013-01-23T09:25:50 info: Installing coreutils-libs-8.4-19.el6.x86_64
2013-01-23T09:25:50 info: Installing gzip-1.3.12-18.el6.x86_64
2013-01-23T09:25:50 info: Installing cracklib-2.8.16-4.el6.x86_64
2013-01-23T09:25:50 info: Installing cracklib-dicts-2.8.16-4.el6.x86_64
2013-01-23T09:25:51 info: Installing coreutils-8.4-19.el6.x86_64
2013-01-23T09:25:52 info: Installing pam-1.1.1-10.el6_2.1.x86_64
2013-01-23T09:25:54 info: Installing module-init-tools-3.9-20.el6.x86_64
2013-01-23T09:25:55 info: Installing hwdata-0.233-7.8.el6.noarch
2013-01-23T09:25:57 info: Installing redhat-logos-60.0.14-12.el6.centos.noarch
2013-01-23T09:25:59 info: Installing plymouth-scripts-0.8.3-24.el6.centos.x86_64
2013-01-23T09:25:59 info: Installing logrotate-3.7.8-15.el6.x86_64
2013-01-23T09:25:59 info: Installing nss-3.13.5-1.el6_3.x86_64
2013-01-23T09:25:59 info: Installing nss-sysinit-3.13.5-1.el6_3.x86_64
2013-01-23T09:25:59 info: Installing nss-tools-3.13.5-1.el6_3.x86_64
2013-01-23T09:26:00 info: Installing openldap-2.4.23-26.el6_3.2.x86_64
2013-01-23T09:26:00 info: Installing compat-readline5-5.2-17.1.el6.x86_64
2013-01-23T09:26:00 info: Installing libcap-ng-0.6.4-3.el6_0.1.x86_64
2013-01-23T09:26:00 info: Installing ethtool-2.6.33-0.3.el6.x86_64
2013-01-23T09:26:00 info: Installing mingetty-1.08-5.el6.x86_64
2013-01-23T09:26:00 info: Installing vconfig-1.9-8.1.el6.x86_64
2013-01-23T09:26:00 info: Installing dmidecode-2.11-2.el6.x86_64
2013-01-23T09:26:00 info: Installing keyutils-libs-1.4-4.el6.x86_64
2013-01-23T09:26:00 info: Installing krb5-libs-1.9-33.el6_3.3.x86_64
2013-01-23T09:26:01 info: Installing openssl-1.0.0-25.el6_3.1.x86_64
2013-01-23T09:26:01 info: Installing ruby-libs-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:03 info: Installing ruby-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:03 info: Installing libssh2-1.2.2-11.el6_3.x86_64
2013-01-23T09:26:03 info: Installing libcurl-7.19.7-26.el6_2.4.x86_64
2013-01-23T09:26:03 info: Installing curl-7.19.7-26.el6_2.4.x86_64
2013-01-23T09:26:03 info: Installing rpm-libs-4.8.0-27.el6.x86_64
2013-01-23T09:26:04 info: Installing rpm-4.8.0-27.el6.x86_64
2013-01-23T09:26:04 info: Installing gnupg2-2.0.14-4.el6.x86_64
2013-01-23T09:26:04 info: Installing gpgme-1.1.8-3.el6.x86_64
2013-01-23T09:26:05 info: Installing ruby-irb-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:05 info: Installing ruby-rdoc-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:06 info: Installing rubygems-1.3.7-1.el6.noarch
2013-01-23T09:26:06 info: Installing rubygem-stomp-1.1.8-1.el6.noarch
2013-01-23T09:26:06 info: warning: rubygem-stomp-1.1.8-1.el6.noarch: Header V3 RSA/SHA256 Signature, key ID 0608b895: NOKEY
2013-01-23T09:26:06 info: Installing mcollective-common-2.2.2-1.el6.noarch
2013-01-23T09:26:06 info: warning: mcollective-common-2.2.2-1.el6.noarch: Header V4 RSA/SHA1 Signature, key ID 4bd6ec30: NOKEY
2013-01-23T09:26:07 info: Installing mcollective-2.2.2-1.el6.noarch
2013-01-23T09:26:07 info: Installing ruby-augeas-0.4.1-1.el6.x86_64
2013-01-23T09:26:07 info: Installing ruby-shadow-1.4.1-13.el6.x86_64
2013-01-23T09:26:07 info: Installing fipscheck-lib-1.2.0-7.el6.x86_64
2013-01-23T09:26:07 info: Installing fipscheck-1.2.0-7.el6.x86_64
2013-01-23T09:26:07 info: Installing ustr-1.0.4-9.1.el6.x86_64
2013-01-23T09:26:07 info: Installing libsemanage-2.0.43-4.1.el6.x86_64
2013-01-23T09:26:07 info: Installing libffi-3.0.5-3.2.el6.x86_64
2013-01-23T09:26:07 info: Installing python-libs-2.6.6-29.el6_3.3.x86_64
2013-01-23T09:26:08 info: Installing python-2.6.6-29.el6_3.3.x86_64
2013-01-23T09:26:12 info: Installing scapy-2.0.0.10-5.el6.noarch
2013-01-23T09:26:13 info: Installing yum-metadata-parser-1.1.2-16.el6.x86_64
2013-01-23T09:26:13 info: Installing pygpgme-0.1-18.20090824bzr68.el6.x86_64
2013-01-23T09:26:13 info: Installing rpm-python-4.8.0-27.el6.x86_64
2013-01-23T09:26:13 info: Installing python-iniparse-0.3.1-2.1.el6.noarch
2013-01-23T09:26:13 info: Installing python-pycurl-7.19.0-8.el6.x86_64
2013-01-23T09:26:13 info: Installing python-urlgrabber-3.9.1-8.el6.noarch
2013-01-23T09:26:13 info: Installing yum-plugin-fastestmirror-1.1.30-14.el6.noarch
2013-01-23T09:26:13 info: Installing yum-3.2.29-30.el6.centos.noarch
2013-01-23T09:26:13 info: Installing dash-0.5.5.1-3.1.el6.x86_64
2013-01-23T09:26:14 info: Installing pciutils-libs-3.1.4-11.el6.x86_64
2013-01-23T09:26:14 info: Installing pciutils-3.1.4-11.el6.x86_64
2013-01-23T09:26:14 info: Installing facter-1.6.17-1.el6.x86_64
2013-01-23T09:26:14 info: Installing plymouth-core-libs-0.8.3-24.el6.centos.x86_64
2013-01-23T09:26:14 info: Installing kbd-misc-1.15-11.el6.noarch
2013-01-23T09:26:14 info: Installing centos-release-6-3.el6.centos.9.x86_64
2013-01-23T09:26:14 info: Installing iputils-20071127-16.el6.x86_64
2013-01-23T09:26:14 info: Installing util-linux-ng-2.17.2-12.7.el6_3.x86_64
2013-01-23T09:26:15 info: Installing initscripts-9.03.31-2.el6.centos.1.x86_64
2013-01-23T09:26:16 info: Installing udev-147-2.42.el6.x86_64
2013-01-23T09:26:16 info: Installing openssh-5.3p1-81.el6_3.x86_64
2013-01-23T09:26:16 info: Installing kbd-1.15-11.el6.x86_64
2013-01-23T09:26:16 info: Installing rsyslog-5.8.10-2.el6.x86_64
2013-01-23T09:26:17 info: Installing exim-4.72-4.el6.x86_64
2013-01-23T09:26:17 info: Installing crontabs-1.10-33.el6.noarch
2013-01-23T09:26:17 info: Installing cronie-anacron-1.4.4-7.el6.x86_64
2013-01-23T09:26:17 info: Installing cronie-1.4.4-7.el6.x86_64
2013-01-23T09:26:17 info: Installing ntpdate-4.2.4p8-2.el6.centos.x86_64
2013-01-23T09:26:17 info: Installing dhcp-common-4.1.1-31.0.1.P1.el6.centos.1.x86_64
2013-01-23T09:26:17 info: Installing kernel-firmware-2.6.32-279.19.1.el6.centos.plus.noarch
2013-01-23T09:26:19 info: Installing libdrm-2.4.25-2.el6.x86_64
2013-01-23T09:26:19 info: Installing plymouth-0.8.3-24.el6.centos.x86_64
2013-01-23T09:26:19 info: Installing dracut-004-284.el6_3.1.noarch
2013-01-23T09:26:19 info: Installing dracut-kernel-004-284.el6_3.1.noarch
2013-01-23T09:26:19 info: Installing kernel-2.6.32-279.19.1.el6.centos.plus.x86_64
2013-01-23T09:26:27 info: Installing dhclient-4.1.1-31.0.1.P1.el6.centos.1.x86_64
2013-01-23T09:26:27 info: Installing ntp-4.2.4p8-2.el6.centos.x86_64
2013-01-23T09:26:27 info: Installing openssh-clients-5.3p1-81.el6_3.x86_64
2013-01-23T09:26:27 info: Installing openssh-server-5.3p1-81.el6_3.x86_64
2013-01-23T09:26:28 info: Installing puppet-2.7.19-1.el6.noarch
2013-01-23T09:26:30 info: Installing policycoreutils-2.0.83-19.24.el6.x86_64
2013-01-23T09:26:31 info: Installing nailgun-net-check-0.0.2-1.x86_64
2013-01-23T09:26:31 info: Installing grub-0.97-77.el6.x86_64
2013-01-23T09:26:31 info: Installing nailgun-mcagents-0.1.0-1.x86_64
2013-01-23T09:26:31 info: Installing ruby-devel-1.8.7.352-7.el6_2.x86_64
2013-01-23T09:26:31 info: Installing wget-1.12-1.4.el6.x86_64
2013-01-23T09:26:31 info: Installing sudo-1.7.4p5-13.el6_3.x86_64
2013-01-23T09:26:31 info: Installing nailgun-agent-0.1.0-1.x86_64
2013-01-23T09:26:31 info: Installing gcc-4.4.6-4.el6.x86_64
2013-01-23T09:26:35 info: Installing e2fsprogs-1.41.12-12.el6.x86_64
2013-01-23T09:26:35 info: Installing iptables-ipv6-1.4.7-5.1.el6_2.x86_64
2013-01-23T09:26:35 info: Installing acl-2.2.49-6.el6.x86_64
2013-01-23T09:26:35 info: Installing make-3.81-20.el6.x86_64
2013-01-23T09:26:35 info: Installing attr-2.4.44-7.el6.x86_64
2013-01-23T09:27:14 info: 09:27:14,602 INFO : leaving (1) step installpackages
2013-01-23T09:27:14 info: 09:27:14,603 INFO : moving (1) to step postinstallconfig
2013-01-23T09:27:14 debug: 09:27:14,604 DEBUG : postinstallconfig is a direct step
2013-01-23T09:27:14 info: 09:27:14,628 INFO : leaving (1) step postinstallconfig
2013-01-23T09:27:14 info: 09:27:14,628 INFO : moving (1) to step writeconfig
2013-01-23T09:27:14 debug: 09:27:14,629 DEBUG : writeconfig is a direct step
2013-01-23T09:27:14 info: 09:27:14,629 INFO : Writing main configuration
2013-01-23T09:27:14 warning: 09:27:14,638 WARNING : '/usr/sbin/authconfig' specified as full path
2013-01-23T09:27:14 err: 09:27:14,661 ERROR : Error running /usr/sbin/authconfig: No such file or directory
2013-01-23T09:27:14 err: 09:27:14,662 ERROR : Error running ['--update', '--nostart', '--enableshadow', '--passalgo=sha512']: Error running /usr/sbin/authconfig: No such file or directory
2013-01-23T09:27:14 warning: 09:27:14,665 WARNING : '/usr/sbin/lokkit' specified as full path
2013-01-23T09:27:14 err: 09:27:14,680 ERROR : Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 err: 09:27:14,681 ERROR : lokkit run failed: Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 warning: 09:27:14,681 WARNING : '/usr/sbin/lokkit' specified as full path
2013-01-23T09:27:14 err: 09:27:14,694 ERROR : Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 err: 09:27:14,695 ERROR : lokkit run failed: Error running /usr/sbin/lokkit: No such file or directory
2013-01-23T09:27:14 info: 09:27:14,798 INFO : removing libuser.conf at /tmp/libuser.JtvFQd
2013-01-23T09:27:14 info: 09:27:14,799 INFO : created new libuser.conf at /tmp/libuser.JtvFQd with instPath="/mnt/sysimage"
2013-01-23T09:27:14 info: 09:27:14,821 INFO : leaving (1) step writeconfig
2013-01-23T09:27:14 info: 09:27:14,821 INFO : moving (1) to step firstboot
2013-01-23T09:27:14 debug: 09:27:14,821 DEBUG : firstboot is a direct step
2013-01-23T09:27:14 info: 09:27:14,821 INFO : leaving (1) step firstboot
2013-01-23T09:27:14 info: 09:27:14,822 INFO : moving (1) to step instbootloader
2013-01-23T09:27:14 debug: 09:27:14,822 DEBUG : instbootloader is a direct step
2013-01-23T09:27:14 info: *** FINISHED INSTALLING PACKAGES ***
2013-01-23T09:27:15 warning: 09:27:14,989 WARNING : '/sbin/grub-install' specified as full path
2013-01-23T09:27:15 warning: 09:27:15,038 WARNING : '/sbin/grub' specified as full path
2013-01-23T09:27:17 info: 09:27:17,176 INFO : leaving (1) step instbootloader
2013-01-23T09:27:17 info: 09:27:17,177 INFO : moving (1) to step reipl
2013-01-23T09:27:17 debug: 09:27:17,177 DEBUG : reipl is a direct step
2013-01-23T09:27:17 info: 09:27:17,177 INFO : leaving (1) step reipl
2013-01-23T09:27:17 info: 09:27:17,177 INFO : moving (1) to step writeksconfig
2013-01-23T09:27:17 debug: 09:27:17,177 DEBUG : writeksconfig is a direct step
2013-01-23T09:27:17 info: 09:27:17,177 INFO : Writing autokickstart file
2013-01-23T09:27:17 info: 09:27:17,183 INFO : leaving (1) step writeksconfig
2013-01-23T09:27:17 info: 09:27:17,183 INFO : moving (1) to step setfilecon
2013-01-23T09:27:17 debug: 09:27:17,183 DEBUG : setfilecon is a direct step
2013-01-23T09:27:17 info: 09:27:17,184 INFO : setting SELinux contexts for anaconda created files
2013-01-23T09:27:19 info: 09:27:18,940 INFO : leaving (1) step setfilecon
2013-01-23T09:27:19 info: 09:27:18,940 INFO : moving (1) to step copylogs
2013-01-23T09:27:19 debug: 09:27:18,941 DEBUG : copylogs is a direct step
2013-01-23T09:27:19 info: 09:27:18,941 INFO : Copying anaconda logs
2013-01-23T09:27:19 info: 09:27:18,943 INFO : leaving (1) step copylogs
2013-01-23T09:27:19 info: 09:27:18,943 INFO : moving (1) to step methodcomplete
2013-01-23T09:27:19 debug: 09:27:18,943 DEBUG : methodcomplete is a direct step
2013-01-23T09:27:19 info: 09:27:18,943 INFO : leaving (1) step methodcomplete
2013-01-23T09:27:19 info: 09:27:18,943 INFO : moving (1) to step postscripts
2013-01-23T09:27:19 debug: 09:27:18,944 DEBUG : postscripts is a direct step
2013-01-23T09:27:19 info: 09:27:18,944 INFO : Running kickstart %%post script(s)
2013-01-23T09:27:19 warning: 09:27:18,946 WARNING : '/bin/sh' specified as full path
2013-01-23T09:28:30 info: 09:28:30,453 INFO : All kickstart %%post script(s) have been run
2013-01-23T09:28:30 info: 09:28:30,454 INFO : leaving (1) step postscripts
2013-01-23T09:28:30 info: 09:28:30,454 INFO : moving (1) to step dopostaction
2013-01-23T09:28:30 debug: 09:28:30,455 DEBUG : dopostaction is a direct step
2013-01-23T09:28:30 info: 09:28:30,455 INFO : leaving (1) step dopostaction

View File

@ -0,0 +1,86 @@
2013-01-30T11:46:13 notice: Reopening log files
2013-01-30T11:46:37 notice: (/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created
2013-01-30T11:46:39 notice: (/Stage[main]/Nova::Compute/Package[bridge-utils]/ensure) created
2013-01-30T11:46:51 notice: (/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created
2013-01-30T11:46:54 notice: (/Stage[main]/Nova::Utilities/Package[screen]/ensure) created
2013-01-30T11:46:54 notice: (/Stage[main]/Nova::Compute::Libvirt/Exec[symlink-qemu-kvm]/returns) executed successfully
2013-01-30T11:46:59 notice: (/Stage[main]/Nova::Utilities/Package[parted]/ensure) created
2013-01-30T11:48:03 notice: (/Stage[main]/Nova::Compute::Libvirt/Package[libvirt]/ensure) created
2013-01-30T11:48:33 notice: (/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created
2013-01-30T11:49:33 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Compute::Libvirt/Package[dnsmasq-utils]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/network_manager]/value) value changed 'nova.network.manager.FlatDHCPManager' to 'nova.network.manager.VlanManager'
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Compute/Nova_config[DEFAULT/vncserver_proxyclient_address]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_hosts]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_compute_listen]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/ec2_listen]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'nova'
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/glance_api_servers]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_userid]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_ha_queues]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_password]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/verbose]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/enabled_apis]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/sql_connection]/value) value changed 'mysql://nova:nova@localhost/nova' to 'mysql://nova:nova@192.168.0.6/nova'
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Compute/Nova_config[DEFAULT/vnc_enabled]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_virtual_host]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/image_service]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/volume_api_class]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/force_dhcp_release]/value) value changed 'True' to 'true'
2013-01-30T11:49:35 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_port]/ensure) created
2013-01-30T11:49:35 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_start]/ensure) created
2013-01-30T11:49:36 notice: (/Stage[main]/Nova/User[nova]/shell) shell changed '/sbin/nologin' to '/bin/bash'
2013-01-30T11:49:36 notice: (/Stage[main]/Nova/File[/var/log/nova]/group) group changed 'root' to 'nova'
2013-01-30T11:49:36 notice: (/Stage[main]/Nova/File[/var/log/nova]/mode) mode changed '0755' to '0751'
2013-01-30T11:49:36 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/api_paste_config]/ensure) created
2013-01-30T11:49:36 notice: (/Stage[main]/Nova::Compute/Nova_config[DEFAULT/novncproxy_base_url]/ensure) created
2013-01-30T11:49:36 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/public_interface]/ensure) created
2013-01-30T11:49:36 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/service_down_time]/ensure) created
2013-01-30T11:49:36 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T11:49:38 notice: (/Stage[main]/Nova/Package[python-amqp]/ensure) created
2013-01-30T11:49:39 notice: (/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/vncserver_listen]/ensure) created
2013-01-30T11:50:44 notice: (/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Package[nova-compute]/ensure) created
2013-01-30T11:50:44 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_volume_listen]/ensure) created
2013-01-30T11:50:44 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T11:50:44 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/metadata_listen]/ensure) created
2013-01-30T11:50:44 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/auth_strategy]/ensure) created
2013-01-30T11:50:48 notice: (/Stage[main]/Nova::Compute::Libvirt/Package[avahi]/ensure) created
2013-01-30T11:50:53 notice: (/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created
2013-01-30T11:50:53 notice: (/Stage[main]/Osnailyfacter::Test_compute/File[/tmp/compute-file]/ensure) defined content as '{md5}1b7628cdc1398d36048932f1eff47a63'
2013-01-30T11:50:53 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rpc_backend]/value) value changed 'nova.openstack.common.rpc.impl_qpid' to 'nova.rpc.impl_kombu'
2013-01-30T11:50:53 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/fixed_range]/ensure) created
2013-01-30T11:50:53 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/dhcp_domain]/ensure) created
2013-01-30T11:50:53 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Sysctl[net.ipv4.ip_forward]/val) val changed '0' to '1'
2013-01-30T11:50:56 notice: (/Stage[main]/Nova::Network/Nova::Generic_service[network]/Package[nova-network]/ensure) created
2013-01-30T11:50:56 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/metadata_host]/ensure) created
2013-01-30T11:50:56 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/memcached_servers]/ensure) created
2013-01-30T11:50:58 notice: (/Stage[main]/Openstack::Compute/Augeas[sysconfig-libvirt]/returns) executed successfully
2013-01-30T11:50:58 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/send_arp_for_ha]/ensure) created
2013-01-30T11:50:58 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/multi_host]/ensure) created
2013-01-30T11:50:58 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/allow_resize_to_same_host]/ensure) created
2013-01-30T11:50:58 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) net.ipv4.ip_forward = 1
2013-01-30T11:50:58 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) executed successfully
2013-01-30T11:50:59 notice: (/Stage[main]/Openstack::Compute/Augeas[libvirt-conf]/returns) executed successfully
2013-01-30T11:50:59 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[messagebus]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:50:59 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[avahi-daemon]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:50:59 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[libvirt]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:51:00 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[libvirt]) Triggered 'refresh' from 1 events
2013-01-30T11:51:00 notice: (/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/libvirt_type]/ensure) created
2013-01-30T11:51:00 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_interface]/ensure) created
2013-01-30T11:51:00 notice: (/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/connection_type]/ensure) created
2013-01-30T11:51:00 notice: (/Stage[main]/Nova/File[/etc/nova/nova.conf]/owner) owner changed 'root' to 'nova'
2013-01-30T11:51:00 notice: (/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed
2013-01-30T11:51:00 notice: (/Stage[main]/Nova/Exec[post-nova_config]) Triggered 'refresh' from 86 events
2013-01-30T11:51:01 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:51:01 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=4334) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:51:01 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:51:02 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:51:02 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=4398) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:51:02 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]) Triggered 'refresh' from 1 events
2013-01-30T11:51:02 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:51:03 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 47 events
2013-01-30T11:51:05 notice: (/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:51:07 notice: (/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]) Triggered 'refresh' from 4 events
2013-01-30T11:51:09 notice: (/Stage[main]/Nova::Network/Nova::Generic_service[network]/Service[nova-network]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:51:11 notice: (/Stage[main]/Nova::Network/Nova::Generic_service[network]/Service[nova-network]) Triggered 'refresh' from 4 events
2013-01-30T11:51:12 notice: Finished catalog run in 287.81 seconds

View File

@ -0,0 +1,950 @@
2013-01-30T10:46:55 notice: Reopening log files
2013-01-30T10:47:46 notice: (/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created
2013-01-30T10:47:46 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat]/ensure) created
2013-01-30T10:47:49 notice: (/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created
2013-01-30T10:47:49 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat/bin]/ensure) created
2013-01-30T10:47:49 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat/bin/concatfragments.sh]/ensure) defined content as '{md5}256169ee61115a6b717b2844d2ea3128'
2013-01-30T10:47:51 notice: (/Stage[main]/Nova::Utilities/Package[screen]/ensure) created
2013-01-30T10:47:53 notice: (/Stage[main]/Xinetd/Package[xinetd]/ensure) created
2013-01-30T10:47:54 notice: (/Stage[main]/Memcached/User[memcached]/ensure) created
2013-01-30T10:47:57 notice: (/Stage[main]/Memcached/Package[memcached]/ensure) created
2013-01-30T10:47:57 notice: (/Stage[main]/Memcached/File[/etc/sysconfig/memcached]/content) content changed '{md5}05503957e3796fbe6fddd756a7a102a0' to '{md5}3a3961445528bdeda6d7b8b5564dfcfc'
2013-01-30T10:47:58 notice: (/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:47:59 notice: (/Stage[main]/Memcached/Service[memcached]) Triggered 'refresh' from 1 events
2013-01-30T10:47:59 notice: (/Stage[main]/Osnailyfacter::Test_controller/File[/tmp/controller-file]/ensure) defined content as '{md5}7f5c51282c4b1242e12addba8cc331fa'
2013-01-30T10:48:04 notice: (/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created
2013-01-30T10:48:08 notice: (/Stage[main]/Swift::Xfs/Package[xfsprogs]/ensure) created
2013-01-30T10:48:12 notice: (/Stage[main]/Nova::Utilities/Package[parted]/ensure) created
2013-01-30T10:48:12 notice: (/Stage[main]/Openstack::Firewall/File[iptables]/ensure) defined content as '{md5}7efd2b2c624fb433a1538c229ae20e1f'
2013-01-30T10:48:12 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T10:48:33 notice: (/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created
2013-01-30T10:48:39 notice: (/Stage[main]/Openstack::Glance/Package[swift]/ensure) created
2013-01-30T10:49:38 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created
2013-01-30T10:49:38 notice: (/Stage[main]/Openstack::Swift::Proxy/File[/tmp/swift_keystone_test.rb]/ensure) defined content as '{md5}9921c28fffe90ef152603443c7a9a4d3'
2013-01-30T10:49:45 notice: (/Stage[main]/Nova/Package[python-amqp]/ensure) created
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/var/lib/swift]/ensure) created
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/etc/swift]/owner) owner changed 'root' to 'swift'
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/etc/swift]/group) group changed 'root' to 'swift'
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/etc/swift]/mode) mode changed '0755' to '2770'
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/etc/swift/swift.conf]/content) content changed '{md5}5f2a4640b3293fedc9e8e249408f3044' to '{md5}32a608237a8eeb3fb48db3fda8a239e0'
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/etc/swift/swift.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/var/cache/swift]/ensure) created
2013-01-30T10:49:45 notice: (/Stage[main]/Swift/File[/var/run/swift]/group) group changed 'root' to 'swift'
2013-01-30T10:49:45 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/content) content changed '{md5}9ff8cc688dd9f0dfc45e5afd25c427a7' to '{md5}1680192de4cef61a23dca13cdfff07c7'
2013-01-30T10:49:45 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T10:49:45 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/group) group changed 'root' to 'swift'
2013-01-30T10:49:45 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/mode) mode changed '0600' to '0664'
2013-01-30T10:49:48 notice: (/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created
2013-01-30T10:49:48 notice: (/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as '{md5}70358e4313c4c6852303bf9d0f24fb81'
2013-01-30T10:50:08 notice: (/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created
2013-01-30T10:50:08 notice: (/Stage[main]/Horizon/File[/var/log/horizon]/ensure) created
2013-01-30T10:50:08 notice: (/Stage[main]/Horizon/File_line[horizon_redirect_rule]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Package[socat]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[mysqld]/Haproxy::Balancermember[mysqld]/Concat::Fragment[mysqld_balancermember_mysqld]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/95-mysqld_mysqld_mysqld_balancermember_mysqld]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-api]/Haproxy::Balancermember[glance-api]/Concat::Fragment[glance-api_balancermember_glance-api]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/80-glance-api_glance-api_glance-api_balancermember_glance-api]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-1]/Haproxy::Balancermember[nova-api-1]/Concat::Fragment[nova-api-1_balancermember_nova-api-1]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/40-nova-api-1_nova-api-1_nova-api-1_balancermember_nova-api-1]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[quantum]/Haproxy::Balancermember[quantum]/Concat::Fragment[quantum_balancermember_quantum]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/85-quantum_quantum_quantum_balancermember_quantum]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-reg]/Haproxy::Balancermember[glance-reg]/Concat::Fragment[glance-reg_balancermember_glance-reg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/90-glance-reg_glance-reg_glance-reg_balancermember_glance-reg]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-1]/Haproxy::Listen[nova-api-1]/Concat::Fragment[nova-api-1-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/39-nova-api-1_nova-api-1-listen_block]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[mysqld]/Haproxy::Listen[mysqld]/Concat::Fragment[mysqld-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/94-mysqld_mysqld-listen_block]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[swift]/Haproxy::Listen[swift]/Concat::Fragment[swift-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/95-swift_swift-listen_block]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments.concat]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Haproxy/Concat::Fragment[haproxy-base]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/10_haproxy-base]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Haproxy/Concat::Fragment[00-header]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/01_00-header]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-api]/Haproxy::Listen[glance-api]/Concat::Fragment[glance-api-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/79-glance-api_glance-api-listen_block]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[swift]/Haproxy::Balancermember[swift]/Concat::Fragment[swift_balancermember_swift]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/96-swift_swift_swift_balancermember_swift]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments.concat.out]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-4]/Haproxy::Listen[nova-api-4]/Concat::Fragment[nova-api-4-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/69-nova-api-4_nova-api-4-listen_block]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-2]/Haproxy::Balancermember[nova-api-2]/Concat::Fragment[nova-api-2_balancermember_nova-api-2]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/50-nova-api-2_nova-api-2_nova-api-2_balancermember_nova-api-2]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-2]/Haproxy::Listen[nova-api-2]/Concat::Fragment[nova-api-2-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/49-nova-api-2_nova-api-2-listen_block]/ensure) created
2013-01-30T10:50:11 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-1]/Haproxy::Listen[keystone-1]/Concat::Fragment[keystone-1-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/19-keystone-1_keystone-1-listen_block]/ensure) created
2013-01-30T10:50:12 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/content) content changed '{md5}d114fa06522fa1016ab2bdede4cfd7bf' to '{md5}6f16dcbc1a76f8ae51dd6fe7157af42d'
2013-01-30T10:50:12 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/owner) owner changed 'root' to 'apache'
2013-01-30T10:50:12 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/group) group changed 'root' to 'apache'
2013-01-30T10:50:12 notice: (/Stage[main]/Swift/File[/home/swift]/ensure) created
2013-01-30T10:50:12 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Create[container]/Exec[create_container]/returns) executed successfully
2013-01-30T10:50:12 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Create[object]/Exec[create_object]/returns) executed successfully
2013-01-30T10:50:12 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) node name: 192.168.0.2:6001
2013-01-30T10:50:12 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) available devs: []
2013-01-30T10:50:12 err: (/Stage[main]/Swift::Storage::Container/Ring_container_device[192.168.0.2:6001]) Could not evaluate: Device not found check device on 192.168.0.2:6001
2013-01-30T10:50:12 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:50:12 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]) Skipping because of failed dependencies
2013-01-30T10:50:12 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:50:12 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]) Skipping because of failed dependencies
2013-01-30T10:50:13 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) node name: 192.168.0.2:6000
2013-01-30T10:50:13 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) available devs: []
2013-01-30T10:50:13 err: (/Stage[main]/Swift::Storage::Object/Ring_object_device[192.168.0.2:6000]) Could not evaluate: Device not found check device on 192.168.0.2:6000
2013-01-30T10:50:13 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:50:13 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]) Skipping because of failed dependencies
2013-01-30T10:50:13 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:50:13 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]) Skipping because of failed dependencies
2013-01-30T10:50:22 notice: (/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/File[/srv/node]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments.concat]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments.concat]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments.concat.out]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat::Fragment[swift-account-6002]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments/00_swift-account-6002]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments.concat.out]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-2]/Haproxy::Balancermember[keystone-2]/Concat::Fragment[keystone-2_balancermember_keystone-2]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/30-keystone-2_keystone-2_keystone-2_balancermember_keystone-2]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Rsync::Server/File[/etc/rsync.d]/ensure) created
2013-01-30T10:51:58 notice: (/Stage[main]/Rsync::Server/File[/etc/rsync.d/header]/ensure) defined content as '{md5}81c93e6021cb444faaac0cd902198ce2'
2013-01-30T10:51:58 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Rsync::Server::Module[container]/File[/etc/rsync.d/frag-container]/ensure) defined content as '{md5}f34ae17bc92c6dac2f9e4535cc52d6f4'
2013-01-30T10:51:58 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.102]/File[/etc/dhcp/dhclient-eth0.102-down-hooks]/ensure) defined content as '{md5}d5bdb51453a200b5822441bcc6072cae'
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf]/ensure) created
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments]/ensure) created
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments.concat.out]/ensure) created
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat::Fragment[swift-object-6000]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments/00_swift-object-6000]/ensure) created
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Rsync::Server::Module[object]/File[/etc/rsync.d/frag-object]/ensure) defined content as '{md5}6599fe18f7573876515cdb791157facc'
2013-01-30T10:51:59 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Sysctl[net.ipv4.ip_forward]/val) val changed '0' to '1'
2013-01-30T10:51:59 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.101]/File[/etc/dhcp/dhclient-eth0.101-down-hooks]/ensure) defined content as '{md5}150162dde6c8e637d6192de84fd26fde'
2013-01-30T10:51:59 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.101]/File[/etc/dhcp/dhclient-eth0.101-up-hooks]/ensure) defined content as '{md5}beeb771a9fc99bc9f3b01258a2c947a2'
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Create[account]/Exec[create_account]/returns) executed successfully
2013-01-30T10:51:59 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) node name: 192.168.0.2:6002
2013-01-30T10:51:59 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) available devs: []
2013-01-30T10:51:59 err: (/Stage[main]/Swift::Storage::Account/Ring_account_device[192.168.0.2:6002]) Could not evaluate: Device not found check device on 192.168.0.2:6002
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:51:59 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]) Skipping because of failed dependencies
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:51:59 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]) Skipping because of failed dependencies
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:51:59 warning: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Skipping because of failed dependencies
2013-01-30T10:51:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat::Fragment[swift-container-6001]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments/00_swift-container-6001]/ensure) created
2013-01-30T10:52:00 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/Exec[concat_/etc/swift/container-server.conf]/returns) executed successfully
2013-01-30T10:52:00 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/Exec[concat_/etc/swift/container-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T10:52:00 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/etc/swift/container-server.conf]/ensure) defined content as '{md5}a6951489e072481239e499a6eb753933'
2013-01-30T10:52:00 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-2]/Haproxy::Listen[keystone-2]/Concat::Fragment[keystone-2-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/29-keystone-2_keystone-2-listen_block]/ensure) created
2013-01-30T10:52:00 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) net.ipv4.ip_forward = 1
2013-01-30T10:52:00 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) executed successfully
2013-01-30T10:52:00 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/File[/srv/loopback-device]/ensure) created
2013-01-30T10:52:00 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[horizon]/Haproxy::Listen[horizon]/Concat::Fragment[horizon-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/14-horizon_horizon-listen_block]/ensure) created
2013-01-30T10:52:00 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/content) content changed '{md5}fd32314f3157aaf15712e6da7758060d' to '{md5}b5745641b26d03ebddd24b143319f385'
2013-01-30T10:52:00 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/owner) owner changed 'root' to 'swift'
2013-01-30T10:52:00 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/group) group changed 'root' to 'swift'
2013-01-30T10:52:00 notice: (/Stage[main]/Xinetd/Service[xinetd]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:52:00 notice: (/Stage[main]/Xinetd/Service[xinetd]) Triggered 'refresh' from 3 events
2013-01-30T10:52:01 notice: (/Stage[main]/Horizon/File_line[httpd_listen_on_internal_network_only]/ensure) created
2013-01-30T10:52:01 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:52:02 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]) Triggered 'refresh' from 6 events
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0+0 records in
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0+0 records out
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0 bytes (0 B) copied, 1.4742e-05 s, 0.0 kB/s
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) executed successfully
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) meta-data=/srv/loopback-device/1 isize=1024 agcount=4, agsize=65548 blks
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sectsz=512 attr=2
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) data = bsize=4096 blocks=262189, imaxpct=25
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sunit=0 swidth=0 blks
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) naming =version 2 bsize=4096 ascii-ci=0
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) log =internal log bsize=4096 blocks=2560, version=2
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sectsz=512 sunit=0 blks, lazy-count=1
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) realtime =none extsz=4096 blocks=0, rtextents=0
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]) Triggered 'refresh' from 1 events
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/File[/srv/node/1]/ensure) created
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Mount[/srv/node/1]/ensure) defined 'ensure' as 'defined'
2013-01-30T10:52:03 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Mount[/srv/node/1]) Triggered 'refresh' from 2 events
2013-01-30T10:52:04 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[mount_1]/returns) executed successfully
2013-01-30T10:52:04 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[mount_1]) Triggered 'refresh' from 1 events
2013-01-30T10:52:04 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[fix_mount_permissions_1]) Triggered 'refresh' from 3 events
2013-01-30T10:52:04 notice: (/Stage[main]/Glance/Group[glance]/ensure) created
2013-01-30T10:52:04 notice: (/Stage[main]/Glance/User[glance]/ensure) created
2013-01-30T10:52:08 notice: (/Stage[main]/Glance/Package[glance]/ensure) created
2013-01-30T10:52:08 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_auth_address]/value) value changed '127.0.0.1:5000/v2.0/' to 'http://192.168.0.6:5000/v2.0/'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dirname]/ensure) created
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_user]/ensure) created
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:nova@192.168.0.6/glance'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/bind_host]/value) value changed '0.0.0.0' to '192.168.0.2'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/default_store]/value) value changed 'file' to 'swift'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_create_container_on_put]/value) value changed 'False' to 'True'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[paste_deploy/flavor]/ensure) created
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_user]/value) value changed 'jdoe:jdoe' to 'services:glance'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_tenant_name]/ensure) created
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_key]/value) value changed 'a86850deb2742ec3cb41518e26aa2d89' to 'nova'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_password]/ensure) created
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '192.168.0.6'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_uri]/ensure) created
2013-01-30T10:52:09 notice: (/Stage[main]/Glance/File[/etc/glance/]/owner) owner changed 'root' to 'glance'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance/File[/etc/glance/]/mode) mode changed '0755' to '0770'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '192.168.0.6'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance/File[glance-logging.conf]/ensure) defined content as '{md5}71fa2daa8e89a992f4225e850fd879e4'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-cache.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-api.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T10:52:09 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T10:52:11 notice: (/Stage[main]/Keepalived::Install/Package[keepalived]/ensure) created
2013-01-30T10:52:11 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf]/ensure) created
2013-01-30T10:52:11 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments]/ensure) created
2013-01-30T10:52:11 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived::Instance[2]/Concat::Fragment[keepalived_2]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/50_keepalived_2]/ensure) created
2013-01-30T10:52:11 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments.concat]/ensure) created
2013-01-30T10:52:11 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived::Instance[1]/Concat::Fragment[keepalived_1]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/50_keepalived_1]/ensure) created
2013-01-30T10:52:11 notice: (/Stage[main]/Keepalived::Config/Concat::Fragment[global_config]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/01_global_config]/ensure) created
2013-01-30T10:52:12 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]/returns) /var/lib/puppet/concat/bin/concatfragments.sh: line 108: [: too many arguments
2013-01-30T10:52:12 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]/returns) executed successfully
2013-01-30T10:52:12 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]) Triggered 'refresh' from 5 events
2013-01-30T10:52:12 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/etc/keepalived/keepalived.conf]/content) content changed '{md5}e79dca9e58978e8035e7d4ad25e2ce67' to '{md5}6c3df9d008e205fc97ac1501178e7a76'
2013-01-30T10:52:12 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/etc/keepalived/keepalived.conf]/mode) mode changed '0640' to '0644'
2013-01-30T10:52:12 notice: (/Stage[main]/Keepalived::Service/Service[keepalived]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:52:12 notice: (/Stage[main]/Keepalived::Service/Service[keepalived]) Triggered 'refresh' from 1 events
2013-01-30T10:52:12 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-1]/Haproxy::Balancermember[keystone-1]/Concat::Fragment[keystone-1_balancermember_keystone-1]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/20-keystone-1_keystone-1_keystone-1_balancermember_keystone-1]/ensure) created
2013-01-30T10:52:12 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-4]/Haproxy::Balancermember[nova-api-4]/Concat::Fragment[nova-api-4_balancermember_nova-api-4]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/70-nova-api-4_nova-api-4_nova-api-4_balancermember_nova-api-4]/ensure) created
2013-01-30T10:52:12 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[quantum]/Haproxy::Listen[quantum]/Concat::Fragment[quantum-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/84-quantum_quantum-listen_block]/ensure) created
2013-01-30T10:52:12 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/ensure) created
2013-01-30T10:52:12 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/ensure) created
2013-01-30T10:52:15 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Package[swift-account]/ensure) created
2013-01-30T10:52:18 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Package[swift-container]/ensure) created
2013-01-30T10:52:19 err: (/Service[swift-container-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-container-replicator]: Execution of '/usr/bin/swift-init container-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T10:52:19 notice: (/Service[swift-container-replicator]) Triggered 'refresh' from 2 events
2013-01-30T10:52:19 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/File[/etc/swift/container-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T10:52:19 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/File[/etc/swift/container-server/]/group) group changed 'root' to 'swift'
2013-01-30T10:52:21 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Package[swift-object]/ensure) created
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Service[swift-container]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Service[swift-container]) Triggered 'refresh' from 2 events
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:22 warning: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Skipping because of failed dependencies
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:22 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Skipping because of failed dependencies
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:22 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Skipping because of failed dependencies
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:22 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Skipping because of failed dependencies
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:22 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Skipping because of failed dependencies
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:22 warning: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Skipping because of failed dependencies
2013-01-30T10:52:22 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:52:23 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:52:23 warning: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Skipping because of failed dependencies
2013-01-30T10:52:23 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T10:54:24 notice: (/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created
2013-01-30T10:54:24 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) Stopping rabbitmq-server: RabbitMQ is not running
2013-01-30T10:54:24 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) rabbitmq-server.
2013-01-30T10:54:24 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) executed successfully
2013-01-30T10:54:24 notice: (/Stage[main]/Rabbitmq::Server/File[erlang_cookie]/content) content changed '{md5}025d224a10b6a0babb8f5f7407f77a96' to '{md5}b28788594da393660db1e4f20d296c10'
2013-01-30T10:54:24 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq.config]/ensure) defined content as '{md5}f22d1aa923c4727590fa559e8643fcf8'
2013-01-30T10:54:24 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq-env.config]/ensure) defined content as '{md5}2980dac99b8f2195a50ef6e459ffedae'
2013-01-30T10:54:28 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:54:36 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]) Triggered 'refresh' from 1 events
2013-01-30T10:54:36 notice: (/Stage[main]/Rabbitmq::Server/Rabbitmq_user[guest]/ensure) removed
2013-01-30T10:54:37 notice: (/Stage[main]/Nova::Rabbitmq/Rabbitmq_user[nova]/ensure) created
2013-01-30T10:54:38 notice: (/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[nova@/]/ensure) created
2013-01-30T10:54:38 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-reg]/Haproxy::Listen[glance-reg]/Concat::Fragment[glance-reg-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/89-glance-reg_glance-reg-listen_block]/ensure) created
2013-01-30T10:54:39 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/Exec[concat_/etc/swift/account-server.conf]/returns) executed successfully
2013-01-30T10:54:39 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/Exec[concat_/etc/swift/account-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T10:54:39 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/content) content changed '{md5}68d75de69955941c8fd8f40b0dbbb31b' to '{md5}58d15925aa4d741323ce5bfd41680fe2'
2013-01-30T10:54:39 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T10:54:39 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/mode) mode changed '0660' to '0640'
2013-01-30T10:54:40 err: (/Service[swift-account-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-account-replicator]: Execution of '/usr/bin/swift-init account-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T10:54:40 notice: (/Service[swift-account-replicator]) Triggered 'refresh' from 2 events
2013-01-30T10:54:41 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Service[swift-account]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:54:42 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Service[swift-account]) Triggered 'refresh' from 2 events
2013-01-30T10:54:43 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0+0 records in
2013-01-30T10:54:43 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0+0 records out
2013-01-30T10:54:43 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0 bytes (0 B) copied, 1.4652e-05 s, 0.0 kB/s
2013-01-30T10:54:43 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) executed successfully
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) meta-data=/srv/loopback-device/2 isize=1024 agcount=4, agsize=65548 blks
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sectsz=512 attr=2
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) data = bsize=4096 blocks=262189, imaxpct=25
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sunit=0 swidth=0 blks
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) naming =version 2 bsize=4096 ascii-ci=0
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) log =internal log bsize=4096 blocks=2560, version=2
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sectsz=512 sunit=0 blks, lazy-count=1
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) realtime =none extsz=4096 blocks=0, rtextents=0
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]) Triggered 'refresh' from 1 events
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/File[/srv/node/2]/ensure) created
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Mount[/srv/node/2]/ensure) defined 'ensure' as 'defined'
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Mount[/srv/node/2]) Triggered 'refresh' from 2 events
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[mount_2]/returns) executed successfully
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[mount_2]) Triggered 'refresh' from 1 events
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[fix_mount_permissions_2]) Triggered 'refresh' from 3 events
2013-01-30T10:54:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Rsync::Server::Module[account]/File[/etc/rsync.d/frag-account]/ensure) defined content as '{md5}5c536b07e578ed18d4c7eedd2d76a225'
2013-01-30T10:54:45 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:54:45 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:54:45 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:54:45 warning: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Skipping because of failed dependencies
2013-01-30T10:54:45 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Triggered 'refresh' from 4 events
2013-01-30T10:54:45 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[horizon]/Haproxy::Balancermember[horizon]/Concat::Fragment[horizon_balancermember_horizon]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/15-horizon_horizon_horizon_balancermember_horizon]/ensure) created
2013-01-30T10:54:46 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/Exec[concat_/etc/haproxy/haproxy.cfg]/returns) executed successfully
2013-01-30T10:54:46 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/Exec[concat_/etc/haproxy/haproxy.cfg]) Triggered 'refresh' from 26 events
2013-01-30T10:54:46 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/etc/haproxy/haproxy.cfg]/content) content changed '{md5}1f337186b0e1ba5ee82760cb437fb810' to '{md5}fdf25f95cf3036f7752f4a95de32a2ad'
2013-01-30T10:54:46 notice: (/Stage[main]/Haproxy/Service[haproxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:54:46 notice: (/Stage[main]/Haproxy/Service[haproxy]) Triggered 'refresh' from 1 events
2013-01-30T10:54:46 notice: (/Stage[main]/Galera/File[/etc/mysql]/ensure) created
2013-01-30T10:54:47 notice: (/Stage[main]/Galera/File[/etc/my.cnf]/ensure) defined content as '{md5}9c703ee12b3b1cef32501cdeeb24feef'
2013-01-30T10:54:50 notice: (/Stage[main]/Galera/Package[openssl098e]/ensure) created
2013-01-30T10:54:54 notice: (/Stage[main]/Galera/Package[galera]/ensure) created
2013-01-30T10:55:02 notice: (/Stage[main]/Galera/Package[MySQL-client]/ensure) created
2013-01-30T10:55:02 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T10:55:02 notice: (/Stage[main]/Galera/File[/etc/mysql/conf.d]/ensure) created
2013-01-30T10:55:02 notice: (/Stage[main]/Galera/File[/etc/mysql/conf.d/wsrep.cnf]/ensure) created
2013-01-30T10:55:27 notice: (/Stage[main]/Galera/Package[MySQL-server]/ensure) created
2013-01-30T10:55:28 notice: (/Stage[main]/Galera/Exec[set-mysql-password]) Triggered 'refresh' from 2 events
2013-01-30T10:55:43 notice: (/Stage[main]/Galera/Exec[wait-initial-sync]) Triggered 'refresh' from 1 events
2013-01-30T10:55:59 notice: (/Stage[main]/Galera/Exec[kill-initial-mysql]/returns) mysqld_safe: no process killed
2013-01-30T10:55:59 notice: (/Stage[main]/Galera/Exec[kill-initial-mysql]) Triggered 'refresh' from 1 events
2013-01-30T10:55:59 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T10:56:32 notice: (/Stage[main]/Galera/Service[mysql-galera]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:56:32 notice: (/Stage[main]/Galera/Service[mysql-galera]) Triggered 'refresh' from 1 events
2013-01-30T10:56:47 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T10:56:48 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T10:56:48 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database[glance]/ensure) created
2013-01-30T10:56:48 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database[nova]/ensure) created
2013-01-30T10:56:48 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[]/Database_user[nova@]/ensure) created
2013-01-30T10:56:48 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_user[nova@127.0.0.1]/ensure) created
2013-01-30T10:56:49 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[]/Database_grant[nova@/nova]/privileges) privileges changed '' to 'all'
2013-01-30T10:56:49 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-1]/Database_user[nova@slave-1]/ensure) created
2013-01-30T10:56:49 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-1]/Database_grant[nova@slave-1/nova]/privileges) privileges changed '' to 'all'
2013-01-30T10:56:50 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-1]/Database_user[glance@slave-1]/ensure) created
2013-01-30T10:56:50 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-1]/Database_grant[glance@slave-1/glance]/privileges) privileges changed '' to 'all'
2013-01-30T10:57:05 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_grant[nova@127.0.0.1/nova]/privileges) privileges changed '' to 'all'
2013-01-30T10:57:05 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/network_manager]/value) value changed 'nova.network.manager.FlatDHCPManager' to 'nova.network.manager.VlanManager'
2013-01-30T10:57:05 notice: (/Stage[main]/Nova::Network/Nova_config[DEFAULT/floating_range]/ensure) created
2013-01-30T10:57:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_hosts]/ensure) created
2013-01-30T10:57:05 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_volumes]/ensure) created
2013-01-30T10:57:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_compute_listen]/ensure) created
2013-01-30T10:57:05 notice: (/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created
2013-01-30T10:57:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/ec2_listen]/ensure) created
2013-01-30T10:57:05 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T10:57:05 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_content_bytes]/ensure) created
2013-01-30T10:57:05 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'nova'
2013-01-30T10:57:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/glance_api_servers]/ensure) created
2013-01-30T10:57:06 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_host]/ensure) created
2013-01-30T10:57:06 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_userid]/ensure) created
2013-01-30T10:57:06 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_ha_queues]/ensure) created
2013-01-30T10:57:06 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_password]/ensure) created
2013-01-30T10:57:06 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/verbose]/ensure) created
2013-01-30T10:57:06 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_cores]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Package[nova-consoleauth]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/enabled_apis]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/sql_connection]/value) value changed 'mysql://nova:nova@localhost/nova' to 'mysql://nova:nova@192.168.0.6/nova'
2013-01-30T10:57:08 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_virtual_host]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/image_service]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/volume_api_class]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_files]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T10:57:08 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_port]/ensure) created
2013-01-30T10:57:08 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/force_dhcp_release]/value) value changed 'True' to 'true'
2013-01-30T10:57:08 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_port]/ensure) created
2013-01-30T10:57:48 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package[nova-vncproxy]/ensure) created
2013-01-30T10:57:48 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_start]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/User[nova]/shell) shell changed '/sbin/nologin' to '/bin/bash'
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/File[/var/log/nova]/group) group changed 'root' to 'nova'
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/File[/var/log/nova]/mode) mode changed '0755' to '0751'
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_path_bytes]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/api_paste_config]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_floating_ips]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/public_interface]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/service_down_time]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_gigabytes]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_instances]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_volume_listen]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/metadata_listen]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/auth_strategy]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_metadata_items]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rpc_backend]/value) value changed 'nova.openstack.common.rpc.impl_qpid' to 'nova.rpc.impl_kombu'
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/fixed_range]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/dhcp_domain]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/allow_resize_to_same_host]/ensure) created
2013-01-30T10:57:49 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/Database_user[glance@]/ensure) created
2013-01-30T10:57:51 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/Database_grant[glance@/glance]/privileges) privileges changed '' to 'all'
2013-01-30T10:57:51 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_interface]/ensure) created
2013-01-30T10:57:51 notice: (/Stage[main]/Nova/File[/etc/nova/nova.conf]/owner) owner changed 'root' to 'nova'
2013-01-30T10:57:52 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database[keystone]/ensure) created
2013-01-30T10:57:52 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database_user[keystone@127.0.0.1]/ensure) created
2013-01-30T10:57:52 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[]/Database_user[keystone@]/ensure) created
2013-01-30T10:57:52 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database_grant[keystone@127.0.0.1/keystone]/privileges) privileges changed '' to 'all'
2013-01-30T10:57:53 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[]/Database_grant[keystone@/keystone]/privileges) privileges changed '' to 'all'
2013-01-30T10:57:53 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-1]/Database_user[keystone@slave-1]/ensure) created
2013-01-30T10:57:53 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-1]/Database_grant[keystone@slave-1/keystone]/privileges) privileges changed '' to 'all'
2013-01-30T10:57:57 notice: (/Stage[main]/Keystone/Package[keystone]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[sql/connection]/value) value changed 'mysql://keystone:keystone@localhost/keystone' to 'mysql://keystone:nova@192.168.0.6/keystone'
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/public_port]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/compute_port]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/debug]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[policy/driver]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[sql/idle_timeout]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_token]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/bind_host]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/File[/etc/keystone]/owner) owner changed 'root' to 'keystone'
2013-01-30T10:57:58 notice: (/Stage[main]/Keystone/File[/etc/keystone]/group) group changed 'root' to 'keystone'
2013-01-30T10:57:58 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database_user[glance@127.0.0.1]/ensure) created
2013-01-30T10:57:58 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database_grant[glance@127.0.0.1/glance]/privileges) privileges changed '' to 'all'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/bind_host]/value) value changed '0.0.0.0' to '192.168.0.2'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/File[/etc/glance/glance-registry.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[paste_deploy/flavor]/ensure) created
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dirname]/ensure) created
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:nova@192.168.0.6/glance'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T10:57:59 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-01-30T10:57:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments.concat]/ensure) created
2013-01-30T10:57:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/Exec[concat_/etc/swift/object-server.conf]/returns) executed successfully
2013-01-30T10:57:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/Exec[concat_/etc/swift/object-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T10:57:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/content) content changed '{md5}bd9ae72e0cc70c89655071d96b32b172' to '{md5}515231e92e97604524c99a0610532d32'
2013-01-30T10:57:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T10:57:59 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/mode) mode changed '0660' to '0640'
2013-01-30T10:57:59 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Service[swift-object]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:58:00 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Service[swift-object]) Triggered 'refresh' from 2 events
2013-01-30T10:58:00 err: (/Service[swift-object-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-object-replicator]: Execution of '/usr/bin/swift-init object-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T10:58:00 notice: (/Service[swift-object-replicator]) Triggered 'refresh' from 2 events
2013-01-30T10:58:00 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/verbose]/ensure) created
2013-01-30T10:58:03 notice: (/Stage[main]/Keystone/Exec[keystone-manage db_sync]) Triggered 'refresh' from 12 events
2013-01-30T10:58:05 notice: (/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:58:06 notice: (/Stage[main]/Keystone/Service[keystone]) Triggered 'refresh' from 13 events
2013-01-30T10:58:18 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_volume]/ensure) created
2013-01-30T10:58:19 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_tenant[admin]/ensure) created
2013-01-30T10:58:19 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_role[Member]/ensure) created
2013-01-30T10:58:20 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_ec2]/ensure) created
2013-01-30T10:58:21 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova_volume]/ensure) created
2013-01-30T10:58:21 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova_ec2]/ensure) created
2013-01-30T10:58:22 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_tenant[services]/ensure) created
2013-01-30T10:58:23 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_user[nova]/ensure) created
2013-01-30T10:58:23 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_user[glance]/ensure) created
2013-01-30T10:58:24 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_role[admin]/ensure) created
2013-01-30T10:58:27 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_user_role[glance@services]/ensure) created
2013-01-30T10:58:28 notice: (/Stage[main]/Keystone::Endpoint/Keystone_service[keystone]/ensure) created
2013-01-30T10:58:28 notice: (/Stage[main]/Keystone::Endpoint/Keystone_endpoint[keystone]/ensure) created
2013-01-30T10:58:28 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_service[glance]/ensure) created
2013-01-30T10:58:29 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_endpoint[glance]/ensure) created
2013-01-30T10:58:31 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_user_role[nova@services]/ensure) created
2013-01-30T10:58:31 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_user[admin]/ensure) created
2013-01-30T10:58:33 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_user_role[admin@admin]/ensure) created
2013-01-30T10:58:33 notice: (/Stage[main]/Swift::Keystone::Auth/Keystone_service[swift_s3]/ensure) created
2013-01-30T10:58:34 notice: (/Stage[main]/Swift::Keystone::Auth/Keystone_endpoint[swift_s3]/ensure) created
2013-01-30T10:58:34 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova]/ensure) created
2013-01-30T10:58:36 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova]/ensure) created
2013-01-30T10:58:36 notice: (/Stage[main]/Swift::Keystone::Auth/Keystone_service[swift]/ensure) created
2013-01-30T10:58:37 notice: (/Stage[main]/Swift::Keystone::Auth/Keystone_endpoint[swift]/ensure) created
2013-01-30T10:58:38 notice: (/Stage[main]/Swift::Keystone::Auth/Keystone_user[swift]/ensure) created
2013-01-30T10:58:41 notice: (/Stage[main]/Swift::Keystone::Auth/Keystone_user_role[swift@services]/ensure) created
2013-01-30T10:58:41 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/auth_url]/ensure) created
2013-01-30T10:58:48 notice: (/Stage[main]/Glance::Registry/Exec[glance-manage db_sync]) Triggered 'refresh' from 39 events
2013-01-30T10:58:49 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:58:49 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]) Triggered 'refresh' from 13 events
2013-01-30T10:58:51 notice: (/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:58:51 notice: (/Stage[main]/Glance::Api/Service[glance-api]) Triggered 'refresh' from 29 events
2013-01-30T10:58:51 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:58:51 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:58:51 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:58:51 warning: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Skipping because of failed dependencies
2013-01-30T10:58:51 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_object_device[192.168.0.2:6000] has failures: true
2013-01-30T10:58:51 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_container_device[192.168.0.2:6001] has failures: true
2013-01-30T10:58:51 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_account_device[192.168.0.2:6002] has failures: true
2013-01-30T10:58:51 warning: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Skipping because of failed dependencies
2013-01-30T10:58:52 notice: (/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed
2013-01-30T10:58:52 notice: (/Stage[main]/Nova/Exec[post-nova_config]) Triggered 'refresh' from 94 events
2013-01-30T10:59:33 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 10:58:53 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=7520) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T10:59:33 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T10:59:34 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 10:59:33 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=7587) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T10:59:34 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]) Triggered 'refresh' from 2 events
2013-01-30T10:59:34 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:59:34 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 3 events
2013-01-30T10:59:35 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:59:36 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 4 events
2013-01-30T10:59:36 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:59:37 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 3 events
2013-01-30T10:59:41 notice: (/Stage[main]/Nova::Network/Nova::Manage::Floating[nova-vm-floating]/Nova_floating[nova-vm-floating]/ensure) created
2013-01-30T10:59:41 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:59:42 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 4 events
2013-01-30T10:59:42 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:59:43 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 51 events
2013-01-30T10:59:43 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T10:59:44 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 3 events
2013-01-30T10:59:50 notice: (/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network[nova-vm-net]/ensure) created
2013-01-30T10:59:50 notice: Finished catalog run in 731.03 seconds
SEPARATOR
2013-01-30T11:28:15 notice: Reopening log files
2013-01-30T11:29:52 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:29:52 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-private-interface]/returns) executed successfully
2013-01-30T11:29:52 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-internal-interface]/returns) executed successfully
2013-01-30T11:29:53 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-public-interface]/returns) executed successfully
2013-01-30T11:29:54 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) node name: 192.168.0.4:6000
2013-01-30T11:29:54 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) available devs: []
2013-01-30T11:29:54 err: (/Stage[main]/Openstack::Swift::Proxy/Ring_object_device[192.168.0.4:6000]) Could not evaluate: Device not found check device on 192.168.0.4:6000
2013-01-30T11:29:54 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) node name: 192.168.0.2:6001
2013-01-30T11:29:54 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:29:55 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:29:55 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:29:55 notice: (/Stage[main]/Swift::Storage::Container/Ring_container_device[192.168.0.2:6001]/ensure) created
2013-01-30T11:29:55 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) node name: 192.168.0.3:6001
2013-01-30T11:29:55 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) available devs: []
2013-01-30T11:29:55 err: (/Stage[main]/Openstack::Swift::Proxy/Ring_container_device[192.168.0.3:6001]) Could not evaluate: Device not found check device on 192.168.0.3:6001
2013-01-30T11:29:55 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) node name: 192.168.0.3:6000
2013-01-30T11:29:55 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) available devs: []
2013-01-30T11:29:55 err: (/Stage[main]/Openstack::Swift::Proxy/Ring_object_device[192.168.0.3:6000]) Could not evaluate: Device not found check device on 192.168.0.3:6000
2013-01-30T11:29:55 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) node name: 192.168.0.2:6000
2013-01-30T11:29:55 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:29:56 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:29:56 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:29:56 notice: (/Stage[main]/Swift::Storage::Object/Ring_object_device[192.168.0.2:6000]/ensure) created
2013-01-30T11:29:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:29:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:29:56 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]) Skipping because of failed dependencies
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) Traceback (most recent call last):
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) File "/usr/bin/swift-ring-builder", line 730, in <module>
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) Commands.__dict__.get(command, Commands.unknown)()
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) File "/usr/bin/swift-ring-builder", line 651, in pretend_min_part_hours_passed
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) builder.pretend_min_part_hours_passed()
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) File "/usr/lib/python2.6/site-packages/swift/common/ring/builder.py", line 408, in pretend_min_part_hours_passed
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) self._last_part_moves[part] = 0xff
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) TypeError: 'NoneType' object does not support item assignment
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]) Triggered 'refresh' from 1 events
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:29:57 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:29:57 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]) Skipping because of failed dependencies
2013-01-30T11:30:54 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 0.00.
2013-01-30T11:30:54 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]) Triggered 'refresh' from 1 events
2013-01-30T11:30:55 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) node name: 192.168.0.4:6002
2013-01-30T11:30:55 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) available devs: []
2013-01-30T11:30:55 err: (/Stage[main]/Openstack::Swift::Proxy/Ring_account_device[192.168.0.4:6002]) Could not evaluate: Device not found check device on 192.168.0.4:6002
2013-01-30T11:30:55 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) node name: 192.168.0.3:6002
2013-01-30T11:30:55 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) available devs: []
2013-01-30T11:30:55 err: (/Stage[main]/Openstack::Swift::Proxy/Ring_account_device[192.168.0.3:6002]) Could not evaluate: Device not found check device on 192.168.0.3:6002
2013-01-30T11:30:55 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) node name: 192.168.0.2:6002
2013-01-30T11:30:55 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:30:56 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:30:56 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Storage::Account/Ring_account_device[192.168.0.2:6002]/ensure) created
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:30:56 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]) Skipping because of failed dependencies
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) Traceback (most recent call last):
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) File "/usr/bin/swift-ring-builder", line 730, in <module>
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) Commands.__dict__.get(command, Commands.unknown)()
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) File "/usr/bin/swift-ring-builder", line 651, in pretend_min_part_hours_passed
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) builder.pretend_min_part_hours_passed()
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) File "/usr/lib/python2.6/site-packages/swift/common/ring/builder.py", line 408, in pretend_min_part_hours_passed
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) self._last_part_moves[part] = 0xff
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) TypeError: 'NoneType' object does not support item assignment
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]) Triggered 'refresh' from 1 events
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:30:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:30:56 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]) Skipping because of failed dependencies
2013-01-30T11:31:54 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 0.00.
2013-01-30T11:31:54 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]) Triggered 'refresh' from 1 events
2013-01-30T11:31:56 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) node name: 192.168.0.4:6001
2013-01-30T11:31:56 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) available devs: []
2013-01-30T11:31:56 err: (/Stage[main]/Openstack::Swift::Proxy/Ring_container_device[192.168.0.4:6001]) Could not evaluate: Device not found check device on 192.168.0.4:6001
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:31:56 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]) Skipping because of failed dependencies
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) Traceback (most recent call last):
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) File "/usr/bin/swift-ring-builder", line 730, in <module>
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) Commands.__dict__.get(command, Commands.unknown)()
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) File "/usr/bin/swift-ring-builder", line 651, in pretend_min_part_hours_passed
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) builder.pretend_min_part_hours_passed()
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) File "/usr/lib/python2.6/site-packages/swift/common/ring/builder.py", line 408, in pretend_min_part_hours_passed
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) self._last_part_moves[part] = 0xff
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) TypeError: 'NoneType' object does not support item assignment
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]) Triggered 'refresh' from 1 events
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:31:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:31:56 warning: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]) Skipping because of failed dependencies
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 0.00.
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]) Triggered 'refresh' from 1 events
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:53 warning: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]) Skipping because of failed dependencies
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:32:53 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:32:54 notice: (/Service[swift-container-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy/Package[swift-proxy]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]) Skipping because of failed dependencies
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:54 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:54 warning: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]) Skipping because of failed dependencies
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:55 warning: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]) Skipping because of failed dependencies
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:55 warning: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]) Skipping because of failed dependencies
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:55 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Skipping because of failed dependencies
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:55 warning: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]) Skipping because of failed dependencies
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:55 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:55 warning: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Skipping because of failed dependencies
2013-01-30T11:32:55 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:32:57 notice: (/Service[swift-account-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:32:57 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:32:57 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:32:57 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:32:57 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:32:57 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:32:57 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:32:57 warning: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Skipping because of failed dependencies
2013-01-30T11:32:58 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:32:59 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:33:09 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:33:09 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:33:40 notice: (/Stage[main]/Galera::Galera_master_final_config/Exec[first-galera-node-final-config]/returns) executed successfully
2013-01-30T11:33:46 notice: (/Service[swift-object-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:33:58 warning: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]) Skipping because of failed dependencies
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_container_device[192.168.0.4:6001] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_account_device[192.168.0.3:6002] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_account_device[192.168.0.4:6002] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_object_device[192.168.0.4:6000] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_object_device[192.168.0.3:6000] has failures: true
2013-01-30T11:33:58 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Dependency Ring_container_device[192.168.0.3:6001] has failures: true
2013-01-30T11:33:58 warning: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]) Skipping because of failed dependencies
2013-01-30T11:33:59 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:33:59 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=11462) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:33:59 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:34:00 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 1 events
2013-01-30T11:34:00 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:34:02 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 1 events
2013-01-30T11:34:04 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 1 events
2013-01-30T11:34:06 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 1 events
2013-01-30T11:34:07 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 1 events
2013-01-30T11:34:11 notice: Finished catalog run in 261.96 seconds
SEPARATOR
2013-01-30T11:34:20 notice: Reopening log files
2013-01-30T11:35:00 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:35:01 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-private-interface]/returns) executed successfully
2013-01-30T11:35:01 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-internal-interface]/returns) executed successfully
2013-01-30T11:35:01 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-public-interface]/returns) executed successfully
2013-01-30T11:35:03 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) node name: 192.168.0.4:6000
2013-01-30T11:35:03 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:35:04 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:35:04 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:35:04 notice: (/Stage[main]/Openstack::Swift::Proxy/Ring_object_device[192.168.0.4:6000]/ensure) created
2013-01-30T11:35:05 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) node name: 192.168.0.2:6001
2013-01-30T11:35:05 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:35:05 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) node name: 192.168.0.3:6001
2013-01-30T11:35:05 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:35:06 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:35:06 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:35:06 notice: (/Stage[main]/Openstack::Swift::Proxy/Ring_container_device[192.168.0.3:6001]/ensure) created
2013-01-30T11:35:06 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) node name: 192.168.0.3:6000
2013-01-30T11:35:06 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:35:07 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:35:07 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:35:08 notice: (/Stage[main]/Openstack::Swift::Proxy/Ring_object_device[192.168.0.3:6000]/ensure) created
2013-01-30T11:35:08 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) node name: 192.168.0.2:6000
2013-01-30T11:35:08 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:35:08 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) executed successfully
2013-01-30T11:35:09 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]) Triggered 'refresh' from 1 events
2013-01-30T11:35:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 100.15.
2013-01-30T11:35:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) -------------------------------------------------------------------------------
2013-01-30T11:35:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) NOTE: Balance of 100.15 indicates you should push this
2013-01-30T11:35:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) ring, wait at least 1 hours, and rebalance/repush.
2013-01-30T11:35:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) -------------------------------------------------------------------------------
2013-01-30T11:35:56 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) executed successfully
2013-01-30T11:36:17 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) No partitions could be reassigned.
2013-01-30T11:36:17 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) Either none need to be or none can be due to min_part_hours [1].
2013-01-30T11:36:17 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]) Triggered 'refresh' from 1 events
2013-01-30T11:36:18 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) node name: 192.168.0.4:6002
2013-01-30T11:36:18 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:36:18 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:36:18 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:36:19 notice: (/Stage[main]/Openstack::Swift::Proxy/Ring_account_device[192.168.0.4:6002]/ensure) created
2013-01-30T11:36:19 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) node name: 192.168.0.3:6002
2013-01-30T11:36:19 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:36:20 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:36:20 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:36:21 notice: (/Stage[main]/Openstack::Swift::Proxy/Ring_account_device[192.168.0.3:6002]/ensure) created
2013-01-30T11:36:21 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) node name: 192.168.0.2:6002
2013-01-30T11:36:21 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:36:22 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) executed successfully
2013-01-30T11:36:22 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]) Triggered 'refresh' from 1 events
2013-01-30T11:37:10 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 100.27.
2013-01-30T11:37:10 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) -------------------------------------------------------------------------------
2013-01-30T11:37:10 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) NOTE: Balance of 100.27 indicates you should push this
2013-01-30T11:37:10 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) ring, wait at least 1 hours, and rebalance/repush.
2013-01-30T11:37:10 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) -------------------------------------------------------------------------------
2013-01-30T11:37:10 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) executed successfully
2013-01-30T11:37:32 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) No partitions could be reassigned.
2013-01-30T11:37:32 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) Either none need to be or none can be due to min_part_hours [1].
2013-01-30T11:37:32 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]) Triggered 'refresh' from 1 events
2013-01-30T11:37:34 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) node name: 192.168.0.4:6001
2013-01-30T11:37:34 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:37:34 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) *** create device: 2
2013-01-30T11:37:35 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) *** create device: 1
2013-01-30T11:37:35 notice: (/Stage[main]/Openstack::Swift::Proxy/Ring_container_device[192.168.0.4:6001]/ensure) created
2013-01-30T11:37:35 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) executed successfully
2013-01-30T11:37:36 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]) Triggered 'refresh' from 1 events
2013-01-30T11:38:23 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 100.13.
2013-01-30T11:38:23 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) -------------------------------------------------------------------------------
2013-01-30T11:38:23 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) NOTE: Balance of 100.13 indicates you should push this
2013-01-30T11:38:23 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) ring, wait at least 1 hours, and rebalance/repush.
2013-01-30T11:38:23 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) -------------------------------------------------------------------------------
2013-01-30T11:38:23 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) executed successfully
2013-01-30T11:38:46 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) No partitions could be reassigned.
2013-01-30T11:38:46 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) Either none need to be or none can be due to min_part_hours [1].
2013-01-30T11:38:46 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]) Triggered 'refresh' from 1 events
2013-01-30T11:38:46 notice: (/Stage[main]/Swift::Ringserver/Rsync::Server::Module[swift_server]/File[/etc/rsync.d/frag-swift_server]/ensure) defined content as '{md5}7fae4818c72b8e958b2f3fcbe3c76d82'
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]/ensure) created
2013-01-30T11:38:48 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]/ensure) created
2013-01-30T11:38:49 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]/ensure) created
2013-01-30T11:38:49 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]/returns) executed successfully
2013-01-30T11:38:49 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Triggered 'refresh' from 11 events
2013-01-30T11:38:49 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]/content) content changed '{md5}b6581fa5a48beaa38dae9450d67b0d28' to '{md5}e46a68b307fb3b8cf307b15f9eefd9af'
2013-01-30T11:38:49 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:38:50 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:38:50 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Triggered 'refresh' from 1 events
2013-01-30T11:38:51 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:38:54 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Triggered 'refresh' from 1 events
2013-01-30T11:38:54 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:38:54 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:39:05 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:39:05 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:39:36 notice: (/Stage[main]/Galera::Galera_master_final_config/Exec[first-galera-node-final-config]/returns) executed successfully
2013-01-30T11:39:56 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]/ensure) created
2013-01-30T11:40:09 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]/returns) Added new image with ID: 38024dfd-7583-4fcb-86c7-56efc8b922e0
2013-01-30T11:40:09 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]/returns) executed successfully
2013-01-30T11:40:10 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:40:10 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=13971) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:40:10 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:40:11 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 1 events
2013-01-30T11:40:11 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:40:13 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 1 events
2013-01-30T11:40:16 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 1 events
2013-01-30T11:40:20 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 1 events
2013-01-30T11:40:24 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 1 events
2013-01-30T11:40:27 notice: Finished catalog run in 329.72 seconds
SEPARATOR
2013-01-30T11:40:36 notice: Reopening log files
2013-01-30T11:42:08 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:42:09 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-private-interface]/returns) executed successfully
2013-01-30T11:42:09 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-internal-interface]/returns) executed successfully
2013-01-30T11:42:09 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-public-interface]/returns) executed successfully
2013-01-30T11:42:10 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) node name: 192.168.0.4:6000
2013-01-30T11:42:10 notice: (Ring_object_device[192.168.0.4:6000](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:42:11 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) node name: 192.168.0.2:6001
2013-01-30T11:42:11 notice: (Ring_container_device[192.168.0.2:6001](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:42:11 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) node name: 192.168.0.3:6001
2013-01-30T11:42:11 notice: (Ring_container_device[192.168.0.3:6001](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:42:12 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) node name: 192.168.0.3:6000
2013-01-30T11:42:12 notice: (Ring_object_device[192.168.0.3:6000](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:42:12 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) node name: 192.168.0.2:6000
2013-01-30T11:42:12 notice: (Ring_object_device[192.168.0.2:6000](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:42:13 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[hours_passed_object]/returns) executed successfully
2013-01-30T11:43:00 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 0.00.
2013-01-30T11:43:00 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[object]/Exec[rebalance_object]/returns) executed successfully
2013-01-30T11:43:01 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) node name: 192.168.0.4:6002
2013-01-30T11:43:01 notice: (Ring_account_device[192.168.0.4:6002](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:43:01 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) node name: 192.168.0.3:6002
2013-01-30T11:43:01 notice: (Ring_account_device[192.168.0.3:6002](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:43:01 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) node name: 192.168.0.2:6002
2013-01-30T11:43:01 notice: (Ring_account_device[192.168.0.2:6002](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:43:02 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[hours_passed_account]/returns) executed successfully
2013-01-30T11:43:47 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 0.00.
2013-01-30T11:43:47 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[account]/Exec[rebalance_account]/returns) executed successfully
2013-01-30T11:43:51 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) node name: 192.168.0.4:6001
2013-01-30T11:43:51 notice: (Ring_container_device[192.168.0.4:6001](provider=swift_ring_builder)) available devs: ["1", "2"]
2013-01-30T11:43:51 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[hours_passed_container]/returns) executed successfully
2013-01-30T11:44:43 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) Reassigned 262144 (100.00%) partitions. Balance is now 0.00.
2013-01-30T11:44:43 notice: (/Stage[main]/Swift::Ringbuilder/Swift::Ringbuilder::Rebalance[container]/Exec[rebalance_container]/returns) executed successfully
2013-01-30T11:44:46 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:44:50 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:44:50 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:45:00 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:45:00 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:45:30 notice: (/Stage[main]/Galera::Galera_master_final_config/Exec[first-galera-node-final-config]/returns) executed successfully
2013-01-30T11:45:51 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:45:51 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=24869) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:45:51 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:45:52 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 1 events
2013-01-30T11:45:53 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:45:55 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 1 events
2013-01-30T11:45:57 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 1 events
2013-01-30T11:45:59 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 1 events
2013-01-30T11:46:01 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 1 events
2013-01-30T11:46:04 notice: Finished catalog run in 239.73 seconds

View File

@ -0,0 +1,451 @@
2013-01-30T11:00:03 notice: Reopening log files
2013-01-30T11:00:53 notice: (/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created
2013-01-30T11:00:53 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat]/ensure) created
2013-01-30T11:00:57 notice: (/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created
2013-01-30T11:00:57 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat/bin]/ensure) created
2013-01-30T11:00:57 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat/bin/concatfragments.sh]/ensure) defined content as '{md5}256169ee61115a6b717b2844d2ea3128'
2013-01-30T11:00:59 notice: (/Stage[main]/Nova::Utilities/Package[screen]/ensure) created
2013-01-30T11:01:03 notice: (/Stage[main]/Xinetd/Package[xinetd]/ensure) created
2013-01-30T11:01:03 notice: (/Stage[main]/Memcached/User[memcached]/ensure) created
2013-01-30T11:01:05 notice: (/Stage[main]/Memcached/Package[memcached]/ensure) created
2013-01-30T11:01:05 notice: (/Stage[main]/Memcached/File[/etc/sysconfig/memcached]/content) content changed '{md5}05503957e3796fbe6fddd756a7a102a0' to '{md5}3a3961445528bdeda6d7b8b5564dfcfc'
2013-01-30T11:01:06 notice: (/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:01:06 notice: (/Stage[main]/Memcached/Service[memcached]) Triggered 'refresh' from 1 events
2013-01-30T11:01:06 notice: (/Stage[main]/Osnailyfacter::Test_controller/File[/tmp/controller-file]/ensure) defined content as '{md5}7f5c51282c4b1242e12addba8cc331fa'
2013-01-30T11:01:14 notice: (/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created
2013-01-30T11:01:18 notice: (/Stage[main]/Swift::Xfs/Package[xfsprogs]/ensure) created
2013-01-30T11:01:24 notice: (/Stage[main]/Nova::Utilities/Package[parted]/ensure) created
2013-01-30T11:01:24 notice: (/Stage[main]/Openstack::Firewall/File[iptables]/ensure) defined content as '{md5}7efd2b2c624fb433a1538c229ae20e1f'
2013-01-30T11:01:25 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:01:55 notice: (/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created
2013-01-30T11:02:05 notice: (/Stage[main]/Openstack::Glance/Package[swift]/ensure) created
2013-01-30T11:03:03 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created
2013-01-30T11:03:04 notice: (/Stage[main]/Openstack::Swift::Proxy/File[/tmp/swift_keystone_test.rb]/ensure) defined content as '{md5}9921c28fffe90ef152603443c7a9a4d3'
2013-01-30T11:03:06 notice: (/Stage[main]/Nova/Package[python-amqp]/ensure) created
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/var/lib/swift]/ensure) created
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/etc/swift]/owner) owner changed 'root' to 'swift'
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/etc/swift]/group) group changed 'root' to 'swift'
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/etc/swift]/mode) mode changed '0755' to '2770'
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/etc/swift/swift.conf]/content) content changed '{md5}5f2a4640b3293fedc9e8e249408f3044' to '{md5}32a608237a8eeb3fb48db3fda8a239e0'
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/etc/swift/swift.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/var/cache/swift]/ensure) created
2013-01-30T11:03:06 notice: (/Stage[main]/Swift/File[/var/run/swift]/group) group changed 'root' to 'swift'
2013-01-30T11:03:06 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/content) content changed '{md5}9ff8cc688dd9f0dfc45e5afd25c427a7' to '{md5}1680192de4cef61a23dca13cdfff07c7'
2013-01-30T11:03:06 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:03:06 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/group) group changed 'root' to 'swift'
2013-01-30T11:03:06 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/mode) mode changed '0600' to '0664'
2013-01-30T11:03:10 notice: (/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created
2013-01-30T11:03:10 notice: (/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as '{md5}70358e4313c4c6852303bf9d0f24fb81'
2013-01-30T11:03:25 notice: (/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created
2013-01-30T11:03:25 notice: (/Stage[main]/Horizon/File[/var/log/horizon]/ensure) created
2013-01-30T11:03:25 notice: (/Stage[main]/Horizon/File_line[horizon_redirect_rule]/ensure) created
2013-01-30T11:03:27 notice: (/Stage[main]/Openstack::Controller_ha/Package[socat]/ensure) created
2013-01-30T11:03:27 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[mysqld]/Haproxy::Balancermember[mysqld]/Concat::Fragment[mysqld_balancermember_mysqld]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/95-mysqld_mysqld_mysqld_balancermember_mysqld]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-api]/Haproxy::Balancermember[glance-api]/Concat::Fragment[glance-api_balancermember_glance-api]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/80-glance-api_glance-api_glance-api_balancermember_glance-api]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-1]/Haproxy::Balancermember[nova-api-1]/Concat::Fragment[nova-api-1_balancermember_nova-api-1]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/40-nova-api-1_nova-api-1_nova-api-1_balancermember_nova-api-1]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[quantum]/Haproxy::Balancermember[quantum]/Concat::Fragment[quantum_balancermember_quantum]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/85-quantum_quantum_quantum_balancermember_quantum]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-reg]/Haproxy::Balancermember[glance-reg]/Concat::Fragment[glance-reg_balancermember_glance-reg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/90-glance-reg_glance-reg_glance-reg_balancermember_glance-reg]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-1]/Haproxy::Listen[nova-api-1]/Concat::Fragment[nova-api-1-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/39-nova-api-1_nova-api-1-listen_block]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[mysqld]/Haproxy::Listen[mysqld]/Concat::Fragment[mysqld-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/94-mysqld_mysqld-listen_block]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[swift]/Haproxy::Listen[swift]/Concat::Fragment[swift-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/95-swift_swift-listen_block]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments.concat]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Haproxy/Concat::Fragment[haproxy-base]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/10_haproxy-base]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Haproxy/Concat::Fragment[00-header]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/01_00-header]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-api]/Haproxy::Listen[glance-api]/Concat::Fragment[glance-api-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/79-glance-api_glance-api-listen_block]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[swift]/Haproxy::Balancermember[swift]/Concat::Fragment[swift_balancermember_swift]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/96-swift_swift_swift_balancermember_swift]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments.concat.out]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-4]/Haproxy::Listen[nova-api-4]/Concat::Fragment[nova-api-4-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/69-nova-api-4_nova-api-4-listen_block]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-2]/Haproxy::Balancermember[nova-api-2]/Concat::Fragment[nova-api-2_balancermember_nova-api-2]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/50-nova-api-2_nova-api-2_nova-api-2_balancermember_nova-api-2]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-2]/Haproxy::Listen[nova-api-2]/Concat::Fragment[nova-api-2-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/49-nova-api-2_nova-api-2-listen_block]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-1]/Haproxy::Listen[keystone-1]/Concat::Fragment[keystone-1-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/19-keystone-1_keystone-1-listen_block]/ensure) created
2013-01-30T11:03:28 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/content) content changed '{md5}d114fa06522fa1016ab2bdede4cfd7bf' to '{md5}6f16dcbc1a76f8ae51dd6fe7157af42d'
2013-01-30T11:03:28 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/owner) owner changed 'root' to 'apache'
2013-01-30T11:03:28 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/group) group changed 'root' to 'apache'
2013-01-30T11:03:28 notice: (/Stage[main]/Swift/File[/home/swift]/ensure) created
2013-01-30T11:03:35 notice: (/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created
2013-01-30T11:05:13 notice: (/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created
2013-01-30T11:05:13 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf]/ensure) created
2013-01-30T11:05:13 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments]/ensure) created
2013-01-30T11:05:13 notice: (/Stage[main]/Swift::Storage::All/File[/srv/node]/ensure) created
2013-01-30T11:05:13 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments.concat]/ensure) created
2013-01-30T11:05:14 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf]/ensure) created
2013-01-30T11:05:14 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments.concat]/ensure) created
2013-01-30T11:05:14 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:05:14 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments]/ensure) created
2013-01-30T11:05:14 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat::Fragment[swift-account-6002]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments/00_swift-account-6002]/ensure) created
2013-01-30T11:05:14 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:05:14 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-2]/Haproxy::Balancermember[keystone-2]/Concat::Fragment[keystone-2_balancermember_keystone-2]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/30-keystone-2_keystone-2_keystone-2_balancermember_keystone-2]/ensure) created
2013-01-30T11:05:24 notice: (/Stage[main]/Rsync::Server/File[/etc/rsync.d]/ensure) created
2013-01-30T11:05:24 notice: (/Stage[main]/Rsync::Server/File[/etc/rsync.d/header]/ensure) defined content as '{md5}81c93e6021cb444faaac0cd902198ce2'
2013-01-30T11:05:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Rsync::Server::Module[container]/File[/etc/rsync.d/frag-container]/ensure) defined content as '{md5}f34ae17bc92c6dac2f9e4535cc52d6f4'
2013-01-30T11:05:24 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.102]/File[/etc/dhcp/dhclient-eth0.102-down-hooks]/ensure) defined content as '{md5}d5bdb51453a200b5822441bcc6072cae'
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf]/ensure) created
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments]/ensure) created
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat::Fragment[swift-object-6000]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments/00_swift-object-6000]/ensure) created
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Rsync::Server::Module[object]/File[/etc/rsync.d/frag-object]/ensure) defined content as '{md5}6599fe18f7573876515cdb791157facc'
2013-01-30T11:05:34 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Sysctl[net.ipv4.ip_forward]/val) val changed '0' to '1'
2013-01-30T11:05:34 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.101]/File[/etc/dhcp/dhclient-eth0.101-down-hooks]/ensure) defined content as '{md5}150162dde6c8e637d6192de84fd26fde'
2013-01-30T11:05:34 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.101]/File[/etc/dhcp/dhclient-eth0.101-up-hooks]/ensure) defined content as '{md5}beeb771a9fc99bc9f3b01258a2c947a2'
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat::Fragment[swift-container-6001]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments/00_swift-container-6001]/ensure) created
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/Exec[concat_/etc/swift/container-server.conf]/returns) executed successfully
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/Exec[concat_/etc/swift/container-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T11:05:34 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/etc/swift/container-server.conf]/ensure) defined content as '{md5}f0497d0e2e8f6a42dc405f29ce78b1e3'
2013-01-30T11:05:34 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-2]/Haproxy::Listen[keystone-2]/Concat::Fragment[keystone-2-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/29-keystone-2_keystone-2-listen_block]/ensure) created
2013-01-30T11:05:35 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) net.ipv4.ip_forward = 1
2013-01-30T11:05:35 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) executed successfully
2013-01-30T11:05:35 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/File[/srv/loopback-device]/ensure) created
2013-01-30T11:05:35 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[horizon]/Haproxy::Listen[horizon]/Concat::Fragment[horizon-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/14-horizon_horizon-listen_block]/ensure) created
2013-01-30T11:05:35 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/content) content changed '{md5}fd32314f3157aaf15712e6da7758060d' to '{md5}b5745641b26d03ebddd24b143319f385'
2013-01-30T11:05:35 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/owner) owner changed 'root' to 'swift'
2013-01-30T11:05:35 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/group) group changed 'root' to 'swift'
2013-01-30T11:05:35 notice: (/Stage[main]/Xinetd/Service[xinetd]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:05:36 notice: (/Stage[main]/Xinetd/Service[xinetd]) Triggered 'refresh' from 3 events
2013-01-30T11:05:36 notice: (/Stage[main]/Horizon/File_line[httpd_listen_on_internal_network_only]/ensure) created
2013-01-30T11:05:37 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:05:39 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]) Triggered 'refresh' from 6 events
2013-01-30T11:05:39 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0+0 records in
2013-01-30T11:05:39 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0+0 records out
2013-01-30T11:05:39 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0 bytes (0 B) copied, 1.562e-05 s, 0.0 kB/s
2013-01-30T11:05:39 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) executed successfully
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) meta-data=/srv/loopback-device/1 isize=1024 agcount=4, agsize=65548 blks
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sectsz=512 attr=2
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) data = bsize=4096 blocks=262189, imaxpct=25
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sunit=0 swidth=0 blks
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) naming =version 2 bsize=4096 ascii-ci=0
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) log =internal log bsize=4096 blocks=2560, version=2
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sectsz=512 sunit=0 blks, lazy-count=1
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) realtime =none extsz=4096 blocks=0, rtextents=0
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]) Triggered 'refresh' from 1 events
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/File[/srv/node/1]/ensure) created
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Mount[/srv/node/1]/ensure) defined 'ensure' as 'defined'
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Mount[/srv/node/1]) Triggered 'refresh' from 2 events
2013-01-30T11:05:40 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[mount_1]/returns) executed successfully
2013-01-30T11:05:41 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[mount_1]) Triggered 'refresh' from 1 events
2013-01-30T11:05:41 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[fix_mount_permissions_1]) Triggered 'refresh' from 3 events
2013-01-30T11:05:41 notice: (/Stage[main]/Glance/Group[glance]/ensure) created
2013-01-30T11:05:41 notice: (/Stage[main]/Glance/User[glance]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance/Package[glance]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_auth_address]/value) value changed '127.0.0.1:5000/v2.0/' to 'http://192.168.0.6:5000/v2.0/'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dirname]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_user]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:nova@192.168.0.6/glance'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/bind_host]/value) value changed '0.0.0.0' to '192.168.0.3'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/default_store]/value) value changed 'file' to 'swift'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_create_container_on_put]/value) value changed 'False' to 'True'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[paste_deploy/flavor]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_user]/value) value changed 'jdoe:jdoe' to 'services:glance'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_tenant_name]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_key]/value) value changed 'a86850deb2742ec3cb41518e26aa2d89' to 'nova'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_password]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '192.168.0.6'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_uri]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance/File[/etc/glance/]/owner) owner changed 'root' to 'glance'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance/File[/etc/glance/]/mode) mode changed '0755' to '0770'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T11:05:47 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '192.168.0.6'
2013-01-30T11:05:48 notice: (/Stage[main]/Glance/File[glance-logging.conf]/ensure) defined content as '{md5}71fa2daa8e89a992f4225e850fd879e4'
2013-01-30T11:05:48 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-cache.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T11:05:48 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-api.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T11:05:48 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T11:05:48 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T11:05:50 notice: (/Stage[main]/Keepalived::Install/Package[keepalived]/ensure) created
2013-01-30T11:05:50 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf]/ensure) created
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments]/ensure) created
2013-01-30T11:05:51 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived::Instance[2]/Concat::Fragment[keepalived_2]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/50_keepalived_2]/ensure) created
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments.concat]/ensure) created
2013-01-30T11:05:51 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived::Instance[1]/Concat::Fragment[keepalived_1]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/50_keepalived_1]/ensure) created
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat::Fragment[global_config]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/01_global_config]/ensure) created
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]/returns) /var/lib/puppet/concat/bin/concatfragments.sh: line 108: [: too many arguments
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]/returns) executed successfully
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]) Triggered 'refresh' from 5 events
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/etc/keepalived/keepalived.conf]/content) content changed '{md5}e79dca9e58978e8035e7d4ad25e2ce67' to '{md5}b0c0fed15bce5900037840ca219adfb5'
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/etc/keepalived/keepalived.conf]/mode) mode changed '0640' to '0644'
2013-01-30T11:05:51 notice: (/Stage[main]/Keepalived::Service/Service[keepalived]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:05:52 notice: (/Stage[main]/Keepalived::Service/Service[keepalived]) Triggered 'refresh' from 1 events
2013-01-30T11:05:52 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-1]/Haproxy::Balancermember[keystone-1]/Concat::Fragment[keystone-1_balancermember_keystone-1]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/20-keystone-1_keystone-1_keystone-1_balancermember_keystone-1]/ensure) created
2013-01-30T11:05:52 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-4]/Haproxy::Balancermember[nova-api-4]/Concat::Fragment[nova-api-4_balancermember_nova-api-4]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/70-nova-api-4_nova-api-4_nova-api-4_balancermember_nova-api-4]/ensure) created
2013-01-30T11:06:02 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[quantum]/Haproxy::Listen[quantum]/Concat::Fragment[quantum-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/84-quantum_quantum-listen_block]/ensure) created
2013-01-30T11:06:02 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/ensure) created
2013-01-30T11:06:02 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/ensure) created
2013-01-30T11:06:05 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Package[swift-account]/ensure) created
2013-01-30T11:06:07 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Package[swift-container]/ensure) created
2013-01-30T11:06:07 err: (/Service[swift-container-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-container-replicator]: Execution of '/usr/bin/swift-init container-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:06:07 notice: (/Service[swift-container-replicator]) Triggered 'refresh' from 2 events
2013-01-30T11:06:07 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/File[/etc/swift/container-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:06:07 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/File[/etc/swift/container-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:06:10 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Package[swift-object]/ensure) created
2013-01-30T11:06:11 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Service[swift-container]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:06:12 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Service[swift-container]) Triggered 'refresh' from 2 events
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]/ensure) created
2013-01-30T11:06:15 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]/ensure) created
2013-01-30T11:06:16 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]/ensure) created
2013-01-30T11:06:16 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]/returns) executed successfully
2013-01-30T11:06:16 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Triggered 'refresh' from 11 events
2013-01-30T11:06:16 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]/content) content changed '{md5}b6581fa5a48beaa38dae9450d67b0d28' to '{md5}9d2d408cec205ea9bbb5e5e22691dc76'
2013-01-30T11:06:16 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:06:16 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:06:17 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Triggered 'refresh' from 1 events
2013-01-30T11:06:17 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:08:31 notice: (/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created
2013-01-30T11:08:32 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) Stopping rabbitmq-server: RabbitMQ is not running
2013-01-30T11:08:32 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) rabbitmq-server.
2013-01-30T11:08:32 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) executed successfully
2013-01-30T11:08:32 notice: (/Stage[main]/Rabbitmq::Server/File[erlang_cookie]/content) content changed '{md5}2a03a34108a295ae469819b1abfcb101' to '{md5}b28788594da393660db1e4f20d296c10'
2013-01-30T11:08:32 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq.config]/ensure) defined content as '{md5}f22d1aa923c4727590fa559e8643fcf8'
2013-01-30T11:08:32 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq-env.config]/ensure) defined content as '{md5}2980dac99b8f2195a50ef6e459ffedae'
2013-01-30T11:08:38 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:08:44 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]) Triggered 'refresh' from 1 events
2013-01-30T11:08:45 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-reg]/Haproxy::Listen[glance-reg]/Concat::Fragment[glance-reg-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/89-glance-reg_glance-reg-listen_block]/ensure) created
2013-01-30T11:08:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/Exec[concat_/etc/swift/account-server.conf]/returns) executed successfully
2013-01-30T11:08:46 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/Exec[concat_/etc/swift/account-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T11:08:46 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/content) content changed '{md5}68d75de69955941c8fd8f40b0dbbb31b' to '{md5}e9c2995f2c596f4cbc11f6639eb5c0fb'
2013-01-30T11:08:46 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:08:46 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/mode) mode changed '0660' to '0640'
2013-01-30T11:08:46 err: (/Service[swift-account-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-account-replicator]: Execution of '/usr/bin/swift-init account-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:08:46 notice: (/Service[swift-account-replicator]) Triggered 'refresh' from 2 events
2013-01-30T11:08:48 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Service[swift-account]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:08:49 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Service[swift-account]) Triggered 'refresh' from 2 events
2013-01-30T11:08:49 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0+0 records in
2013-01-30T11:08:49 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0+0 records out
2013-01-30T11:08:49 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0 bytes (0 B) copied, 1.4634e-05 s, 0.0 kB/s
2013-01-30T11:08:49 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) executed successfully
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) meta-data=/srv/loopback-device/2 isize=1024 agcount=4, agsize=65548 blks
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sectsz=512 attr=2
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) data = bsize=4096 blocks=262189, imaxpct=25
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sunit=0 swidth=0 blks
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) naming =version 2 bsize=4096 ascii-ci=0
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) log =internal log bsize=4096 blocks=2560, version=2
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sectsz=512 sunit=0 blks, lazy-count=1
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) realtime =none extsz=4096 blocks=0, rtextents=0
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]) Triggered 'refresh' from 1 events
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/File[/srv/node/2]/ensure) created
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Mount[/srv/node/2]/ensure) defined 'ensure' as 'defined'
2013-01-30T11:08:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Mount[/srv/node/2]) Triggered 'refresh' from 2 events
2013-01-30T11:08:51 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[mount_2]/returns) executed successfully
2013-01-30T11:08:51 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[mount_2]) Triggered 'refresh' from 1 events
2013-01-30T11:08:51 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[fix_mount_permissions_2]) Triggered 'refresh' from 3 events
2013-01-30T11:08:51 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Rsync::Server::Module[account]/File[/etc/rsync.d/frag-account]/ensure) defined content as '{md5}5c536b07e578ed18d4c7eedd2d76a225'
2013-01-30T11:08:51 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Triggered 'refresh' from 4 events
2013-01-30T11:08:51 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[horizon]/Haproxy::Balancermember[horizon]/Concat::Fragment[horizon_balancermember_horizon]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/15-horizon_horizon_horizon_balancermember_horizon]/ensure) created
2013-01-30T11:08:52 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/Exec[concat_/etc/haproxy/haproxy.cfg]/returns) executed successfully
2013-01-30T11:08:52 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/Exec[concat_/etc/haproxy/haproxy.cfg]) Triggered 'refresh' from 26 events
2013-01-30T11:08:52 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/etc/haproxy/haproxy.cfg]/content) content changed '{md5}1f337186b0e1ba5ee82760cb437fb810' to '{md5}9f86f0ac91477b48d33f24cf1d560774'
2013-01-30T11:08:52 notice: (/Stage[main]/Haproxy/Service[haproxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:08:52 notice: (/Stage[main]/Haproxy/Service[haproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:08:52 notice: (/Stage[main]/Galera/File[/etc/mysql]/ensure) created
2013-01-30T11:08:53 notice: (/Stage[main]/Galera/File[/etc/my.cnf]/ensure) defined content as '{md5}9c703ee12b3b1cef32501cdeeb24feef'
2013-01-30T11:08:56 notice: (/Stage[main]/Galera/Package[openssl098e]/ensure) created
2013-01-30T11:09:00 notice: (/Stage[main]/Galera/Package[galera]/ensure) created
2013-01-30T11:09:08 notice: (/Stage[main]/Galera/Package[MySQL-client]/ensure) created
2013-01-30T11:09:08 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:09:08 notice: (/Stage[main]/Galera/File[/etc/mysql/conf.d]/ensure) created
2013-01-30T11:09:08 notice: (/Stage[main]/Galera/File[/etc/mysql/conf.d/wsrep.cnf]/ensure) created
2013-01-30T11:09:35 notice: (/Stage[main]/Galera/Package[MySQL-server]/ensure) created
2013-01-30T11:09:36 notice: (/Stage[main]/Galera/Exec[set-mysql-password]) Triggered 'refresh' from 2 events
2013-01-30T11:10:08 notice: (/Stage[main]/Galera/Exec[wait-initial-sync]) Triggered 'refresh' from 1 events
2013-01-30T11:10:25 notice: (/Stage[main]/Galera/Exec[kill-initial-mysql]/returns) mysqld_safe: no process killed
2013-01-30T11:10:25 notice: (/Stage[main]/Galera/Exec[kill-initial-mysql]) Triggered 'refresh' from 1 events
2013-01-30T11:10:25 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:10:58 notice: (/Stage[main]/Galera/Service[mysql-galera]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:12:06 notice: (/Stage[main]/Galera/Service[mysql-galera]) Triggered 'refresh' from 1 events
2013-01-30T11:12:23 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:12:23 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:12:24 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database[nova]/charset) charset changed 'utf8' to 'latin1'
2013-01-30T11:12:24 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-2]/Database_user[nova@slave-2]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-2]/Database_grant[nova@slave-2/nova]/privileges) privileges changed '' to 'all'
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/network_manager]/value) value changed 'nova.network.manager.FlatDHCPManager' to 'nova.network.manager.VlanManager'
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Network/Nova_config[DEFAULT/floating_range]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_hosts]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_volumes]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_compute_listen]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/ec2_listen]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_content_bytes]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'nova'
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/glance_api_servers]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_host]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_userid]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_ha_queues]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_password]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/verbose]/ensure) created
2013-01-30T11:12:25 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_cores]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Package[nova-consoleauth]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/enabled_apis]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/sql_connection]/value) value changed 'mysql://nova:nova@localhost/nova' to 'mysql://nova:nova@192.168.0.6/nova'
2013-01-30T11:12:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_virtual_host]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/image_service]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/volume_api_class]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_files]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T11:12:28 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_port]/ensure) created
2013-01-30T11:12:28 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/force_dhcp_release]/value) value changed 'True' to 'true'
2013-01-30T11:12:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_port]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package[nova-vncproxy]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_start]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/User[nova]/shell) shell changed '/sbin/nologin' to '/bin/bash'
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/File[/var/log/nova]/group) group changed 'root' to 'nova'
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/File[/var/log/nova]/mode) mode changed '0755' to '0751'
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_path_bytes]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/api_paste_config]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_floating_ips]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/public_interface]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/service_down_time]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_gigabytes]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_instances]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_volume_listen]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/metadata_listen]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/auth_strategy]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_metadata_items]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rpc_backend]/value) value changed 'nova.openstack.common.rpc.impl_qpid' to 'nova.rpc.impl_kombu'
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/fixed_range]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/dhcp_domain]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/allow_resize_to_same_host]/ensure) created
2013-01-30T11:13:10 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-2]/Database_user[glance@slave-2]/ensure) created
2013-01-30T11:13:12 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-2]/Database_grant[glance@slave-2/glance]/privileges) privileges changed '' to 'all'
2013-01-30T11:13:13 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_interface]/ensure) created
2013-01-30T11:13:13 notice: (/Stage[main]/Nova/File[/etc/nova/nova.conf]/owner) owner changed 'root' to 'nova'
2013-01-30T11:13:13 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-2]/Database_user[keystone@slave-2]/ensure) created
2013-01-30T11:13:14 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-2]/Database_grant[keystone@slave-2/keystone]/privileges) privileges changed '' to 'all'
2013-01-30T11:13:17 notice: (/Stage[main]/Keystone/Package[keystone]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[sql/connection]/value) value changed 'mysql://keystone:keystone@localhost/keystone' to 'mysql://keystone:nova@192.168.0.6/keystone'
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/public_port]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/compute_port]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/debug]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[policy/driver]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[sql/idle_timeout]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_token]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/bind_host]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/File[/etc/keystone]/owner) owner changed 'root' to 'keystone'
2013-01-30T11:13:18 notice: (/Stage[main]/Keystone/File[/etc/keystone]/group) group changed 'root' to 'keystone'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/bind_host]/value) value changed '0.0.0.0' to '192.168.0.3'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/File[/etc/glance/glance-registry.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[paste_deploy/flavor]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dirname]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:nova@192.168.0.6/glance'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T11:13:18 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-01-30T11:13:18 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments.concat]/ensure) created
2013-01-30T11:13:18 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/Exec[concat_/etc/swift/object-server.conf]/returns) executed successfully
2013-01-30T11:13:19 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/Exec[concat_/etc/swift/object-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T11:13:19 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/content) content changed '{md5}bd9ae72e0cc70c89655071d96b32b172' to '{md5}d9963f6c96a2e062ca131508db7a2158'
2013-01-30T11:13:19 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:13:19 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/mode) mode changed '0660' to '0640'
2013-01-30T11:13:19 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Service[swift-object]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:19 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Service[swift-object]) Triggered 'refresh' from 2 events
2013-01-30T11:13:20 err: (/Service[swift-object-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-object-replicator]: Execution of '/usr/bin/swift-init object-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:13:20 notice: (/Service[swift-object-replicator]) Triggered 'refresh' from 2 events
2013-01-30T11:13:20 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/verbose]/ensure) created
2013-01-30T11:13:21 notice: (/Stage[main]/Keystone/Exec[keystone-manage db_sync]) Triggered 'refresh' from 11 events
2013-01-30T11:13:23 notice: (/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:24 notice: (/Stage[main]/Keystone/Service[keystone]) Triggered 'refresh' from 13 events
2013-01-30T11:13:45 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/auth_url]/ensure) created
2013-01-30T11:13:45 notice: (/Stage[main]/Glance::Registry/Exec[glance-manage db_sync]) Triggered 'refresh' from 38 events
2013-01-30T11:13:46 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:46 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]) Triggered 'refresh' from 13 events
2013-01-30T11:13:47 notice: (/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:47 notice: (/Stage[main]/Glance::Api/Service[glance-api]) Triggered 'refresh' from 29 events
2013-01-30T11:13:48 notice: (/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed
2013-01-30T11:13:48 notice: (/Stage[main]/Nova/Exec[post-nova_config]) Triggered 'refresh' from 94 events
2013-01-30T11:13:50 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:13:49 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=8202) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:13:50 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:13:51 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:13:51 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=8214) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:13:51 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]) Triggered 'refresh' from 2 events
2013-01-30T11:13:52 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:52 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 3 events
2013-01-30T11:13:54 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:55 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 4 events
2013-01-30T11:13:56 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:57 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 3 events
2013-01-30T11:13:59 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:13:59 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 4 events
2013-01-30T11:14:00 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:14:01 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 51 events
2013-01-30T11:14:02 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:14:03 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 3 events
2013-01-30T11:14:07 notice: Finished catalog run in 802.32 seconds
SEPARATOR
2013-01-30T11:28:15 notice: Reopening log files
2013-01-30T11:29:44 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:29:45 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-private-interface]/returns) executed successfully
2013-01-30T11:29:45 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-internal-interface]/returns) executed successfully
2013-01-30T11:29:45 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-public-interface]/returns) executed successfully
2013-01-30T11:30:19 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:30:19 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:30:19 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:30:19 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:30:20 err: (/Service[swift-container-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-container-replicator]: Execution of '/usr/bin/swift-init container-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:30:21 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:30:22 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:30:23 err: (/Service[swift-account-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-account-replicator]: Execution of '/usr/bin/swift-init account-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:30:25 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:30:25 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:30:35 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:30:35 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:30:41 err: (/Service[swift-object-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-object-replicator]: Execution of '/usr/bin/swift-init object-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:30:51 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:30:50 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=19300) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:30:51 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:30:51 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 1 events
2013-01-30T11:30:52 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:30:54 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 1 events
2013-01-30T11:30:56 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 1 events
2013-01-30T11:30:58 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 1 events
2013-01-30T11:30:59 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 1 events
2013-01-30T11:31:03 notice: Finished catalog run in 81.92 seconds
SEPARATOR
2013-01-30T11:40:36 notice: Reopening log files
2013-01-30T11:42:01 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:42:01 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-private-interface]/returns) executed successfully
2013-01-30T11:42:02 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-internal-interface]/returns) executed successfully
2013-01-30T11:42:02 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-public-interface]/returns) executed successfully
2013-01-30T11:42:24 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Ringsync[account]/Rsync::Get[/etc/swift/account.ring.gz]/Exec[rsync /etc/swift/account.ring.gz]/returns) executed successfully
2013-01-30T11:42:44 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Ringsync[object]/Rsync::Get[/etc/swift/object.ring.gz]/Exec[rsync /etc/swift/object.ring.gz]/returns) executed successfully
2013-01-30T11:43:06 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Ringsync[container]/Rsync::Get[/etc/swift/container.ring.gz]/Exec[rsync /etc/swift/container.ring.gz]/returns) executed successfully
2013-01-30T11:43:07 notice: (/Service[swift-container-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:08 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:09 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Triggered 'refresh' from 3 events
2013-01-30T11:43:09 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:43:11 notice: (/Service[swift-account-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:13 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:43:13 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:43:23 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:43:23 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:43:30 notice: (/Service[swift-object-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:42 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:43:41 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=21513) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:43:42 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:43:42 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 1 events
2013-01-30T11:43:43 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:43:45 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 1 events
2013-01-30T11:43:47 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 1 events
2013-01-30T11:43:49 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 1 events
2013-01-30T11:43:51 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 1 events
2013-01-30T11:43:54 notice: Finished catalog run in 116.05 seconds

View File

@ -0,0 +1,450 @@
2013-01-30T11:14:18 notice: Reopening log files
2013-01-30T11:15:05 notice: (/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created
2013-01-30T11:15:05 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat]/ensure) created
2013-01-30T11:15:09 notice: (/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created
2013-01-30T11:15:09 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat/bin]/ensure) created
2013-01-30T11:15:10 notice: (/Stage[main]/Concat::Setup/File[/var/lib/puppet/concat/bin/concatfragments.sh]/ensure) defined content as '{md5}256169ee61115a6b717b2844d2ea3128'
2013-01-30T11:15:12 notice: (/Stage[main]/Nova::Utilities/Package[screen]/ensure) created
2013-01-30T11:15:15 notice: (/Stage[main]/Xinetd/Package[xinetd]/ensure) created
2013-01-30T11:15:16 notice: (/Stage[main]/Memcached/User[memcached]/ensure) created
2013-01-30T11:15:19 notice: (/Stage[main]/Memcached/Package[memcached]/ensure) created
2013-01-30T11:15:19 notice: (/Stage[main]/Memcached/File[/etc/sysconfig/memcached]/content) content changed '{md5}05503957e3796fbe6fddd756a7a102a0' to '{md5}3a3961445528bdeda6d7b8b5564dfcfc'
2013-01-30T11:15:20 notice: (/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:15:20 notice: (/Stage[main]/Memcached/Service[memcached]) Triggered 'refresh' from 1 events
2013-01-30T11:15:20 notice: (/Stage[main]/Osnailyfacter::Test_controller/File[/tmp/controller-file]/ensure) defined content as '{md5}7f5c51282c4b1242e12addba8cc331fa'
2013-01-30T11:15:27 notice: (/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created
2013-01-30T11:15:31 notice: (/Stage[main]/Swift::Xfs/Package[xfsprogs]/ensure) created
2013-01-30T11:15:37 notice: (/Stage[main]/Nova::Utilities/Package[parted]/ensure) created
2013-01-30T11:15:37 notice: (/Stage[main]/Openstack::Firewall/File[iptables]/ensure) defined content as '{md5}7efd2b2c624fb433a1538c229ae20e1f'
2013-01-30T11:15:38 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:16:05 notice: (/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created
2013-01-30T11:16:15 notice: (/Stage[main]/Openstack::Glance/Package[swift]/ensure) created
2013-01-30T11:17:26 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created
2013-01-30T11:17:27 notice: (/Stage[main]/Openstack::Swift::Proxy/File[/tmp/swift_keystone_test.rb]/ensure) defined content as '{md5}9921c28fffe90ef152603443c7a9a4d3'
2013-01-30T11:17:31 notice: (/Stage[main]/Nova/Package[python-amqp]/ensure) created
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/var/lib/swift]/ensure) created
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/etc/swift]/owner) owner changed 'root' to 'swift'
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/etc/swift]/group) group changed 'root' to 'swift'
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/etc/swift]/mode) mode changed '0755' to '2770'
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/etc/swift/swift.conf]/content) content changed '{md5}5f2a4640b3293fedc9e8e249408f3044' to '{md5}32a608237a8eeb3fb48db3fda8a239e0'
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/etc/swift/swift.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/var/cache/swift]/ensure) created
2013-01-30T11:17:31 notice: (/Stage[main]/Swift/File[/var/run/swift]/group) group changed 'root' to 'swift'
2013-01-30T11:17:32 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/content) content changed '{md5}9ff8cc688dd9f0dfc45e5afd25c427a7' to '{md5}1680192de4cef61a23dca13cdfff07c7'
2013-01-30T11:17:32 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:17:32 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/group) group changed 'root' to 'swift'
2013-01-30T11:17:32 notice: (/Stage[main]/Xinetd/File[/etc/xinetd.conf]/mode) mode changed '0600' to '0664'
2013-01-30T11:17:36 notice: (/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created
2013-01-30T11:17:36 notice: (/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as '{md5}70358e4313c4c6852303bf9d0f24fb81'
2013-01-30T11:17:53 notice: (/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created
2013-01-30T11:17:53 notice: (/Stage[main]/Horizon/File[/var/log/horizon]/ensure) created
2013-01-30T11:17:53 notice: (/Stage[main]/Horizon/File_line[horizon_redirect_rule]/ensure) created
2013-01-30T11:17:55 notice: (/Stage[main]/Openstack::Controller_ha/Package[socat]/ensure) created
2013-01-30T11:17:55 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg]/ensure) created
2013-01-30T11:17:55 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments]/ensure) created
2013-01-30T11:17:55 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[mysqld]/Haproxy::Balancermember[mysqld]/Concat::Fragment[mysqld_balancermember_mysqld]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/95-mysqld_mysqld_mysqld_balancermember_mysqld]/ensure) created
2013-01-30T11:17:55 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-api]/Haproxy::Balancermember[glance-api]/Concat::Fragment[glance-api_balancermember_glance-api]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/80-glance-api_glance-api_glance-api_balancermember_glance-api]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-1]/Haproxy::Balancermember[nova-api-1]/Concat::Fragment[nova-api-1_balancermember_nova-api-1]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/40-nova-api-1_nova-api-1_nova-api-1_balancermember_nova-api-1]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[quantum]/Haproxy::Balancermember[quantum]/Concat::Fragment[quantum_balancermember_quantum]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/85-quantum_quantum_quantum_balancermember_quantum]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-reg]/Haproxy::Balancermember[glance-reg]/Concat::Fragment[glance-reg_balancermember_glance-reg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/90-glance-reg_glance-reg_glance-reg_balancermember_glance-reg]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-1]/Haproxy::Listen[nova-api-1]/Concat::Fragment[nova-api-1-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/39-nova-api-1_nova-api-1-listen_block]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[mysqld]/Haproxy::Listen[mysqld]/Concat::Fragment[mysqld-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/94-mysqld_mysqld-listen_block]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[swift]/Haproxy::Listen[swift]/Concat::Fragment[swift-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/95-swift_swift-listen_block]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments.concat]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Haproxy/Concat::Fragment[haproxy-base]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/10_haproxy-base]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Haproxy/Concat::Fragment[00-header]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/01_00-header]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-api]/Haproxy::Listen[glance-api]/Concat::Fragment[glance-api-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/79-glance-api_glance-api-listen_block]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[swift]/Haproxy::Balancermember[swift]/Concat::Fragment[swift_balancermember_swift]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/96-swift_swift_swift_balancermember_swift]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments.concat.out]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-4]/Haproxy::Listen[nova-api-4]/Concat::Fragment[nova-api-4-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/69-nova-api-4_nova-api-4-listen_block]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-2]/Haproxy::Balancermember[nova-api-2]/Concat::Fragment[nova-api-2_balancermember_nova-api-2]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/50-nova-api-2_nova-api-2_nova-api-2_balancermember_nova-api-2]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-2]/Haproxy::Listen[nova-api-2]/Concat::Fragment[nova-api-2-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/49-nova-api-2_nova-api-2-listen_block]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-1]/Haproxy::Listen[keystone-1]/Concat::Fragment[keystone-1-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/19-keystone-1_keystone-1-listen_block]/ensure) created
2013-01-30T11:17:56 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/content) content changed '{md5}d114fa06522fa1016ab2bdede4cfd7bf' to '{md5}6f16dcbc1a76f8ae51dd6fe7157af42d'
2013-01-30T11:17:56 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/owner) owner changed 'root' to 'apache'
2013-01-30T11:17:56 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/group) group changed 'root' to 'apache'
2013-01-30T11:17:56 notice: (/Stage[main]/Swift/File[/home/swift]/ensure) created
2013-01-30T11:18:02 notice: (/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/File[/srv/node]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments.concat]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments.concat]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments]/ensure) created
2013-01-30T11:19:24 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat::Fragment[swift-account-6002]/File[/var/lib/puppet/concat/_etc_swift_account-server.conf/fragments/00_swift-account-6002]/ensure) created
2013-01-30T11:19:25 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:19:25 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-2]/Haproxy::Balancermember[keystone-2]/Concat::Fragment[keystone-2_balancermember_keystone-2]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/30-keystone-2_keystone-2_keystone-2_balancermember_keystone-2]/ensure) created
2013-01-30T11:19:35 notice: (/Stage[main]/Rsync::Server/File[/etc/rsync.d]/ensure) created
2013-01-30T11:19:35 notice: (/Stage[main]/Rsync::Server/File[/etc/rsync.d/header]/ensure) defined content as '{md5}81c93e6021cb444faaac0cd902198ce2'
2013-01-30T11:19:35 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Rsync::Server::Module[container]/File[/etc/rsync.d/frag-container]/ensure) defined content as '{md5}f34ae17bc92c6dac2f9e4535cc52d6f4'
2013-01-30T11:19:35 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.102]/File[/etc/dhcp/dhclient-eth0.102-down-hooks]/ensure) defined content as '{md5}d5bdb51453a200b5822441bcc6072cae'
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf]/ensure) created
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments]/ensure) created
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat::Fragment[swift-object-6000]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments/00_swift-object-6000]/ensure) created
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Rsync::Server::Module[object]/File[/etc/rsync.d/frag-object]/ensure) defined content as '{md5}6599fe18f7573876515cdb791157facc'
2013-01-30T11:19:45 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Sysctl[net.ipv4.ip_forward]/val) val changed '0' to '1'
2013-01-30T11:19:45 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.101]/File[/etc/dhcp/dhclient-eth0.101-down-hooks]/ensure) defined content as '{md5}150162dde6c8e637d6192de84fd26fde'
2013-01-30T11:19:45 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived_dhcp_hook[eth0.101]/File[/etc/dhcp/dhclient-eth0.101-up-hooks]/ensure) defined content as '{md5}beeb771a9fc99bc9f3b01258a2c947a2'
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat::Fragment[swift-container-6001]/File[/var/lib/puppet/concat/_etc_swift_container-server.conf/fragments/00_swift-container-6001]/ensure) created
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/Exec[concat_/etc/swift/container-server.conf]/returns) executed successfully
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/Exec[concat_/etc/swift/container-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T11:19:45 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6001]/Concat[/etc/swift/container-server.conf]/File[/etc/swift/container-server.conf]/ensure) defined content as '{md5}5b79511c6c52bc963aa08f0a3e60654e'
2013-01-30T11:19:45 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-2]/Haproxy::Listen[keystone-2]/Concat::Fragment[keystone-2-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/29-keystone-2_keystone-2-listen_block]/ensure) created
2013-01-30T11:19:46 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) net.ipv4.ip_forward = 1
2013-01-30T11:19:46 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) executed successfully
2013-01-30T11:19:46 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/File[/srv/loopback-device]/ensure) created
2013-01-30T11:19:46 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[horizon]/Haproxy::Listen[horizon]/Concat::Fragment[horizon-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/14-horizon_horizon-listen_block]/ensure) created
2013-01-30T11:19:46 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/content) content changed '{md5}fd32314f3157aaf15712e6da7758060d' to '{md5}b5745641b26d03ebddd24b143319f385'
2013-01-30T11:19:46 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/owner) owner changed 'root' to 'swift'
2013-01-30T11:19:46 notice: (/Stage[main]/Rsync::Server/Xinetd::Service[rsync]/File[/etc/xinetd.d/rsync]/group) group changed 'root' to 'swift'
2013-01-30T11:19:46 notice: (/Stage[main]/Xinetd/Service[xinetd]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:19:46 notice: (/Stage[main]/Xinetd/Service[xinetd]) Triggered 'refresh' from 3 events
2013-01-30T11:19:46 notice: (/Stage[main]/Horizon/File_line[httpd_listen_on_internal_network_only]/ensure) created
2013-01-30T11:19:46 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:19:48 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]) Triggered 'refresh' from 6 events
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0+0 records in
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0+0 records out
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) 0 bytes (0 B) copied, 1.487e-05 s, 0.0 kB/s
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Exec[create_partition-1]/returns) executed successfully
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) meta-data=/srv/loopback-device/1 isize=1024 agcount=4, agsize=65548 blks
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sectsz=512 attr=2
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) data = bsize=4096 blocks=262189, imaxpct=25
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sunit=0 swidth=0 blks
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) naming =version 2 bsize=4096 ascii-ci=0
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) log =internal log bsize=4096 blocks=2560, version=2
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) = sectsz=512 sunit=0 blks, lazy-count=1
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]/returns) realtime =none extsz=4096 blocks=0, rtextents=0
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Exec[mkfs-1]) Triggered 'refresh' from 1 events
2013-01-30T11:19:48 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/File[/srv/node/1]/ensure) created
2013-01-30T11:19:49 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Mount[/srv/node/1]/ensure) defined 'ensure' as 'defined'
2013-01-30T11:19:49 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Mount[/srv/node/1]) Triggered 'refresh' from 2 events
2013-01-30T11:19:49 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[mount_1]/returns) executed successfully
2013-01-30T11:19:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[mount_1]) Triggered 'refresh' from 1 events
2013-01-30T11:19:50 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[1]/Swift::Storage::Xfs[1]/Swift::Storage::Mount[1]/Exec[fix_mount_permissions_1]) Triggered 'refresh' from 3 events
2013-01-30T11:19:50 notice: (/Stage[main]/Glance/Group[glance]/ensure) created
2013-01-30T11:19:50 notice: (/Stage[main]/Glance/User[glance]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance/Package[glance]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_auth_address]/value) value changed '127.0.0.1:5000/v2.0/' to 'http://192.168.0.6:5000/v2.0/'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dirname]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_user]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:nova@192.168.0.6/glance'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/bind_host]/value) value changed '0.0.0.0' to '192.168.0.4'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/default_store]/value) value changed 'file' to 'swift'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_create_container_on_put]/value) value changed 'False' to 'True'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[paste_deploy/flavor]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_user]/value) value changed 'jdoe:jdoe' to 'services:glance'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_tenant_name]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Backend::Swift/Glance_api_config[DEFAULT/swift_store_key]/value) value changed 'a86850deb2742ec3cb41518e26aa2d89' to 'nova'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_password]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '192.168.0.6'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_uri]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance/File[/etc/glance/]/owner) owner changed 'root' to 'glance'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance/File[/etc/glance/]/mode) mode changed '0755' to '0770'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T11:19:56 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '192.168.0.6'
2013-01-30T11:19:57 notice: (/Stage[main]/Glance/File[glance-logging.conf]/ensure) defined content as '{md5}71fa2daa8e89a992f4225e850fd879e4'
2013-01-30T11:19:57 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-cache.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T11:19:57 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-api.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T11:19:57 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T11:19:57 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T11:20:00 notice: (/Stage[main]/Keepalived::Install/Package[keepalived]/ensure) created
2013-01-30T11:20:00 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf]/ensure) created
2013-01-30T11:20:00 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments]/ensure) created
2013-01-30T11:20:00 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived::Instance[2]/Concat::Fragment[keepalived_2]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/50_keepalived_2]/ensure) created
2013-01-30T11:20:00 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments.concat]/ensure) created
2013-01-30T11:20:00 notice: (/Stage[main]/Openstack::Controller_ha/Keepalived::Instance[1]/Concat::Fragment[keepalived_1]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/50_keepalived_1]/ensure) created
2013-01-30T11:20:00 notice: (/Stage[main]/Keepalived::Config/Concat::Fragment[global_config]/File[/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/01_global_config]/ensure) created
2013-01-30T11:20:00 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]/returns) /var/lib/puppet/concat/bin/concatfragments.sh: line 108: [: too many arguments
2013-01-30T11:20:00 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]/returns) executed successfully
2013-01-30T11:20:01 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/Exec[concat_/etc/keepalived/keepalived.conf]) Triggered 'refresh' from 5 events
2013-01-30T11:20:01 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/etc/keepalived/keepalived.conf]/content) content changed '{md5}e79dca9e58978e8035e7d4ad25e2ce67' to '{md5}0f64db8e75be1d62a1c1894ea185f050'
2013-01-30T11:20:01 notice: (/Stage[main]/Keepalived::Config/Concat[/etc/keepalived/keepalived.conf]/File[/etc/keepalived/keepalived.conf]/mode) mode changed '0640' to '0644'
2013-01-30T11:20:01 notice: (/Stage[main]/Keepalived::Service/Service[keepalived]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:20:02 notice: (/Stage[main]/Keepalived::Service/Service[keepalived]) Triggered 'refresh' from 1 events
2013-01-30T11:20:02 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[keystone-1]/Haproxy::Balancermember[keystone-1]/Concat::Fragment[keystone-1_balancermember_keystone-1]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/20-keystone-1_keystone-1_keystone-1_balancermember_keystone-1]/ensure) created
2013-01-30T11:20:02 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[nova-api-4]/Haproxy::Balancermember[nova-api-4]/Concat::Fragment[nova-api-4_balancermember_nova-api-4]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/70-nova-api-4_nova-api-4_nova-api-4_balancermember_nova-api-4]/ensure) created
2013-01-30T11:20:12 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[quantum]/Haproxy::Listen[quantum]/Concat::Fragment[quantum-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/84-quantum_quantum-listen_block]/ensure) created
2013-01-30T11:20:12 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/ensure) created
2013-01-30T11:20:12 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/ensure) created
2013-01-30T11:20:15 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Package[swift-account]/ensure) created
2013-01-30T11:20:18 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Package[swift-container]/ensure) created
2013-01-30T11:20:19 err: (/Service[swift-container-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-container-replicator]: Execution of '/usr/bin/swift-init container-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:20:19 notice: (/Service[swift-container-replicator]) Triggered 'refresh' from 2 events
2013-01-30T11:20:19 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/File[/etc/swift/container-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:20:19 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/File[/etc/swift/container-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:20:21 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Package[swift-object]/ensure) created
2013-01-30T11:20:22 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Service[swift-container]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:20:22 notice: (/Stage[main]/Swift::Storage::Container/Swift::Storage::Generic[container]/Service[swift-container]) Triggered 'refresh' from 2 events
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy/Package[swift-proxy]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments.concat.out]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy/Concat::Fragment[swift_proxy]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/00_swift_proxy]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::S3token/Concat::Fragment[swift_s3token]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/28_swift_s3token]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::Catch_errors/Concat::Fragment[swift_catch_errors]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/24_swift_catch_errors]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::Healthcheck/Concat::Fragment[swift_healthcheck]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/25_swift_healthcheck]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::Cache/Concat::Fragment[swift_cache]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/23_swift_cache]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::Keystone/Concat::Fragment[swift_keystone]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/79_swift_keystone]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::Ratelimit/Concat::Fragment[swift_ratelimit]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/26_swift_ratelimit]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::Swift3/Concat::Fragment[swift_swift3]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/27_swift_swift3]/ensure) created
2013-01-30T11:20:25 notice: (/Stage[main]/Swift::Proxy::Authtoken/Keystone::Client::Authtoken[/etc/swift/proxy-server.conf]/Concat::Fragment[_etc_swift_proxy-server.conf_authtoken]/File[/var/lib/puppet/concat/_etc_swift_proxy-server.conf/fragments/80__etc_swift_proxy-server.conf_authtoken]/ensure) created
2013-01-30T11:20:26 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]/returns) executed successfully
2013-01-30T11:20:26 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/Exec[concat_/etc/swift/proxy-server.conf]) Triggered 'refresh' from 11 events
2013-01-30T11:20:26 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]/content) content changed '{md5}b6581fa5a48beaa38dae9450d67b0d28' to '{md5}8d2d03495da02438bd24cc04f7a90b2a'
2013-01-30T11:20:26 notice: (/Stage[main]/Swift::Proxy/Concat[/etc/swift/proxy-server.conf]/File[/etc/swift/proxy-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:20:27 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:20:27 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Triggered 'refresh' from 1 events
2013-01-30T11:20:28 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:22:30 notice: (/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created
2013-01-30T11:22:30 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) Stopping rabbitmq-server: RabbitMQ is not running
2013-01-30T11:22:30 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) rabbitmq-server.
2013-01-30T11:22:30 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) executed successfully
2013-01-30T11:22:30 notice: (/Stage[main]/Rabbitmq::Server/File[erlang_cookie]/content) content changed '{md5}5ca36383f027148bc87612d81fec252a' to '{md5}b28788594da393660db1e4f20d296c10'
2013-01-30T11:22:30 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq.config]/ensure) defined content as '{md5}f22d1aa923c4727590fa559e8643fcf8'
2013-01-30T11:22:30 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq-env.config]/ensure) defined content as '{md5}2980dac99b8f2195a50ef6e459ffedae'
2013-01-30T11:22:42 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:22:49 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]) Triggered 'refresh' from 1 events
2013-01-30T11:22:50 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[glance-reg]/Haproxy::Listen[glance-reg]/Concat::Fragment[glance-reg-listen_block]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/89-glance-reg_glance-reg-listen_block]/ensure) created
2013-01-30T11:22:50 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/Exec[concat_/etc/swift/account-server.conf]/returns) executed successfully
2013-01-30T11:22:50 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/Exec[concat_/etc/swift/account-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T11:22:50 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/content) content changed '{md5}68d75de69955941c8fd8f40b0dbbb31b' to '{md5}47ca0c31c97ed0dd8a6c6cc38cce3160'
2013-01-30T11:22:50 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:22:50 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Concat[/etc/swift/account-server.conf]/File[/etc/swift/account-server.conf]/mode) mode changed '0660' to '0640'
2013-01-30T11:22:51 err: (/Service[swift-account-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-account-replicator]: Execution of '/usr/bin/swift-init account-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:22:51 notice: (/Service[swift-account-replicator]) Triggered 'refresh' from 2 events
2013-01-30T11:22:51 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Service[swift-account]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:22:52 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/Service[swift-account]) Triggered 'refresh' from 2 events
2013-01-30T11:22:52 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0+0 records in
2013-01-30T11:22:52 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0+0 records out
2013-01-30T11:22:52 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) 0 bytes (0 B) copied, 1.4544e-05 s, 0.0 kB/s
2013-01-30T11:22:52 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Exec[create_partition-2]/returns) executed successfully
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) meta-data=/srv/loopback-device/2 isize=1024 agcount=4, agsize=65548 blks
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sectsz=512 attr=2
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) data = bsize=4096 blocks=262189, imaxpct=25
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sunit=0 swidth=0 blks
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) naming =version 2 bsize=4096 ascii-ci=0
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) log =internal log bsize=4096 blocks=2560, version=2
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) = sectsz=512 sunit=0 blks, lazy-count=1
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]/returns) realtime =none extsz=4096 blocks=0, rtextents=0
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Exec[mkfs-2]) Triggered 'refresh' from 1 events
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/File[/srv/node/2]/ensure) created
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Mount[/srv/node/2]/ensure) defined 'ensure' as 'defined'
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Mount[/srv/node/2]) Triggered 'refresh' from 2 events
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[mount_2]/returns) executed successfully
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[mount_2]) Triggered 'refresh' from 1 events
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Storage::Loopback[2]/Swift::Storage::Xfs[2]/Swift::Storage::Mount[2]/Exec[fix_mount_permissions_2]) Triggered 'refresh' from 3 events
2013-01-30T11:22:53 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6002]/Rsync::Server::Module[account]/File[/etc/rsync.d/frag-account]/ensure) defined content as '{md5}5c536b07e578ed18d4c7eedd2d76a225'
2013-01-30T11:22:53 notice: (/Stage[main]/Rsync::Server/Exec[compile fragments]) Triggered 'refresh' from 4 events
2013-01-30T11:22:53 notice: (/Stage[main]/Openstack::Controller_ha/Haproxy_service[horizon]/Haproxy::Balancermember[horizon]/Concat::Fragment[horizon_balancermember_horizon]/File[/var/lib/puppet/concat/_etc_haproxy_haproxy.cfg/fragments/15-horizon_horizon_horizon_balancermember_horizon]/ensure) created
2013-01-30T11:22:53 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/Exec[concat_/etc/haproxy/haproxy.cfg]/returns) executed successfully
2013-01-30T11:22:53 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/Exec[concat_/etc/haproxy/haproxy.cfg]) Triggered 'refresh' from 26 events
2013-01-30T11:22:53 notice: (/Stage[main]/Haproxy/Concat[/etc/haproxy/haproxy.cfg]/File[/etc/haproxy/haproxy.cfg]/content) content changed '{md5}1f337186b0e1ba5ee82760cb437fb810' to '{md5}88cfb25e046978e0c16fa67068edb72e'
2013-01-30T11:22:54 notice: (/Stage[main]/Haproxy/Service[haproxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:22:54 notice: (/Stage[main]/Haproxy/Service[haproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:22:54 notice: (/Stage[main]/Galera/File[/etc/mysql]/ensure) created
2013-01-30T11:22:54 notice: (/Stage[main]/Galera/File[/etc/my.cnf]/ensure) defined content as '{md5}9c703ee12b3b1cef32501cdeeb24feef'
2013-01-30T11:22:57 notice: (/Stage[main]/Galera/Package[openssl098e]/ensure) created
2013-01-30T11:23:01 notice: (/Stage[main]/Galera/Package[galera]/ensure) created
2013-01-30T11:23:11 notice: (/Stage[main]/Galera/Package[MySQL-client]/ensure) created
2013-01-30T11:23:11 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:23:11 notice: (/Stage[main]/Galera/File[/etc/mysql/conf.d]/ensure) created
2013-01-30T11:23:11 notice: (/Stage[main]/Galera/File[/etc/mysql/conf.d/wsrep.cnf]/ensure) created
2013-01-30T11:23:34 notice: (/Stage[main]/Galera/Package[MySQL-server]/ensure) created
2013-01-30T11:23:34 notice: (/Stage[main]/Galera/Exec[set-mysql-password]) Triggered 'refresh' from 2 events
2013-01-30T11:24:10 notice: (/Stage[main]/Galera/Exec[wait-initial-sync]) Triggered 'refresh' from 1 events
2013-01-30T11:24:25 notice: (/Stage[main]/Galera/Exec[kill-initial-mysql]/returns) mysqld_safe: no process killed
2013-01-30T11:24:25 notice: (/Stage[main]/Galera/Exec[kill-initial-mysql]) Triggered 'refresh' from 1 events
2013-01-30T11:24:25 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:24:58 notice: (/Stage[main]/Galera/Service[mysql-galera]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:26:04 notice: (/Stage[main]/Galera/Service[mysql-galera]) Triggered 'refresh' from 1 events
2013-01-30T11:26:25 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:26:25 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:26:25 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-3]/Database_user[nova@slave-3]/ensure) created
2013-01-30T11:26:25 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-3]/Database_user[glance@slave-3]/ensure) created
2013-01-30T11:26:26 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-3]/Database_grant[glance@slave-3/glance]/privileges) privileges changed '' to 'all'
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-3]/Database_grant[nova@slave-3/nova]/privileges) privileges changed '' to 'all'
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/network_manager]/value) value changed 'nova.network.manager.FlatDHCPManager' to 'nova.network.manager.VlanManager'
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Network/Nova_config[DEFAULT/floating_range]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_hosts]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_volumes]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_compute_listen]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/ec2_listen]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_content_bytes]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'nova'
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/glance_api_servers]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_host]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_userid]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_ha_queues]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_password]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/verbose]/ensure) created
2013-01-30T11:26:28 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_cores]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Package[nova-consoleauth]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/enabled_apis]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/sql_connection]/value) value changed 'mysql://nova:nova@localhost/nova' to 'mysql://nova:nova@192.168.0.6/nova'
2013-01-30T11:26:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_virtual_host]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/image_service]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/volume_api_class]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_files]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T11:26:31 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_port]/ensure) created
2013-01-30T11:26:31 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/force_dhcp_release]/value) value changed 'True' to 'true'
2013-01-30T11:26:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_port]/ensure) created
2013-01-30T11:27:04 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package[nova-vncproxy]/ensure) created
2013-01-30T11:27:04 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_start]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/User[nova]/shell) shell changed '/sbin/nologin' to '/bin/bash'
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/File[/var/log/nova]/group) group changed 'root' to 'nova'
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/File[/var/log/nova]/mode) mode changed '0755' to '0751'
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_path_bytes]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/api_paste_config]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_floating_ips]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/public_interface]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/service_down_time]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_gigabytes]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_instances]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_volume_listen]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/metadata_listen]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/auth_strategy]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_metadata_items]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rpc_backend]/value) value changed 'nova.openstack.common.rpc.impl_qpid' to 'nova.rpc.impl_kombu'
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/fixed_range]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/dhcp_domain]/ensure) created
2013-01-30T11:27:05 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/allow_resize_to_same_host]/ensure) created
2013-01-30T11:27:06 notice: (/Stage[main]/Nova::Network::Vlan/Nova_config[DEFAULT/vlan_interface]/ensure) created
2013-01-30T11:27:06 notice: (/Stage[main]/Nova/File[/etc/nova/nova.conf]/owner) owner changed 'root' to 'nova'
2013-01-30T11:27:06 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-3]/Database_user[keystone@slave-3]/ensure) created
2013-01-30T11:27:07 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-3]/Database_grant[keystone@slave-3/keystone]/privileges) privileges changed '' to 'all'
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Package[keystone]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[sql/connection]/value) value changed 'mysql://keystone:keystone@localhost/keystone' to 'mysql://keystone:nova@192.168.0.6/keystone'
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/public_port]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/compute_port]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/debug]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[policy/driver]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[sql/idle_timeout]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_token]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/bind_host]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/File[/etc/keystone]/owner) owner changed 'root' to 'keystone'
2013-01-30T11:27:09 notice: (/Stage[main]/Keystone/File[/etc/keystone]/group) group changed 'root' to 'keystone'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dir]/ensure) created
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.0.6'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/bind_host]/value) value changed '0.0.0.0' to '192.168.0.4'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/File[/etc/glance/glance-registry.conf]/owner) owner changed 'root' to 'glance'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[paste_deploy/flavor]/ensure) created
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dirname]/ensure) created
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:nova@192.168.0.6/glance'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-01-30T11:27:10 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-01-30T11:27:10 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/var/lib/puppet/concat/_etc_swift_object-server.conf/fragments.concat]/ensure) created
2013-01-30T11:27:10 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/Exec[concat_/etc/swift/object-server.conf]/returns) executed successfully
2013-01-30T11:27:10 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/Exec[concat_/etc/swift/object-server.conf]) Triggered 'refresh' from 3 events
2013-01-30T11:27:10 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/content) content changed '{md5}bd9ae72e0cc70c89655071d96b32b172' to '{md5}cbf8e9bca187c13fef5b7c3130e1d6b0'
2013-01-30T11:27:10 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/owner) owner changed 'root' to 'swift'
2013-01-30T11:27:10 notice: (/Stage[main]/Swift::Storage::All/Swift::Storage::Server[6000]/Concat[/etc/swift/object-server.conf]/File[/etc/swift/object-server.conf]/mode) mode changed '0660' to '0640'
2013-01-30T11:27:10 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Service[swift-object]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:11 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/Service[swift-object]) Triggered 'refresh' from 2 events
2013-01-30T11:27:11 err: (/Service[swift-object-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-object-replicator]: Execution of '/usr/bin/swift-init object-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:27:11 notice: (/Service[swift-object-replicator]) Triggered 'refresh' from 2 events
2013-01-30T11:27:12 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/verbose]/ensure) created
2013-01-30T11:27:13 notice: (/Stage[main]/Keystone/Exec[keystone-manage db_sync]) Triggered 'refresh' from 11 events
2013-01-30T11:27:14 notice: (/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:15 notice: (/Stage[main]/Keystone/Service[keystone]) Triggered 'refresh' from 13 events
2013-01-30T11:27:36 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/auth_url]/ensure) created
2013-01-30T11:27:37 notice: (/Stage[main]/Glance::Registry/Exec[glance-manage db_sync]) Triggered 'refresh' from 38 events
2013-01-30T11:27:37 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:38 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]) Triggered 'refresh' from 13 events
2013-01-30T11:27:38 notice: (/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:39 notice: (/Stage[main]/Glance::Api/Service[glance-api]) Triggered 'refresh' from 29 events
2013-01-30T11:27:39 notice: (/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed
2013-01-30T11:27:39 notice: (/Stage[main]/Nova/Exec[post-nova_config]) Triggered 'refresh' from 94 events
2013-01-30T11:27:41 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:27:41 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=8486) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:27:41 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:27:43 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:27:42 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=8506) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:27:43 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]) Triggered 'refresh' from 1 events
2013-01-30T11:27:44 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:47 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 3 events
2013-01-30T11:27:50 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:51 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 4 events
2013-01-30T11:27:52 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:53 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 3 events
2013-01-30T11:27:55 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:55 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 4 events
2013-01-30T11:27:56 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:57 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 51 events
2013-01-30T11:27:58 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:27:59 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 3 events
2013-01-30T11:28:03 notice: Finished catalog run in 786.94 seconds
SEPARATOR
2013-01-30T11:28:15 notice: Reopening log files
2013-01-30T11:29:42 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:29:43 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-private-interface]/returns) executed successfully
2013-01-30T11:29:43 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-internal-interface]/returns) executed successfully
2013-01-30T11:29:43 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-public-interface]/returns) executed successfully
2013-01-30T11:30:16 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:30:16 notice: (/Stage[main]/Swift::Storage::Object/Swift::Storage::Generic[object]/File[/etc/swift/object-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:30:16 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/owner) owner changed 'root' to 'swift'
2013-01-30T11:30:16 notice: (/Stage[main]/Swift::Storage::Account/Swift::Storage::Generic[account]/File[/etc/swift/account-server/]/group) group changed 'root' to 'swift'
2013-01-30T11:30:17 err: (/Service[swift-container-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-container-replicator]: Execution of '/usr/bin/swift-init container-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:30:18 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:30:19 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:30:21 err: (/Service[swift-account-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-account-replicator]: Execution of '/usr/bin/swift-init account-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:30:22 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:30:23 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:30:33 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:30:33 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:30:38 err: (/Service[swift-object-replicator]/ensure) change from stopped to running failed: Could not start Service[swift-object-replicator]: Execution of '/usr/bin/swift-init object-replicator start' returned 1: at /etc/puppet/modules/swift/manifests/storage/generic.pp:61
2013-01-30T11:30:49 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:30:48 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=10387) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:30:49 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:30:50 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 1 events
2013-01-30T11:30:52 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:30:53 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 1 events
2013-01-30T11:30:56 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 1 events
2013-01-30T11:30:58 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 1 events
2013-01-30T11:31:00 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 1 events
2013-01-30T11:31:02 notice: Finished catalog run in 83.22 seconds
SEPARATOR
2013-01-30T11:40:36 notice: Reopening log files
2013-01-30T11:42:02 notice: (/Stage[main]/Openstack::Firewall/Exec[startup-firewall]/returns) executed successfully
2013-01-30T11:42:03 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-private-interface]/returns) executed successfully
2013-01-30T11:42:03 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-internal-interface]/returns) executed successfully
2013-01-30T11:42:03 notice: (/Stage[main]/Openstack::Controller_ha/Exec[up-public-interface]/returns) executed successfully
2013-01-30T11:42:25 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Ringsync[account]/Rsync::Get[/etc/swift/account.ring.gz]/Exec[rsync /etc/swift/account.ring.gz]/returns) executed successfully
2013-01-30T11:42:46 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Ringsync[object]/Rsync::Get[/etc/swift/object.ring.gz]/Exec[rsync /etc/swift/object.ring.gz]/returns) executed successfully
2013-01-30T11:43:08 notice: (/Stage[main]/Openstack::Swift::Storage-node/Swift::Ringsync[container]/Rsync::Get[/etc/swift/container.ring.gz]/Exec[rsync /etc/swift/container.ring.gz]/returns) executed successfully
2013-01-30T11:43:09 notice: (/Service[swift-container-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:10 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:10 notice: (/Stage[main]/Swift::Proxy/Service[swift-proxy]) Triggered 'refresh' from 3 events
2013-01-30T11:43:11 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-01-30T11:43:13 notice: (/Service[swift-account-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:14 notice: (/Stage[main]/Galera/File[/tmp/wsrep-init-file]/ensure) created
2013-01-30T11:43:14 notice: (/Stage[main]/Galera/Exec[rm-init-file]/returns) executed successfully
2013-01-30T11:43:25 notice: (/Stage[main]/Galera/Exec[wait-for-synced-state]/returns) executed successfully
2013-01-30T11:43:25 notice: (/Stage[main]/Openstack::Controller_ha/Exec[wait-for-haproxy-mysql-backend]/returns) executed successfully
2013-01-30T11:43:30 notice: (/Service[swift-object-replicator]/ensure) ensure changed 'stopped' to 'running'
2013-01-30T11:43:42 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-01-30 11:43:42 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=12561) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-01-30T11:43:42 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-01-30T11:43:42 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 1 events
2013-01-30T11:43:43 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 1 events
2013-01-30T11:43:45 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 1 events
2013-01-30T11:43:47 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 1 events
2013-01-30T11:43:50 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 1 events
2013-01-30T11:43:54 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 1 events
2013-01-30T11:43:57 notice: Finished catalog run in 117.64 seconds

View File

@ -0,0 +1,90 @@
2013-02-07T11:54:29 notice: Reopening log files
2013-02-07T11:54:45 notice: (/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created
2013-02-07T11:54:46 notice: (/Stage[main]/Nova::Compute/Package[bridge-utils]/ensure) created
2013-02-07T11:54:49 notice: (/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created
2013-02-07T11:54:50 notice: (/Stage[main]/Nova::Utilities/Package[screen]/ensure) created
2013-02-07T11:55:21 notice: (/Stage[main]/Nova::Compute::Libvirt/Package[qemu-kvm]/ensure) created
2013-02-07T11:55:21 notice: (/Stage[main]/Nova::Compute::Libvirt/Exec[symlink-qemu-kvm]/returns) executed successfully
2013-02-07T11:55:26 notice: (/Stage[main]/Nova::Utilities/Package[parted]/ensure) created
2013-02-07T11:56:12 notice: (/Stage[main]/Nova::Compute::Libvirt/Package[libvirt]/ensure) created
2013-02-07T11:56:33 notice: (/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created
2013-02-07T11:56:35 notice: (/Stage[main]/Osnailyfacter::Network_setup/Network_config[eth0.109]/ensure) created
2013-02-07T11:56:38 notice: (/Stage[main]/Osnailyfacter::Network_setup/Network_config[eth0.105]/ensure) created
2013-02-07T11:56:44 notice: (/Stage[main]/Osnailyfacter::Network_setup/Network_config[eth0.107]/ensure) created
2013-02-07T11:57:30 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Compute::Libvirt/Package[dnsmasq-utils]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_interface]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Compute/Nova_config[DEFAULT/vncserver_proxyclient_address]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_hosts]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_compute_listen]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_network_bridge]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/ec2_listen]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova_pass'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'nova'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/glance_api_servers]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_userid]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_ha_queues]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_password]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/verbose]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/enabled_apis]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/sql_connection]/value) value changed 'mysql://nova:nova@localhost/nova' to 'mysql://nova:nova_pass@192.168.1.2/nova'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Compute/Nova_config[DEFAULT/vnc_enabled]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_virtual_host]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/image_service]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/volume_api_class]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/auth_host]/value) value changed '127.0.0.1' to '192.168.1.2'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/force_dhcp_release]/value) value changed 'True' to 'true'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_port]/ensure) created
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/User[nova]/shell) shell changed '/sbin/nologin' to '/bin/bash'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/File[/var/log/nova]/group) group changed 'root' to 'nova'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova/File[/var/log/nova]/mode) mode changed '0755' to '0751'
2013-02-07T11:57:31 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/api_paste_config]/ensure) created
2013-02-07T11:57:32 notice: (/Stage[main]/Nova::Compute/Nova_config[DEFAULT/novncproxy_base_url]/ensure) created
2013-02-07T11:57:32 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/public_interface]/ensure) created
2013-02-07T11:57:32 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/service_down_time]/ensure) created
2013-02-07T11:57:32 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-02-07T11:57:33 notice: (/Stage[main]/Nova/Package[python-amqp]/ensure) created
2013-02-07T11:57:33 notice: (/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/vncserver_listen]/ensure) created
2013-02-07T11:57:52 notice: (/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Package[nova-compute]/ensure) created
2013-02-07T11:57:52 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_volume_listen]/ensure) created
2013-02-07T11:57:52 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/signing_dir]/ensure) created
2013-02-07T11:57:52 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/metadata_listen]/ensure) created
2013-02-07T11:57:52 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/auth_strategy]/ensure) created
2013-02-07T11:57:57 notice: (/Stage[main]/Nova::Compute::Libvirt/Package[avahi]/ensure) created
2013-02-07T11:57:59 notice: (/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created
2013-02-07T11:57:59 notice: (/Stage[main]/Osnailyfacter::Test_compute/File[/tmp/compute-file]/ensure) defined content as '{md5}1b7628cdc1398d36048932f1eff47a63'
2013-02-07T11:57:59 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rpc_backend]/value) value changed 'nova.openstack.common.rpc.impl_qpid' to 'nova.rpc.impl_kombu'
2013-02-07T11:57:59 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/fixed_range]/ensure) created
2013-02-07T11:57:59 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/dhcp_domain]/ensure) created
2013-02-07T11:57:59 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Sysctl[net.ipv4.ip_forward]/val) val changed '0' to '1'
2013-02-07T11:58:01 notice: (/Stage[main]/Nova::Network/Nova::Generic_service[network]/Package[nova-network]/ensure) created
2013-02-07T11:58:01 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/metadata_host]/ensure) created
2013-02-07T11:58:01 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/memcached_servers]/ensure) created
2013-02-07T11:58:02 notice: (/Stage[main]/Openstack::Compute/Augeas[sysconfig-libvirt]/returns) executed successfully
2013-02-07T11:58:02 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/send_arp_for_ha]/ensure) created
2013-02-07T11:58:02 notice: (/Stage[main]/Openstack::Compute/Nova_config[DEFAULT/multi_host]/ensure) created
2013-02-07T11:58:02 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/allow_resize_to_same_host]/ensure) created
2013-02-07T11:58:02 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) net.ipv4.ip_forward = 1
2013-02-07T11:58:02 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) executed successfully
2013-02-07T11:58:03 notice: (/Stage[main]/Openstack::Compute/Augeas[libvirt-conf]/returns) executed successfully
2013-02-07T11:58:03 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[messagebus]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:58:03 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[avahi-daemon]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:58:03 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[libvirt]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:58:03 notice: (/Stage[main]/Nova::Compute::Libvirt/Service[libvirt]) Triggered 'refresh' from 1 events
2013-02-07T11:58:03 notice: (/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/libvirt_type]/ensure) created
2013-02-07T11:58:03 notice: (/Stage[main]/Nova::Compute::Libvirt/Nova_config[DEFAULT/connection_type]/ensure) created
2013-02-07T11:58:03 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_injected]/ensure) created
2013-02-07T11:58:03 notice: (/Stage[main]/Nova/File[/etc/nova/nova.conf]/owner) owner changed 'root' to 'nova'
2013-02-07T11:58:04 notice: (/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed
2013-02-07T11:58:04 notice: (/Stage[main]/Nova/Exec[post-nova_config]) Triggered 'refresh' from 86 events
2013-02-07T11:58:06 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-02-07 11:58:05 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=3464) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-02-07T11:58:06 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-02-07T11:58:06 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-02-07 11:58:06 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=3531) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-02-07T11:58:06 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]) Triggered 'refresh' from 1 events
2013-02-07T11:58:06 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:58:07 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 47 events
2013-02-07T11:58:08 notice: (/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:58:09 notice: (/Stage[main]/Nova::Compute/Nova::Generic_service[compute]/Service[nova-compute]) Triggered 'refresh' from 4 events
2013-02-07T11:58:10 notice: (/Stage[main]/Nova::Network/Nova::Generic_service[network]/Service[nova-network]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:58:12 notice: (/Stage[main]/Nova::Network/Nova::Generic_service[network]/Service[nova-network]) Triggered 'refresh' from 4 events
2013-02-07T11:58:12 notice: Finished catalog run in 212.07 seconds

View File

@ -0,0 +1,233 @@
2013-02-07T11:44:48 notice: Reopening log files
2013-02-07T11:45:13 notice: (/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created
2013-02-07T11:45:16 notice: (/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created
2013-02-07T11:45:18 notice: (/Stage[main]/Nova::Utilities/Package[screen]/ensure) created
2013-02-07T11:45:18 notice: (/Stage[main]/Memcached/User[memcached]/ensure) created
2013-02-07T11:45:20 notice: (/Stage[main]/Memcached/Package[memcached]/ensure) created
2013-02-07T11:45:20 notice: (/Stage[main]/Memcached/File[/etc/sysconfig/memcached]/content) content changed '{md5}05503957e3796fbe6fddd756a7a102a0' to '{md5}3a3961445528bdeda6d7b8b5564dfcfc'
2013-02-07T11:45:20 notice: (/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:45:21 notice: (/Stage[main]/Memcached/Service[memcached]) Triggered 'refresh' from 1 events
2013-02-07T11:45:21 notice: (/Stage[main]/Osnailyfacter::Test_controller/File[/tmp/controller-file]/ensure) defined content as '{md5}7f5c51282c4b1242e12addba8cc331fa'
2013-02-07T11:45:28 notice: (/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created
2013-02-07T11:45:32 notice: (/Stage[main]/Nova::Utilities/Package[parted]/ensure) created
2013-02-07T11:45:36 notice: (/Stage[main]/Mysql/Package[mysql_client]/ensure) created
2013-02-07T11:45:51 notice: (/Stage[main]/Mysql::Server/Package[mysql-server]/ensure) created
2013-02-07T11:46:25 notice: (/Stage[main]/Mysql::Server/Service[mysqld]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:46:44 notice: (/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created
2013-02-07T11:46:46 notice: (/Stage[main]/Osnailyfacter::Network_setup/Network_config[eth0.109]/ensure) created
2013-02-07T11:46:48 notice: (/Stage[main]/Osnailyfacter::Network_setup/Network_config[eth0.105]/ensure) created
2013-02-07T11:46:54 notice: (/Stage[main]/Osnailyfacter::Network_setup/Network_config[eth0.107]/ensure) created
2013-02-07T11:47:39 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created
2013-02-07T11:47:39 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database[glance]/ensure) created
2013-02-07T11:47:40 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database[nova]/ensure) created
2013-02-07T11:47:40 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_user[nova@127.0.0.1]/ensure) created
2013-02-07T11:47:40 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-2]/Database_user[nova@slave-2]/ensure) created
2013-02-07T11:47:40 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-2]/Database_grant[nova@slave-2/nova]/privileges) privileges changed '' to 'all'
2013-02-07T11:47:42 notice: (/Stage[main]/Nova/Package[python-amqp]/ensure) created
2013-02-07T11:47:44 notice: (/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created
2013-02-07T11:47:44 notice: (/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as '{md5}8d7aea4ac8f2f9ce168ab7c04420308c'
2013-02-07T11:47:58 notice: (/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created
2013-02-07T11:47:58 notice: (/Stage[main]/Horizon/File[/var/log/horizon]/ensure) created
2013-02-07T11:47:58 notice: (/Stage[main]/Horizon/File_line[horizon_redirect_rule]/ensure) created
2013-02-07T11:47:58 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/content) content changed '{md5}d114fa06522fa1016ab2bdede4cfd7bf' to '{md5}4966b1e7a0c22c25b1c69f149d9ae92b'
2013-02-07T11:47:58 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/owner) owner changed 'root' to 'apache'
2013-02-07T11:47:58 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/group) group changed 'root' to 'apache'
2013-02-07T11:48:05 notice: (/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created
2013-02-07T11:49:17 notice: (/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created
2013-02-07T11:49:17 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Sysctl[net.ipv4.ip_forward]/val) val changed '0' to '1'
2013-02-07T11:49:17 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_grant[nova@127.0.0.1/nova]/privileges) privileges changed '' to 'all'
2013-02-07T11:49:17 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) net.ipv4.ip_forward = 1
2013-02-07T11:49:17 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) executed successfully
2013-02-07T11:49:17 notice: (/Stage[main]/Mysql::Config/File[/etc/mysql]/ensure) created
2013-02-07T11:49:17 notice: (/Stage[main]/Mysql::Config/File[/etc/my.cnf]/ensure) defined content as '{md5}7ae09a894eebe7ed3b5fc3b8ca8b445e'
2013-02-07T11:49:17 notice: (/Stage[main]/Mysql::Config/File[/etc/mysql/conf.d]/ensure) created
2013-02-07T11:50:21 notice: (/Stage[main]/Mysql::Config/Exec[mysqld-restart]) Triggered 'refresh' from 3 events
2013-02-07T11:50:21 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-2]/Database_user[glance@slave-2]/ensure) created
2013-02-07T11:50:22 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-2]/Database_grant[glance@slave-2/glance]/privileges) privileges changed '' to 'all'
2013-02-07T11:50:22 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/Database_user[glance@]/ensure) created
2013-02-07T11:50:22 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/Database_grant[glance@/glance]/privileges) privileges changed '' to 'all'
2013-02-07T11:50:22 notice: (/Stage[main]/Horizon/File_line[httpd_listen_on_internal_network_only]/ensure) created
2013-02-07T11:50:22 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:50:24 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]) Triggered 'refresh' from 6 events
2013-02-07T11:50:24 notice: (/Stage[main]/Glance/Group[glance]/ensure) created
2013-02-07T11:50:24 notice: (/Stage[main]/Glance/User[glance]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance/Package[glance]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dirname]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_user]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:glance_pass@127.0.0.1/glance'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[paste_deploy/flavor]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'glance_pass'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_tenant_name]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_password]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '127.0.0.1'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_uri]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance/File[/etc/glance/]/owner) owner changed 'root' to 'glance'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance/File[/etc/glance/]/mode) mode changed '0755' to '0770'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dir]/ensure) created
2013-02-07T11:50:29 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '127.0.0.1'
2013-02-07T11:50:30 notice: (/Stage[main]/Glance/File[glance-logging.conf]/ensure) defined content as '{md5}71fa2daa8e89a992f4225e850fd879e4'
2013-02-07T11:50:30 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-cache.conf]/owner) owner changed 'root' to 'glance'
2013-02-07T11:50:30 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-api.conf]/owner) owner changed 'root' to 'glance'
2013-02-07T11:50:30 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-02-07T11:50:30 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-02-07T11:50:30 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[]/Database_user[nova@]/ensure) created
2013-02-07T11:50:31 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[]/Database_grant[nova@/nova]/privileges) privileges changed '' to 'all'
2013-02-07T11:50:31 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database[keystone]/ensure) created
2013-02-07T11:50:31 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database_user[keystone@127.0.0.1]/ensure) created
2013-02-07T11:50:31 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-2]/Database_user[keystone@slave-2]/ensure) created
2013-02-07T11:50:31 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[]/Database_user[keystone@]/ensure) created
2013-02-07T11:50:31 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-2]/Database_grant[keystone@slave-2/keystone]/privileges) privileges changed '' to 'all'
2013-02-07T11:50:32 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database_grant[keystone@127.0.0.1/keystone]/privileges) privileges changed '' to 'all'
2013-02-07T11:50:32 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[]/Database_grant[keystone@/keystone]/privileges) privileges changed '' to 'all'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database_user[glance@127.0.0.1]/ensure) created
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database_grant[glance@127.0.0.1/glance]/privileges) privileges changed '' to 'all'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dir]/ensure) created
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/File[/etc/glance/glance-registry.conf]/owner) owner changed 'root' to 'glance'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'glance_pass'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[paste_deploy/flavor]/ensure) created
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dirname]/ensure) created
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:glance_pass@127.0.0.1/glance'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-02-07T11:50:32 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Package[keystone]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[sql/connection]/value) value changed 'mysql://keystone:keystone@localhost/keystone' to 'mysql://keystone:keystone_db_pass@127.0.0.1/keystone'
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/public_port]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/compute_port]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/debug]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[policy/driver]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[sql/idle_timeout]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_token]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/bind_host]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/File[/etc/keystone]/owner) owner changed 'root' to 'keystone'
2013-02-07T11:50:34 notice: (/Stage[main]/Keystone/File[/etc/keystone]/group) group changed 'root' to 'keystone'
2013-02-07T11:50:34 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[root@slave-2.domain.tld]/ensure) removed
2013-02-07T11:50:34 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[@localhost]/ensure) removed
2013-02-07T11:50:34 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[@slave-2.domain.tld]/ensure) removed
2013-02-07T11:50:35 notice: (/Stage[main]/Mysql::Server::Account_security/Database[test]/ensure) removed
2013-02-07T11:50:35 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[root@127.0.0.1]/ensure) removed
2013-02-07T11:50:35 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-02-07T11:52:10 notice: (/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created
2013-02-07T11:52:10 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) Stopping rabbitmq-server: RabbitMQ is not running
2013-02-07T11:52:10 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) rabbitmq-server.
2013-02-07T11:52:10 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) executed successfully
2013-02-07T11:52:10 notice: (/Stage[main]/Rabbitmq::Server/File[erlang_cookie]/content) content changed '{md5}a2aaad5829075918ba3a2915808071c7' to '{md5}b28788594da393660db1e4f20d296c10'
2013-02-07T11:52:10 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq.config]/ensure) defined content as '{md5}017a023ed429c071bf945a706be7fb13'
2013-02-07T11:52:10 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq-env.config]/ensure) defined content as '{md5}2980dac99b8f2195a50ef6e459ffedae'
2013-02-07T11:52:13 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:52:17 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]) Triggered 'refresh' from 1 events
2013-02-07T11:52:17 notice: (/Stage[main]/Rabbitmq::Server/Rabbitmq_user[guest]/ensure) removed
2013-02-07T11:52:18 notice: (/Stage[main]/Nova::Rabbitmq/Rabbitmq_user[openstack_rabbit_user]/ensure) created
2013-02-07T11:52:18 notice: (/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[openstack_rabbit_user@/]/ensure) created
2013-02-07T11:52:18 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_interface]/ensure) created
2013-02-07T11:52:18 notice: (/Stage[main]/Nova::Network/Nova_config[DEFAULT/floating_range]/ensure) created
2013-02-07T11:52:18 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_hosts]/ensure) created
2013-02-07T11:52:18 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_volumes]/ensure) created
2013-02-07T11:52:18 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_compute_listen]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_network_bridge]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/ec2_listen]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova_pass'
2013-02-07T11:52:19 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_content_bytes]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'nova'
2013-02-07T11:52:19 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/glance_api_servers]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_host]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_userid]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_ha_queues]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_password]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/verbose]/ensure) created
2013-02-07T11:52:19 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_cores]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Package[nova-consoleauth]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/enabled_apis]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/sql_connection]/value) value changed 'mysql://nova:nova@localhost/nova' to 'mysql://nova:nova_pass@127.0.0.1/nova'
2013-02-07T11:52:20 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_virtual_host]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/image_service]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/volume_api_class]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_files]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_port]/ensure) created
2013-02-07T11:52:20 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/force_dhcp_release]/value) value changed 'True' to 'true'
2013-02-07T11:52:20 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_port]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package[nova-vncproxy]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/User[nova]/shell) shell changed '/sbin/nologin' to '/bin/bash'
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/File[/var/log/nova]/group) group changed 'root' to 'nova'
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/File[/var/log/nova]/mode) mode changed '0755' to '0751'
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_path_bytes]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/api_paste_config]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_floating_ips]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/public_interface]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/service_down_time]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_gigabytes]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_instances]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_volume_listen]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/signing_dir]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/metadata_listen]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/auth_strategy]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_metadata_items]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rpc_backend]/value) value changed 'nova.openstack.common.rpc.impl_qpid' to 'nova.rpc.impl_kombu'
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/fixed_range]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/dhcp_domain]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/allow_resize_to_same_host]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_injected]/ensure) created
2013-02-07T11:52:54 notice: (/Stage[main]/Nova/File[/etc/nova/nova.conf]/owner) owner changed 'root' to 'nova'
2013-02-07T11:52:54 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/verbose]/ensure) created
2013-02-07T11:52:59 notice: (/Stage[main]/Keystone/Exec[keystone-manage db_sync]) Triggered 'refresh' from 12 events
2013-02-07T11:53:00 notice: (/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:53:00 notice: (/Stage[main]/Keystone/Service[keystone]) Triggered 'refresh' from 13 events
2013-02-07T11:53:11 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_volume]/ensure) created
2013-02-07T11:53:12 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_tenant[admin]/ensure) created
2013-02-07T11:53:12 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_role[Member]/ensure) created
2013-02-07T11:53:12 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_ec2]/ensure) created
2013-02-07T11:53:13 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova_volume]/ensure) created
2013-02-07T11:53:13 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova_ec2]/ensure) created
2013-02-07T11:53:13 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_tenant[services]/ensure) created
2013-02-07T11:53:14 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_user[nova]/ensure) created
2013-02-07T11:53:14 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_user[glance]/ensure) created
2013-02-07T11:53:14 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_role[admin]/ensure) created
2013-02-07T11:53:16 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_user_role[glance@services]/ensure) created
2013-02-07T11:53:16 notice: (/Stage[main]/Keystone::Endpoint/Keystone_service[keystone]/ensure) created
2013-02-07T11:53:16 notice: (/Stage[main]/Keystone::Endpoint/Keystone_endpoint[keystone]/ensure) created
2013-02-07T11:53:16 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_service[glance]/ensure) created
2013-02-07T11:53:17 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_endpoint[glance]/ensure) created
2013-02-07T11:53:17 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_user_role[nova@services]/ensure) created
2013-02-07T11:53:18 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_user[admin]/ensure) created
2013-02-07T11:53:19 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_user_role[admin@admin]/ensure) created
2013-02-07T11:53:19 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova]/ensure) created
2013-02-07T11:53:19 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova]/ensure) created
2013-02-07T11:53:19 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/auth_url]/ensure) created
2013-02-07T11:53:27 notice: (/Stage[main]/Glance::Registry/Exec[glance-manage db_sync]) Triggered 'refresh' from 30 events
2013-02-07T11:53:27 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:53:28 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]) Triggered 'refresh' from 11 events
2013-02-07T11:53:29 notice: (/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:53:30 notice: (/Stage[main]/Glance::Api/Service[glance-api]) Triggered 'refresh' from 21 events
2013-02-07T11:53:33 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]/ensure) created
2013-02-07T11:53:35 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]/returns) Added new image with ID: db616923-a62b-43dc-9924-25620186f567
2013-02-07T11:53:35 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]/returns) executed successfully
2013-02-07T11:53:35 notice: (/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed
2013-02-07T11:53:35 notice: (/Stage[main]/Nova/Exec[post-nova_config]) Triggered 'refresh' from 92 events
2013-02-07T11:54:15 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-02-07 11:53:36 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=5871) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-02-07T11:54:15 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-02-07T11:54:15 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-02-07 11:54:15 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=5929) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-02-07T11:54:15 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]) Triggered 'refresh' from 2 events
2013-02-07T11:54:15 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:54:16 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 3 events
2013-02-07T11:54:16 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:54:16 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 4 events
2013-02-07T11:54:17 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:54:17 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 3 events
2013-02-07T11:54:19 notice: (/Stage[main]/Nova::Network/Nova::Manage::Floating[nova-vm-floating]/Nova_floating[nova-vm-floating]/ensure) created
2013-02-07T11:54:19 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:54:20 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 4 events
2013-02-07T11:54:20 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:54:20 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 50 events
2013-02-07T11:54:21 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T11:54:21 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 3 events
2013-02-07T11:54:24 notice: (/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network[nova-vm-net]/ensure) created
2013-02-07T11:54:24 notice: Finished catalog run in 557.67 seconds

View File

@ -0,0 +1,231 @@
2013-02-07T09:48:00 notice: Reopening log files
2013-02-07T09:48:24 notice: (/Stage[main]/Nova::Utilities/Package[euca2ools]/ensure) created
2013-02-07T09:48:27 notice: (/Stage[main]/Mysql::Python/Package[python-mysqldb]/ensure) created
2013-02-07T09:48:30 notice: (/Stage[main]/Nova::Utilities/Package[screen]/ensure) created
2013-02-07T09:48:30 notice: (/Stage[main]/Memcached/User[memcached]/ensure) created
2013-02-07T09:48:33 notice: (/Stage[main]/Memcached/Package[memcached]/ensure) created
2013-02-07T09:48:33 notice: (/Stage[main]/Memcached/File[/etc/sysconfig/memcached]/content) content changed '{md5}05503957e3796fbe6fddd756a7a102a0' to '{md5}3a3961445528bdeda6d7b8b5564dfcfc'
2013-02-07T09:48:34 notice: (/Stage[main]/Memcached/Service[memcached]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:48:34 notice: (/Stage[main]/Memcached/Service[memcached]) Triggered 'refresh' from 1 events
2013-02-07T09:48:34 notice: (/Stage[main]/Osnailyfacter::Test_controller/File[/tmp/controller-file]/ensure) defined content as '{md5}7f5c51282c4b1242e12addba8cc331fa'
2013-02-07T09:48:43 notice: (/Stage[main]/Horizon/Package[mod_wsgi]/ensure) created
2013-02-07T09:48:48 notice: (/Stage[main]/Nova::Utilities/Package[parted]/ensure) created
2013-02-07T09:48:54 notice: (/Stage[main]/Mysql/Package[mysql_client]/ensure) created
2013-02-07T09:49:19 notice: (/Stage[main]/Mysql::Server/Package[mysql-server]/ensure) created
2013-02-07T09:49:52 notice: (/Stage[main]/Mysql::Server/Service[mysqld]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:50:21 notice: (/Stage[main]/Keystone::Python/Package[python-keystone]/ensure) created
2013-02-07T09:50:23 notice: (/Stage[main]/Osnailyfacter::Network_setup/Network_config[eth0.104]/ensure) created
2013-02-07T09:51:25 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Package[nova-api]/ensure) created
2013-02-07T09:51:26 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database[glance]/ensure) created
2013-02-07T09:51:26 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database[nova]/ensure) created
2013-02-07T09:51:26 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_user[nova@127.0.0.1]/ensure) created
2013-02-07T09:51:28 notice: (/Stage[main]/Nova/Package[python-amqp]/ensure) created
2013-02-07T09:51:28 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-1]/Database_user[nova@slave-1]/ensure) created
2013-02-07T09:51:29 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[slave-1]/Database_grant[nova@slave-1/nova]/privileges) privileges changed '' to 'all'
2013-02-07T09:51:31 notice: (/Stage[main]/Nova::Utilities/Package[unzip]/ensure) created
2013-02-07T09:51:31 notice: (/Stage[main]/Openstack::Auth_file/File[/root/openrc]/ensure) defined content as '{md5}08262b4756f32b16740cb81596c8c607'
2013-02-07T09:51:46 notice: (/Stage[main]/Horizon/Package[openstack-dashboard]/ensure) created
2013-02-07T09:51:46 notice: (/Stage[main]/Horizon/File[/var/log/horizon]/ensure) created
2013-02-07T09:51:46 notice: (/Stage[main]/Horizon/File_line[horizon_redirect_rule]/ensure) created
2013-02-07T09:51:46 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-1]/Database_user[glance@slave-1]/ensure) created
2013-02-07T09:51:46 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[slave-1]/Database_grant[glance@slave-1/glance]/privileges) privileges changed '' to 'all'
2013-02-07T09:51:46 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/content) content changed '{md5}d114fa06522fa1016ab2bdede4cfd7bf' to '{md5}4966b1e7a0c22c25b1c69f149d9ae92b'
2013-02-07T09:51:46 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/owner) owner changed 'root' to 'apache'
2013-02-07T09:51:46 notice: (/Stage[main]/Horizon/File[/etc/openstack-dashboard/local_settings]/group) group changed 'root' to 'apache'
2013-02-07T09:51:54 notice: (/Stage[main]/Nova::Vncproxy/Package[python-numpy]/ensure) created
2013-02-07T09:53:27 notice: (/Stage[main]/Nova::Utilities/Package[libguestfs-tools-c]/ensure) created
2013-02-07T09:53:27 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Sysctl[net.ipv4.ip_forward]/val) val changed '0' to '1'
2013-02-07T09:53:28 notice: (/Stage[main]/Nova::Db::Mysql/Mysql::Db[nova]/Database_grant[nova@127.0.0.1/nova]/privileges) privileges changed '' to 'all'
2013-02-07T09:53:28 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) net.ipv4.ip_forward = 1
2013-02-07T09:53:28 notice: (/Stage[main]/Nova::Network/Sysctl::Value[net.ipv4.ip_forward]/Exec[exec_sysctl_net.ipv4.ip_forward]/returns) executed successfully
2013-02-07T09:53:28 notice: (/Stage[main]/Mysql::Config/File[/etc/mysql]/ensure) created
2013-02-07T09:53:28 notice: (/Stage[main]/Mysql::Config/File[/etc/my.cnf]/ensure) defined content as '{md5}7ae09a894eebe7ed3b5fc3b8ca8b445e'
2013-02-07T09:53:28 notice: (/Stage[main]/Mysql::Config/File[/etc/mysql/conf.d]/ensure) created
2013-02-07T09:54:31 notice: (/Stage[main]/Mysql::Config/Exec[mysqld-restart]) Triggered 'refresh' from 3 events
2013-02-07T09:54:31 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/Database_user[glance@]/ensure) created
2013-02-07T09:54:32 notice: (/Stage[main]/Glance::Db::Mysql/Glance::Db::Mysql::Host_access[]/Database_grant[glance@/glance]/privileges) privileges changed '' to 'all'
2013-02-07T09:54:32 notice: (/Stage[main]/Horizon/File_line[httpd_listen_on_internal_network_only]/ensure) created
2013-02-07T09:54:32 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:54:34 notice: (/Stage[main]/Horizon/Service[$::horizon::params::http_service]) Triggered 'refresh' from 6 events
2013-02-07T09:54:34 notice: (/Stage[main]/Glance/Group[glance]/ensure) created
2013-02-07T09:54:34 notice: (/Stage[main]/Glance/User[glance]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance/Package[glance]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dirname]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_user]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:glance_pass@127.0.0.1/glance'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[paste_deploy/flavor]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'glance_pass'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_tenant_name]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/admin_password]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '127.0.0.1'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/auth_uri]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance/File[/etc/glance/]/owner) owner changed 'root' to 'glance'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance/File[/etc/glance/]/mode) mode changed '0755' to '0770'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[keystone_authtoken/signing_dir]/ensure) created
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/registry_host]/value) value changed '0.0.0.0' to '127.0.0.1'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance/File[glance-logging.conf]/ensure) defined content as '{md5}71fa2daa8e89a992f4225e850fd879e4'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-cache.conf]/owner) owner changed 'root' to 'glance'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/File[/etc/glance/glance-api.conf]/owner) owner changed 'root' to 'glance'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-02-07T09:54:40 notice: (/Stage[main]/Glance::Api/Glance_api_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-02-07T09:54:41 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[]/Database_user[nova@]/ensure) created
2013-02-07T09:54:41 notice: (/Stage[main]/Nova::Db::Mysql/Nova::Db::Mysql::Host_access[]/Database_grant[nova@/nova]/privileges) privileges changed '' to 'all'
2013-02-07T09:54:42 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database[keystone]/ensure) created
2013-02-07T09:54:42 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database_user[keystone@127.0.0.1]/ensure) created
2013-02-07T09:54:42 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[]/Database_user[keystone@]/ensure) created
2013-02-07T09:54:42 notice: (/Stage[main]/Keystone::Db::Mysql/Mysql::Db[keystone]/Database_grant[keystone@127.0.0.1/keystone]/privileges) privileges changed '' to 'all'
2013-02-07T09:54:42 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[]/Database_grant[keystone@/keystone]/privileges) privileges changed '' to 'all'
2013-02-07T09:54:42 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-1]/Database_user[keystone@slave-1]/ensure) created
2013-02-07T09:54:43 notice: (/Stage[main]/Keystone::Db::Mysql/Keystone::Db::Mysql::Host_access[slave-1]/Database_grant[keystone@slave-1/keystone]/privileges) privileges changed '' to 'all'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database_user[glance@127.0.0.1]/ensure) created
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Db::Mysql/Mysql::Db[glance]/Database_grant[glance@127.0.0.1/glance]/privileges) privileges changed '' to 'all'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dir]/ensure) created
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/File[/etc/glance/glance-registry.conf]/owner) owner changed 'root' to 'glance'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'glance_pass'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[paste_deploy/flavor]/ensure) created
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/signing_dirname]/ensure) created
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/sql_connection]/value) value changed 'mysql://glance:glance@localhost/glance' to 'mysql://glance:glance_pass@127.0.0.1/glance'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/debug]/value) value changed 'False' to 'true'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[DEFAULT/verbose]/value) value changed 'True' to 'true'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-02-07T09:54:43 notice: (/Stage[main]/Glance::Registry/Glance_registry_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'glance'
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Package[keystone]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[sql/connection]/value) value changed 'mysql://keystone:keystone@localhost/keystone' to 'mysql://keystone:keystone_db_pass@127.0.0.1/keystone'
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/public_port]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/compute_port]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/debug]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[policy/driver]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[sql/idle_timeout]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_token]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/bind_host]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/admin_port]/ensure) created
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/File[/etc/keystone]/owner) owner changed 'root' to 'keystone'
2013-02-07T09:54:45 notice: (/Stage[main]/Keystone/File[/etc/keystone]/group) group changed 'root' to 'keystone'
2013-02-07T09:54:45 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[@slave-1.domain.tld]/ensure) removed
2013-02-07T09:54:45 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[@localhost]/ensure) removed
2013-02-07T09:54:45 notice: (/Stage[main]/Mysql::Server::Account_security/Database[test]/ensure) removed
2013-02-07T09:54:45 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[root@slave-1.domain.tld]/ensure) removed
2013-02-07T09:54:45 notice: (/Stage[main]/Mysql::Server::Account_security/Database_user[root@127.0.0.1]/ensure) removed
2013-02-07T09:54:46 notice: (/Stage[main]/Rabbitmq::Server/Package[qpid-cpp-server]/ensure) created
2013-02-07T09:56:47 notice: (/Stage[main]/Rabbitmq::Server/Package[rabbitmq-server]/ensure) created
2013-02-07T09:56:48 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) Stopping rabbitmq-server: RabbitMQ is not running
2013-02-07T09:56:48 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) rabbitmq-server.
2013-02-07T09:56:48 notice: (/Stage[main]/Rabbitmq::Server/Exec[rabbitmq_stop]/returns) executed successfully
2013-02-07T09:56:48 notice: (/Stage[main]/Rabbitmq::Server/File[erlang_cookie]/content) content changed '{md5}8be1bf5048c306fa9b40c631203d8300' to '{md5}b28788594da393660db1e4f20d296c10'
2013-02-07T09:56:48 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq.config]/ensure) defined content as '{md5}017a023ed429c071bf945a706be7fb13'
2013-02-07T09:56:48 notice: (/Stage[main]/Rabbitmq::Server/File[rabbitmq-env.config]/ensure) defined content as '{md5}2980dac99b8f2195a50ef6e459ffedae'
2013-02-07T09:56:51 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:56:56 notice: (/Stage[main]/Rabbitmq::Service/Service[rabbitmq-server]) Triggered 'refresh' from 1 events
2013-02-07T09:56:56 notice: (/Stage[main]/Rabbitmq::Server/Rabbitmq_user[guest]/ensure) removed
2013-02-07T09:56:57 notice: (/Stage[main]/Nova::Rabbitmq/Rabbitmq_user[openstack_rabbit_user]/ensure) created
2013-02-07T09:56:57 notice: (/Stage[main]/Nova::Rabbitmq/Rabbitmq_user_permissions[openstack_rabbit_user@/]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_interface]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova::Network/Nova_config[DEFAULT/floating_range]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_hosts]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_volumes]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_compute_listen]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_network_bridge]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Openstack::Controller/Nova_config[DEFAULT/memcached_servers]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/ec2_listen]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_password]/value) value changed '%SERVICE_PASSWORD%' to 'nova_pass'
2013-02-07T09:56:58 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_content_bytes]/ensure) created
2013-02-07T09:56:58 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_user]/value) value changed '%SERVICE_USER%' to 'nova'
2013-02-07T09:56:59 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/glance_api_servers]/ensure) created
2013-02-07T09:56:59 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_host]/ensure) created
2013-02-07T09:56:59 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_userid]/ensure) created
2013-02-07T09:56:59 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_ha_queues]/ensure) created
2013-02-07T09:56:59 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_password]/ensure) created
2013-02-07T09:56:59 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/verbose]/ensure) created
2013-02-07T09:56:59 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_cores]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Package[nova-consoleauth]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/enabled_apis]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/sql_connection]/value) value changed 'mysql://nova:nova@localhost/nova' to 'mysql://nova:nova_pass@127.0.0.1/nova'
2013-02-07T09:57:01 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_virtual_host]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/image_service]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/volume_api_class]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_files]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova::Vncproxy/Nova_config[DEFAULT/novncproxy_port]/ensure) created
2013-02-07T09:57:01 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/force_dhcp_release]/value) value changed 'True' to 'true'
2013-02-07T09:57:01 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rabbit_port]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Package[nova-vncproxy]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/User[nova]/shell) shell changed '/sbin/nologin' to '/bin/bash'
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/File[/var/log/nova]/group) group changed 'root' to 'nova'
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/File[/var/log/nova]/mode) mode changed '0755' to '0751'
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_max_injected_file_path_bytes]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Api/Nova_config[DEFAULT/api_paste_config]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_floating_ips]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Openstack::Nova::Controller/Nova_config[DEFAULT/multi_host]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/public_interface]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/service_down_time]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/admin_tenant_name]/value) value changed '%SERVICE_TENANT_NAME%' to 'services'
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_gigabytes]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_instances]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/osapi_volume_listen]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Api/Nova_config[keystone_authtoken/signing_dir]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/metadata_listen]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/auth_strategy]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Quota/Nova_config[DEFAULT/quota_metadata_items]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/rpc_backend]/value) value changed 'nova.openstack.common.rpc.impl_qpid' to 'nova.rpc.impl_kombu'
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/fixed_range]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/dhcp_domain]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/Nova_config[DEFAULT/allow_resize_to_same_host]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova::Network::Flatdhcp/Nova_config[DEFAULT/flat_injected]/ensure) created
2013-02-07T09:57:42 notice: (/Stage[main]/Nova/File[/etc/nova/nova.conf]/owner) owner changed 'root' to 'nova'
2013-02-07T09:57:42 notice: (/Stage[main]/Keystone/Keystone_config[DEFAULT/verbose]/ensure) created
2013-02-07T09:57:46 notice: (/Stage[main]/Keystone/Exec[keystone-manage db_sync]) Triggered 'refresh' from 12 events
2013-02-07T09:57:47 notice: (/Stage[main]/Keystone/Service[keystone]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:57:47 notice: (/Stage[main]/Keystone/Service[keystone]) Triggered 'refresh' from 13 events
2013-02-07T09:57:59 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_volume]/ensure) created
2013-02-07T09:57:59 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_tenant[admin]/ensure) created
2013-02-07T09:57:59 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_role[Member]/ensure) created
2013-02-07T09:58:00 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova_ec2]/ensure) created
2013-02-07T09:58:00 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova_volume]/ensure) created
2013-02-07T09:58:00 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova_ec2]/ensure) created
2013-02-07T09:58:01 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_tenant[services]/ensure) created
2013-02-07T09:58:01 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_user[nova]/ensure) created
2013-02-07T09:58:02 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_user[glance]/ensure) created
2013-02-07T09:58:02 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_role[admin]/ensure) created
2013-02-07T09:58:04 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_user_role[glance@services]/ensure) created
2013-02-07T09:58:05 notice: (/Stage[main]/Keystone::Endpoint/Keystone_service[keystone]/ensure) created
2013-02-07T09:58:05 notice: (/Stage[main]/Keystone::Endpoint/Keystone_endpoint[keystone]/ensure) created
2013-02-07T09:58:05 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_service[glance]/ensure) created
2013-02-07T09:58:05 notice: (/Stage[main]/Glance::Keystone::Auth/Keystone_endpoint[glance]/ensure) created
2013-02-07T09:58:06 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_user_role[nova@services]/ensure) created
2013-02-07T09:58:07 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_user[admin]/ensure) created
2013-02-07T09:58:07 notice: (/Stage[main]/Keystone::Roles::Admin/Keystone_user_role[admin@admin]/ensure) created
2013-02-07T09:58:08 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_service[nova]/ensure) created
2013-02-07T09:58:08 notice: (/Stage[main]/Nova::Keystone::Auth/Keystone_endpoint[nova]/ensure) created
2013-02-07T09:58:08 notice: (/Stage[main]/Glance::Api/Glance_cache_config[DEFAULT/auth_url]/ensure) created
2013-02-07T09:58:22 notice: (/Stage[main]/Glance::Registry/Exec[glance-manage db_sync]) Triggered 'refresh' from 30 events
2013-02-07T09:58:22 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:58:22 notice: (/Stage[main]/Glance::Registry/Service[glance-registry]) Triggered 'refresh' from 11 events
2013-02-07T09:58:24 notice: (/Stage[main]/Glance::Api/Service[glance-api]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:58:24 notice: (/Stage[main]/Glance::Api/Service[glance-api]) Triggered 'refresh' from 21 events
2013-02-07T09:58:27 notice: (/Stage[main]/Openstack::Img::Cirros/Package[cirros-testvm]/ensure) created
2013-02-07T09:58:29 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]/returns) Added new image with ID: 71bfad1f-74d4-4b6f-8b08-605bc3e782c7
2013-02-07T09:58:29 notice: (/Stage[main]/Openstack::Img::Cirros/Exec[upload-img]/returns) executed successfully
2013-02-07T09:58:30 notice: (/Stage[main]/Nova/Exec[post-nova_config]/returns) Nova config has changed
2013-02-07T09:58:30 notice: (/Stage[main]/Nova/Exec[post-nova_config]) Triggered 'refresh' from 92 events
2013-02-07T09:59:20 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-02-07 09:58:30 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=6121) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-02-07T09:59:20 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) executed successfully
2013-02-07T09:59:21 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]/returns) 2013-02-07 09:59:21 DEBUG nova.utils [-] backend <module 'nova.db.sqlalchemy.migration' from '/usr/lib/python2.6/site-packages/nova/db/sqlalchemy/migration.pyc'> from (pid=6191) __get_backend /usr/lib/python2.6/site-packages/nova/utils.py:494
2013-02-07T09:59:21 notice: (/Stage[main]/Nova::Api/Exec[nova-db-sync]) Triggered 'refresh' from 2 events
2013-02-07T09:59:21 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:59:22 notice: (/Stage[main]/Nova::Cert/Nova::Generic_service[cert]/Service[nova-cert]) Triggered 'refresh' from 3 events
2013-02-07T09:59:22 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:59:22 notice: (/Stage[main]/Nova::Vncproxy/Nova::Generic_service[vncproxy]/Service[nova-vncproxy]) Triggered 'refresh' from 4 events
2013-02-07T09:59:23 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:59:24 notice: (/Stage[main]/Nova::Objectstore/Nova::Generic_service[objectstore]/Service[nova-objectstore]) Triggered 'refresh' from 3 events
2013-02-07T09:59:26 notice: (/Stage[main]/Nova::Network/Nova::Manage::Floating[nova-vm-floating]/Nova_floating[nova-vm-floating]/ensure) created
2013-02-07T09:59:26 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:59:27 notice: (/Stage[main]/Nova::Consoleauth/Nova::Generic_service[consoleauth]/Service[nova-consoleauth]) Triggered 'refresh' from 4 events
2013-02-07T09:59:27 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:59:27 notice: (/Stage[main]/Nova::Api/Nova::Generic_service[api]/Service[nova-api]) Triggered 'refresh' from 50 events
2013-02-07T09:59:28 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]/ensure) ensure changed 'stopped' to 'running'
2013-02-07T09:59:28 notice: (/Stage[main]/Nova::Scheduler/Nova::Generic_service[scheduler]/Service[nova-scheduler]) Triggered 'refresh' from 3 events
2013-02-07T09:59:31 notice: (/Stage[main]/Nova::Network/Nova::Manage::Network[nova-vm-net]/Nova_network[nova-vm-net]/ensure) created
2013-02-07T09:59:31 notice: Finished catalog run in 674.93 seconds

View File

@ -0,0 +1,43 @@
require File.join(File.dirname(__FILE__), "..", "spec_helper")
require 'mcollective'
require 'json'
include MCollective::RPC
NODE = "devnailgun.mirantis.com"
describe "MCollective" do
context "When MC agent is up and running" do
it "it should send echo message to MC agent and get it back" do
data_to_send = "simple message of node '#{NODE}'"
mc = rpcclient("fake")
mc.progress = false
mc.discover(:nodes => [NODE])
stats = mc.echo(:msg => data_to_send)
check_mcollective_result(stats)
stats[0].results[:data][:msg].should eql("Hello, it is my reply: #{data_to_send}")
end
it "it should update facts file with new key-value and could get it back" do
data_to_send = {"anykey" => rand(2**30).to_s, "other" => "static"}
mc = rpcclient("nailyfact")
mc.progress = false
mc.discover(:nodes => [NODE])
stats = mc.post(:value => data_to_send.to_json)
check_mcollective_result(stats)
stats = mc.get(:key => "anykey")
check_mcollective_result(stats)
stats[0].results[:data][:value].should eql(data_to_send['anykey'])
stats = mc.get(:key => "other")
check_mcollective_result(stats)
stats[0].results[:data][:value].should eql(data_to_send['other'])
end
end
end
private
def check_mcollective_result(stats)
stats.should have(1).items
stats[0].results[:statuscode].should eql(0)
end

View File

@ -0,0 +1,44 @@
$LOAD_PATH << File.join(File.dirname(__FILE__),"..","lib")
require 'rspec'
# Following require is needed for rcov to provide valid results
require 'rspec/autorun'
require 'yaml'
require 'astute'
RSpec.configure do |config|
config.mock_with :mocha
end
# NOTE(mihgen): I hate to wait for unit tests to complete,
# resetting time to sleep significantly increases tests speed
Astute.config.PUPPET_DEPLOY_INTERVAL = 0
Astute.config.PUPPET_FADE_INTERVAL = 0
Astute.config.MC_RETRY_INTERVAL = 0
module SpecHelpers
def mock_rpcclient(discover_nodes=nil, timeout=nil)
rpcclient = mock('rpcclient') do
stubs(:progress=)
unless timeout.nil?
expects(:timeout=).with(timeout)
end
unless discover_nodes.nil?
expects(:discover).with(:nodes => discover_nodes.map{|x| x['uid'].to_s}).at_least_once
else
stubs(:discover)
end
end
Astute::MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
return rpcclient
end
def mock_mc_result(result={})
mc_res = {:statuscode => 0, :data => {}, :sender => '1'}
mc_res.merge!(result)
mc_result = mock('mc_result') do
stubs(:results).returns(mc_res)
stubs(:agent).returns('mc_stubbed_agent')
end
return mc_result
end
end

View File

@ -0,0 +1,267 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
require 'tempfile'
require 'tmpdir'
require 'date'
include Astute
describe LogParser do
def get_statistics_variables(progress_table)
# Calculate some statistics variables: expectancy, standart deviation and
# correlation coefficient between real and ideal progress calculation.
total_time = 0
real_expectancy = 0
real_sqr_expectancy = 0
prev_event_date = nil
progress_table.each do |el|
date = el[:date]
prev_event_date = date unless prev_event_date
progress = el[:progress].to_f
period = date - prev_event_date
hours, mins, secs, frac = Date::day_fraction_to_time(period)
period_in_sec = hours * 60 * 60 + mins * 60 + secs
total_time += period_in_sec
real_expectancy += period_in_sec * progress
real_sqr_expectancy += period_in_sec * progress ** 2
el[:time_delta] = period_in_sec
prev_event_date = date
end
# Calculate standart deviation for real progress distibution.
real_expectancy = real_expectancy.to_f / total_time
real_sqr_expectancy = real_sqr_expectancy.to_f / total_time
real_standart_deviation = Math.sqrt(real_sqr_expectancy - real_expectancy ** 2)
# Calculate PCC (correlation coefficient).
ideal_sqr_expectancy = 0
ideal_expectancy = 0
t = 0
ideal_delta = 100.0 / total_time
mixed_expectancy = 0
progress_table.each do |el|
t += el[:time_delta]
ideal_progress = t * ideal_delta
ideal_expectancy += ideal_progress * el[:time_delta]
ideal_sqr_expectancy += ideal_progress ** 2 * el[:time_delta]
el[:ideal_progress] = ideal_progress
mixed_expectancy += el[:progress] * ideal_progress * el[:time_delta]
end
ideal_expectancy = ideal_expectancy / total_time
ideal_sqr_expectancy = ideal_sqr_expectancy / total_time
mixed_expectancy = mixed_expectancy / total_time
ideal_standart_deviation = Math.sqrt(ideal_sqr_expectancy - ideal_expectancy ** 2)
covariance = mixed_expectancy - ideal_expectancy * real_expectancy
pcc = covariance / (ideal_standart_deviation * real_standart_deviation)
statistics = {
'real_expectancy' => real_expectancy,
'real_sqr_expectancy' => real_sqr_expectancy,
'real_standart_deviation' => real_standart_deviation,
'ideal_expectancy' => ideal_expectancy,
'ideal_sqr_expectancy' => ideal_sqr_expectancy,
'ideal_standart_deviation' => ideal_standart_deviation,
'mixed_expectancy' => mixed_expectancy,
'covariance' => covariance,
'pcc' => pcc,
'total_time' => total_time,
}
return statistics
end
def get_next_line(fo, date_regexp, date_format)
until fo.eof?
line = fo.readline
date_string = line.match(date_regexp)
if date_string
date = DateTime.strptime(date_string[0], date_format)
return line, date
end
end
end
def get_next_lines_by_date(fo, now, date_regexp, date_format)
lines = ''
until fo.eof?
pos = fo.pos
line, date = get_next_line(fo, date_regexp, date_format)
if date <= now
lines += line
else
fo.pos = pos
return lines
end
end
return lines
end
context "Correlation coeff. (PCC) of Provisioning progress bar calculation" do
def provision_parser_wrapper(node)
uids = [node['uid']]
nodes = [node]
time_delta = 5.0/24/60/60
log_delay = 6*time_delta
deploy_parser = Astute::LogParser::ParseProvisionLogs.new
pattern_spec = deploy_parser.pattern_spec
date_regexp = '^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}'
date_format = '%Y-%m-%dT%H:%M:%S'
Dir.mktmpdir do |dir|
# Create temp log files and structures.
pattern_spec['path_prefix'] = "#{dir}/"
path = "#{pattern_spec['path_prefix']}#{node['ip']}/#{pattern_spec['filename']}"
Dir.mkdir(File.dirname(File.dirname(path)))
Dir.mkdir(File.dirname(path))
node['file'] = File.open(path, 'w')
src_filename = File.join(File.dirname(__FILE__), "..", "example-logs", node['src_filename'])
node['src'] = File.open(src_filename)
line, date = get_next_line(node['src'], date_regexp, date_format)
node['src'].pos = 0
node['now'] = date - log_delay
node['progress_table'] ||= []
# End 'while' cycle if reach EOF at all src files.
until node['src'].eof?
# Copy logs line by line from example logfile to tempfile and collect progress for each step.
lines, date = get_next_lines_by_date(node['src'], node['now'], date_regexp, date_format)
node['file'].write(lines)
node['file'].flush
node['last_lines'] = lines
DateTime.stubs(:now).returns(node['now'])
node_progress = deploy_parser.progress_calculate(uids, nodes)[0]
node['progress_table'] << {:date => node['now'], :progress => node_progress['progress']}
node['now'] += time_delta
end
nodes.each do |node|
node['statistics'] = get_statistics_variables(node['progress_table'])
end
# Clear temp files.
node['file'].close
File.unlink(node['file'].path)
Dir.unlink(File.dirname(node['file'].path))
end
return node
end
it "should be greather than 0.96" do
node = {'uid' => '1', 'ip' => '1.0.0.1', 'role' => 'controller', 'src_filename' => 'anaconda.log_',
'meta' => { 'disks' =>
[
{'name' => 'flash drive', 'removable' => true, 'size' => 1000},
{'name' => 'sda', 'removable'=> false, 'size' => 32*1000*1000*1000},
]
}
}
calculated_node = provision_parser_wrapper(node)
calculated_node['statistics']['pcc'].should > 0.96
end
it "it must be updated at least 5 times" do
# Otherwise progress bar has no meaning I guess...
pending('Not yet implemented')
end
end
context "Correlation coeff. (PCC) of Deploying progress bar calculation" do
def deployment_parser_wrapper(cluster_type, nodes)
uids = nodes.map{|n| n['uid']}
deploy_parser = Astute::LogParser::ParseDeployLogs.new(cluster_type)
pattern_spec = deploy_parser.pattern_spec
date_regexp = '^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}'
date_format = '%Y-%m-%dT%H:%M:%S'
Dir.mktmpdir do |dir|
# Create temp log files and structures.
pattern_spec['path_prefix'] = "#{dir}/"
nodes.each do |node|
path = "#{pattern_spec['path_prefix']}#{node['ip']}/#{pattern_spec['filename']}"
Dir.mkdir(File.dirname(path))
node['file'] = File.open(path, 'w')
src_filename = File.join(File.dirname(__FILE__), "..", "example-logs", node['src_filename'])
node['src'] = File.open(src_filename)
node['progress_table'] ||= []
end
# End 'while' cycle if reach EOF at all src files.
while nodes.index{|n| not n['src'].eof?}
# Copy logs line by line from example logfile to tempfile and collect progress for each step.
nodes.each do |node|
unless node['src'].eof?
line = node['src'].readline
node['file'].write(line)
node['file'].flush
node['last_line'] = line
else
node['last_line'] = ''
end
end
nodes_progress = deploy_parser.progress_calculate(uids, nodes)
nodes_progress.each do |progress|
node = nodes.at(nodes.index{|n| n['uid'] == progress['uid']})
date_string = node['last_line'].match(date_regexp)
if date_string
date = DateTime.strptime(date_string[0], date_format)
node['progress_table'] << {:date => date, :progress => progress['progress']}
end
end
end
nodes.each do |node|
node['statistics'] = get_statistics_variables(node['progress_table'])
end
# Clear temp files.
nodes.each do |n|
n['file'].close
File.unlink(n['file'].path)
Dir.unlink(File.dirname(n['file'].path))
end
end
return nodes
end
it "should be greather than 0.85 for HA deployment" do
nodes = [
{'uid' => '1', 'ip' => '1.0.0.1', 'role' => 'controller', 'src_filename' => 'puppet-agent.log.ha.contr.2'},
{'uid' => '2', 'ip' => '1.0.0.2', 'role' => 'compute', 'src_filename' => 'puppet-agent.log.ha.compute'},
]
calculated_nodes = deployment_parser_wrapper('ha_compute', nodes)
calculated_nodes.each {|node| node['statistics']['pcc'].should > 0.85}
# For debug purposes.
# print "\n"
# calculated_nodes.each do |node|
# print node['statistics'].inspect, "\n", node['statistics']['pcc'], "\n", node['progress_table'][-1][:progress], "\n"
# end
end
it "should be greather than 0.97 for singlenode deployment" do
nodes = [
{'uid' => '1', 'ip' => '1.0.0.1', 'role' => 'controller', 'src_filename' => 'puppet-agent.log.singlenode'},
]
calculated_nodes = deployment_parser_wrapper('singlenode_compute', nodes)
calculated_nodes.each {|node| node['statistics']['pcc'].should > 0.97}
end
it "should be greather than 0.94 for multinode deployment" do
nodes = [
{'uid' => '1', 'ip' => '1.0.0.1', 'role' => 'controller', 'src_filename' => 'puppet-agent.log.multi.contr'},
{'uid' => '2', 'ip' => '1.0.0.2', 'role' => 'compute', 'src_filename' => 'puppet-agent.log.multi.compute'},
]
calculated_nodes = deployment_parser_wrapper('multinode_compute', nodes)
calculated_nodes.each {|node| node['statistics']['pcc'].should > 0.94}
end
end
end

View File

@ -0,0 +1,79 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
include Astute
describe MClient do
include SpecHelpers
before(:each) do
@ctx = mock('context')
@ctx.stubs(:task_id)
@ctx.stubs(:reporter)
end
it "should receive method call and process valid result correctly" do
nodes = [{'uid' => 1}, {'uid' => 2}, {'uid' => 3}]
rpcclient = mock_rpcclient(nodes)
mc_valid_result = mock_mc_result
rpcclient.expects(:echo).with(:msg => 'hello world').once.returns([mc_valid_result]*3)
mclient = MClient.new(@ctx, "faketest", nodes.map {|x| x['uid']})
stats = mclient.echo(:msg => 'hello world')
stats.should eql([mc_valid_result]*3)
end
it "should return even bad result if check_result=false" do
nodes = [{'uid' => 1}, {'uid' => 2}, {'uid' => 3}]
rpcclient = mock_rpcclient(nodes)
mc_valid_result = mock_mc_result
mc_error_result = mock_mc_result({:statuscode => 1, :sender => '2'})
rpcclient.expects(:echo).with(:msg => 'hello world').once.\
returns([mc_valid_result, mc_error_result])
mclient = MClient.new(@ctx, "faketest", nodes.map {|x| x['uid']}, check_result=false)
stats = mclient.echo(:msg => 'hello world')
stats.should eql([mc_valid_result, mc_error_result])
end
it "should try to retry for non-responded nodes" do
nodes = [{'uid' => 1}, {'uid' => 2}, {'uid' => 3}]
rpcclient = mock('rpcclient') do
stubs(:progress=)
expects(:discover).with(:nodes => ['1','2','3'])
expects(:discover).with(:nodes => ['2','3'])
end
Astute::MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
mc_valid_result = mock_mc_result
mc_valid_result2 = mock_mc_result({:sender => '2'})
rpcclient.stubs(:echo).returns([mc_valid_result]).then.
returns([mc_valid_result2]).then
mclient = MClient.new(@ctx, "faketest", nodes.map {|x| x['uid']})
mclient.retries = 1
expect { mclient.echo(:msg => 'hello world') }.to raise_error(/MCollective agents '3' didn't respond./)
end
it "should raise error if agent returns statuscode != 0" do
nodes = [{'uid' => 1}, {'uid' => 2}, {'uid' => 3}]
rpcclient = mock('rpcclient') do
stubs(:progress=)
expects(:discover).with(:nodes => ['1','2','3'])
expects(:discover).with(:nodes => ['2','3'])
end
Astute::MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
mc_valid_result = mock_mc_result
mc_failed_result = mock_mc_result({:sender => '2', :statuscode => 1})
rpcclient.stubs(:echo).returns([mc_valid_result]).then.
returns([mc_failed_result]).then
mclient = MClient.new(@ctx, "faketest", nodes.map {|x| x['uid']})
mclient.retries = 1
expect { mclient.echo(:msg => 'hello world') }.to \
raise_error(/MCollective agents '3' didn't respond. \n.* failed nodes: 2/)
end
end

View File

@ -0,0 +1,182 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
describe "NailyFact DeploymentEngine" do
context "When deploy is called, " do
before(:each) do
@ctx = mock
@ctx.stubs(:task_id)
@ctx.stubs(:deploy_log_parser).returns(Astute::LogParser::NoParsing.new)
reporter = mock
@ctx.stubs(:reporter).returns(reporter)
reporter.stubs(:report)
@deploy_engine = Astute::DeploymentEngine::NailyFact.new(@ctx)
@data = {"args" =>
{"attributes" =>
{"storage_network_range" => "172.16.0.0/24", "auto_assign_floating_ip" => false,
"mysql" => {"root_password" => "Z2EqsZo5"},
"keystone" => {"admin_token" => "5qKy0i63", "db_password" => "HHQ86Rym", "admin_tenant" => "admin"},
"nova" => {"user_password" => "h8RY8SE7", "db_password" => "Xl9I51Cb"},
"glance" => {"user_password" => "nDlUxuJq", "db_password" => "V050pQAn"},
"rabbit" => {"user" => "nova", "password" => "FLF3txKC"},
"management_network_range" => "192.168.0.0/24",
"public_network_range" => "240.0.1.0/24",
"fixed_network_range" => "10.0.0.0/24",
"floating_network_range" => "240.0.0.0/24"},
"task_uuid" => "19d99029-350a-4c9c-819c-1f294cf9e741",
"nodes" => [{"mac" => "52:54:00:0E:B8:F5", "status" => "provisioning",
"uid" => "devnailgun.mirantis.com", "error_type" => nil,
"fqdn" => "devnailgun.mirantis.com",
"network_data" => [{"gateway" => "192.168.0.1",
"name" => "management", "dev" => "eth0",
"brd" => "192.168.0.255", "netmask" => "255.255.255.0",
"vlan" => 102, "ip" => "192.168.0.2/24"},
{"gateway" => "240.0.1.1",
"name" => "public", "dev" => "eth0",
"brd" => "240.0.1.255", "netmask" => "255.255.255.0",
"vlan" => 101, "ip" => "240.0.1.2/24"},
{"name" => "floating", "dev" => "eth0", "vlan" => 120},
{"name" => "fixed", "dev" => "eth0", "vlan" => 103},
{"name" => "storage", "dev" => "eth0", "vlan" => 104}],
"id" => 1,
"ip" => "10.20.0.200",
"role" => "controller"},
{"mac" => "52:54:00:50:91:DD", "status" => "provisioning",
"uid" => 2, "error_type" => nil,
"fqdn" => "slave-2.mirantis.com",
"network_data" => [{"gateway" => "192.168.0.1",
"name" => "management", "dev" => "eth0",
"brd" => "192.168.0.255", "netmask" => "255.255.255.0",
"vlan" => 102, "ip" => "192.168.0.3/24"},
{"gateway" => "240.0.1.1",
"name" => "public", "dev" => "eth0",
"brd" => "240.0.1.255", "netmask" => "255.255.255.0",
"vlan" => 101, "ip" => "240.0.1.3/24"},
{"name" => "floating", "dev" => "eth0", "vlan" => 120},
{"name" => "fixed", "dev" => "eth0", "vlan" => 103},
{"name" => "storage", "dev" => "eth0", "vlan" => 104}],
"id" => 2,
"ip" => "10.20.0.221",
"role" => "compute"},
{"mac" => "52:54:00:C3:2C:28", "status" => "provisioning",
"uid" => 3, "error_type" => nil,
"fqdn" => "slave-3.mirantis.com",
"network_data" => [{"gateway" => "192.168.0.1",
"name" => "management", "dev" => "eth0",
"brd" => "192.168.0.255", "netmask" => "255.255.255.0",
"vlan" => 102, "ip" => "192.168.0.4/24"},
{"gateway" => "240.0.1.1",
"name" => "public", "dev" => "eth0",
"brd" => "240.0.1.255", "netmask" => "255.255.255.0",
"vlan" => 101, "ip" => "240.0.1.4/24"},
{"name" => "floating", "dev" => "eth0", "vlan" => 120},
{"name" => "fixed", "dev" => "eth0", "vlan" => 103},
{"name" => "storage", "dev" => "eth0", "vlan" => 104}],
"id" => 3,
"ip" => "10.20.0.68",
"role" => "compute"}]},
"method" => "deploy",
"respond_to" => "deploy_resp"}
ha_nodes = @data['args']['nodes'] +
[{"mac" => "52:54:00:0E:88:88", "status" => "provisioned",
"uid" => "4", "error_type" => nil,
"fqdn" => "controller-4.mirantis.com",
"network_data" => [{"gateway" => "192.168.0.1",
"name" => "management", "dev" => "eth0",
"brd" => "192.168.0.255", "netmask" => "255.255.255.0",
"vlan" => 102, "ip" => "192.168.0.5/24"},
{"gateway" => "240.0.1.1",
"name" => "public", "dev" => "eth0",
"brd" => "240.0.1.255", "netmask" => "255.255.255.0",
"vlan" => 101, "ip" => "240.0.1.5/24"},
{"name" => "floating", "dev" => "eth0", "vlan" => 120},
{"name" => "fixed", "dev" => "eth0", "vlan" => 103},
{"name" => "storage", "dev" => "eth0", "vlan" => 104}],
"id" => 4,
"ip" => "10.20.0.205",
"role" => "controller"},
{"mac" => "52:54:00:0E:99:99", "status" => "provisioned",
"uid" => "5", "error_type" => nil,
"fqdn" => "controller-5.mirantis.com",
"network_data" => [{"gateway" => "192.168.0.1",
"name" => "management", "dev" => "eth0",
"brd" => "192.168.0.255", "netmask" => "255.255.255.0",
"vlan" => 102, "ip" => "192.168.0.6/24"},
{"gateway" => "240.0.1.1",
"name" => "public", "dev" => "eth0",
"brd" => "240.0.1.255", "netmask" => "255.255.255.0",
"vlan" => 101, "ip" => "240.0.1.6/24"},
{"name" => "floating", "dev" => "eth0", "vlan" => 120},
{"name" => "fixed", "dev" => "eth0", "vlan" => 103},
{"name" => "storage", "dev" => "eth0", "vlan" => 104}],
"id" => 5,
"ip" => "10.20.0.206",
"role" => "controller"}]
@data_ha = Marshal.load(Marshal.dump(@data))
@data_ha['args']['nodes'] = ha_nodes
@data_ha['args']['attributes']['deployment_mode'] = "ha_compute"
# VIPs are required for HA mode and should be passed from Nailgun (only in HA)
@data_ha['args']['attributes']['management_vip'] = "192.168.0.111"
@data_ha['args']['attributes']['public_vip'] = "240.0.1.111"
end
it "it should call valid method depends on attrs" do
nodes = [{'uid' => 1}]
attrs = {'deployment_mode' => 'ha_compute'}
attrs_modified = attrs.merge({'some' => 'somea'})
@deploy_engine.expects(:attrs_ha_compute).with(nodes, attrs).returns(attrs_modified)
@deploy_engine.expects(:deploy_ha_compute).with(nodes, attrs_modified)
# All implementations of deploy_piece go to subclasses
@deploy_engine.respond_to?(:deploy_piece).should be_true
@deploy_engine.deploy(nodes, attrs)
end
it "it should raise an exception if deployment mode is unsupported" do
nodes = [{'uid' => 1}]
attrs = {'deployment_mode' => 'unknown'}
expect {@deploy_engine.deploy(nodes, attrs)}.to raise_exception(/Method attrs_unknown is not implemented/)
end
it "multinode_compute deploy should not raise any exception" do
@data['args']['attributes']['deployment_mode'] = "multinode_compute"
Astute::Metadata.expects(:publish_facts).times(@data['args']['nodes'].size)
# we got two calls, one for controller, and another for all computes
controller_nodes = @data['args']['nodes'].select{|n| n['role'] == 'controller'}
compute_nodes = @data['args']['nodes'].select{|n| n['role'] == 'compute'}
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, controller_nodes, instance_of(Fixnum), true).once
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, compute_nodes, instance_of(Fixnum), true).once
@deploy_engine.deploy(@data['args']['nodes'], @data['args']['attributes'])
end
it "ha_compute deploy should not raise any exception" do
Astute::Metadata.expects(:publish_facts).at_least_once
controller_nodes = @data_ha['args']['nodes'].select{|n| n['role'] == 'controller'}
compute_nodes = @data_ha['args']['nodes'].select{|n| n['role'] == 'compute'}
controller_nodes.each do |n|
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, [n], 0, false).once
end
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, controller_nodes, 0, false).once
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, [controller_nodes.first], 0, false).once
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, controller_nodes, 0, false).once
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, controller_nodes, 3, true).once
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, compute_nodes, instance_of(Fixnum), true).once
@deploy_engine.deploy(@data_ha['args']['nodes'], @data_ha['args']['attributes'])
end
it "ha_compute deploy should not raise any exception if there are only one controller" do
Astute::Metadata.expects(:publish_facts).at_least_once
Astute::PuppetdDeployer.expects(:deploy).times(5)
ctrl = @data_ha['args']['nodes'].select {|n| n['role'] == 'controller'}[0]
@deploy_engine.deploy([ctrl], @data_ha['args']['attributes'])
end
it "singlenode_compute deploy should not raise any exception" do
@data['args']['attributes']['deployment_mode'] = "singlenode_compute"
@data['args']['nodes'] = [@data['args']['nodes'][0]] # We have only one node in singlenode
Astute::Metadata.expects(:publish_facts).times(@data['args']['nodes'].size)
Astute::PuppetdDeployer.expects(:deploy).with(@ctx, @data['args']['nodes'], instance_of(Fixnum), true).once
@deploy_engine.deploy(@data['args']['nodes'], @data['args']['attributes'])
end
end
end

View File

@ -0,0 +1,75 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
describe Astute::Node do
it "accepts hash for initialization" do
node = Astute::Node.new('uid' => 'abc', 'info' => 'blabla')
node.uid.should == 'abc'
node.info.should == 'blabla'
end
it "requires uid" do
expect{ Astute::Node.new({}) }.to raise_error(TypeError)
end
it "stringifies uid" do
node = Astute::Node.new('uid' => :abc)
node.uid.should == 'abc'
node = Astute::Node.new('uid' => 123)
node.uid.should == '123'
end
it "denies uid changes" do
node = Astute::Node.new('uid' => 1)
expect{ node.uid = 2 }.to raise_error(TypeError)
expect{ node['uid'] = 2 }.to raise_error(TypeError)
expect{ node[:uid] = 2 }.to raise_error(TypeError)
end
it "allows [] accessors" do
node = Astute::Node.new('uid' => 123, 'info' => 'abc')
node['info'].should == 'abc'
node[:info].should == 'abc'
node['info'] = 'cba'
node['info'].should == 'cba'
node[:info] = 'dcb'
node[:info].should == 'dcb'
end
it "unwraps to hash" do
hash = {'uid' => '123', 'info' => 'abc'}
node = Astute::Node.new(hash)
node.to_hash.should == hash
node.to_hash.should_not === node.instance_variable_get(:@table)
end
end
describe Astute::NodesHash do
it "accepts array of hashes or nodes for initialization and allows accessing by uid" do
nodes = Astute::NodesHash.build([{'uid' => 123, 'info' => 'blabla1'}, Astute::Node.new({'uid' => 'abc', 'info' => 'blabla2'})])
nodes['123'].info.should == 'blabla1'
nodes['abc'].info.should == 'blabla2'
nodes[123].info.should == 'blabla1'
nodes[:abc].info.should == 'blabla2'
nodes['123'].uid.should == '123'
nodes.values.map(&:class).uniq.should == [Astute::Node]
end
it "allows easy elements addition and normalizes data" do
nodes = Astute::NodesHash.new
nodes << {'uid' => 1} << {'uid' => 2}
nodes.push({'uid' => 3}, {'uid' => 4}, {'uid' => 5})
nodes.keys.sort.should == %w(1 2 3 4 5)
nodes.values.map(&:class).uniq.should == [Astute::Node]
end
it "introduces meaningful aliases" do
nodes = Astute::NodesHash.build([{'uid' => 123, 'info' => 'blabla1'}, Astute::Node.new({'uid' => 'abc', 'info' => 'blabla2'})])
nodes.uids.should == nodes.keys
nodes.nodes.should == nodes.values
end
it "denies direct accessors" do
expect{ Astute::NodesHash.new['fake-uid'] = {'bla' => 'bla'} }.to raise_error(NoMethodError)
end
end

View File

@ -0,0 +1,181 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
describe Astute::Orchestrator do
include SpecHelpers
before(:each) do
@orchestrator = Astute::Orchestrator.new
@reporter = mock('reporter')
@reporter.stub_everything
end
it "must be able to return node type" do
nodes = [{'uid' => '1'}]
res = {:data => {:node_type => 'target'},
:sender=>"1"}
mc_res = mock_mc_result(res)
mc_timeout = 5
rpcclient = mock_rpcclient(nodes, mc_timeout)
rpcclient.expects(:get_type).once.returns([mc_res])
types = @orchestrator.node_type(@reporter, 'task_uuid', nodes, mc_timeout)
types.should eql([{"node_type"=>"target", "uid"=>"1"}])
end
it "must be able to complete verify_networks" do
nodes = [{'uid' => '1'}, {'uid' => '2'}]
networks = [{'id' => 1, 'vlan_id' => 100, 'cidr' => '10.0.0.0/24'},
{'id' => 2, 'vlan_id' => 101, 'cidr' => '192.168.0.0/24'}]
res1 = {:data => {:uid=>"1",
:neighbours => {"eth0" => {"100" => {"1" => ["eth0"], "2" => ["eth0"]},
"101" => {"1" => ["eth0"]}
}
}
},
:sender=>"1"
}
res2 = {:data => {:uid=>"2",
:neighbours => {"eth0" => {"100" => {"1" => ["eth0"], "2" => ["eth0"]},
"101" => {"1" => ["eth0"], "2" => ["eth0"]}
}
}
},
:sender=>"2"
}
valid_res = {:statuscode => 0, :sender => '1'}
mc_res1 = mock_mc_result(res1)
mc_res2 = mock_mc_result(res2)
mc_valid_res = mock_mc_result
rpcclient = mock_rpcclient(nodes)
rpcclient.expects(:start_frame_listeners).once.returns([mc_valid_res]*2)
rpcclient.expects(:send_probing_frames).once.returns([mc_valid_res]*2)
rpcclient.expects(:get_probing_info).once.returns([mc_res1, mc_res2])
Astute::MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
res = @orchestrator.verify_networks(@reporter, 'task_uuid', nodes, networks)
expected = {"nodes" => [{"networks" => [{"iface"=>"eth0", "vlans"=>[100]}], "uid"=>"1"},
{"networks"=>[{"iface"=>"eth0", "vlans"=>[100, 101]}], "uid"=>"2"}]}
res.should eql(expected)
end
it "verify_network returns error if nodes list is empty" do
res = @orchestrator.verify_networks(@reporter, 'task_uuid', [], [])
res.should eql({'status' => 'error', 'error' => "Nodes list is empty. Nothing to check."})
end
it "verify_network returns all vlans passed if only one node provided" do
nodes = [{'uid' => '1'}]
networks = [{'id' => 1, 'vlan_id' => 100, 'cidr' => '10.0.0.0/24'},
{'id' => 2, 'vlan_id' => 101, 'cidr' => '192.168.0.0/24'}]
res = @orchestrator.verify_networks(@reporter, 'task_uuid', nodes, networks)
expected = {"nodes" => [{"networks" => [{"iface"=>"eth0", "vlans"=>[100,101]}], "uid"=>"1"}]}
res.should eql(expected)
end
it "in remove_nodes, it returns empty list if nodes are not provided" do
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', [])
res.should eql({'nodes' => []})
end
it "remove_nodes cleans nodes and reboots them" do
removed_hash = {:sender => '1',
:data => {:rebooted => true}}
error_hash = {:sender => '2',
:data => {:rebooted => false, :error_msg => 'Could not reboot'}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient
mc_removed_res = mock_mc_result(removed_hash)
mc_error_res = mock_mc_result(error_hash)
rpcclient.expects(:erase_node).at_least_once.with(:reboot => true).returns([mc_removed_res, mc_error_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res.should eql({'nodes' => [{'uid' => '1'}], 'status' => 'error',
'error_nodes' => [{"uid"=>"2", "error"=>"RPC method 'erase_node' failed "\
"with message: Could not reboot"}]})
end
it "it calls deploy method with valid arguments" do
nodes = [{'uid' => 1}]
attrs = {'a' => 'b'}
Astute::DeploymentEngine::NailyFact.any_instance.expects(:deploy).
with([{'uid' => '1'}], attrs)
@orchestrator.deploy(@reporter, 'task_uuid', nodes, attrs)
end
it "deploy method raises error if nodes list is empty" do
expect {@orchestrator.deploy(@reporter, 'task_uuid', [], {})}.
to raise_error(/Nodes to deploy are not provided!/)
end
it "remove_nodes try to call MCAgent multiple times on error" do
removed_hash = {:sender => '1',
:data => {:rebooted => true}}
error_hash = {:sender => '2',
:data => {:rebooted => false, :error_msg => 'Could not reboot'}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_error_res = mock_mc_result(error_hash)
retries = Astute.config[:MC_RETRIES]
retries.should == 5
rpcclient.expects(:discover).with(:nodes => ['2']).times(retries)
rpcclient.expects(:erase_node).times(retries + 1).with(:reboot => true).returns([mc_removed_res, mc_error_res]).then.returns([mc_error_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res.should eql({'nodes' => [{'uid' => '1'}], 'status' => 'error',
'error_nodes' => [{"uid"=>"2", "error"=>"RPC method 'erase_node' failed "\
"with message: Could not reboot"}]})
end
it "remove_nodes try to call MCAgent multiple times on no response" do
removed_hash = {:sender => '2', :data => {:rebooted => true}}
then_removed_hash = {:sender => '3', :data => {:rebooted => true}}
nodes = [{'uid' => 1}, {'uid' => 2}, {'uid' => 3}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_then_removed_res = mock_mc_result(then_removed_hash)
retries = Astute.config[:MC_RETRIES]
rpcclient.expects(:discover).with(:nodes => %w(1 3)).times(1)
rpcclient.expects(:discover).with(:nodes => %w(1)).times(retries - 1)
rpcclient.expects(:erase_node).times(retries + 1).with(:reboot => true).
returns([mc_removed_res]).then.returns([mc_then_removed_res]).then.returns([])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res['nodes'] = res['nodes'].sort_by{|n| n['uid'] }
res.should eql({'nodes' => [{'uid' => '2'}, {'uid' => '3'}], 'status' => 'error',
'error_nodes' => [{'uid'=>'1', 'error'=>'Node not answered by RPC.'}]})
end
it "remove_nodes and returns early if retries were successful" do
removed_hash = {:sender => '1', :data => {:rebooted => true}}
then_removed_hash = {:sender => '2', :data => {:rebooted => true}}
nodes = [{'uid' => 1}, {'uid' => 2}]
rpcclient = mock_rpcclient(nodes)
mc_removed_res = mock_mc_result(removed_hash)
mc_then_removed_res = mock_mc_result(then_removed_hash)
retries = Astute.config[:MC_RETRIES]
retries.should_not == 2
rpcclient.expects(:discover).with(:nodes => %w(2)).times(1)
rpcclient.expects(:erase_node).times(2).with(:reboot => true).
returns([mc_removed_res]).then.returns([mc_then_removed_res])
res = @orchestrator.remove_nodes(@reporter, 'task_uuid', nodes)
res['nodes'] = res['nodes'].sort_by{|n| n['uid'] }
res.should eql({'nodes' => [{'uid' => '1'}, {'uid' => '2'}]})
end
it "remove_nodes do not fail if any of nodes failed"
end

View File

@ -0,0 +1,194 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
include Astute
describe "Puppetd" do
include SpecHelpers
context "PuppetdDeployer" do
before :each do
@ctx = mock
@ctx.stubs(:task_id)
@reporter = mock('reporter')
@ctx.stubs(:reporter).returns(ProxyReporter.new(@reporter))
@ctx.stubs(:deploy_log_parser).returns(Astute::LogParser::NoParsing.new)
end
it "reports ready status for node if puppet deploy finished successfully" do
@reporter.expects(:report).with('nodes' => [{'uid' => '1', 'status' => 'ready', 'progress' => 100}])
last_run_result = {:data=>
{:time=>{"last_run"=>1358425701},
:status => "running", :resources => {'failed' => 0},
:running => 1, :idling => 0},
:sender=>"1"}
last_run_result_new = Marshal.load(Marshal.dump(last_run_result))
last_run_result_new[:data][:time]['last_run'] = 1358426000
last_run_result_finished = Marshal.load(Marshal.dump(last_run_result))
last_run_result_finished[:data][:status] = 'stopped'
last_run_result_finished[:data][:time]['last_run'] = 1358427000
nodes = [{'uid' => '1'}]
rpcclient = mock_rpcclient(nodes)
rpcclient_valid_result = mock_mc_result(last_run_result)
rpcclient_new_res = mock_mc_result(last_run_result_new)
rpcclient_finished_res = mock_mc_result(last_run_result_finished)
rpcclient.stubs(:last_run_summary).returns([rpcclient_valid_result]).then.
returns([rpcclient_valid_result]).then.
returns([rpcclient_new_res]).then.
returns([rpcclient_finished_res])
rpcclient.expects(:runonce).at_least_once.returns([rpcclient_valid_result])
Astute::PuppetdDeployer.deploy(@ctx, nodes, retries=0)
end
it "doesn't report ready status for node if change_node_status disabled" do
@reporter.expects(:report).never
last_run_result = {:data=>
{:time=>{"last_run"=>1358425701},
:status => "running", :resources => {'failed' => 0},
:running => 1, :idling => 0},
:sender=>"1"}
last_run_result_new = Marshal.load(Marshal.dump(last_run_result))
last_run_result_new[:data][:time]['last_run'] = 1358426000
last_run_result_finished = Marshal.load(Marshal.dump(last_run_result))
last_run_result_finished[:data][:status] = 'stopped'
last_run_result_finished[:data][:time]['last_run'] = 1358427000
nodes = [{'uid' => '1'}]
rpcclient = mock_rpcclient(nodes)
rpcclient_valid_result = mock_mc_result(last_run_result)
rpcclient_new_res = mock_mc_result(last_run_result_new)
rpcclient_finished_res = mock_mc_result(last_run_result_finished)
rpcclient.stubs(:last_run_summary).returns([rpcclient_valid_result]).then.
returns([rpcclient_valid_result]).then.
returns([rpcclient_new_res]).then.
returns([rpcclient_finished_res])
rpcclient.expects(:runonce).at_least_once.returns([rpcclient_valid_result])
Astute::PuppetdDeployer.deploy(@ctx, nodes, retries=0, change_node_status=false)
end
it "publishes error status for node if puppet failed" do
@reporter.expects(:report).with('nodes' => [{'status' => 'error',
'error_type' => 'deploy', 'uid' => '1'}])
last_run_result = {:statuscode=>0, :data=>
{:changes=>{"total"=>1}, :time=>{"last_run"=>1358425701},
:resources=>{"failed"=>0}, :status => "running",
:running => 1, :idling => 0, :runtime => 100},
:sender=>"1"}
last_run_result_new = Marshal.load(Marshal.dump(last_run_result))
last_run_result_new[:data][:time]['last_run'] = 1358426000
last_run_result_new[:data][:resources]['failed'] = 1
nodes = [{'uid' => '1'}]
last_run_result_finished = Marshal.load(Marshal.dump(last_run_result))
last_run_result_finished[:data][:status] = 'stopped'
last_run_result_finished[:data][:time]['last_run'] = 1358427000
last_run_result_finished[:data][:resources]['failed'] = 1
rpcclient = mock_rpcclient(nodes)
rpcclient_valid_result = mock_mc_result(last_run_result)
rpcclient_new_res = mock_mc_result(last_run_result_new)
rpcclient_finished_res = mock_mc_result(last_run_result_finished)
rpcclient.stubs(:last_run_summary).returns([rpcclient_valid_result]).then.
returns([rpcclient_valid_result]).then.
returns([rpcclient_new_res]).then.
returns([rpcclient_finished_res])
rpcclient.expects(:runonce).at_least_once.returns([rpcclient_valid_result])
MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
Astute::PuppetdDeployer.deploy(@ctx, nodes, retries=0)
end
it "doesn't publish error status for node if change_node_status disabled" do
@reporter.expects(:report).never
last_run_result = {:statuscode=>0, :data=>
{:changes=>{"total"=>1}, :time=>{"last_run"=>1358425701},
:resources=>{"failed"=>0}, :status => "running",
:running => 1, :idling => 0, :runtime => 100},
:sender=>"1"}
last_run_result_new = Marshal.load(Marshal.dump(last_run_result))
last_run_result_new[:data][:time]['last_run'] = 1358426000
last_run_result_new[:data][:resources]['failed'] = 1
nodes = [{'uid' => '1'}]
last_run_result_finished = Marshal.load(Marshal.dump(last_run_result))
last_run_result_finished[:data][:status] = 'stopped'
last_run_result_finished[:data][:time]['last_run'] = 1358427000
last_run_result_finished[:data][:resources]['failed'] = 1
rpcclient = mock_rpcclient(nodes)
rpcclient_valid_result = mock_mc_result(last_run_result)
rpcclient_new_res = mock_mc_result(last_run_result_new)
rpcclient_finished_res = mock_mc_result(last_run_result_finished)
rpcclient.stubs(:last_run_summary).returns([rpcclient_valid_result]).then.
returns([rpcclient_valid_result]).then.
returns([rpcclient_new_res]).then.
returns([rpcclient_finished_res])
rpcclient.expects(:runonce).at_least_once.returns([rpcclient_valid_result])
MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
Astute::PuppetdDeployer.deploy(@ctx, nodes, retries=0, change_node_status=false)
end
it "retries to run puppet if it fails" do
@reporter.expects(:report).with('nodes' => [{'uid' => '1', 'status' => 'ready', 'progress' => 100}])
last_run_result = {:statuscode=>0, :data=>
{:changes=>{"total"=>1}, :time=>{"last_run"=>1358425701},
:resources=>{"failed"=>0}, :status => "running",
:running => 1, :idling => 0, :runtime => 100},
:sender=>"1"}
last_run_failed = Marshal.load(Marshal.dump(last_run_result))
last_run_failed[:data][:time]['last_run'] = 1358426000
last_run_failed[:data][:resources]['failed'] = 1
last_run_failed[:data][:status] = 'stopped'
last_run_fixing = Marshal.load(Marshal.dump(last_run_result))
last_run_fixing[:data][:time]['last_run'] = 1358426000
last_run_fixing[:data][:resources]['failed'] = 1
last_run_fixing[:data][:status] = 'running'
last_run_success = Marshal.load(Marshal.dump(last_run_result))
last_run_success[:data][:time]['last_run'] = 1358428000
last_run_success[:data][:status] = 'stopped'
nodes = [{'uid' => '1'}]
rpcclient = mock_rpcclient(nodes)
rpcclient_valid_result = mock_mc_result(last_run_result)
rpcclient_failed = mock_mc_result(last_run_failed)
rpcclient_fixing = mock_mc_result(last_run_fixing)
rpcclient_succeed = mock_mc_result(last_run_success)
rpcclient.stubs(:last_run_summary).returns([rpcclient_valid_result]).then.
returns([rpcclient_valid_result]).then.
returns([rpcclient_failed]).then.
returns([rpcclient_failed]).then.
returns([rpcclient_fixing]).then.
returns([rpcclient_succeed])
rpcclient.expects(:runonce).at_least_once.returns([rpcclient_valid_result])
MClient.any_instance.stubs(:rpcclient).returns(rpcclient)
Astute::PuppetdDeployer.deploy(@ctx, nodes, retries=1)
end
end
end

View File

@ -0,0 +1,190 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
include Astute
describe "ProxyReporter" do
context "Instance of ProxyReporter class" do
before :each do
@msg = {'nodes' => [{'status' => 'ready', 'uid' => '1'}]}
@msg_pr = {'nodes' => [@msg['nodes'][0],
{'status' => 'deploying', 'uid' => '2',
'progress' => 54}]}
@up_reporter = mock('up_reporter')
@reporter = ProxyReporter.new(@up_reporter)
end
it "reports first-come data" do
@up_reporter.expects(:report).with(@msg)
@reporter.report(@msg)
end
it "does not report the same message" do
@up_reporter.expects(:report).with(@msg).once
5.times { @reporter.report(@msg) }
end
it "reports only updated node" do
updated_node = @msg_pr['nodes'][1]
expected_msg = {'nodes' => [updated_node]}
@up_reporter.expects(:report).with(@msg)
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(@msg)
@reporter.report(@msg_pr)
end
it "reports only if progress value is greater" do
msg1 = {'nodes' => [{'status' => 'deploying', 'uid' => '1', 'progress' => 54},
{'status' => 'deploying', 'uid' => '2', 'progress' => 54}]}
msg2 = Marshal.load(Marshal.dump(msg1))
msg2['nodes'][1]['progress'] = 100
msg2['nodes'][1]['status'] = 'ready'
updated_node = msg2['nodes'][1]
expected_msg = {'nodes' => [updated_node]}
@up_reporter.expects(:report).with(msg1)
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(msg1)
@reporter.report(msg2)
end
it "raises exception if wrong key passed" do
@msg['nodes'][0]['ups'] = 'some_value'
lambda {@reporter.report(@msg)}.should raise_error
end
it "adjusts progress to 100 if passed greater" do
input_msg = {'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => 120}]}
expected_msg = {'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => 100}]}
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(input_msg)
end
it "adjusts progress to 0 if passed less" do
input_msg = {'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => -20}]}
expected_msg = {'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => 0}]}
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(input_msg)
end
it "adjusts progress to 100 if status provisioned and no progress given" do
input_msg = {'nodes' => [{'uid' => 1, 'status' => 'provisioned'}]}
expected_msg = {'nodes' => [{'uid' => 1, 'status' => 'provisioned', 'progress' => 100}]}
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(input_msg)
end
it "adjusts progress to 100 if status ready and no progress given" do
input_msg = {'nodes' => [{'uid' => 1, 'status' => 'ready'}]}
expected_msg = {'nodes' => [{'uid' => 1, 'status' => 'ready', 'progress' => 100}]}
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(input_msg)
end
it "adjusts progress to 100 if status provisioned with progress" do
input_msg = {'nodes' => [{'uid' => 1, 'status' => 'provisioned', 'progress' => 50}]}
expected_msg = {'nodes' => [{'uid' => 1, 'status' => 'provisioned', 'progress' => 100}]}
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(input_msg)
end
it "adjusts progress to 100 if status ready with progress" do
input_msg = {'nodes' => [{'uid' => 1, 'status' => 'ready', 'progress' => 50}]}
expected_msg = {'nodes' => [{'uid' => 1, 'status' => 'ready', 'progress' => 100}]}
@up_reporter.expects(:report).with(expected_msg)
@reporter.report(input_msg)
end
it "does not report if node was in ready, and trying to set is deploying" do
msg1 = {'nodes' => [{'uid' => 1, 'status' => 'ready'}]}
msg2 = {'nodes' => [{'uid' => 2, 'status' => 'ready'}]}
msg3 = {'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => 100}]}
@up_reporter.expects(:report).with(msg1)
@up_reporter.expects(:report).with(msg2)
@up_reporter.expects(:report).never
@reporter.report(msg1)
@reporter.report(msg2)
5.times { @reporter.report(msg3) }
end
it "reports even not all keys provided" do
msg1 = {'nodes' => [{'uid' => 1, 'status' => 'deploying'}]}
msg2 = {'nodes' => [{'uid' => 2, 'status' => 'ready'}]}
@up_reporter.expects(:report).with(msg1)
@up_reporter.expects(:report).with(msg2)
@reporter.report(msg1)
@reporter.report(msg2)
end
it "raises exception if progress provided and no status" do
msg1 = {'nodes' => [{'uid' => 1, 'status' => 'ready'}]}
msg2 = {'nodes' => [{'uid' => 1, 'progress' => 100}]}
@up_reporter.expects(:report).with(msg1)
@up_reporter.expects(:report).never
@reporter.report(msg1)
lambda {@reporter.report(msg2)}.should raise_error
end
it "raises exception if status of node is not supported" do
msg1 = {'nodes' => [{'uid' => 1, 'status' => 'hah'}]}
@up_reporter.expects(:report).never
lambda {@reporter.report(msg1)}.should raise_error
end
it "some other attrs are valid and passed" do
msg1 = {'nodes' => [{'uid' => 1, 'status' => 'deploying'}]}
msg2 = {'status' => 'error', 'error_type' => 'deploy',
'nodes' => [{'uid' => 2, 'status' => 'error', 'message' => 'deploy'}]}
@up_reporter.expects(:report).with(msg1)
@up_reporter.expects(:report).with(msg2)
@reporter.report(msg1)
@reporter.report(msg2)
end
it "reports if status is greater" do
msgs = [{'nodes' => [{'uid' => 1, 'status' => 'provisioned'}]},
{'nodes' => [{'uid' => 1, 'status' => 'provisioning'}]},
{'nodes' => [{'uid' => 1, 'status' => 'ready'}]},
{'nodes' => [{'uid' => 1, 'status' => 'error'}]}]
@up_reporter.expects(:report).with(msgs[0])
@up_reporter.expects(:report).with(msgs[2])
@up_reporter.expects(:report).with(msgs[3])
msgs.each {|msg| @reporter.report(msg)}
end
it "doesn't update progress if it less than previous progress with same status" do
msgs = [{'nodes' => [{'uid' => 1, 'status' => 'provisioning', 'progress' => 50}]},
{'nodes' => [{'uid' => 1, 'status' => 'provisioning', 'progress' => 10}]},
{'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => 50}]},
{'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => 10}]}]
@up_reporter.expects(:report).with(msgs[0])
@up_reporter.expects(:report).with(msgs[2])
@up_reporter.expects(:report).never
msgs.each {|msg| @reporter.report(msg)}
end
it "updates progress if it less than previous progress when changing status" do
msgs = [{'nodes' => [{'uid' => 1, 'status' => 'provisioning', 'progress' => 50}]},
{'nodes' => [{'uid' => 1, 'status' => 'provisioned'}]},
{'nodes' => [{'uid' => 1, 'status' => 'provisioned', 'progress' => 100}]},
{'nodes' => [{'uid' => 1, 'status' => 'deploying', 'progress' => 0}]}]
@up_reporter.expects(:report).with(msgs[0])
@up_reporter.expects(:report).with(msgs[2])
@up_reporter.expects(:report).with(msgs[3])
@up_reporter.expects(:report).never
msgs.each {|msg| @reporter.report(msg)}
end
it "doesn't forget previously reported attributes" do
msgs = [{'nodes' => [{'uid' => 1, 'status' => 'provisioning', 'progress' => 50}]},
{'nodes' => [{'uid' => 1, 'status' => 'provisioning'}]},
{'nodes' => [{'uid' => 1, 'status' => 'provisioning', 'key' => 'value'}]},
{'nodes' => [{'uid' => 1, 'status' => 'provisioning', 'progress' => 0}]},
]
@up_reporter.expects(:report).with(msgs[0])
@up_reporter.expects(:report).with(msgs[2])
@up_reporter.expects(:report).never
msgs.each {|msg| @reporter.report(msg)}
end
end
end

View File

@ -0,0 +1,57 @@
#!/usr/bin/env rspec
require File.join(File.dirname(__FILE__), "..", "spec_helper")
describe "SimplePuppet DeploymentEngine" do
context "When deploy is called, " do
before(:each) do
@ctx = mock
@ctx.stubs(:task_id)
@ctx.stubs(:deploy_log_parser).returns(Astute::LogParser::NoParsing.new)
@reporter = mock('reporter')
@reporter.stub_everything
@ctx.stubs(:reporter).returns(Astute::ProxyReporter.new(@reporter))
@deploy_engine = Astute::DeploymentEngine::SimplePuppet.new(@ctx)
@env = YAML.load_file(File.join(File.dirname(__FILE__), "..", "..", "examples", "no_attrs.yaml"))
end
it "it should call valid method depends on attrs" do
nodes = [{'uid' => 1}]
attrs = {'deployment_mode' => 'ha_compute'}
@deploy_engine.expects(:attrs_ha_compute).never # It is not supported in SimplePuppet
@deploy_engine.expects(:deploy_ha_compute).with(nodes, attrs)
# All implementations of deploy_piece go to subclasses
@deploy_engine.respond_to?(:deploy_piece).should be_true
@deploy_engine.deploy(nodes, attrs)
end
it "it should raise an exception if deployment mode is unsupported" do
nodes = [{'uid' => 1}]
attrs = {'deployment_mode' => 'unknown'}
expect {@deploy_engine.deploy(nodes, attrs)}.to raise_exception(
/Method deploy_unknown is not implemented/)
end
it "multinode_compute deploy should not raise any exception" do
@env['attributes']['deployment_mode'] = "multinode_compute"
Astute::Metadata.expects(:publish_facts).never # It is not supported in SimplePuppet
# we got two calls, one for controller, and another for all computes
Astute::PuppetdDeployer.expects(:deploy).twice
@deploy_engine.deploy(@env['nodes'], @env['attributes'])
end
it "ha_compute deploy should not raise any exception" do
@env['attributes']['deployment_mode'] = "ha_compute"
Astute::Metadata.expects(:publish_facts).never
Astute::PuppetdDeployer.expects(:deploy).times(6)
@deploy_engine.deploy(@env['nodes'], @env['attributes'])
end
it "singlenode_compute deploy should not raise any exception" do
@env['attributes']['deployment_mode'] = "singlenode_compute"
@env['nodes'] = [@env['nodes'][0]] # We have only one node in singlenode
Astute::Metadata.expects(:publish_facts).never
Astute::PuppetdDeployer.expects(:deploy).once # one call for one node
@deploy_engine.deploy(@env['nodes'], @env['attributes'])
end
end
end

15
mcagents/erase_node.ddl Normal file
View File

@ -0,0 +1,15 @@
metadata :name => "Erase node bootloader",
:description => "Erase node bootloader and reboot it.",
:author => "Andrey Danin",
:license => "MIT",
:version => "0.1",
:url => "http://mirantis.com",
:timeout => 40
action "erase_node", :description => "Zeroing of boot device" do
display :always
end
action "reboot_node", :description => "Reboot node" do
display :always
end

107
mcagents/erase_node.rb Normal file
View File

@ -0,0 +1,107 @@
require "json"
require "base64"
require 'fileutils'
module MCollective
module Agent
class Erase_node<RPC::Agent
action "erase_node" do
erase_node
end
action "reboot_node" do
reboot
end
private
def erase_node
request_reboot = request.data[:reboot]
dry_run = request.data[:dry_run]
error_msg = []
reply[:status] = 0 # Shell exitcode behaviour
prevent_discover unless dry_run
begin
boot_device = get_boot_device
erase_data(boot_device, 512) unless dry_run
reply[:erased] = true
rescue Exception => e
reply[:erased] = false
reply[:status] += 1
msg = "MBR can't be erased. Reason: #{e.message};"
Log.error(msg)
error_msg << msg
end
begin
reboot if not dry_run and request_reboot
reply[:rebooted] = true
rescue Exception => e
reply[:rebooted] = false
reply[:status] += 1
msg = "Can't reboot node. Reason: #{e.message};"
Log.error(msg)
error_msg << "Can't reboot node. Reason: #{e.message};"
end
unless error_msg.empty?
reply[:error_msg] = error_msg.join(' ')
end
end
def get_boot_device
dev_map = '/boot/grub/device.map'
grub_conf = '/boot/grub/grub.conf'
# Look boot device at GRUB device.map file
if File.file?(dev_map) and File.readable?(dev_map)
open(dev_map).readlines.each do |l|
line = l.strip
unless line.start_with?('#')
grub_dev, kernel_dev = line.split(%r{[ |\t]+})
return kernel_dev if grub_dev == '(hd0)'
end
end
end
# Look boot device at GRUB config autogenerated file
if File.file?(grub_conf) and File.readable?(grub_conf)
open(grub_conf).readlines.each do |l|
line = l.strip
if line.start_with?('#boot=')
grub_dev, kernel_dev = line.split('=')
return kernel_dev
end
end
end
# If nothing found
raise 'Boot device not found.'
end
def reboot
cmd = "/bin/sleep 5; /sbin/shutdown -r now"
pid = fork { system(cmd) }
Process.detach(pid)
end
def get_data(file, length, offset=0)
fd = open(file)
fd.seek(offset)
ret = fd.sysread(length)
fd.close
ret
end
def erase_data(file, length, offset=0)
fd = open(file, 'w')
fd.seek(offset)
ret = fd.syswrite("\000"*length)
fd.close
end
def prevent_discover
FileUtils.touch '/var/run/nodiscover'
end
end
end
end

14
mcagents/fake.ddl Normal file
View File

@ -0,0 +1,14 @@
metadata :name => "Fake Agent",
:description => "Fake Agent",
:author => "Mirantis Inc.",
:license => "Apache License 2.0",
:version => "0.0.1",
:url => "http://mirantis.com",
:timeout => 20
action "echo", :description => "Echo request message" do
output :output,
:description => "Just request message",
:display_as => "Echo message"
end

12
mcagents/fake.rb Normal file
View File

@ -0,0 +1,12 @@
module MCollective
module Agent
class Fake < RPC::Agent
action "echo" do
validate :msg, String
reply[:msg] = "Hello, it is my reply: #{request[:msg]}"
end
end
end
end

39
mcagents/nailyfact.ddl Normal file
View File

@ -0,0 +1,39 @@
metadata :name => "Naily Fact Agent",
:description => "Key/values in a text file",
:author => "Puppet Master Guy",
:license => "GPL",
:version => "Version 1",
:url => "www.naily.com",
:timeout => 40
action "get", :description => "fetches a value from a file" do
display :failed
input :key,
:prompt => "Key",
:description => "Key you want from the file",
:type => :string,
:validation => '^[a-zA-Z0-9_]+$',
:optional => false,
:maxlength => 0
output :value,
:description => "Value",
:display_as => "Value"
end
action "post", :description => "Create new attributes file" do
display :failed
input :value,
:prompt => "Value",
:description => "Value you want to set in the file",
:type => :string,
:validation => '.*',
:optional => false,
:maxlength => 0
output :msg,
:description => "Status",
:display_as => "Status"
end

72
mcagents/nailyfact.rb Normal file
View File

@ -0,0 +1,72 @@
require 'json'
module MCollective
module Agent
class Nailyfact<RPC::Agent
nailyfile = "/etc/naily.facts"
def parse_facts(fname)
begin
if File.exist?(fname)
kv_map = {}
File.readlines(fname).each do |line|
if line =~ /^(.+)=(.+)$/
@key = $1.strip;
@val = $2.strip
kv_map.update({@key=>@val})
end
end
return kv_map
else
f = File.open(fname,'w')
f.close
File.open("/var/log/facter.log", "a") {|f| f.write("#{Time.now} EMPTY facts saved\n")}
return {}
end
rescue
logger.warn("Could not access naily facts file. There was an error in nailyfacts.rb:parse_facts")
return {}
end
end
def write_facts(fname, facts)
if not File.exists?(File.dirname(fname))
Dir.mkdir(File.dirname(fname))
end
begin
f = File.open(fname,"w+")
facts.each do |k,v|
f.puts("#{k} = #{v}")
end
f.close
File.open("/var/log/facter.log", "a") {|f| f.write("#{Time.now} facts saved\n")}
return true
rescue
File.open("/var/log/facter.log", "a") {|f| f.write("#{Time.now} facts NOT saved\n")}
return false
end
end
action "get" do
validate :key, String
kv_map = parse_facts(nailyfile)
if kv_map[request[:key]] != nil
reply[:value] = kv_map[request[:key]]
end
end
action "post" do
validate :value, String
kv_map = JSON.parse(request[:value])
if write_facts(nailyfile, kv_map)
reply[:msg] = "Settings Updated!"
else
reply.fail! "Could not write file!"
end
end
end
end
end

27
mcagents/net_probe.ddl Normal file
View File

@ -0,0 +1,27 @@
metadata :name => "Network Probe Agent",
:description => "Check network connectivity between nodes.",
:author => "Andrey Danin",
:license => "MIT",
:version => "0.1",
:url => "http://mirantis.com",
:timeout => 300
action "start_frame_listeners", :description => "Starts catching packets on interfaces" do
display :always
end
action "send_probing_frames", :description => "Sends packets with VLAN tags" do
display :always
end
action "get_probing_info", :description => "Get info about packets catched" do
display :always
end
action "stop_frame_listeners", :description => "Stop catching packets, dump data to file" do
display :always
end
action "echo", :description => "Silly echo" do
display :always
end

153
mcagents/net_probe.rb Normal file
View File

@ -0,0 +1,153 @@
require "json"
require "tempfile"
require "socket"
require "timeout"
module MCollective
module Agent
class Net_probe<RPC::Agent
def startup_hook
@pattern = "/var/tmp/net-probe-dump*"
end
action "start_frame_listeners" do
start_frame_listeners
end
action "send_probing_frames" do
send_probing_frames
end
action "get_probing_info" do
get_probing_info
end
action "stop_frame_listeners" do
stop_frame_listeners
end
private
def get_uid
File.open('/etc/nailgun_uid') do |fo|
uid = fo.gets.chomp
return uid
end
end
def start_frame_listeners
validate :interfaces, String
config = {
"action" => "listen",
"interfaces" => JSON.parse(request[:interfaces]),
"dump_file" => "/var/tmp/net-probe-dump",
"ready_address" => "localhost",
"ready_port" => 31338,
}
if request.data.key?('config')
config.merge!(JSON.parse(request[:config]))
end
# wipe out old stuff before start
Dir.glob(@pattern).each do |file|
File.delete file
end
f = Tempfile.new "net_probe"
f.write config.to_json
fpath = f.path
f.close
begin
socket = Socket.new( Socket::AF_INET, Socket::SOCK_STREAM, 0)
sockaddr = Socket.pack_sockaddr_in(config['ready_port'], config['ready_address'])
socket.bind(sockaddr)
socket.listen(1)
rescue Exception => e
reply.fail "Socket error: #{e.to_s}"
else
cmd = "net_probe.py -c #{fpath}"
pid = fork { `#{cmd}` }
Process.detach(pid)
# It raises Errno::ESRCH if there is no process, so we check that it runs
sleep 1
begin
Process.kill(0, pid)
rescue Errno::ESRCH => e
reply.fail "Failed to run '#{cmd}'"
else
begin
Timeout::timeout(120) do
client, clientaddr = socket.accept
status = client.read
reply.fail "Wrong listener status: '#{status}'" unless status =~ /READY/
client.close
end
rescue Timeout::Error
reply.fail "Listener did not reported status."
end
end
ensure
socket.shutdown
socket.close
end
end
def send_probing_frames
validate :interfaces, String
config = { "action" => "generate", "uid" => get_uid,
"interfaces" => JSON.parse(request[:interfaces]) }
if request.data.key?('config')
config.merge!(JSON.parse(request[:config]))
end
cmd = "net_probe.py -c -"
status = run(cmd, :stdin => config.to_json, :stdout => :out, :stderr => :error)
reply.fail "Failed to send probing frames, cmd='#{cmd}' failed, config: #{config.inspect}" if status != 0
end
def get_probing_info
stop_frame_listeners
neighbours = Hash.new
Dir.glob(@pattern).each do |file|
p = JSON.load(File.read(file))
neighbours.merge!(p)
end
reply[:neighbours] = neighbours
reply[:uid] = get_uid
end
def stop_frame_listeners
piddir = "/var/run/net_probe"
pidfiles = Dir.glob(File.join(piddir, '*'))
# Send SIGINT to all PIDs in piddir.
pidfiles.each do |f|
begin
Process.kill("INT", File.basename(f).to_i)
rescue Errno::ESRCH
# Unlink pidfile if no such process.
File.unlink(f)
end
end
# Wait while all processes dump data and exit.
while not pidfiles.empty? do
pidfiles.each do |f|
begin
Process.getpgid(File.basename(f).to_i)
rescue Errno::ESRCH
begin
File.unlink(f)
rescue Errno::ENOENT
end
end
end
pidfiles = Dir.glob(File.join(piddir, '*'))
end
end
end
end
end
# vi:tabstop=2:expandtab:ai:filetype=ruby

88
mcagents/puppetd.ddl Normal file
View File

@ -0,0 +1,88 @@
metadata :name => "puppetd",
:description => "Run puppet agent, get its status, and enable/disable it",
:author => "R.I.Pienaar",
:license => "Apache License 2.0",
:version => "1.8",
:url => "https://github.com/puppetlabs/mcollective-plugins",
:timeout => 40
action "last_run_summary", :description => "Get a summary of the last puppet run" do
display :always
output :time,
:description => "Time per resource type",
:display_as => "Times"
output :resources,
:description => "Overall resource counts",
:display_as => "Resources"
output :changes,
:description => "Number of changes",
:display_as => "Changes"
output :events,
:description => "Number of events",
:display_as => "Events"
output :version,
:description => "Puppet and Catalog versions",
:display_as => "Versions"
end
action "enable", :description => "Enable puppet agent" do
output :output,
:description => "String indicating status",
:display_as => "Status"
end
action "disable", :description => "Disable puppet agent" do
output :output,
:description => "String indicating status",
:display_as => "Status"
end
action "runonce", :description => "Invoke a single puppet run" do
#input :forcerun,
# :prompt => "Force puppet run",
# :description => "Should the puppet run happen immediately?",
# :type => :string,
# :validation => '^.+$',
# :optional => true,
# :maxlength => 5
output :output,
:description => "Output from puppet agent",
:display_as => "Output"
end
action "status", :description => "Get puppet agent's status" do
display :always
output :status,
:description => "The status of the puppet agent: disabled, running, idling or stopped",
:display_as => "Status"
output :enabled,
:description => "Whether puppet agent is enabled",
:display_as => "Enabled"
output :running,
:description => "Whether puppet agent is running",
:display_as => "Running"
output :idling,
:description => "Whether puppet agent is idling",
:display_as => "Idling"
output :stopped,
:description => "Whether puppet agent is stopped",
:display_as => "Stopped"
output :lastrun,
:description => "When puppet agent last ran",
:display_as => "Last Run"
output :output,
:description => "String displaying agent status",
:display_as => "Status"
end

185
mcagents/puppetd.rb Normal file
View File

@ -0,0 +1,185 @@
module MCollective
module Agent
# An agent to manage the Puppet Daemon
#
# Configuration Options:
# puppetd.splaytime - Number of seconds within which to splay; no splay
# by default
# puppetd.statefile - Where to find the state.yaml file; defaults to
# /var/lib/puppet/state/state.yaml
# puppetd.lockfile - Where to find the lock file; defaults to
# /var/lib/puppet/state/puppetdlock
# puppetd.puppetd - Where to find the puppet agent binary; defaults to
# /usr/bin/puppet agent
# puppetd.summary - Where to find the summary file written by Puppet
# 2.6.8 and newer; defaults to
# /var/lib/puppet/state/last_run_summary.yaml
# puppetd.pidfile - Where to find puppet agent's pid file; defaults to
# /var/run/puppet/agent.pid
class Puppetd<RPC::Agent
def startup_hook
@splaytime = @config.pluginconf["puppetd.splaytime"].to_i || 0
@lockfile = @config.pluginconf["puppetd.lockfile"] || "/var/lib/puppet/state/puppetdlock"
@statefile = @config.pluginconf["puppetd.statefile"] || "/var/lib/puppet/state/state.yaml"
@pidfile = @config.pluginconf["puppet.pidfile"] || "/var/run/puppet/agent.pid"
@puppetd = @config.pluginconf["puppetd.puppetd"] || "/usr/bin/puppet agent"
@last_summary = @config.pluginconf["puppet.summary"] || "/var/lib/puppet/state/last_run_summary.yaml"
end
action "last_run_summary" do
last_run_summary
set_status
end
action "enable" do
enable
end
action "disable" do
disable
end
action "runonce" do
runonce
end
action "status" do
set_status
end
private
def last_run_summary
# wrap into begin..rescue: fixes PRD-252
begin
summary = YAML.load_file(@last_summary)
rescue
summary = {}
end
# It should be empty hash, if 'resources' key is not defined, because otherwise merge will fail with TypeError
summary["resources"] ||= {}
# Astute relies on last_run, so we must set last_run
summary["time"] ||= {}
summary["time"]["last_run"] ||= 0
# if 'failed' is not provided, it means something is wrong. So default value is 1.
reply[:resources] = {"failed"=>1, "changed"=>0, "total"=>0, "restarted"=>0, "out_of_sync"=>0}.merge(summary["resources"])
["time", "events", "changes", "version"].each do |dat|
reply[dat.to_sym] = summary[dat]
end
end
def set_status
reply[:status] = puppet_daemon_status
reply[:running] = reply[:status] == 'running' ? 1 : 0
reply[:enabled] = reply[:status] == 'disabled' ? 0 : 1
reply[:idling] = reply[:status] == 'idling' ? 1 : 0
reply[:stopped] = reply[:status] == 'stopped' ? 1 : 0
reply[:lastrun] = 0
reply[:lastrun] = File.stat(@statefile).mtime.to_i if File.exists?(@statefile)
reply[:runtime] = Time.now.to_i - reply[:lastrun]
reply[:output] = "Currently #{reply[:status]}; last completed run #{reply[:runtime]} seconds ago"
end
def puppet_daemon_status
locked = File.exists?(@lockfile)
disabled = locked && File::Stat.new(@lockfile).zero?
has_pid = File.exists?(@pidfile)
return 'disabled' if disabled
return 'running' if locked && has_pid
return 'idling' if ! locked && has_pid
return 'stopped' if ! has_pid
end
def runonce
set_status
case (reply[:status])
when 'disabled' then # can't run
reply.fail "Empty Lock file exists; puppet agent is disabled."
when 'running' then # can't run two simultaniously
reply.fail "Lock file and PID file exist; puppet agent is running."
when 'idling' then # signal daemon
pid = File.read(@pidfile)
if pid !~ /^\d+$/
reply.fail "PID file does not contain a PID; got #{pid.inspect}"
else
begin
::Process.kill(0, Integer(pid)) # check that pid is alive
# REVISIT: Should we add an extra round of security here, and
# ensure that the PID file is securely owned, or that the target
# process looks like Puppet? Otherwise a malicious user could
# theoretically signal arbitrary processes with this...
begin
::Process.kill("USR1", Integer(pid))
reply[:output] = "Signalled daemonized puppet agent to run (process #{Integer(pid)}); " + (reply[:output] || '')
rescue Exception => e
reply.fail "Failed to signal the puppet agent daemon (process #{pid}): #{e}"
end
rescue Errno::ESRCH => e
# PID is invalid, run puppet onetime as usual
runonce_background
end
end
when 'stopped' then # just run
runonce_background
else
reply.fail "Unknown puppet agent status: #{reply[:status]}"
end
end
def runonce_background
cmd = [@puppetd, "--onetime", "--logdest", 'syslog']
unless request[:forcerun]
if @splaytime && @splaytime > 0
cmd << "--splaylimit" << @splaytime << "--splay"
end
end
cmd = cmd.join(" ")
output = reply[:output] || ''
run(cmd, :stdout => :output, :chomp => true)
reply[:output] = "Called #{cmd}, " + output + (reply[:output] || '')
end
def enable
if File.exists?(@lockfile)
stat = File::Stat.new(@lockfile)
if stat.zero?
File.unlink(@lockfile)
reply[:output] = "Lock removed"
else
reply[:output] = "Currently running; can't remove lock"
end
else
reply.fail "Already enabled"
end
end
def disable
if File.exists?(@lockfile)
stat = File::Stat.new(@lockfile)
stat.zero? ? reply.fail("Already disabled") : reply.fail("Currently running; can't remove lock")
else
begin
File.open(@lockfile, "w") { |file| }
reply[:output] = "Lock created"
rescue Exception => e
reply.fail "Could not create lock: #{e}"
end
end
end
end
end
end
# vi:tabstop=2:expandtab:ai:filetype=ruby

14
mcagents/systemtype.ddl Normal file
View File

@ -0,0 +1,14 @@
metadata :name => "systemtype",
:description => "Checks system type out of file",
:author => "Mirantis Inc",
:license => "Apache License 2.0",
:version => "0.0.1",
:url => 'http://www.mirantis.com/',
:timeout => 40
action "get_type", :description => "Get the type" do
display :always
output :node_type,
:description => "Type out of file",
:display_as => "Node type"
end

15
mcagents/systemtype.rb Normal file
View File

@ -0,0 +1,15 @@
module MCollective
module Agent
class Systemtype < RPC::Agent
file = "/etc/nailgun_systemtype"
action "get_type" do
begin
reply[:node_type] = File.read(file)
rescue
reply.fail! $!.to_s
end
end
end
end
end