Deploy without puppet master:

* mco agent puppetsync;
* upload modules and manifests using rsync;
* limit running concurrently nodes by mco;
* tests.

Change-Id: I0bfbaae0b623b0b26d6a77b49f0449860cbd5520
This commit is contained in:
Vladimir 2013-10-23 16:52:39 +04:00
parent 596a9ffbfd
commit 9e8402ba8c
11 changed files with 297 additions and 111 deletions

View File

@ -32,11 +32,23 @@ module Astute
@ctx.deploy_log_parser.deploy_type = deployment_info.first['deployment_mode']
Astute.logger.info "Deployment mode #{@ctx.deploy_log_parser.deploy_type}"
# Generate and upload ssh keys from master node to all cluster nodes.
begin
# Generate ssh keys to future uploading to all cluster nodes
generate_ssh_keys(deployment_info.first['deployment_id'])
# Prevent to prepare too many nodes at once
deployment_info.uniq { |n| n['uid'] }.each_slice(Astute.config[:MAX_NODES_PER_CALL]) do |part|
# Upload ssh keys from master node to all cluster nodes.
# Will be used by puppet after to connect nodes between themselves.
generate_and_upload_ssh_keys(deployment_info.map{ |n| n['uid'] }.uniq,
deployment_info.first['deployment_id']
)
upload_ssh_keys(part.map{ |n| n['uid'] }, part.first['deployment_id'])
# Sync puppet manifests and modules to every node (emulate puppet master)
sync_puppet_manifests(part)
end
rescue => e
Astute.logger.error("Unexpected error #{e.message} traceback #{e.format_backtrace}")
raise e
end
# Sort by priority (the lower the number, the higher the priority)
# and send groups to deploy
@ -90,16 +102,20 @@ module Astute
nodes_array.find { |n| node['uid'] == n['uid'] }
end
# Generate and upload ssh keys from master node to all cluster nodes.
def generate_and_upload_ssh_keys(node_uids, deployment_id)
raise "Deployment_id is missing" unless deployment_id
Astute.config.PUPPET_SSH_KEYS.each do |key_name|
generate_ssh_key(key_name, deployment_id)
upload_ssh_key(node_uids, key_name, deployment_id)
end
# Sync puppet manifests and modules to every node
def sync_puppet_manifests(deployment_info)
sync_mclient = MClient.new(@ctx, "puppetsync", deployment_info.map{ |n| n['uid'] }.uniq)
master_ip = deployment_info.first['master_ip']
# Paths /puppet/modules and /puppet/manifests/ in master node set by FUEL
# Check fuel source code /deployment/puppet/nailgun/manifests/puppetsync.pp
sync_mclient.rsync(:modules_source => "rsync://#{master_ip}:/puppet/modules/",
:manifests_source => "rsync://#{master_ip}:/puppet/manifests/"
)
end
def generate_ssh_key(key_name, deployment_id, overwrite=false)
def generate_ssh_keys(deployment_id, overwrite=false)
raise "Deployment_id is missing" unless deployment_id
Astute.config.PUPPET_SSH_KEYS.each do |key_name|
dir_path = File.join(KEY_DIR, deployment_id.to_s, key_name)
key_path = File.join(dir_path, key_name)
FileUtils.mkdir_p dir_path
@ -110,8 +126,10 @@ module Astute
result = system("ssh-keygen -b 2048 -t rsa -N '' -f #{key_path}")
raise "Could not generate ssh key!" unless result
end
end
def upload_ssh_key(node_uids, key_name, deployment_id, overwrite=false)
def upload_ssh_keys(node_uids, deployment_id, overwrite=false)
Astute.config.PUPPET_SSH_KEYS.each do |key_name|
upload_mclient = MClient.new(@ctx, "uploadfile", node_uids)
[key_name, key_name + ".pub"].each do |ssh_key|
source_path = File.join(KEY_DIR, deployment_id.to_s, key_name, ssh_key)
@ -128,6 +146,7 @@ module Astute
)
end
end
end
def nodes_status(nodes, status, data_to_merge)
{

View File

@ -96,7 +96,7 @@ module Astute
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>",
'components_list' => [
{'name' => 'Galera', 'weight' => 5, 'patterns' => [
@ -261,7 +261,7 @@ module Astute
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>",
'components_list' => [
{'name' => 'Keystone', 'weight' => 10, 'patterns' => [
@ -314,7 +314,7 @@ module Astute
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>",
'components_list' => [
{'name' => 'Glance', 'weight' => 10, 'patterns' => [
@ -401,7 +401,7 @@ module Astute
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>",
'components_list' => [
{'name' => 'Glance', 'weight' => 10, 'patterns' => [
@ -488,7 +488,7 @@ module Astute
{'type' => 'components-list',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'chunk_size' => 40000,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>",
'components_list' => [
{'name' => 'Keystone', 'weight' => 10, 'patterns' => [
@ -541,7 +541,7 @@ module Astute
'type' => 'count-lines',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'expected_line_number' => 345,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>"
},
@ -549,7 +549,7 @@ module Astute
'type' => 'count-lines',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'expected_line_number' => 345,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>"
},
@ -557,7 +557,7 @@ module Astute
'type' => 'count-lines',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'expected_line_number' => 345,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>"
},
@ -565,7 +565,7 @@ module Astute
'type' => 'count-lines',
'endlog_patterns' => [{'pattern' => /Finished catalog run in [0-9]+\.[0-9]* seconds\n/, 'progress' => 1.0}],
'expected_line_number' => 345,
'filename' => 'puppet-agent.log',
'filename' => 'puppet-apply.log',
'path_format' => "<%= @pattern_spec['path_prefix'] %><%= node['fqdn'] %>/<%= @pattern_spec['filename'] %>"
},
}

View File

@ -27,6 +27,7 @@ module Astute
@nodes = nodes.map { |n| n.to_s } if nodes
@check_result = check_result
@retries = Astute.config.MC_RETRIES
#FIXME: this timeout does not work
@timeout = timeout
initialize_mclient
end
@ -87,7 +88,10 @@ module Astute
failed = @mc_res.select{|x| x.results[:statuscode] != 0 }
if failed.any?
err_msg += "MCollective call failed in agent '#{@agent}', "\
"method '#{method}', failed nodes: #{failed.map{|x| x.results[:sender]}.join(',')} \n"
"method '#{method}', failed nodes: \n"
failed.each do |n|
err_msg += "ID: #{n.results[:sender]} - Reason: #{n.results[:statusmsg]}\n"
end
end
unless err_msg.empty?
Astute.logger.error err_msg
@ -95,7 +99,6 @@ module Astute
end
end
def mc_send(*args)
@mc.send(*args)
rescue => ex

View File

@ -1,5 +1,5 @@
metadata :name => "puppetd",
:description => "Run puppet agent, get its status, and enable/disable it",
:description => "Run puppet, get its status, and enable/disable it",
:author => "R.I.Pienaar",
:license => "Apache License 2.0",
:version => "1.8",
@ -29,13 +29,13 @@ action "last_run_summary", :description => "Get a summary of the last puppet run
:display_as => "Versions"
end
action "enable", :description => "Enable puppet agent" do
action "enable", :description => "Enable puppet" do
output :output,
:description => "String indicating status",
:display_as => "Status"
end
action "disable", :description => "Disable puppet agent" do
action "disable", :description => "Disable puppet" do
output :output,
:description => "String indicating status",
:display_as => "Status"
@ -51,38 +51,38 @@ action "runonce", :description => "Invoke a single puppet run" do
# :maxlength => 5
output :output,
:description => "Output from puppet agent",
:description => "Output from puppet",
:display_as => "Output"
end
action "status", :description => "Get puppet agent's status" do
action "status", :description => "Get puppet status" do
display :always
output :status,
:description => "The status of the puppet agent: disabled, running, idling or stopped",
:description => "The status of the puppet: disabled, running, idling or stopped",
:display_as => "Status"
output :enabled,
:description => "Whether puppet agent is enabled",
:description => "Whether puppet is enabled",
:display_as => "Enabled"
output :running,
:description => "Whether puppet agent is running",
:description => "Whether puppet is running",
:display_as => "Running"
output :idling,
:description => "Whether puppet agent is idling",
:description => "Whether puppet is idling",
:display_as => "Idling"
output :stopped,
:description => "Whether puppet agent is stopped",
:description => "Whether puppet is stopped",
:display_as => "Stopped"
output :lastrun,
:description => "When puppet agent last ran",
:description => "When puppet last ran",
:display_as => "Last Run"
output :output,
:description => "String displaying agent status",
:description => "String displaying puppet status",
:display_as => "Status"
end

View File

@ -24,8 +24,8 @@ module MCollective
# /var/lib/puppet/state/state.yaml
# puppetd.lockfile - Where to find the lock file; defaults to
# /var/lib/puppet/state/puppetdlock
# puppetd.puppetd - Where to find the puppet agent binary; defaults to
# /usr/bin/puppet agent
# puppetd.puppetd - Where to find the puppet binary; defaults to
# /usr/bin/puppet apply
# puppetd.summary - Where to find the summary file written by Puppet
# 2.6.8 and newer; defaults to
# /var/lib/puppet/state/last_run_summary.yaml
@ -34,10 +34,13 @@ module MCollective
class Puppetd<RPC::Agent
def startup_hook
@splaytime = @config.pluginconf["puppetd.splaytime"].to_i || 0
@lockfile = @config.pluginconf["puppetd.lockfile"] || "/var/lib/puppet/state/puppetdlock"
@lockfile = @config.pluginconf["puppetd.lockfile"] || "/tmp/puppetdlock"
@statefile = @config.pluginconf["puppetd.statefile"] || "/var/lib/puppet/state/state.yaml"
@pidfile = @config.pluginconf["puppet.pidfile"] || "/var/run/puppet/agent.pid"
@puppetd = @config.pluginconf["puppetd.puppetd"] || "/usr/bin/puppet agent"
@puppetd = @config.pluginconf["puppetd.puppetd"] || "/usr/sbin/daemonize -a -e /var/log/puppet/puppet.err \
-o /var/log/puppet/puppet.log \
-l #{@lockfile} \
/usr/bin/puppet apply /etc/puppet/manifests/site.pp"
@last_summary = @config.pluginconf["puppet.summary"] || "/var/lib/puppet/state/last_run_summary.yaml"
end
@ -134,31 +137,30 @@ module MCollective
set_status
case (reply[:status])
when 'disabled' then # can't run
reply.fail "Empty Lock file exists; puppet agent is disabled."
reply.fail "Empty Lock file exists; puppet is disabled."
when 'running' then # can't run two simultaniously
reply.fail "Lock file and PID file exist; puppet agent is running."
reply.fail "Lock file and PID file exist; puppet is running."
when 'idling' then # signal daemon
pid = puppet_agent_pid
begin
::Process.kill('USR1', pid)
reply[:output] = "Signalled daemonized puppet agent to run (process #{pid}); " + (reply[:output] || '')
reply[:output] = "Signalled daemonized puppet to run (process #{pid}); " + (reply[:output] || '')
rescue => ex
reply.fail "Failed to signal the puppet agent daemon (process #{pid}): #{ex}"
reply.fail "Failed to signal the puppet daemon (process #{pid}): #{ex}"
end
when 'stopped' then # just run
runonce_background
else
reply.fail "Unknown puppet agent status: #{reply[:status]}"
reply.fail "Unknown puppet status: #{reply[:status]}"
end
end
def runonce_background
cmd = [@puppetd, "--onetime", "--ignorecache", "--logdest", 'syslog', '--trace']
cmd = [@puppetd, "--logdest", 'syslog', '--trace']
unless request[:forcerun]
if @splaytime && @splaytime > 0
cmd << "--splaylimit" << @splaytime << "--splay"

61
mcagents/puppetsync.ddl Normal file
View File

@ -0,0 +1,61 @@
metadata:name => "puppetsync",
:description => "Downloads latest version of Puppet manifests to managed servers",
:author => "Mirantis Inc",
:license => "Apache License 2.0",
:version => "0.1",
:url => "http://mirantis.com",
:timeout => 300
action "rsync", :description => "Download using rsync" do
display :failed
input :modules_source,
:prompt => "Rsync source URL of modules",
:description => "Where to get modules from. URL with any protocol supported by rsync",
:type => :string,
:validation => :shellsafe,
:optional => false,
:default => 'rsync://10.20.0.2:/puppet/modules/',
:maxlength => 256
input :manifests_source,
:prompt => "Rsync source URL of manifests",
:description => "Where to get manifests from. URL with any protocol supported by rsync",
:type => :string,
:validation => :shellsafe,
:optional => false,
:default => 'rsync://10.20.0.2:/puppet/manifests/',
:maxlength => 256
input :modules_path,
:prompt => "Rsync destination of modules",
:description => "Where should downloaded modules be saved?",
:type => :string,
:validation => :shellsafe,
:optional => false,
:default => '/etc/puppet/modules/',
:maxlength => 256
input :manifests_path,
:prompt => "Rsync destination of manifests",
:description => "Where should downloaded manifests be saved?",
:type => :string,
:validation => :shellsafe,
:optional => false,
:default => '/etc/puppet/manifests/',
:maxlength => 256
input :rsync_options,
:prompt => "Options for rsync command run",
:description => "What options should be pathed to rsync command?",
:type => :string,
:validation => :shellsafe,
:optional => false,
:default => '-c -r --delete',
:maxlength => 256
output :msg,
:description => "Report message",
:display_as => "Message"
end

74
mcagents/puppetsync.rb Normal file
View File

@ -0,0 +1,74 @@
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
module MCollective
module Agent
class Puppetsync < RPC::Agent
action 'rsync' do
['modules', 'manifests'].each do |type|
# Rsync depend of presence or absence of
source = request.data["#{type}_source".to_sym].chomp('/').concat('/')
path = request.data["#{type}_path".to_sym].chomp('/').concat('/')
cmd = "rsync #{request.data[:rsync_options]} #{source} #{path}"
run_and_respond(cmd)
end
reply[:msg] = "Puppet modules and manifests was sync!"
end
private
def run_and_respond(cmd)
stdout, stderr, exit_code = runcommand(cmd)
if exit_code != 0
reply.fail! "Fail to upload folder using command #{cmd}.
Exit code: #{exit_code}, stderr: #{stderr}"
end
end
def runcommand(cmd)
# We cannot use Shell from puppet because
# version 2.3.1 has bug, with returning wrong exit
# code in some cases, in newer version mcollective
# it was fixed
# https://github.com/puppetlabs/marionette-collective
# /commit/10f163550bc6395f1594dacb9f15a86d4a3fde27
# So, it's just fixed code from Shell#runcommand
thread = Thread.current
stdout = ''
stderr = ''
status = systemu(cmd, {'stdout' => stdout, 'stderr' => stderr}) do |cid|
begin
while(thread.alive?)
sleep 0.1
end
Process.waitpid(cid) if Process.getpgid(cid)
rescue SystemExit
rescue Errno::ESRCH
rescue Errno::ECHILD
rescue Exception => e
Log.info("Unexpected exception received while waiting for child process: #{e.class}: #{e}")
end
end
[stdout, stderr, status.exitstatus]
end
end
end
end

View File

@ -4,7 +4,7 @@ metadata :name => "systemtype",
:license => "Apache License 2.0",
:version => "0.0.1",
:url => 'http://www.mirantis.com/',
:timeout => 40
:timeout => 60
action "get_type", :description => "Get the type" do
display :always

View File

@ -35,17 +35,25 @@ describe Astute::DeploymentEngine do
let(:deployer) { Engine.new(ctx) }
describe '#deploy' do
before(:each) do
deployer.stubs(:generate_ssh_keys)
deployer.stubs(:upload_ssh_keys)
deployer.stubs(:sync_puppet_manifests)
end
it 'should generate and upload ssh keys' do
nodes = [{'uid' => 1, 'deployment_id' => 1}, {'uid' => 2}, {'uid' => 1}]
deployer.stubs(:deploy_piece)
deployer.expects(:generate_and_upload_ssh_keys).with([1,2], nodes.first['deployment_id'])
deployer.expects(:generate_ssh_keys).with(nodes.first['deployment_id'])
deployer.expects(:upload_ssh_keys).with([1,2], nodes.first['deployment_id']).returns()
deployer.expects(:sync_puppet_manifests).with([{'uid' => 1, 'deployment_id' => 1}, {'uid' => 2}])
deployer.deploy(nodes)
end
it 'deploy nodes by order' do
deployer.stubs(:generate_and_upload_ssh_keys)
nodes = [{'uid' => 1, 'priority' => 10}, {'uid' => 2, 'priority' => 0}, {'uid' => 1, 'priority' => 15}]
deployer.expects(:deploy_piece).with([{'uid' => 2, 'priority' => 0}])
@ -56,7 +64,6 @@ describe Astute::DeploymentEngine do
end
it 'nodes with same priority should be deploy at parallel' do
deployer.stubs(:generate_and_upload_ssh_keys)
nodes = [{'uid' => 1, 'priority' => 10}, {'uid' => 2, 'priority' => 0}, {'uid' => 3, 'priority' => 10}]
deployer.expects(:deploy_piece).with([{'uid' => 2, 'priority' => 0}])
@ -66,7 +73,6 @@ describe Astute::DeploymentEngine do
end
it 'node with several roles with same priority should not run at parallel' do
deployer.stubs(:generate_and_upload_ssh_keys)
nodes = [
{'uid' => 1, 'priority' => 10, 'role' => 'compute'},
{'uid' => 2, 'priority' => 0, 'role' => 'primary-controller'},
@ -81,7 +87,6 @@ describe Astute::DeploymentEngine do
end
it 'node with several roles with same priority should not run at parallel, but diffirent nodes should' do
deployer.stubs(:generate_and_upload_ssh_keys)
nodes = [
{'uid' => 1, 'priority' => 10, 'role' => 'compute'},
{'uid' => 3, 'priority' => 10, 'role' => 'compute'},
@ -108,7 +113,10 @@ describe Astute::DeploymentEngine do
it 'number of nodes running in parallel should be limit' do
Astute.config.MAX_NODES_PER_CALL = 1
deployer.stubs(:generate_and_upload_ssh_keys)
deployer.stubs(:generate_ssh_keys)
deployer.stubs(:upload_ssh_keys)
deployer.stubs(:sync_puppet_manifests)
nodes = [
{'uid' => 1, 'priority' => 10, 'role' => 'compute'},
{'uid' => 3, 'priority' => 10, 'role' => 'compute'},
@ -131,17 +139,41 @@ describe Astute::DeploymentEngine do
end
describe '#generate_and_upload_ssh_keys' do
describe '#sync_puppet_manifests' do
before(:each) do
deployer.stubs(:deploy_piece)
deployer.stubs(:generate_ssh_keys)
deployer.stubs(:upload_ssh_keys)
end
let(:nodes) { [{'uid' => 1, 'deployment_id' => 1, 'master_ip' => '10.20.0.2'}, {'uid' => 2}] }
it "should sync puppet modules and manifests mcollective client 'puppetsync'" do
mclient = mock_rpcclient(nodes)
Astute::MClient.any_instance.stubs(:rpcclient).returns(mclient)
Astute::MClient.any_instance.stubs(:log_result).returns(mclient)
Astute::MClient.any_instance.stubs(:check_results_with_retries).returns(mclient)
master_ip = nodes.first['master_ip']
mclient.expects(:rsync).with(:modules_source => "rsync://#{master_ip}:/puppet/modules/",
:manifests_source => "rsync://#{master_ip}:/puppet/manifests/"
)
deployer.deploy(nodes)
end
end
describe '#generation and uploading of ssh keys' do
before(:each) do
Astute.config.PUPPET_SSH_KEYS = ['nova']
deployer.stubs(:deploy_piece)
deployer.stubs(:sync_puppet_manifests)
end
let(:nodes) { [{'uid' => 1, 'deployment_id' => 1}, {'uid' => 2}] }
it 'should use Astute.config to get the ssh names that need to generate' do
deployer.expects(:generate_ssh_key).with('nova', nodes.first['deployment_id'])
deployer.expects(:upload_ssh_key).with([1, 2], 'nova', nodes.first['deployment_id'])
deployer.expects(:generate_ssh_keys).with(nodes.first['deployment_id'])
deployer.expects(:upload_ssh_keys).with([1, 2], nodes.first['deployment_id'])
deployer.deploy(nodes)
end
@ -152,7 +184,7 @@ describe Astute::DeploymentEngine do
context 'generation of ssh keys' do
before(:each) do
deployer.stubs(:upload_ssh_key).with([1, 2], 'nova', nodes.first['deployment_id'])
deployer.stubs(:upload_ssh_keys).with([1, 2], nodes.first['deployment_id'])
end
it 'should save files in correct place: KEY_DIR/<name of key>/' do
@ -207,7 +239,7 @@ describe Astute::DeploymentEngine do
context 'upload ssh keys' do
before(:each) do
deployer.stubs(:generate_ssh_key)
deployer.stubs(:generate_ssh_keys)
end
it "should upload ssh keys using mcollective client 'uploadfile'" do

View File

@ -90,6 +90,6 @@ describe MClient do
mclient = MClient.new(@ctx, "faketest", nodes.map {|x| x['uid']})
mclient.retries = 1
expect { mclient.echo(:msg => 'hello world') }.to \
raise_error(/MCollective agents '3' didn't respond. \n.* failed nodes: 2/)
raise_error(/MCollective agents '3' didn't respond./)
end
end

View File

@ -46,13 +46,19 @@ describe "NailyFact DeploymentEngine" do
nodes_with_role(deploy_data, 'cinder')
end
before(:each) do
uniq_nodes_uid = deploy_data.map {|n| n['uid'] }.uniq
deploy_engine.stubs(:generate_ssh_keys).with(deploy_data.first['deployment_id'])
deploy_engine.stubs(:upload_ssh_keys).with(uniq_nodes_uid, deploy_data.first['deployment_id'])
deploy_engine.stubs(:sync_puppet_manifests).with(deploy_data.uniq { |n| n['uid'] })
end
context 'log parsing' do
let(:deploy_data) do
[{'uid' => 1, 'role' => 'controller', 'deployment_mode' => 'unknown', 'deployment_id' => '123'}]
end
it "it should not raise an exception if deployment mode is unknown" do
deploy_engine.stubs(:generate_and_upload_ssh_keys).with([1], deploy_data.first['deployment_id'])
deploy_engine.expects(:upload_facts).times(deploy_data.size)
Astute::PuppetdDeployer.stubs(:deploy).with(ctx, deploy_data, instance_of(Fixnum), true).once
expect {deploy_engine.deploy(deploy_data)}.to_not raise_exception
@ -67,9 +73,6 @@ describe "NailyFact DeploymentEngine" do
it "should not raise any exception" do
deploy_engine.expects(:upload_facts).times(deploy_data.size)
uniq_nodes_uid = deploy_data.map {|n| n['uid'] }.uniq
deploy_engine.expects(:generate_and_upload_ssh_keys).with(uniq_nodes_uid, deploy_data.first['deployment_id'])
# we got two calls, one for controller (high priority), and another for all computes (same low priority)
Astute::PuppetdDeployer.expects(:deploy).with(ctx, controller_nodes, instance_of(Fixnum), true).once
Astute::PuppetdDeployer.expects(:deploy).with(ctx, compute_nodes, instance_of(Fixnum), true).once
@ -94,20 +97,15 @@ describe "NailyFact DeploymentEngine" do
ctx.deploy_log_parser.expects(:prepare).with(compute_nodes).once
ctx.deploy_log_parser.expects(:prepare).with(cinder_nodes).once
uniq_nodes_uid = deploy_data.map {|n| n['uid'] }.uniq
deploy_engine.expects(:generate_and_upload_ssh_keys).with(uniq_nodes_uid, deploy_data.first['deployment_id'])
Astute::PuppetdDeployer.expects(:deploy).times(2)
deploy_engine.deploy(deploy_data)
end
it "should generate and publish facts for every deploy call because node may be deployed several times" do
deploy_engine.expects(:upload_facts).times(node_amount)
ctx.deploy_log_parser.expects(:prepare).with(compute_nodes).once
ctx.deploy_log_parser.expects(:prepare).with(cinder_nodes).once
deploy_engine.expects(:upload_facts).times(node_amount)
uniq_nodes_uid = deploy_data.map {|n| n['uid'] }.uniq
deploy_engine.expects(:generate_and_upload_ssh_keys).with(uniq_nodes_uid, deploy_data.first['deployment_id'])
Astute::PuppetdDeployer.expects(:deploy).times(2)
@ -123,9 +121,6 @@ describe "NailyFact DeploymentEngine" do
it "ha deploy should not raise any exception" do
deploy_engine.expects(:upload_facts).at_least_once
uniq_nodes_uid = deploy_data.map {|n| n['uid'] }.uniq
deploy_engine.expects(:generate_and_upload_ssh_keys).with(uniq_nodes_uid, deploy_data.first['deployment_id'])
primary_controller = deploy_data.find { |n| n['role'] == 'primary-controller' }
Astute::PuppetdDeployer.expects(:deploy).with(ctx, [primary_controller], 2, true).once
@ -137,18 +132,18 @@ describe "NailyFact DeploymentEngine" do
deploy_engine.deploy(deploy_data)
end
context 'exception case' do
let(:deploy_data) do
[Fixtures.ha_deploy.find { |n| n['role'] == 'controller' }]
end
it "ha deploy should not raise any exception if there are only one controller" do
deploy_engine.expects(:upload_facts).at_least_once
Astute::PuppetdDeployer.expects(:deploy).once
ctrl = deploy_data.find { |n| n['role'] == 'controller' }
uniq_nodes_uid = [ctrl].map {|n| n['uid'] }.uniq
deploy_engine.expects(:generate_and_upload_ssh_keys).with(uniq_nodes_uid, deploy_data.first['deployment_id'])
deploy_engine.deploy([ctrl])
deploy_engine.deploy(deploy_data)
end
end
end # 'ha deploy'
end
end