Add refactored merge_yaml_settings module
Replace the merge_yaml_settings resource with the
merge_yaml module. This module introduced more advanced
version of the merge_yaml_settings resource with
additional options to control the merging behavior.
There are also resources with alternative implementation
of a configuration merging types and providers. They
are working in the same way as the "concat" module does
by assembling the resulting data structure from a set of
fragments. This should be much more puppet-master frinedly.
These types are not used anywhere yet but may be used later.
Related-Bug: 1614279
Cherry-picked-from: 9eccbbc
Change-Id: Ieaa8fc840f6a91a8d10e0670bd60d07692e2cb7d
This commit is contained in:
parent
4a966e51f9
commit
49d918fc01
|
@ -1,41 +0,0 @@
|
|||
require 'yaml'
|
||||
|
||||
Puppet::Type.type(:merge_yaml_settings).provide(:ruby) do
|
||||
desc "Support for merging yaml configuration files."
|
||||
|
||||
def create
|
||||
merged_settings = get_merged_settings
|
||||
write_to_file(@resource[:name], merged_settings.to_yaml) if not (merged_settings.empty?)
|
||||
end
|
||||
|
||||
def destroy
|
||||
File.unlink(@resource[:name])
|
||||
end
|
||||
|
||||
def exists?
|
||||
get_dict(@resource[:sample_settings]) == get_merged_settings
|
||||
end
|
||||
|
||||
def get_merged_settings
|
||||
sample_settings = get_dict(@resource[:sample_settings])
|
||||
override_settings = get_dict(@resource[:override_settings])
|
||||
sample_settings.merge(override_settings)
|
||||
end
|
||||
|
||||
def write_to_file(filename, content)
|
||||
debug "writing content #{content} to the file #{filename}"
|
||||
begin
|
||||
File.open(filename, "w") { |f| f.puts content }
|
||||
rescue
|
||||
raise Puppet::Error, "merge_yaml_settings: the file #{filename} can not be written!"
|
||||
end
|
||||
end
|
||||
|
||||
def get_dict(obj)
|
||||
return obj if obj.is_a?(Hash)
|
||||
YAML.load_file(obj) rescue {}
|
||||
end
|
||||
|
||||
private :get_merged_settings, :get_dict, :write_to_file
|
||||
|
||||
end
|
|
@ -1,19 +0,0 @@
|
|||
Puppet::Type.newtype(:merge_yaml_settings) do
|
||||
|
||||
desc = "Type to merge yaml configuration files"
|
||||
|
||||
ensurable
|
||||
|
||||
newparam(:name) do
|
||||
desc "Path for destination settings file"
|
||||
end
|
||||
|
||||
newparam(:sample_settings) do
|
||||
desc "Path or Hash containing source settings"
|
||||
end
|
||||
|
||||
newparam(:override_settings) do
|
||||
desc "Path or Hash containing custom settings"
|
||||
end
|
||||
|
||||
end
|
|
@ -52,11 +52,12 @@ class fuel::bootstrap_cli(
|
|||
|
||||
ensure_packages([$bootstrap_cli_package])
|
||||
|
||||
merge_yaml_settings { $config_path:
|
||||
sample_settings => $config_path,
|
||||
override_settings => $custom_settings,
|
||||
ensure => present,
|
||||
require => Package[$bootstrap_cli_package],
|
||||
merge_yaml_settings { $config_path :
|
||||
ensure => 'present',
|
||||
path => $config_path,
|
||||
original_data => $config_path,
|
||||
override_data => $custom_settings,
|
||||
require => Package[$bootstrap_cli_package],
|
||||
}
|
||||
|
||||
if $config_wgetrc {
|
||||
|
|
|
@ -1,155 +0,0 @@
|
|||
require 'spec_helper'
|
||||
require 'yaml'
|
||||
|
||||
provider_class = Puppet::Type.type(:merge_yaml_settings).provider(:ruby)
|
||||
describe provider_class do
|
||||
|
||||
let(:sample_filepath) { "/test/sample_file_path.yaml" }
|
||||
let(:settings_filepath) { "/test/settings_file_path.yaml" }
|
||||
let(:dest_filepath) { "/test/dest_file_path.yaml" }
|
||||
|
||||
let(:sample) {
|
||||
{
|
||||
:SETTING_A => 'Value1',
|
||||
:SETTING_B => 'Value2',
|
||||
}
|
||||
}
|
||||
|
||||
let(:settings) {
|
||||
{
|
||||
:SETTING_A => 'new_value1',
|
||||
:SETTING_B => 'new_value2',
|
||||
:SETTING_C => 'new_value',
|
||||
}
|
||||
}
|
||||
|
||||
let(:result) {
|
||||
{
|
||||
:SETTING_A => 'new_value1',
|
||||
:SETTING_B => 'new_value2',
|
||||
:SETTING_C => 'new_value'
|
||||
}
|
||||
}
|
||||
|
||||
before(:each) do
|
||||
puppet_debug_override()
|
||||
end
|
||||
|
||||
it 'should merge settings if sample is file and settings is hash' do
|
||||
|
||||
resource = Puppet::Type::Merge_yaml_settings.new(
|
||||
{
|
||||
:name => dest_filepath,
|
||||
:sample_settings => sample_filepath,
|
||||
:provider => 'ruby',
|
||||
:override_settings => settings
|
||||
}
|
||||
)
|
||||
|
||||
provider = provider_class.new(resource)
|
||||
provider.stubs(:get_dict).with(sample_filepath).returns(sample)
|
||||
provider.stubs(:get_dict).with(settings).returns(settings)
|
||||
provider.stubs(:write_to_file).with(dest_filepath, YAML.dump(result)).once
|
||||
provider.create()
|
||||
end
|
||||
|
||||
it 'should FAIL if file can not be written' do
|
||||
|
||||
resource = Puppet::Type::Merge_yaml_settings.new(
|
||||
{
|
||||
:name => dest_filepath,
|
||||
:sample_settings => sample_filepath,
|
||||
:provider => 'ruby',
|
||||
:override_settings => settings
|
||||
}
|
||||
)
|
||||
|
||||
provider = provider_class.new(resource)
|
||||
provider.stubs(:get_dict).with(sample_filepath).returns(sample)
|
||||
provider.stubs(:get_dict).with(settings).returns(settings)
|
||||
File.class.stubs(:open)
|
||||
.with(dest_filepath)
|
||||
.raises(IOError)
|
||||
expect{ provider.create() }.to raise_error(Puppet::Error, %r{merge_yaml_settings:\s+the\s+file\s+\/test\/dest_file_path.yaml\s+can\s+not\s+be\s+written!})
|
||||
end
|
||||
|
||||
|
||||
it 'should merge settings if both are files' do
|
||||
|
||||
|
||||
resource = Puppet::Type::Merge_yaml_settings.new(
|
||||
{
|
||||
:name => dest_filepath,
|
||||
:sample_settings => sample_filepath,
|
||||
:provider => 'ruby',
|
||||
:override_settings => settings_filepath
|
||||
}
|
||||
)
|
||||
|
||||
provider = provider_class.new(resource)
|
||||
provider.stubs(:get_dict).with(sample_filepath).returns(sample)
|
||||
provider.stubs(:get_dict).with(settings_filepath).returns(settings)
|
||||
provider.stubs(:write_to_file).with(dest_filepath, YAML.dump(result)).once
|
||||
provider.create()
|
||||
end
|
||||
|
||||
it 'should merge settings if both are hashes' do
|
||||
|
||||
resource = Puppet::Type::Merge_yaml_settings.new(
|
||||
{
|
||||
:name => dest_filepath,
|
||||
:sample_settings => sample,
|
||||
:provider => 'ruby',
|
||||
:override_settings => settings
|
||||
}
|
||||
)
|
||||
|
||||
provider = provider_class.new(resource)
|
||||
provider.stubs(:write_to_file).with(dest_filepath, YAML.dump(result)).once
|
||||
provider.create()
|
||||
end
|
||||
|
||||
it 'should use sample settings if other is not present' do
|
||||
|
||||
resource = Puppet::Type::Merge_yaml_settings.new(
|
||||
{
|
||||
:name => dest_filepath,
|
||||
:sample_settings => sample,
|
||||
:provider => 'ruby',
|
||||
}
|
||||
)
|
||||
|
||||
provider = provider_class.new(resource)
|
||||
provider.stubs(:write_to_file).with(dest_filepath, YAML.dump(sample)).once
|
||||
provider.create()
|
||||
end
|
||||
|
||||
it 'should use new settings if other is not present' do
|
||||
|
||||
resource = Puppet::Type::Merge_yaml_settings.new(
|
||||
{
|
||||
:name => dest_filepath,
|
||||
:override_settings => settings,
|
||||
:provider => 'ruby',
|
||||
}
|
||||
)
|
||||
|
||||
provider = provider_class.new(resource)
|
||||
provider.stubs(:write_to_file).with(dest_filepath, YAML.dump(settings)).once
|
||||
provider.create()
|
||||
end
|
||||
|
||||
it 'should not write to file if result is empty' do
|
||||
|
||||
resource = Puppet::Type::Merge_yaml_settings.new(
|
||||
{
|
||||
:name => dest_filepath,
|
||||
:provider => 'ruby',
|
||||
}
|
||||
)
|
||||
|
||||
provider = provider_class.new(resource)
|
||||
provider.stubs(:write_to_file).with(dest_filepath, YAML.dump(sample)).never
|
||||
provider.create()
|
||||
end
|
||||
end
|
|
@ -0,0 +1,8 @@
|
|||
spec/fixtures
|
||||
|
||||
Gemfile.lock
|
||||
|
||||
.idea
|
||||
|
||||
*.swp
|
||||
*~
|
|
@ -0,0 +1,10 @@
|
|||
source 'https://rubygems.org'
|
||||
|
||||
gem 'puppetlabs_spec_helper'
|
||||
gem 'rspec-puppet-facts'
|
||||
|
||||
if puppetversion = ENV['PUPPET_GEM_VERSION']
|
||||
gem 'puppet', puppetversion, :require => false
|
||||
else
|
||||
gem 'puppet'
|
||||
end
|
|
@ -0,0 +1 @@
|
|||
require 'puppetlabs_spec_helper/rake_tasks'
|
|
@ -0,0 +1,110 @@
|
|||
require 'yaml'
|
||||
require 'json'
|
||||
|
||||
Puppet::Type.type(:hash_merge).provide(:ruby) do
|
||||
attr_accessor :resource
|
||||
|
||||
# The path to the file
|
||||
# @return [String]
|
||||
def file_path
|
||||
resource[:path]
|
||||
end
|
||||
|
||||
# What serializer does the file use?
|
||||
# @return [Symbol]
|
||||
def file_type
|
||||
resource[:type]
|
||||
end
|
||||
|
||||
# Check if the file exists
|
||||
# @return [true,false]
|
||||
def file_exists?
|
||||
File.exist? file_path
|
||||
end
|
||||
|
||||
# Serialize the data and write it to the file
|
||||
# @param data [Hash]
|
||||
def write_data_to_file(data)
|
||||
content = nil
|
||||
if file_type == :yaml
|
||||
content = YAML.dump(data)
|
||||
elsif file_type == :json
|
||||
content = JSON.dump(data)
|
||||
end
|
||||
write_file content
|
||||
end
|
||||
|
||||
# Write the content to the file
|
||||
# @param data [Hash]
|
||||
def write_file(data)
|
||||
File.open(file_path, 'w') do |file|
|
||||
file.puts data
|
||||
end
|
||||
end
|
||||
|
||||
# Read the file and return its content
|
||||
# @return [String,nil]
|
||||
def read_file
|
||||
return nil unless file_exists?
|
||||
begin
|
||||
File.read file_path
|
||||
rescue Exception => exception
|
||||
warn "Could not read the file: '#{file_path}': #{exception}"
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
# Read the file and parse the data
|
||||
# @return [Hash]
|
||||
def read_data_from_file
|
||||
content = read_file
|
||||
return nil unless content
|
||||
data = nil
|
||||
if file_type == :yaml
|
||||
begin
|
||||
data = YAML.load(content)
|
||||
rescue Exception => exception
|
||||
warn "Could not parse the YAML file: '#{file_path}': #{exception}"
|
||||
return nil
|
||||
end
|
||||
elsif file_type == :json
|
||||
begin
|
||||
data = JSON.parse(content)
|
||||
rescue Exception => exception
|
||||
warn "Could not parse the JSON file: '#{file_path}': #{exception}"
|
||||
return nil
|
||||
end
|
||||
end
|
||||
data
|
||||
end
|
||||
|
||||
#####
|
||||
|
||||
# @return [true,false]
|
||||
def exists?
|
||||
debug 'Call: exists?'
|
||||
file_exists?
|
||||
end
|
||||
|
||||
def create
|
||||
debug 'Call: create'
|
||||
write_data_to_file resource[:data]
|
||||
end
|
||||
|
||||
def destroy
|
||||
debug 'Call: destroy'
|
||||
File.unlink file_path
|
||||
end
|
||||
|
||||
def data
|
||||
debug 'Call: data'
|
||||
read_data_from_file
|
||||
end
|
||||
|
||||
# @return [Hash]
|
||||
def data=(data)
|
||||
debug "Call: data=(#{data.class}/#{data.object_id})"
|
||||
write_data_to_file data
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,163 @@
|
|||
require 'yaml'
|
||||
require_relative '../../yaml_deep_merge'
|
||||
|
||||
Puppet::Type.type(:merge_yaml_settings).provide(:ruby) do
|
||||
desc 'Support for merging yaml configuration files.'
|
||||
|
||||
attr_reader :resource
|
||||
|
||||
# Create a new target YAML file
|
||||
def create
|
||||
debug 'Call: create'
|
||||
data = merged_data
|
||||
return if data.empty?
|
||||
write_to_file(resource[:path], data)
|
||||
end
|
||||
|
||||
# Convert data structure to a YAML file contents
|
||||
# @param [Hash] data
|
||||
# @return [String]
|
||||
def serialize_data(data)
|
||||
data.to_yaml.gsub('x5c', '\\')
|
||||
end
|
||||
|
||||
# A hash of options for the deep merge module
|
||||
# @return [Hash]
|
||||
def deep_merge_options
|
||||
{
|
||||
:preserve_unmergeables => resource[:preserve_unmergeables],
|
||||
:knockout_prefix => resource[:knockout_prefix],
|
||||
:overwrite_arrays => resource[:overwrite_arrays],
|
||||
:sort_merged_arrays => resource[:sort_merged_arrays],
|
||||
:unpack_arrays => resource[:unpack_arrays],
|
||||
:merge_hash_arrays => resource[:merge_hash_arrays],
|
||||
:extend_existing_arrays => resource[:extend_existing_arrays],
|
||||
:merge_debug => resource[:merge_debug],
|
||||
}
|
||||
end
|
||||
|
||||
# Enable additional debug messages
|
||||
# @return [true,false]
|
||||
def merge_debug
|
||||
deep_merge_options[:merge_debug]
|
||||
end
|
||||
|
||||
# Remove the target yaml file
|
||||
def destroy
|
||||
debug 'Call: destroy'
|
||||
File.unlink resource[:path] if File.exists? resource[:path]
|
||||
end
|
||||
|
||||
# Check if the sample file contains the correct merged structure
|
||||
def exists?
|
||||
debug 'Call: exists?'
|
||||
return false unless target_yaml_file?
|
||||
debug "Exists original: #{original_data.inspect}" if merge_debug
|
||||
debug "Exists merged: #{merged_data.inspect}" if merge_debug
|
||||
result = original_data == merged_data
|
||||
debug "Return: #{result}"
|
||||
result
|
||||
end
|
||||
|
||||
# Produce the merged data structure by merging
|
||||
# the original data data with the override data.
|
||||
# @return [Hash]
|
||||
def merged_data
|
||||
debug 'Call: merged_data'
|
||||
debug "Merge original: #{original_data.inspect}" if merge_debug
|
||||
debug "Merge override: #{override_data.inspect}" if merge_debug
|
||||
original_data_clone = Marshal.load Marshal.dump original_data
|
||||
YamlDeepMerge.deep_merge! override_data, original_data_clone, deep_merge_options
|
||||
debug "Result: #{original_data_clone.inspect}" if merge_debug
|
||||
original_data_clone
|
||||
end
|
||||
|
||||
# Write the merged data to the specified file name
|
||||
# @param [String] file_name
|
||||
# @param [Hash] data
|
||||
def write_to_file(file_name, data)
|
||||
debug "Writing content to the file: '#{file_name}'"
|
||||
content = serialize_data data
|
||||
begin
|
||||
File.open(file_name, 'w') { |f| f.puts content }
|
||||
rescue => exception
|
||||
fail "The file: '#{file_name}' cannot be written! #{exception}"
|
||||
end
|
||||
end
|
||||
|
||||
# Read the contents of the YAML file
|
||||
# @param [String] file_name
|
||||
def read_from_file(file_name)
|
||||
debug "Reading content from the file: '#{file_name}'"
|
||||
begin
|
||||
YAML.load_file(file_name)
|
||||
rescue => exception
|
||||
warn "The file: '#{file_name}' cannot be read! #{exception}"
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# The original portion of the YAML file.
|
||||
# If the target file if present it will be loaded.
|
||||
# If there is no target file, the original_data will be loaded as a file
|
||||
# or as a data structure.
|
||||
# @return [Hash,Array]
|
||||
def original_data
|
||||
return @original_data if @original_data
|
||||
if target_yaml_file?
|
||||
@original_data = read_from_file resource[:path]
|
||||
return @original_data if @original_data
|
||||
end
|
||||
if original_data_file?
|
||||
@original_data = read_from_file resource[:original_data]
|
||||
return @original_data if @original_data
|
||||
end
|
||||
unless resource[:original_data].is_a? Hash or resource[:original_data].is_a? Array
|
||||
fail "The original_data should be either a path to the YAML file or the data structure! Got: #{resource[:original_data]}"
|
||||
end
|
||||
@original_data = resource[:original_data]
|
||||
end
|
||||
|
||||
# The override portion of the YAML file.
|
||||
# If the override_data are provided as a path to a file
|
||||
# the file will be loaded.
|
||||
# @return [Hash,Array]
|
||||
def override_data
|
||||
return @override_data if @override_data
|
||||
if override_data_file?
|
||||
@override_data = read_from_file resource[:override_data]
|
||||
return @override_data if @override_data
|
||||
end
|
||||
unless resource[:override_data].is_a? Hash or resource[:override_data].is_a? Array
|
||||
fail "The override_data should be either a path to the YAML file or the data structure! Got: #{resource[:override_data]}"
|
||||
end
|
||||
@override_data = resource[:override_data]
|
||||
end
|
||||
|
||||
# Check if the target YAML file exists
|
||||
# @return [true,false]
|
||||
def target_yaml_file?
|
||||
return false unless resource[:path].is_a? String
|
||||
return false unless File.absolute_path resource[:path]
|
||||
File.file? resource[:path]
|
||||
end
|
||||
|
||||
# Check if original_data are provided as a file
|
||||
# and the file is present
|
||||
# @return [true,false]
|
||||
def original_data_file?
|
||||
return false unless resource[:original_data].is_a? String
|
||||
return false unless File.absolute_path resource[:original_data]
|
||||
File.file? resource[:original_data]
|
||||
end
|
||||
|
||||
# Check if the override_data are provided as a file
|
||||
# and the file is present
|
||||
# @return [true,false]
|
||||
def override_data_file?
|
||||
return false unless resource[:override_data].is_a? String
|
||||
return false unless File.absolute_path resource[:override_data]
|
||||
File.file? resource[:override_data]
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,39 @@
|
|||
Puppet::Type.newtype(:hash_fragment) do
|
||||
newparam(:name) do
|
||||
desc 'The name of this hash file fragment.'
|
||||
isnamevar
|
||||
end
|
||||
|
||||
newparam(:hash_name) do
|
||||
desc 'The name of the hash file this fragment belongs.'
|
||||
isrequired
|
||||
end
|
||||
|
||||
newparam(:priority) do
|
||||
desc 'The merge ordering number this fragment.'
|
||||
newvalues /\d+/
|
||||
munge do |value|
|
||||
value.to_i
|
||||
end
|
||||
end
|
||||
|
||||
newparam(:data) do
|
||||
desc 'The content passed as a hash.'
|
||||
validate do |value|
|
||||
fail "The value should be a hash! Got: #{value.inspect}" unless value.is_a? Hash
|
||||
end
|
||||
end
|
||||
|
||||
newparam(:type) do
|
||||
desc 'The type of serialization this hash is using.'
|
||||
newvalues :yaml, :json
|
||||
defaultto :yaml
|
||||
end
|
||||
|
||||
newparam(:content) do
|
||||
desc 'The content passed as a serialized hash text.'
|
||||
validate do |value|
|
||||
fail "Content should be a text value! Got: #{value.inspect}" unless value.is_a? String
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,156 @@
|
|||
require 'yaml'
|
||||
require 'json'
|
||||
require 'puppet/parameter/boolean'
|
||||
require 'puppet'
|
||||
require 'digest/md5'
|
||||
require_relative '../yaml_deep_merge'
|
||||
|
||||
Puppet::Type.newtype(:hash_merge) do
|
||||
ensurable do
|
||||
defaultvalues
|
||||
defaultto :present
|
||||
end
|
||||
|
||||
newparam(:path) do
|
||||
desc 'The path to this file.'
|
||||
isnamevar
|
||||
end
|
||||
|
||||
newparam(:hash_name) do
|
||||
desc 'Collect the fragments which are having this hash name.'
|
||||
isrequired
|
||||
end
|
||||
|
||||
newparam(:type) do
|
||||
desc 'The type of serialization this hash is using.'
|
||||
newvalues :yaml, :json
|
||||
defaultto :yaml
|
||||
end
|
||||
|
||||
newproperty(:data) do
|
||||
desc 'The collector for the merged fragments data'
|
||||
validate do |value|
|
||||
fail "The data should be a hash! Got: #{value.inspect}" unless value.is_a? Hash
|
||||
end
|
||||
|
||||
def is_to_s(value)
|
||||
"(md5)#{Digest::MD5.hexdigest value.inspect}"
|
||||
end
|
||||
|
||||
def should_to_s(value)
|
||||
"(md5)#{Digest::MD5.hexdigest value.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
newparam(:knockout_prefix, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, remove elements from a hash if they are prefixed with "--".'
|
||||
defaultto true
|
||||
end
|
||||
|
||||
newparam(:overwrite_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, overwrite array values instead of merging them.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:unpack_arrays) do
|
||||
desc 'Use this character as an array separator to unpack arrays which have been passed as a string.'
|
||||
end
|
||||
|
||||
newparam(:merge_hash_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, merge hashes inside arrays too.'
|
||||
defaultto true
|
||||
end
|
||||
|
||||
newparam(:extend_existing_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, add single values to an array value instead of overwriting it.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:preserve_unmergeables, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'Set to true to skip any unmergeable elements from source.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:merge_debug, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'Set to true to get console output of merge process for debugging.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:sort_merged_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'Set to true to sort all arrays that are merged together.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
# A hash of options for the deep merge module
|
||||
# @return [Hash]
|
||||
def deep_merge_options
|
||||
{
|
||||
:preserve_unmergeables => self[:preserve_unmergeables],
|
||||
:knockout_prefix => self[:knockout_prefix],
|
||||
:overwrite_arrays => self[:overwrite_arrays],
|
||||
:sort_merged_arrays => self[:sort_merged_arrays],
|
||||
:unpack_arrays => self[:unpack_arrays],
|
||||
:merge_hash_arrays => self[:merge_hash_arrays],
|
||||
:extend_existing_arrays => self[:extend_existing_arrays],
|
||||
:merge_debug => self[:merge_debug],
|
||||
}
|
||||
end
|
||||
|
||||
def extract_data(fragment)
|
||||
fragment_data = fragment[:data]
|
||||
if not fragment_data and fragment[:content]
|
||||
if fragment[:type] == :yaml
|
||||
begin
|
||||
fragment_data = YAML.load(fragment[:content])
|
||||
rescue
|
||||
warn "Could not load the YAML content of the fragment: #{fragment[:name]}"
|
||||
end
|
||||
elsif fragment[:type] == :json
|
||||
begin
|
||||
fragment_data = JSON.parse(fragment[:content])
|
||||
rescue
|
||||
warn "Could not load the JSON content of the fragment: #{fragment[:name]}"
|
||||
end
|
||||
end
|
||||
end
|
||||
fragment_data
|
||||
end
|
||||
|
||||
# Enable additional debug messages
|
||||
# @return [true,false]
|
||||
def merge_debug
|
||||
deep_merge_options[:merge_debug]
|
||||
end
|
||||
|
||||
def fragments
|
||||
return [] unless self.respond_to? :catalog and self.catalog
|
||||
fragments = self.catalog.resources.select do |resource|
|
||||
resource.type == :hash_fragment and resource[:hash_name] == self[:hash_name]
|
||||
end
|
||||
fragments.sort_by! do |fragment|
|
||||
[fragment[:priority].to_i, fragment[:name]]
|
||||
end
|
||||
debug "Found fragments: #{fragments.map { |f| f.title }.join ', '}"
|
||||
fragments
|
||||
end
|
||||
|
||||
def generate
|
||||
data = {}
|
||||
fragments.each do |fragment|
|
||||
fragment_data = extract_data(fragment)
|
||||
unless fragment_data
|
||||
warn "Fragment: #{fragment[:name]} has no data!" if merge_debug
|
||||
next
|
||||
end
|
||||
unless fragment_data.is_a? Hash
|
||||
warn "Fragment: #{fragment[:name]} data is not a hash! Got: #{fragment_data.inspect}}" if merge_debug
|
||||
next
|
||||
end
|
||||
debug "Merging the fragment: #{fragment[:name]}" if merge_debug
|
||||
YamlDeepMerge.deep_merge! fragment_data, data, deep_merge_options
|
||||
end
|
||||
self[:data] = data
|
||||
nil
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,91 @@
|
|||
require 'puppet/parameter/boolean'
|
||||
|
||||
Puppet::Type.newtype(:merge_yaml_settings) do
|
||||
|
||||
desc 'Type to merge YAML configuration files'
|
||||
|
||||
ensurable do
|
||||
defaultvalues
|
||||
defaultto :present
|
||||
end
|
||||
|
||||
newparam(:name) do
|
||||
desc 'The name of this merge resource'
|
||||
isnamevar
|
||||
end
|
||||
|
||||
newparam(:path) do
|
||||
desc 'Path to the target YAML file'
|
||||
defaultto do
|
||||
fail 'The target "path" should be provided!'
|
||||
end
|
||||
validate do |value|
|
||||
unless Puppet::Util.absolute_path? value
|
||||
fail "The target file path should be an absolute path to a YAML file! Got: #{value.inspect}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
newparam(:original_data) do
|
||||
desc 'Path or Hash containing the source settings. It will be used if there is no file created at "path"'
|
||||
validate do |value|
|
||||
break unless value
|
||||
break if value.is_a? Hash
|
||||
break if value.is_a? Array
|
||||
unless Puppet::Util.absolute_path? value
|
||||
fail "The original data should be either a data structure or an absolute path to a YAML file! Got: #{value.inspect}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
newparam(:override_data) do
|
||||
desc 'The override data structure or a path to the YAML file containing it.'
|
||||
validate do |value|
|
||||
break unless value
|
||||
break if value.is_a? Hash
|
||||
break if value.is_a? Array
|
||||
unless Puppet::Util.absolute_path? value
|
||||
fail "The override data should be either a data structure or an absolute path to a YAML file! Got: #{value.inspect}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
newparam(:knockout_prefix, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, remove elements from a hash if they are prefixed with "--".'
|
||||
defaultto true
|
||||
end
|
||||
|
||||
newparam(:overwrite_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, overwrite array values instead of merging them.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:unpack_arrays) do
|
||||
desc 'Use this character as an array separator to unpack arrays which have been passed as a string.'
|
||||
end
|
||||
|
||||
newparam(:merge_hash_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, merge hashes inside arrays too.'
|
||||
defaultto true
|
||||
end
|
||||
|
||||
newparam(:extend_existing_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'When merging hashes, add single values to an array value instead of overwriting it.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:preserve_unmergeables, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'Set to true to skip any unmergeable elements from source.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:merge_debug, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'Set to true to get console output of merge process for debugging.'
|
||||
defaultto false
|
||||
end
|
||||
|
||||
newparam(:sort_merged_arrays, :boolean => true, :parent => Puppet::Parameter::Boolean) do
|
||||
desc 'Set to true to sort all arrays that are merged together.'
|
||||
defaultto false
|
||||
end
|
||||
end
|
|
@ -0,0 +1,240 @@
|
|||
module YamlDeepMerge
|
||||
|
||||
class InvalidParameter < StandardError; end
|
||||
|
||||
DEFAULT_FIELD_KNOCKOUT_PREFIX = '--'
|
||||
|
||||
# Deep Merge core documentation.
|
||||
# deep_merge! method permits merging of arbitrary child elements. The two top level
|
||||
# elements must be hashes. These hashes can contain unlimited (to stack limit) levels
|
||||
# of child elements. These child elements to not have to be of the same types.
|
||||
# Where child elements are of the same type, deep_merge will attempt to merge them together.
|
||||
# Where child elements are not of the same type, deep_merge will skip or optionally overwrite
|
||||
# the destination element with the contents of the source element at that level.
|
||||
# So if you have two hashes like this:
|
||||
# source = {:x => [1,2,3], :y => 2}
|
||||
# dest = {:x => [4,5,'6'], :y => [7,8,9]}
|
||||
# dest.deep_merge!(source)
|
||||
# Results: {:x => [1,2,3,4,5,'6'], :y => 2}
|
||||
# By default, "deep_merge!" will overwrite any unmergeables and merge everything else.
|
||||
# To avoid this, use "deep_merge" (no bang/exclamation mark)
|
||||
#
|
||||
# Options:
|
||||
# Options are specified in the last parameter passed, which should be in hash format:
|
||||
# hash.deep_merge!({:x => [1,2]}, {:knockout_prefix => '--'})
|
||||
# :preserve_unmergeables DEFAULT: false
|
||||
# Set to true to skip any unmergeable elements from source
|
||||
# :knockout_prefix DEFAULT: nil
|
||||
# Set to string value to signify prefix which deletes elements from existing element
|
||||
# :overwrite_arrays DEFAULT: false
|
||||
# Set to true if you want to avoid merging arrays
|
||||
# :sort_merged_arrays DEFAULT: false
|
||||
# Set to true to sort all arrays that are merged together
|
||||
# :unpack_arrays DEFAULT: nil
|
||||
# Set to string value to run "Array::join" then "String::split" against all arrays
|
||||
# :merge_hash_arrays DEFAULT: false
|
||||
# Set to true to merge hashes within arrays
|
||||
# :keep_array_duplicates DEFAULT: false
|
||||
# Set to true to preserve duplicate array entries
|
||||
# :merge_debug DEFAULT: false
|
||||
# Set to true to get console output of merge process for debugging
|
||||
#
|
||||
# Selected Options Details:
|
||||
# :knockout_prefix => The purpose of this is to provide a way to remove elements
|
||||
# from existing Hash by specifying them in a special way in incoming hash
|
||||
# source = {:x => ['--1', '2']}
|
||||
# dest = {:x => ['1', '3']}
|
||||
# dest.ko_deep_merge!(source)
|
||||
# Results: {:x => ['2','3']}
|
||||
# Additionally, if the knockout_prefix is passed alone as a string, it will cause
|
||||
# the entire element to be removed:
|
||||
# source = {:x => '--'}
|
||||
# dest = {:x => [1,2,3]}
|
||||
# dest.ko_deep_merge!(source)
|
||||
# Results: {:x => ""}
|
||||
# :unpack_arrays => The purpose of this is to permit compound elements to be passed
|
||||
# in as strings and to be converted into discrete array elements
|
||||
# irsource = {:x => ['1,2,3', '4']}
|
||||
# dest = {:x => ['5','6','7,8']}
|
||||
# dest.deep_merge!(source, {:unpack_arrays => ','})
|
||||
# Results: {:x => ['1','2','3','4','5','6','7','8'}
|
||||
# Why: If receiving data from an HTML form, this makes it easy for a checkbox
|
||||
# to pass multiple values from within a single HTML element
|
||||
#
|
||||
# :merge_hash_arrays => merge hashes within arrays
|
||||
# source = {:x => [{:y => 1}]}
|
||||
# dest = {:x => [{:z => 2}]}
|
||||
# dest.deep_merge!(source, {:merge_hash_arrays => true})
|
||||
# Results: {:x => [{:y => 1, :z => 2}]}
|
||||
#
|
||||
# :keep_array_duplicates => merges arrays within hashes but keeps duplicate elements
|
||||
# source = {:x => {:y => [1,2,2,2,3]}}
|
||||
# dest = {:x => {:y => [4,5,6]}}
|
||||
# dest.deep_merge!(source, {:keep_array_duplicates => true})
|
||||
# Results: {:x => {:y => [1,2,2,2,3,4,5,6]}}
|
||||
#
|
||||
# There are many tests for this library - and you can learn more about the features
|
||||
# and usages of deep_merge! by just browsing the test examples
|
||||
def self.deep_merge!(source, dest, options = {})
|
||||
# turn on this line for stdout debugging text
|
||||
merge_debug = options[:merge_debug] || false
|
||||
overwrite_unmergeable = !options[:preserve_unmergeables]
|
||||
knockout_prefix = options[:knockout_prefix] || nil
|
||||
raise InvalidParameter, "knockout_prefix cannot be an empty string in deep_merge!" if knockout_prefix == ""
|
||||
raise InvalidParameter, "overwrite_unmergeable must be true if knockout_prefix is specified in deep_merge!" if knockout_prefix && !overwrite_unmergeable
|
||||
# if present: we will split and join arrays on this char before merging
|
||||
array_split_char = options[:unpack_arrays] || false
|
||||
# request that we avoid merging arrays
|
||||
overwrite_arrays = options[:overwrite_arrays] || false
|
||||
# request that we sort together any arrays when they are merged
|
||||
sort_merged_arrays = options[:sort_merged_arrays] || false
|
||||
# request that arrays of hashes are merged together
|
||||
merge_hash_arrays = options[:merge_hash_arrays] || false
|
||||
# request to extend existing arrays, instead of overwriting them
|
||||
extend_existing_arrays = options[:extend_existing_arrays] || false
|
||||
# request that arrays keep duplicate elements
|
||||
keep_array_duplicates = options[:keep_array_duplicates] || false
|
||||
|
||||
di = options[:debug_indent] || ''
|
||||
# do nothing if source is nil
|
||||
return dest if source.nil?
|
||||
# if dest doesn't exist, then simply copy source to it
|
||||
if !(dest) && overwrite_unmergeable
|
||||
dest = source; return dest
|
||||
end
|
||||
|
||||
puts "#{di}Source class: #{source.class.inspect} :: Dest class: #{dest.class.inspect}" if merge_debug
|
||||
if source.kind_of?(Hash)
|
||||
puts "#{di}Hashes: #{source.inspect} :: #{dest.inspect}" if merge_debug
|
||||
source.each do |src_key, src_value|
|
||||
if dest.kind_of?(Hash)
|
||||
puts "#{di} looping: #{src_key.inspect} => #{src_value.inspect} :: #{dest.inspect}" if merge_debug
|
||||
if dest[src_key]
|
||||
puts "#{di} ==>merging: #{src_key.inspect} => #{src_value.inspect} :: #{dest[src_key].inspect}" if merge_debug
|
||||
dest[src_key] = deep_merge!(src_value, dest[src_key], options.merge(:debug_indent => di + ' '))
|
||||
else # dest[src_key] doesn't exist so we want to create and overwrite it (but we do this via deep_merge!)
|
||||
puts "#{di} ==>merging over: #{src_key.inspect} => #{src_value.inspect}" if merge_debug
|
||||
# note: we rescue here b/c some classes respond to "dup" but don't implement it (Numeric, TrueClass, FalseClass, NilClass among maybe others)
|
||||
begin
|
||||
src_dup = src_value.dup # we dup src_value if possible because we're going to merge into it (since dest is empty)
|
||||
rescue TypeError
|
||||
src_dup = src_value
|
||||
end
|
||||
dest[src_key] = deep_merge!(src_value, src_dup, options.merge(:debug_indent => di + ' '))
|
||||
end
|
||||
elsif dest.kind_of?(Array) && extend_existing_arrays
|
||||
dest.push(source)
|
||||
else # dest isn't a hash, so we overwrite it completely (if permitted)
|
||||
if overwrite_unmergeable
|
||||
puts "#{di} overwriting dest: #{src_key.inspect} => #{src_value.inspect} -over-> #{dest.inspect}" if merge_debug
|
||||
dest = overwrite_unmergeables(source, dest, options)
|
||||
end
|
||||
end
|
||||
end
|
||||
elsif source.kind_of?(Array)
|
||||
puts "#{di}Arrays: #{source.inspect} :: #{dest.inspect}" if merge_debug
|
||||
if overwrite_arrays
|
||||
puts "#{di} overwrite arrays" if merge_debug
|
||||
dest = source
|
||||
else
|
||||
# if we are instructed, join/split any source arrays before processing
|
||||
if array_split_char
|
||||
puts "#{di} split/join on source: #{source.inspect}" if merge_debug
|
||||
source = source.join(array_split_char).split(array_split_char)
|
||||
if dest.kind_of?(Array)
|
||||
dest = dest.join(array_split_char).split(array_split_char)
|
||||
end
|
||||
end
|
||||
# if there's a naked knockout_prefix in source, that means we are to truncate dest
|
||||
if knockout_prefix && source.index(knockout_prefix)
|
||||
dest = clear_or_nil(dest); source.delete(knockout_prefix)
|
||||
end
|
||||
if dest.kind_of?(Array)
|
||||
if knockout_prefix
|
||||
print "#{di} knocking out: " if merge_debug
|
||||
# remove knockout prefix items from both source and dest
|
||||
source.delete_if do |ko_item|
|
||||
retval = false
|
||||
item = ko_item.respond_to?(:gsub) ? ko_item.gsub(%r{^#{knockout_prefix}}, "") : ko_item
|
||||
if item != ko_item
|
||||
print "#{ko_item} - " if merge_debug
|
||||
dest.delete(item)
|
||||
dest.delete(ko_item)
|
||||
retval = true
|
||||
end
|
||||
retval
|
||||
end
|
||||
puts if merge_debug
|
||||
end
|
||||
puts "#{di} merging arrays: #{source.inspect} :: #{dest.inspect}" if merge_debug
|
||||
source_all_hashes = source.all? { |i| i.kind_of?(Hash) }
|
||||
dest_all_hashes = dest.all? { |i| i.kind_of?(Hash) }
|
||||
if merge_hash_arrays && source_all_hashes && dest_all_hashes
|
||||
# merge hashes in lists
|
||||
list = []
|
||||
dest.each_index do |i|
|
||||
list[i] = deep_merge!(source[i] || {}, dest[i],
|
||||
options.merge(:debug_indent => di + ' '))
|
||||
end
|
||||
list += source[dest.count..-1] if source.count > dest.count
|
||||
dest = list
|
||||
elsif keep_array_duplicates
|
||||
dest = dest.concat(source)
|
||||
else
|
||||
dest = dest | source
|
||||
end
|
||||
dest.sort! if sort_merged_arrays
|
||||
elsif overwrite_unmergeable
|
||||
puts "#{di} overwriting dest: #{source.inspect} -over-> #{dest.inspect}" if merge_debug
|
||||
dest = overwrite_unmergeables(source, dest, options)
|
||||
end
|
||||
end
|
||||
else # src_hash is not an array or hash, so we'll have to overwrite dest
|
||||
if dest.kind_of?(Array) && extend_existing_arrays
|
||||
dest.push(source)
|
||||
else
|
||||
puts "#{di}Others: #{source.inspect} :: #{dest.inspect}" if merge_debug
|
||||
dest = overwrite_unmergeables(source, dest, options)
|
||||
end
|
||||
end
|
||||
puts "#{di}Returning #{dest.inspect}" if merge_debug
|
||||
dest
|
||||
end # deep_merge!
|
||||
|
||||
# allows deep_merge! to uniformly handle overwriting of unmergeable entities
|
||||
def self.overwrite_unmergeables(source, dest, options)
|
||||
merge_debug = options[:merge_debug] || false
|
||||
overwrite_unmergeable = !options[:preserve_unmergeables]
|
||||
knockout_prefix = options[:knockout_prefix] || false
|
||||
di = options[:debug_indent] || ''
|
||||
if knockout_prefix && overwrite_unmergeable
|
||||
if source.kind_of?(String) # remove knockout string from source before overwriting dest
|
||||
src_tmp = source.gsub(%r{^#{knockout_prefix}},"")
|
||||
elsif source.kind_of?(Array) # remove all knockout elements before overwriting dest
|
||||
src_tmp = source.delete_if {|ko_item| ko_item.kind_of?(String) && ko_item.match(%r{^#{knockout_prefix}}) }
|
||||
else
|
||||
src_tmp = source
|
||||
end
|
||||
if src_tmp == source # if we didn't find a knockout_prefix then we just overwrite dest
|
||||
puts "#{di}#{src_tmp.inspect} -over-> #{dest.inspect}" if merge_debug
|
||||
dest = src_tmp
|
||||
else # if we do find a knockout_prefix, then we just delete dest
|
||||
puts "#{di}\"\" -over-> #{dest.inspect}" if merge_debug
|
||||
dest = ""
|
||||
end
|
||||
elsif overwrite_unmergeable
|
||||
dest = source
|
||||
end
|
||||
dest
|
||||
end
|
||||
|
||||
def self.clear_or_nil(obj)
|
||||
if obj.respond_to?(:clear)
|
||||
obj.clear
|
||||
else
|
||||
obj = nil
|
||||
end
|
||||
obj
|
||||
end
|
||||
|
||||
end # module DeepMerge
|
|
@ -0,0 +1,27 @@
|
|||
require 'puppetlabs_spec_helper/module_spec_helper'
|
||||
|
||||
require 'rspec-puppet-facts'
|
||||
include RspecPuppetFacts
|
||||
|
||||
def supported_os
|
||||
[
|
||||
{
|
||||
'operatingsystem' => 'CentOS',
|
||||
'operatingsystemrelease' => ['7.0'],
|
||||
},
|
||||
{
|
||||
'operatingsystem' => 'Ubuntu',
|
||||
'operatingsystemrelease' => ['14.04', '16.04'],
|
||||
},
|
||||
]
|
||||
end
|
||||
|
||||
def puppet_debug_override
|
||||
return unless ENV['SPEC_PUPPET_DEBUG']
|
||||
Puppet::Util::Log.level = :debug
|
||||
Puppet::Util::Log.newdestination(:console)
|
||||
end
|
||||
|
||||
RSpec.configure do |config|
|
||||
config.mock_with :rspec
|
||||
end
|
|
@ -0,0 +1,55 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Puppet::Type.type(:hash_merge).provider(:ruby) do
|
||||
before(:each) do
|
||||
puppet_debug_override
|
||||
end
|
||||
|
||||
let(:resource) do
|
||||
Puppet::Type.type(:hash_merge).new(
|
||||
{
|
||||
:title => '/tmp/test.yaml',
|
||||
:data => {'a' => '1'},
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:provider) do
|
||||
resource.provider
|
||||
end
|
||||
|
||||
subject do
|
||||
provider
|
||||
end
|
||||
|
||||
it 'should exist' do
|
||||
is_expected.not_to be_nil
|
||||
end
|
||||
|
||||
it 'should be able to read YAML file' do
|
||||
resource[:type] = 'yaml'
|
||||
expect(provider).to receive(:read_file).and_return("---\na: '1'\n")
|
||||
expect(provider.read_data_from_file).to eq({'a' => '1'})
|
||||
end
|
||||
|
||||
it 'should be able to read JSON file' do
|
||||
resource[:type] = 'json'
|
||||
expect(provider).to receive(:read_file).and_return('{"a":"1"}')
|
||||
expect(provider.read_data_from_file).to eq({'a' => '1'})
|
||||
end
|
||||
|
||||
it 'should be able to write YAML file' do
|
||||
resource[:type] = 'yaml'
|
||||
expect(provider).to receive(:write_file) do |yaml|
|
||||
yaml == "---\na: '1'\n" or yaml == "--\n a:'1'\n"
|
||||
end
|
||||
provider.write_data_to_file({'a' => '1'})
|
||||
end
|
||||
|
||||
it 'should be able to write JSON file' do
|
||||
resource[:type] = 'json'
|
||||
expect(provider).to receive(:write_file).with('{"a":"1"}')
|
||||
provider.write_data_to_file({'a' => '1'})
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,126 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Puppet::Type.type(:merge_yaml_settings).provider(:ruby) do
|
||||
before(:each) do
|
||||
puppet_debug_override
|
||||
end
|
||||
|
||||
let(:path1) do
|
||||
'/tmp/test1.yaml'
|
||||
end
|
||||
|
||||
let(:path2) do
|
||||
'/tmp/test2.yaml'
|
||||
end
|
||||
|
||||
let(:path3) do
|
||||
'/tmp/test3.yaml'
|
||||
end
|
||||
|
||||
let(:resource) do
|
||||
Puppet::Type.type(:merge_yaml_settings).new(
|
||||
{
|
||||
:title => 'test',
|
||||
:path => path1,
|
||||
:original_data => path2,
|
||||
:override_data => path3,
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:provider) do
|
||||
resource.provider
|
||||
end
|
||||
|
||||
subject { provider }
|
||||
|
||||
it do
|
||||
is_expected.not_to be_nil
|
||||
end
|
||||
|
||||
context 'retrieval' do
|
||||
|
||||
it 'can get the original data from the target file' do
|
||||
expect(provider).to receive(:target_yaml_file?).and_return(true)
|
||||
expect(provider).to receive(:read_from_file).with(path1).and_return({'a' => '1'})
|
||||
expect(provider.original_data).to eq('a' => '1')
|
||||
end
|
||||
|
||||
it 'can get the original data from the original file' do
|
||||
expect(provider).to receive(:target_yaml_file?).and_return(false)
|
||||
expect(provider).to receive(:original_data_file?).and_return(true)
|
||||
expect(provider).to receive(:read_from_file).with(path2).and_return({'a' => '2'})
|
||||
expect(provider.original_data).to eq('a' => '2')
|
||||
end
|
||||
|
||||
it 'can get the original data from the parameter' do
|
||||
resource[:original_data] = {'a' => '3'}
|
||||
expect(provider).to receive(:target_yaml_file?).and_return(false)
|
||||
expect(provider).to receive(:original_data_file?).and_return(false)
|
||||
expect(provider.original_data).to eq('a' => '3')
|
||||
end
|
||||
|
||||
it 'can get the override data from the override file' do
|
||||
expect(provider).to receive(:override_data_file?).and_return(true)
|
||||
expect(provider).to receive(:read_from_file).with(path3).and_return({'b' => '1'})
|
||||
expect(provider.override_data).to eq('b' => '1')
|
||||
end
|
||||
|
||||
it 'can get the override data from the override file' do
|
||||
resource[:override_data] = {'b' => '2'}
|
||||
expect(provider).to receive(:override_data_file?).and_return(false)
|
||||
expect(provider.override_data).to eq('b' => '2')
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
context 'merge' do
|
||||
it 'can merge the original data with override data' do
|
||||
expect(provider).to receive(:original_data).and_return('a' => '1', 'c' => '1')
|
||||
expect(provider).to receive(:override_data).and_return('b' => '2', 'c' => '2')
|
||||
expect(provider.merged_data).to eq('a' => '1', 'b' => '2', 'c' => '2')
|
||||
end
|
||||
|
||||
it 'the merge should not modify the original data' do
|
||||
resource[:original_data] = {'a' => '1'}
|
||||
expect(provider).to receive(:target_yaml_file?).and_return(false)
|
||||
expect(provider).to receive(:original_data_file?).and_return(false)
|
||||
expect(provider).to receive(:override_data).and_return('b' => '2')
|
||||
expect(provider.merged_data).to eq('a' => '1', 'b' => '2')
|
||||
expect(provider.original_data).to eq('a' => '1')
|
||||
end
|
||||
|
||||
it 'will merge the array values' do
|
||||
resource[:sort_merged_arrays] = true
|
||||
expect(provider).to receive(:original_data).and_return('a' => ['1']).at_least(:once)
|
||||
expect(provider).to receive(:override_data).and_return('a' => ['2']).at_least(:once)
|
||||
expect(provider.merged_data).to eq('a' => %w(1 2))
|
||||
end
|
||||
|
||||
it 'will replace array values instead of merging them if :overwrite_arrays' do
|
||||
resource[:sort_merged_arrays] = true
|
||||
resource[:overwrite_arrays] = true
|
||||
expect(provider).to receive(:original_data).and_return('a' => ['1']).at_least(:once)
|
||||
expect(provider).to receive(:override_data).and_return('a' => ['2']).at_least(:once)
|
||||
expect(provider.merged_data).to eq('a' => %w(2))
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
context 'transaction' do
|
||||
it 'will detect if the merged data is different from the original data' do
|
||||
expect(provider).to receive(:target_yaml_file?).and_return(true)
|
||||
expect(provider).to receive(:original_data).and_return('a' => '1').at_least(:once)
|
||||
expect(provider).to receive(:merged_data).and_return('a' => '2').at_least(:once)
|
||||
expect(provider.exists?).to eq false
|
||||
end
|
||||
|
||||
it 'will not do anything if the original_data and the merged_data is same' do
|
||||
expect(provider).to receive(:target_yaml_file?).and_return(true)
|
||||
expect(provider).to receive(:original_data).and_return('a' => '1').at_least(:once)
|
||||
expect(provider).to receive(:merged_data).and_return('a' => '1').at_least(:once)
|
||||
expect(provider.exists?).to eq true
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,26 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Puppet::Type.type(:hash_fragment) do
|
||||
subject do
|
||||
Puppet::Type.type(:hash_fragment)
|
||||
end
|
||||
|
||||
before(:each) do
|
||||
puppet_debug_override
|
||||
end
|
||||
|
||||
it 'should exist' do
|
||||
is_expected.not_to be_nil
|
||||
end
|
||||
|
||||
%w(name hash_name priority data type content).each do |param|
|
||||
it "should have a #{param} parameter" do
|
||||
expect(subject.validparameter?(param.to_sym)).to be_truthy
|
||||
end
|
||||
|
||||
it "should have documentation for its #{param} parameter" do
|
||||
expect(subject.paramclass(param.to_sym).doc).to be_a String
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,143 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Puppet::Type.type(:hash_merge) do
|
||||
subject do
|
||||
Puppet::Type.type(:hash_merge)
|
||||
end
|
||||
|
||||
before(:each) do
|
||||
puppet_debug_override
|
||||
end
|
||||
|
||||
it 'should exist' do
|
||||
is_expected.not_to be_nil
|
||||
end
|
||||
|
||||
%w(path hash_name type knockout_prefix overwrite_arrays unpack_arrays merge_hash_arrays extend_existing_arrays).each do |param|
|
||||
it "should have a #{param} parameter" do
|
||||
expect(subject.validparameter?(param.to_sym)).to be_truthy
|
||||
end
|
||||
|
||||
it "should have documentation for its #{param} parameter" do
|
||||
expect(subject.paramclass(param.to_sym).doc).to be_a String
|
||||
end
|
||||
end
|
||||
|
||||
context 'fragment data collection' do
|
||||
let(:fragment) do
|
||||
Puppet::Type.type(:hash_fragment)
|
||||
end
|
||||
|
||||
let(:catalog) do
|
||||
Puppet::Resource::Catalog.new
|
||||
end
|
||||
|
||||
let(:test1) do
|
||||
fragment.new(
|
||||
{
|
||||
:name => 'test1',
|
||||
:hash_name => 'test',
|
||||
:priority => 1,
|
||||
:data => {
|
||||
'a' => '1',
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:test2) do
|
||||
fragment.new(
|
||||
{
|
||||
:name => 'test2',
|
||||
:hash_name => 'test',
|
||||
:priority => 2,
|
||||
:data => {
|
||||
'a' => '2',
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:test3) do
|
||||
fragment.new(
|
||||
{
|
||||
:name => 'test3',
|
||||
:hash_name => 'test',
|
||||
:priority => 3,
|
||||
:type => :json,
|
||||
:content => '{"c":"3"}',
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:test4) do
|
||||
fragment.new(
|
||||
{
|
||||
:name => 'test4',
|
||||
:hash_name => 'test',
|
||||
:priority => 4,
|
||||
:type => :yaml,
|
||||
:content => '
|
||||
---
|
||||
d:
|
||||
- a
|
||||
- b
|
||||
- c
|
||||
',
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:test5) do
|
||||
fragment.new(
|
||||
{
|
||||
:name => 'test5',
|
||||
:hash_name => 'test',
|
||||
:priority => 5,
|
||||
:type => :json,
|
||||
:content => '{"a":"3"}',
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let(:merge) do
|
||||
subject.new(
|
||||
{
|
||||
:name => 'test.yaml',
|
||||
:hash_name => 'test',
|
||||
:path => '/tmp/test.yaml',
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
before(:each) do
|
||||
catalog.add_resource merge
|
||||
catalog.add_resource test1
|
||||
catalog.add_resource test2
|
||||
catalog.add_resource test3
|
||||
catalog.add_resource test4
|
||||
catalog.add_resource test5
|
||||
generate
|
||||
end
|
||||
|
||||
let(:generate) do
|
||||
merge_resource = catalog.resources.find { |r| r.type == :hash_merge }
|
||||
merge_resource.generate
|
||||
merge_resource
|
||||
end
|
||||
|
||||
it 'can collect the data blocks' do
|
||||
ral_merge = generate
|
||||
expect(ral_merge[:data]).to eq(
|
||||
{
|
||||
"a" => "3",
|
||||
"c" => "3",
|
||||
"d" => ["a", "b", "c"],
|
||||
}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Puppet::Type.type(:merge_yaml_settings) do
|
||||
before(:each) do
|
||||
puppet_debug_override
|
||||
end
|
||||
|
||||
subject do
|
||||
Puppet::Type.type(:merge_yaml_settings)
|
||||
end
|
||||
|
||||
it 'should create instance' do
|
||||
is_expected.not_to be_nil
|
||||
end
|
||||
|
||||
it 'should require path' do
|
||||
expect do
|
||||
subject.new(
|
||||
{
|
||||
:title => 'test',
|
||||
}
|
||||
)
|
||||
end.to raise_error Puppet::Error
|
||||
end
|
||||
|
||||
it 'should require path as an absolute path' do
|
||||
expect do
|
||||
subject.new(
|
||||
{
|
||||
:title => 'test',
|
||||
:path => 'test.yaml',
|
||||
}
|
||||
)
|
||||
end.to raise_error Puppet::Error
|
||||
end
|
||||
|
||||
it 'should not accept non-structure values for the original data' do
|
||||
expect do
|
||||
subject.new(
|
||||
{
|
||||
:title => 'test',
|
||||
:path => '/tmp/test.yaml',
|
||||
:original_data => :test,
|
||||
}
|
||||
)
|
||||
end.to raise_error Puppet::Error
|
||||
end
|
||||
|
||||
it 'should not accept non-absolute file paths for the original data' do
|
||||
expect do
|
||||
subject.new(
|
||||
{
|
||||
:title => 'test',
|
||||
:path => '/tmp/test.yaml',
|
||||
:original_data => 'original.yaml',
|
||||
}
|
||||
)
|
||||
end.to raise_error Puppet::Error
|
||||
end
|
||||
|
||||
it 'should not accept non-structure values for the override data' do
|
||||
expect do
|
||||
subject.new(
|
||||
{
|
||||
:title => 'test',
|
||||
:path => '/tmp/test.yaml',
|
||||
:override_data => :test,
|
||||
}
|
||||
)
|
||||
end.to raise_error Puppet::Error
|
||||
end
|
||||
|
||||
it 'should not accept non-absolute file paths for the override data' do
|
||||
expect do
|
||||
subject.new(
|
||||
{
|
||||
:title => 'test',
|
||||
:path => '/tmp/test.yaml',
|
||||
:original_data => 'override.yaml',
|
||||
}
|
||||
)
|
||||
end.to raise_error Puppet::Error
|
||||
end
|
||||
|
||||
%w(knockout_prefix overwrite_arrays unpack_arrays merge_hash_arrays
|
||||
extend_existing_arrays preserve_unmergeables merge_debug sort_merged_arrays).each do |parameter|
|
||||
it "should have '#{parameter}' parameter" do
|
||||
expect(subject.validparameter? parameter.to_sym).to eq true
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,33 @@
|
|||
hash_fragment { 'test1' :
|
||||
hash_name => 'test',
|
||||
priority => '01',
|
||||
data => { 'a' => '1' },
|
||||
}
|
||||
|
||||
hash_fragment { 'test2' :
|
||||
hash_name => 'test',
|
||||
priority => '02',
|
||||
data => { 'b' => '1' },
|
||||
}
|
||||
|
||||
hash_fragment { 'test3' :
|
||||
hash_name => 'test',
|
||||
priority => '03',
|
||||
data => { 'a' => '2' },
|
||||
}
|
||||
|
||||
hash_fragment { 'test4' :
|
||||
hash_name => 'test',
|
||||
priority => '04',
|
||||
data => { 'b' => '2' },
|
||||
}
|
||||
|
||||
hash_fragment { 'test5' :
|
||||
hash_name => 'other',
|
||||
priority => '05',
|
||||
data => { 'a' => '3' },
|
||||
}
|
||||
|
||||
hash_merge { '/tmp/test.yaml' :
|
||||
hash_name => 'test',
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
#!/bin/sh
|
||||
|
||||
DIR=`dirname $0`
|
||||
cd "${DIR}" || exit 1
|
||||
|
||||
puppet apply -vd --evaltrace --trace --modulepath=../.. test_hash_merge.pp
|
|
@ -0,0 +1,34 @@
|
|||
Merge_yaml_settings {
|
||||
path => '/tmp/test.yaml',
|
||||
original_data => '/tmp/test.yaml',
|
||||
overwrite_arrays => true,
|
||||
}
|
||||
|
||||
merge_yaml_settings { 'init' :
|
||||
original_data => { 'a' => '1' },
|
||||
override_data => { 'b' => '2' },
|
||||
}
|
||||
|
||||
->
|
||||
|
||||
merge_yaml_settings { '1' :
|
||||
override_data => { 'c' => '3' },
|
||||
}
|
||||
|
||||
->
|
||||
|
||||
merge_yaml_settings { '2' :
|
||||
override_data => { 'd' => ['1','2'] },
|
||||
}
|
||||
|
||||
->
|
||||
|
||||
merge_yaml_settings { '3' :
|
||||
override_data => { 'd' => ['3','4'] },
|
||||
}
|
||||
|
||||
->
|
||||
|
||||
merge_yaml_settings { '4' :
|
||||
override_data => { 'd' => ['3','4'] },
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
#!/bin/sh
|
||||
|
||||
DIR=`dirname $0`
|
||||
cd "${DIR}" || exit 1
|
||||
|
||||
puppet apply -vd --evaltrace --trace --modulepath=../.. test_yaml_settings.pp
|
Loading…
Reference in New Issue