From 6b6858e8d78708990b82e938adaac29c1bffa066 Mon Sep 17 00:00:00 2001 From: Ghanshyam Mann Date: Sat, 24 Feb 2024 11:31:03 -0800 Subject: [PATCH] Retire Tripleo: remove repo content TripleO project is retiring - https://review.opendev.org/c/openstack/governance/+/905145 this commit remove the content of this project repo Change-Id: I511202b39dc8fa3416743132a926f2402701632f --- .ansible-lint | 7 - .envrc | 3 - .gitignore | 76 --- .pre-commit-config.yaml | 53 -- LICENSE | 175 ------ MANIFEST.in | 11 - README.rst | 329 +--------- ansible.cfg | 12 - bindep.txt | 1 - docs/doc-requirements.txt | 5 - docs/source/_custom/custom.css | 226 ------- docs/source/_custom/rdo_styling.css | 208 ------ docs/source/_templates/layout.html | 1 - docs/source/conf.py | 148 ----- .../_images/TripleO_Network_Diagram_.jpg | Bin 94564 -> 0 bytes .../copying-customized-files.rst | 109 ---- .../customizing-external-network-vlan.rst | 20 - .../customizing-undercloud-conf.rst | 20 - docs/static/baremetal-overcloud/env-setup.rst | 42 -- .../env-specific-pre-deploy-steps.rst | 15 - .../environment-settings-structure.rst | 98 --- .../baremetal-overcloud/introduction.rst | 20 - .../static/baremetal-overcloud/networking.rst | 37 -- .../baremetal-overcloud/requirements.rst | 22 - .../validate-prior-to-deploy.rst | 9 - .../virtual-vs-baremetal-undercloud.rst | 16 - docs/static/env-setup-virt.rst | 82 --- .../calling-quickstart.rst | 87 --- .../ovb-openstack-cloud/introduction.rst | 14 - ...running-quickstart-instances-openstack.rst | 85 --- galaxy.yml | 45 -- infrared_plugin/main.yml | 75 --- infrared_plugin/plugin.spec | 215 ------- infrared_plugin/roles | 1 - plugins/module_utils/test_utils.py | 59 -- plugins/modules/ara_graphite.py | 190 ------ plugins/modules/ara_influxdb.py | 593 ------------------ plugins/modules/flatten_nested_dict.py | 81 --- plugins/modules/sova.py | 301 --------- requirements.txt | 2 - requirements.yml | 2 - roles/collect_logs/defaults/main.yml | 510 --------------- .../files/collect-container-logs.sh | 121 ---- roles/collect_logs/files/consolidate-avc.pl | 44 -- roles/collect_logs/files/heat-deploy-times.py | 62 -- roles/collect_logs/library | 1 - roles/collect_logs/meta/main.yml | 27 - .../molecule/default/converge.yml | 95 --- .../molecule/default/molecule.yml | 44 -- .../collect_logs/molecule/default/prepare.yml | 17 - .../collect_logs/molecule/default/verify.yml | 8 - .../molecule/infrared/converge.yml | 99 --- .../molecule/infrared/molecule.yml | 47 -- .../collect_logs/molecule/infrared/verify.yml | 84 --- roles/collect_logs/molecule/sova/converge.yml | 12 - roles/collect_logs/molecule/sova/molecule.yml | 15 - roles/collect_logs/molecule/sova/prepare.yml | 105 ---- roles/collect_logs/molecule/sova/verify.yml | 33 - .../scripts/doc_extrapolation.awk | 42 -- roles/collect_logs/tasks/collect.yml | 232 ------- .../collect_logs/tasks/collect/container.yml | 49 -- .../collect_logs/tasks/collect/monitoring.yml | 39 -- roles/collect_logs/tasks/collect/network.yml | 49 -- roles/collect_logs/tasks/collect/system.yml | 39 -- roles/collect_logs/tasks/create-docs.yml | 45 -- roles/collect_logs/tasks/main.yml | 48 -- roles/collect_logs/tasks/publish.yml | 150 ----- roles/collect_logs/tasks/publish_ara.yml | 43 -- .../tasks/publish_ara_graphite.yml | 14 - .../tasks/publish_ara_influxdb.yml | 69 -- .../tasks/sanitize_log_strings.yaml | 14 - roles/collect_logs/tasks/sova.yml | 47 -- .../collect_logs/templates/full_logs.html.j2 | 14 - roles/collect_logs/templates/index.rst.j2 | 22 - .../collect_logs/templates/odl_extra_logs.j2 | 20 - roles/collect_logs/templates/rsync-filter.j2 | 30 - roles/collect_logs/vars/family-redhat.yml | 7 - .../vars/infrared-collect-exclude-list.yml | 66 -- roles/collect_logs/vars/unsecure.yml | 10 - setup.cfg | 45 -- setup.py | 22 - .../zuul-ansible-role-collect-logs.yaml | 9 - test-requirements.txt | 4 - tests/sanity/ignore-2.10.txt | 4 - tests/sanity/ignore-2.11.txt | 1 - tests/sanity/ignore-2.12.txt | 1 - tests/sanity/ignore-2.13.txt | 1 - tests/sanity/ignore-2.9.txt | 1 - tests/sanity/requirements.txt | 2 - tests/unit/requirements.txt | 3 - tests/unit/test_flatten_nested_dict.py | 76 --- tests/unit/test_sova.py | 68 -- tox.ini | 83 --- zuul.d/layout.yaml | 101 --- zuul.d/playbooks/pre.yml | 25 - 95 files changed, 8 insertions(+), 6306 deletions(-) delete mode 100644 .ansible-lint delete mode 100644 .envrc delete mode 100644 .gitignore delete mode 100644 .pre-commit-config.yaml delete mode 100644 LICENSE delete mode 100644 MANIFEST.in delete mode 100644 ansible.cfg delete mode 100644 bindep.txt delete mode 100644 docs/doc-requirements.txt delete mode 100644 docs/source/_custom/custom.css delete mode 100644 docs/source/_custom/rdo_styling.css delete mode 100644 docs/source/_templates/layout.html delete mode 100644 docs/source/conf.py delete mode 100644 docs/static/baremetal-overcloud/_images/TripleO_Network_Diagram_.jpg delete mode 100644 docs/static/baremetal-overcloud/copying-customized-files.rst delete mode 100644 docs/static/baremetal-overcloud/customizing-external-network-vlan.rst delete mode 100644 docs/static/baremetal-overcloud/customizing-undercloud-conf.rst delete mode 100644 docs/static/baremetal-overcloud/env-setup.rst delete mode 100644 docs/static/baremetal-overcloud/env-specific-pre-deploy-steps.rst delete mode 100644 docs/static/baremetal-overcloud/environment-settings-structure.rst delete mode 100644 docs/static/baremetal-overcloud/introduction.rst delete mode 100644 docs/static/baremetal-overcloud/networking.rst delete mode 100644 docs/static/baremetal-overcloud/requirements.rst delete mode 100644 docs/static/baremetal-overcloud/validate-prior-to-deploy.rst delete mode 100644 docs/static/baremetal-overcloud/virtual-vs-baremetal-undercloud.rst delete mode 100644 docs/static/env-setup-virt.rst delete mode 100644 docs/static/ovb-openstack-cloud/calling-quickstart.rst delete mode 100644 docs/static/ovb-openstack-cloud/introduction.rst delete mode 100644 docs/static/ovb-openstack-cloud/running-quickstart-instances-openstack.rst delete mode 100644 galaxy.yml delete mode 100644 infrared_plugin/main.yml delete mode 100644 infrared_plugin/plugin.spec delete mode 120000 infrared_plugin/roles delete mode 100644 plugins/module_utils/test_utils.py delete mode 100644 plugins/modules/ara_graphite.py delete mode 100644 plugins/modules/ara_influxdb.py delete mode 100644 plugins/modules/flatten_nested_dict.py delete mode 100644 plugins/modules/sova.py delete mode 100644 requirements.txt delete mode 100644 requirements.yml delete mode 100644 roles/collect_logs/defaults/main.yml delete mode 100755 roles/collect_logs/files/collect-container-logs.sh delete mode 100644 roles/collect_logs/files/consolidate-avc.pl delete mode 100644 roles/collect_logs/files/heat-deploy-times.py delete mode 120000 roles/collect_logs/library delete mode 100644 roles/collect_logs/meta/main.yml delete mode 100644 roles/collect_logs/molecule/default/converge.yml delete mode 100644 roles/collect_logs/molecule/default/molecule.yml delete mode 100644 roles/collect_logs/molecule/default/prepare.yml delete mode 100644 roles/collect_logs/molecule/default/verify.yml delete mode 100644 roles/collect_logs/molecule/infrared/converge.yml delete mode 100644 roles/collect_logs/molecule/infrared/molecule.yml delete mode 100644 roles/collect_logs/molecule/infrared/verify.yml delete mode 100644 roles/collect_logs/molecule/sova/converge.yml delete mode 100644 roles/collect_logs/molecule/sova/molecule.yml delete mode 100644 roles/collect_logs/molecule/sova/prepare.yml delete mode 100644 roles/collect_logs/molecule/sova/verify.yml delete mode 100644 roles/collect_logs/scripts/doc_extrapolation.awk delete mode 100644 roles/collect_logs/tasks/collect.yml delete mode 100644 roles/collect_logs/tasks/collect/container.yml delete mode 100644 roles/collect_logs/tasks/collect/monitoring.yml delete mode 100644 roles/collect_logs/tasks/collect/network.yml delete mode 100644 roles/collect_logs/tasks/collect/system.yml delete mode 100644 roles/collect_logs/tasks/create-docs.yml delete mode 100644 roles/collect_logs/tasks/main.yml delete mode 100644 roles/collect_logs/tasks/publish.yml delete mode 100644 roles/collect_logs/tasks/publish_ara.yml delete mode 100644 roles/collect_logs/tasks/publish_ara_graphite.yml delete mode 100644 roles/collect_logs/tasks/publish_ara_influxdb.yml delete mode 100644 roles/collect_logs/tasks/sanitize_log_strings.yaml delete mode 100644 roles/collect_logs/tasks/sova.yml delete mode 100644 roles/collect_logs/templates/full_logs.html.j2 delete mode 100644 roles/collect_logs/templates/index.rst.j2 delete mode 100644 roles/collect_logs/templates/odl_extra_logs.j2 delete mode 100644 roles/collect_logs/templates/rsync-filter.j2 delete mode 100644 roles/collect_logs/vars/family-redhat.yml delete mode 100644 roles/collect_logs/vars/infrared-collect-exclude-list.yml delete mode 100644 roles/collect_logs/vars/unsecure.yml delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 test-playbooks/zuul-ansible-role-collect-logs.yaml delete mode 100644 test-requirements.txt delete mode 100644 tests/sanity/ignore-2.10.txt delete mode 120000 tests/sanity/ignore-2.11.txt delete mode 120000 tests/sanity/ignore-2.12.txt delete mode 120000 tests/sanity/ignore-2.13.txt delete mode 120000 tests/sanity/ignore-2.9.txt delete mode 100644 tests/sanity/requirements.txt delete mode 100644 tests/unit/requirements.txt delete mode 100644 tests/unit/test_flatten_nested_dict.py delete mode 100644 tests/unit/test_sova.py delete mode 100644 tox.ini delete mode 100644 zuul.d/layout.yaml delete mode 100644 zuul.d/playbooks/pre.yml diff --git a/.ansible-lint b/.ansible-lint deleted file mode 100644 index fd5e9f9..0000000 --- a/.ansible-lint +++ /dev/null @@ -1,7 +0,0 @@ ---- -parseable: true -exclude_paths: - - infrared_plugin/main.yml -skip_list: - # Add skips here only as last resort, like: - - role-name diff --git a/.envrc b/.envrc deleted file mode 100644 index 56f8ba6..0000000 --- a/.envrc +++ /dev/null @@ -1,3 +0,0 @@ -source_up -export ANSIBLE_LIBRARY=./library -export PYTHONPATH=./library:$PYTHONPATH diff --git a/.gitignore b/.gitignore deleted file mode 100644 index f54033f..0000000 --- a/.gitignore +++ /dev/null @@ -1,76 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -sdist/ -var/ -container_registry.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec -!infrared_plugin/plugin.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*,cover - -# Translations -*.mo -*.pot - -# Django stuff: -*.log - -# Sphinx documentation -doc/build/ - -# PyBuilder -target/ - -# virtualenv -.venv/ - -# jenkins config -jenkins/config.ini -playbooks/debug.yml - -# Files created by releasenotes build -releasenotes/build - -# Editors -.*.sw[klmnop] - -# ansible retry files -*.retry -ansible_role_collect_logs.egg-info - -# buit collection -*.tar.gz diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index c33331f..0000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,53 +0,0 @@ ---- -repos: - - repo: https://github.com/PyCQA/isort - rev: 5.11.5 - hooks: - - id: isort - - repo: https://github.com/python/black.git - rev: 22.3.0 - hooks: - - id: black - language_version: python3 - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: end-of-file-fixer - - id: trailing-whitespace - - id: mixed-line-ending - - id: check-byte-order-marker - - id: check-executables-have-shebangs - - id: check-merge-conflict - - id: debug-statements - - repo: https://github.com/pycqa/flake8.git - rev: 3.9.2 - hooks: - - id: flake8 - additional_dependencies: - - flake8-absolute-import - - flake8-black>=0.1.1 - language_version: python3 - - repo: https://github.com/ansible/ansible-lint.git - rev: v6.16.2 - hooks: - - id: ansible-lint - always_run: true - # do not add file filters here as ansible-lint does not give reliable - # results when called with individual files. - # https://github.com/ansible/ansible-lint/issues/611 - verbose: true - additional_dependencies: - - ansible-core - - yamllint - - repo: https://github.com/openstack-dev/bashate.git - rev: 2.0.0 - hooks: - - id: bashate - entry: bashate --error . --verbose --ignore=E006,E040 - # Run bashate check for all bash scripts - # Ignores the following rules: - # E006: Line longer than 79 columns (as many scripts use jinja - # templating, this is very difficult) - # E040: Syntax error determined using `bash -n` (as many scripts - # use jinja templating, this will often fail and the syntax - # error will be discovered in execution anyway) diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 67db858..0000000 --- a/LICENSE +++ /dev/null @@ -1,175 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 000493d..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,11 +0,0 @@ -global-exclude __pycache__ -exclude .benchmarks -exclude .eggs -exclude .mypy_cache -exclude .pytest_cache -exclude .quickstart -exclude .tox -exclude infrared_plugin -exclude plugins -exclude test-playbooks -exclude zuul.d diff --git a/README.rst b/README.rst index 3f4da3d..4ee2c5f 100644 --- a/README.rst +++ b/README.rst @@ -1,323 +1,10 @@ -collect_logs -============ +This project is no longer maintained. -Ansible role for aggregating logs from different nodes. +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". -The only supported way to call this role is using its main entry point. Do not -use ``tasks_from`` as this count as using private interfaces. - -Requirements ------------- - -This role gathers logs and debug information from a target system and -collates them in a designated directory, ``artcl_collect_dir``, on the -localhost. - -Additionally, the role will convert templated bash scripts, created and -used by TripleO-Quickstart during deployment, into rST files. These rST -files are combined with static rST files and fed into Sphinx to create -user friendly post-build-documentation specific to an original -deployment. - -Finally, the role optionally handles uploading these logs to a rsync -server or to an OpenStack Swift object storage. Logs from Swift can be -exposed with -`os-loganalyze `__. - -Role Variables --------------- - -File Collection -~~~~~~~~~~~~~~~ - -- ``artcl_collect_list`` – A list of files and directories to gather - from the target. Directories are collected recursively and need to - end with a '/' to get collected. Should be specified as a YaML list, - e.g.: - -.. code:: yaml - - artcl_collect_list: - - /etc/nova/ - - /home/stack/*.log - - /var/log/ - -- ``artcl_collect_list_append`` – A list of files and directories to be - appended in the default list. This is useful for users that want to - keep the original list and just add more relevant paths. -- ``artcl_exclude_list`` – A list of files and directories to exclude - from collecting. This list is passed to rsync as an exclude filter - and it takes precedence over the collection list. For details see the - 'FILTER RULES' topic in the rsync man page. -- ``artcl_exclude_list_append`` – A list of files and directories to be - appended in the default exclude list. This is useful for users that want to - keep the original list and just add more relevant paths. -- ``artcl_collect_dir`` – A local directory where the logs should be - gathered, without a trailing slash. -- ``collect_log_types`` - A list of which type of logs will be collected, - such as openstack logs, network logs, system logs, etc. - Acceptable values are system, monitoring, network, openstack and container. -- ``artcl_gzip``: Archive files, disabled by default. -- ``artcl_rsync_collect_list`` - if true, a rsync filter file is generated for - ``rsync`` to collect files, if false, ``find`` is used to generate list - of files to collect for ``rsync``. ``find`` brings some benefits like - searching for files in a certain depth (``artcl_find_maxdepth``) or up to - certain size (``artcl_find_max_size``). -- ``artcl_find_maxdepth`` - Number of levels of directories below the starting - points, default is 4. Note: this variable is applied only when - ``artcl_rsync_collect_list`` is set to false. -- ``artcl_find_max_size`` - Max size of a file in MBs to be included in find - search, default value is 256. Note: this variable is applied only when - ``artcl_rsync_collect_list`` is set to false. - -- ``artcl_commands_extras`` - A nested dictionary of additional commands to be - run during collection. First level contains the group type, as defined by - ``collect_log_types`` list which determines which groups are collected and - which ones are skipped. - - Defined keys will override implicit ones from defaults - ``artcl_commands`` which is not expected to be changed by user. - - Second level keys are used to uniqly identify a command and determine the - default output filename, unless is mentioned via ``capture_file`` property. - - ``cmd`` contains the shell command that would be run. - -.. code:: yaml - - artcl_commands_extras: - system: - disk-space: - cmd: df - # will save output to /var/log/extras/dist-space.log - mounts: - cmd: mount -a - capture_file: /mounts.txt # <-- custom capture file location - openstack: - key2: - cmd: touch /foo.txt - capture_disable: true # <-- disable implicit std redirection - when: "1 > 2" # <-- optional condition - -Documentation generation related -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -- ``artcl_gen_docs``: false/true – If true, the role will use build - artifacts and Sphinx and produce user friendly documentation - (default: false) -- ``artcl_docs_source_dir`` – a local directory that serves as the - Sphinx source directory. -- ``artcl_docs_build_dir`` – A local directory that serves as the - Sphinx build output directory. -- ``artcl_create_docs_payload`` – Dictionary of lists that direct what - and how to construct documentation. - - - ``included_deployment_scripts`` – List of templated bash scripts - to be converted to rST files. - - ``included_deployment_scripts`` – List of static rST files that - will be included in the output documentation. - - ``table_of_contents`` – List that defines the order in which rST - files will be laid out in the output documentation. - -- ``artcl_verify_sphinx_build`` – false/true – If true, verify items - defined in ``artcl_create_docs_payload.table_of_contents`` exist in - sphinx generated index.html (default: false) - -.. code:: yaml - - artcl_create_docs_payload: - included_deployment_scripts: - - undercloud-install - - undercloud-post-install - included_static_docs: - - env-setup-virt - table_of_contents: - - env-setup-virt - - undercloud-install - - undercloud-post-install - -Publishing related -~~~~~~~~~~~~~~~~~~ - -- ``artcl_publish``: true/false – If true, the role will attempt to - rsync logs to the target specified by ``artcl_rsync_url``. Uses - ``BUILD_URL``, ``BUILD_TAG`` vars from the environment (set during a - Jenkins job run) and requires the next to variables to be set. -- ``artcl_txt_rename``: false/true – rename text based file to end in - .txt.gz to make upstream log servers display them in the browser - instead of offering them to download -- ``artcl_publish_timeout``: the maximum seconds the role can spend - uploading the logs, the default is 1800 (30 minutes) -- ``artcl_use_rsync``: false/true – use rsync to upload the logs -- ``artcl_rsync_use_daemon``: false/true – use rsync daemon instead of - ssh to connect -- ``artcl_rsync_url`` – rsync target for uploading the logs. The - localhost needs to have passwordless authentication to the target or - the ``PROVISIONER_KEY`` var specified in the environment. -- ``artcl_use_swift``: false/true – use swift object storage to publish - the logs -- ``artcl_swift_auth_url`` – the OpenStack auth URL for Swift -- ``artcl_swift_username`` – OpenStack username for Swift -- ``artcl_swift_password`` – password for the Swift user -- ``artcl_swift_tenant_name`` – OpenStack tenant (project) name for Swift -- ``artcl_swift_container`` – the name of the Swift container to use, - default is ``logs`` -- ``artcl_swift_delete_after`` – The number of seconds after which - Swift will remove the uploaded objects, the default is 2678400 - seconds = 31 days. -- ``artcl_artifact_url`` – An HTTP URL at which the uploaded logs will - be accessible after upload. -- ``artcl_report_server_key`` - A path to a key for an access to the report - server. - - -Ara related -~~~~~~~~~~~ - -- ``ara_enabled``: true/false - If true, the role will generate ara reports. -- ``ara_overcloud_db_path``: Path to ara overcloud path (tripleo only). -- ``ara_generate_html``: true/false - Generate ara html. -- ``ara_graphite_prefix``: Ara prefix to be used in graphite. -- ``ara_only_successful_tasks``: true/false - Send to graphite only successfull - tasks. -- ``ara_tasks_map``: Dictionary with ara tasks to be mapped on graphite. - -Logs parsing -~~~~~~~~~~~~ -"Sova" module parses logs for known patterns and returns messages that were -found. Patterns are tagged by issues types, like "infra", "code", etc. -Patterns are located in file sova-patterns.yml in vars/ directory. - -- ``config`` - patterns loaded from file -- ``files`` - files and patterns sections match -- ``result`` - path to file to write a result of parsing -- ``result_file_dir`` - directory to write a file with patterns in name - -Example of usage of "sova" module: -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: yaml - - --- - - name: Run sova task - sova: - config: "{{ pattern_config }}" - files: - console: "{{ ansible_user_dir }}/workspace/logs/quickstart_install.log" - errors: "/var/log/errors.txt" - "ironic-conductor": "/var/log/containers/ironic/ironic-conductor.log" - syslog: "/var/log/journal.txt" - logstash: "/var/log/extra/logstash.txt" - result: "{{ ansible_user_dir }}/workspace/logs/failures_file" - result_file_dir: "{{ ansible_user_dir }}/workspace/logs" - - -Example Role Playbook ---------------------- - -.. code:: yaml - - --- - - name: Gather logs - hosts: all:!localhost - roles: - - collect_logs - -** Note: - The tasks that collect data from the nodes are executed with ignore_errors. - For `example: `__ - -Templated Bash to rST Conversion Notes --------------------------------------- - -Templated bash scripts used during deployment are converted to rST files -during the ``create-docs`` portion of the role's call. Shell scripts are -fed into an awk script and output as restructured text. The awk script -has several simple rules: - -1. Only lines between ``### ---start_docs`` and ``### ---stop_docs`` - will be parsed. -2. Lines containing ``# nodoc`` will be excluded. -3. Lines containing ``## ::`` indicate subsequent lines should be - formatted as code blocks -4. Other lines beginning with ``## `` will have the - prepended ``##`` removed. This is how and where general rST - formatting is added. -5. All other lines, including shell comments, will be indented by four - spaces. - - -Enabling sosreport Collection ------------------------------ - -`sosreport `__ is a unified tool for -collecting system logs and other debug information. To enable creation -of sosreport(s) with this role, create a custom config (you can use -centosci-logs.yml as a template) and ensure that -``artcl_collect_sosreport: true`` is set. - - -Sanitizing Log Strings ----------------------- - -Logs can contain senstive data such as private links and access -passwords. The 'collect' task provides an option to replace -private strings with sanitized strings to protect private data. - -The 'sanitize_log_strings' task makes use of the Ansible 'replace' -module and is enabled by defining a ``sanitize_lines`` -variable as shown in the example below: - -.. code:: yaml - - --- - sanitize_lines: - - dir_path: '/tmp/{{ inventory_hostname }}/etc/repos/' - file_pattern: '*' - orig_string: '^(.*)download(.*)$' - sanitized_string: 'SANITIZED_STR_download' - - dir_path: '/tmp/{{ inventory_hostname }}/home/zuul/' - file_pattern: '*' - orig_string: '^(.*)my_private_host\.com(.*)$' - sanitized_string: 'SANITIZED_STR_host' - - -The task searches for files containing the sensitive strings -(orig_string) within a file path, and then replaces the sensitive -strings in those files with the sanitized_string. - - -Usage with InfraRed -------------------- - -Run the following steps to execute the role with -`infrared `__. - -1. Install infrared and add ansible-role-collect-logs plugin by providing - the url to this repo: - - .. code-block:: - - (infrared)$ ir plugin add https://opendev.org/openstack/ansible-role-collect-logs.git --src-path infrared_plugin - -2. Verify that the plugin is imported by: - - .. code-block:: - - (infrared)$ ir plugin list - -3. Run the plugin: - - .. code-block:: - - (infrared)$ ir ansible-role-collect-logs - -License -------- - -Apache 2.0 - -Author Information ------------------- - -RDO-CI Team +For any further questions, please email +openstack-discuss@lists.openstack.org or join #openstack-dev on +OFTC. diff --git a/ansible.cfg b/ansible.cfg deleted file mode 100644 index a5c73d8..0000000 --- a/ansible.cfg +++ /dev/null @@ -1,12 +0,0 @@ -[defaults] -gathering = smart -retry_files_enabled = False -callbacks_enabled = profile_tasks - -# Attempt to load custom modules whether it's installed system-wide or from a virtual environment -roles_path = roles:$VIRTUAL_ENV/share/ansible/roles:$VIRTUAL_ENV/usr/local/share/ansible/roles:$VIRTUAL_ENV/usr/share/ansible/roles - -# Required by infrared -host_key_checking = False -forks = 500 -timeout = 300 diff --git a/bindep.txt b/bindep.txt deleted file mode 100644 index 679426b..0000000 --- a/bindep.txt +++ /dev/null @@ -1 +0,0 @@ -python3-yaml [platform:ubuntu] diff --git a/docs/doc-requirements.txt b/docs/doc-requirements.txt deleted file mode 100644 index d540661..0000000 --- a/docs/doc-requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -# Doc requirements -sphinx>=1.1.2,!=1.2.0,!=1.3b1 -oslosphinx>=2.2.0 # Apache-2.0 -sphinx_rtd_theme==0.1.7 -ansible-core>=2.11,<2.12 diff --git a/docs/source/_custom/custom.css b/docs/source/_custom/custom.css deleted file mode 100644 index 7aa7f8d..0000000 --- a/docs/source/_custom/custom.css +++ /dev/null @@ -1,226 +0,0 @@ -@import url("css/theme.css"); -@import url("rdo_styling.css"); - -/* CUSTOM CSS OVERRIDES GO HERE */ -/* ============================ */ - - -/* LAYOUT */ -.wy-nav-side { - overflow: visible; -} - -.wy-side-nav-search { - margin-bottom: 0; -} - -.wy-nav-content-wrap { - background: white; -} - -.wy-nav-content { - max-width: 100%; - box-sizing: border-box; -} - -.rst-content .section ol li p.first:last-child { - margin-bottom: 24px; -} - - -/* LOGO */ -.wy-side-nav-search a { - margin-bottom: 5px; -} - -.wy-side-nav-search img { - background: none; - border-radius: 0; - height: 60px; - width: auto; - margin: 0; -} - - -/* TYPOGRAPHY */ -p { - margin-bottom: 16px; -} - -p + ul, p + ol.simple { - margin-top: -12px; -} - -h1, h2, h3, h4, h5, h6, p.rubric { - margin-top: 48px; -} - -h2 { - border-bottom: 1px solid rgba(0, 0, 0, 0.2); -} - - -/* BREADCRUMBS */ - -.wy-breadcrumbs { - font-size: 85%; - color: rgba(0, 0, 0, 0.45); -} - -.wy-breadcrumbs a { - text-decoration: underline; - color: inherit; -} - -.wy-breadcrumbs a:hover, -.wy-breadcrumbs a:focus { - color: rgba(0, 0, 0, 0.75); - text-decoration: none; -} - - -/* FOOTER */ - -footer { - font-size: 70%; - margin-top: 48px; -} - -footer p { - font-size: inherit; -} - - -/* NOTES, ADMONITTIONS AND TAGS */ -.admonition { - font-size: 85%; /* match code size */ - background: rgb(240, 240, 240); - color: rgba(0, 0, 0, 0.55); - border: 1px solid rgba(0, 0, 0, 0.1); - padding: 0.5em 1em 0.75em 1em; - margin-bottom: 24px; -} - -.admonition p { - font-size: inherit; -} - -.admonition p.last { - margin-bottom: 0; -} - -.admonition p.first.admonition-title { - display: inline; - background: none; - font-weight: bold; - color: rgba(0, 0, 0, 0.75); -} - -/* notes */ -.rst-content .note { - background: rgb(240, 240, 240); -} - -.note > p.first.admonition-title { - display: inline-block; - background: rgba(0, 0, 0, 0.55); - color: rgba(255, 255, 255, 0.95); -} - -/* optional */ -.rst-content .optional { - background: white; -} - -/* tags */ -.rhel {background: #fee;} -.portal {background-color: #ded;} -.satellite {background-color: #dee;} -.centos {background: #fef;} -.baremetal {background: #eef;} -.virtual {background: #efe;} -.ceph {background: #eff;} - -/* admonition selector */ -#admonition_selector { - color: white; - font-size: 85%; - line-height: 1.4; - background: #2980b9; - border-top: 1px solid rgba(255, 255, 255, 0.4); -} - -.trigger { - display: block; - font-size: 110%; - color: rgba(255, 255, 255, 0.75); - line-height: 2.5; - position: relative; - cursor: pointer; - padding: 0 1.618em; -} - -.trigger:after { - content: ''; - display: block; - font-family: FontAwesome; - font-size: 70%; - position: absolute; - right: 1.618em; - top: 6px; -} - -.trigger:hover { - color: white; -} - -.content { - display: none; - border-top: 1px solid rgba(255, 255, 255, 0.1); - background: rgba(255, 255, 255, 0.1); - padding: 0.5em 1.618em; -} - -.displayed .trigger:after { - content: ''; -} - -#admonition_selector .title { - color: rgba(255, 255, 255, 0.45); -} - -#admonition_selector ul { - margin-bottom: 0.75em; -} - -#admonition_selector ul li { - display: block; -} - -#admonition_selector label { - display: inline; - color: inherit; - text-decoration: underline dotted; -} - - -/* LINKS */ -a.external:after { - font-family: FontAwesome; - content: ''; - visibility: visible; - display: inline-block; - font-size: 70%; - position: relative; - padding-left: 0.5em; - top: -0.5em; -} - - -/* LIST */ -.wy-plain-list-decimal > li > ul, -.rst-content .section ol > li > ul, -.rst-content ol.arabic > li > ul, -article ol > li > ul { - margin-bottom: 24px; -} diff --git a/docs/source/_custom/rdo_styling.css b/docs/source/_custom/rdo_styling.css deleted file mode 100644 index df9be63..0000000 --- a/docs/source/_custom/rdo_styling.css +++ /dev/null @@ -1,208 +0,0 @@ -/* general settings */ -body { - font-family: "Open Sans", Helvetica, Arial, sans-serif; - font-weight: 300; - font-size: 16px; -} - - -/* remove backgrounds */ -.wy-nav-content, -.wy-body-for-nav, -.wy-nav-side, -#admonition_selector { - background: none !important; - color: black !important; -} - - -/* page header */ -.wy-side-nav-search, -.wy-nav-top { - background: rgba(0, 0, 0, 0.05) !important; -} - -.wy-nav-top { - line-height: 40px; - border-bottom: 1px solid rgba(0, 0, 0, 0.1); -} - -.wy-side-nav-search a, -.wy-nav-top a, -.wy-nav-top i { - color: rgb(160, 0, 0) !important; -} - -.wy-nav-top i { - position: relative; - top: 0.1em; -} - -.wy-side-nav-search input[type="text"] { - border-color: rgba(0, 0, 0, 0.25); -} - - -/* sidebar*/ -.wy-nav-side { - border-right: 1px solid rgba(0, 0, 0, 0.2); -} - - -/* admonition selector */ -#admonition_selector { - border-top: 0 none !important; -} - -.trigger { - color: rgba(0, 0, 0, 0.7) !important; - border-top: 1px solid rgba(0, 0, 0, 0.2); - border-bottom: 1px solid rgba(0, 0, 0, 0.2); - background: rgba(0, 0, 0, 0.05); -} - -.trigger:hover { - color: rgba(0, 0, 0, 0.9) !important; -} - -.content { - border-top: 0 none !important; - border-bottom: 1px solid rgba(0, 0, 0, 0.2) !important; - background: rgba(0, 0, 0, 0.025) !important; -} - -#admonition_selector .title { - color: rgba(0, 0, 0, 0.6) !important; -} - - -/* menu */ -.wy-menu li a, -.wy-menu-vertical li a { - font-size: 100%; - line-height: 1.6; - color: rgb(80, 80, 80); -} - -.wy-menu-vertical li a:hover, -.wy-menu-vertical li a:focus, -.wy-menu-vertical li.current a:hover, -.wy-menu-vertical li.current a:focus { - color: black; - text-decoration: underline; - background: none; -} - -.wy-menu-vertical li.current, -.wy-menu-vertical li.current a { - border: 0 none; - color: rgb(80, 80, 80); - font-weight: inherit; - background: none; -} - -/* level-1 menu item */ -.wy-menu-vertical li.toctree-l1.current > a, -.wy-menu-vertical li.toctree-l1.current > a:hover, -.wy-menu-vertical li.toctree-l1.current > a:focus { - background: rgb(230, 230, 230); -} - -.wy-menu li.toctree-l1 > a:before { - font-family: FontAwesome; - content: ""; - display: inline-block; - position: relative; - padding-right: 0.5em; -} - -/* level-2 menu item */ -.toctree-l2 { - font-size: 90%; - color: inherit; -} - -.wy-menu-vertical .toctree-l2 a { - padding: 0.4045em 0.5em 0.4045em 2.8em !important; -} - -.wy-menu-vertical li.toctree-l2.current > a, -.wy-menu-vertical li.toctree-l2.current > a:hover, -.wy-menu-vertical li.toctree-l2.current > a:focus, -.wy-menu-vertical li.toctree-l2.active > a, -.wy-menu-vertical li.toctree-l2.active > a:hover, -.wy-menu-vertical li.toctree-l2.active > a:focus { - background: rgb(242, 242, 242); -} - -.wy-menu li.toctree-l2 > a:before { - font-family: FontAwesome; - content: ""; - font-size: 30%; - display: inline-block; - position: relative; - bottom: 0.55em; - padding-right: 1.5em; -} - - -/* typography */ -h1 { - color: rgb(160, 0, 0); - font-weight: 300; - margin-top: 36px !important; -} - -h3 { - font-size: 135%; -} - -h2, h3, h4, h5 { - font-weight: 200; -} - - -a, a:visited { - color: #2275b4; - text-decoration: none; -} - -a:hover, a:focus { - color: #1c6094; - text-decoration: underline; -} - -.rst-content .toc-backref { - color: inherit; -} - -strong { - font-weight: 600; -} - - -/* code */ -.codeblock, -pre.literal-block, -.rst-content .literal-block, -.rst-content pre.literal-block, -div[class^="highlight"] { - background: rgba(0, 0, 0, 0.05); - color: black; -} - - -/* notes */ -.admonition { - color: rgba(0, 0, 0, 0.5) !important; - font-weight: 400; -} - -.rst-content .note { - background: none !important; -} - -.note > p.first.admonition-title { - background: rgba(0, 0, 0, 0.5) !important; - color: rgba(255, 255, 255, 0.9) !important; -} diff --git a/docs/source/_templates/layout.html b/docs/source/_templates/layout.html deleted file mode 100644 index 7170ae5..0000000 --- a/docs/source/_templates/layout.html +++ /dev/null @@ -1 +0,0 @@ -{% extends "!layout.html" %} diff --git a/docs/source/conf.py b/docs/source/conf.py deleted file mode 100644 index 579ee37..0000000 --- a/docs/source/conf.py +++ /dev/null @@ -1,148 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# flake8: noqa - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# instack-undercloud documentation build configuration file, created by -# sphinx-quickstart on Wed Feb 25 10:56:57 2015. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. -from __future__ import absolute_import, division, print_function - -import sphinx_rtd_theme - -__metaclass__ = type -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [] - -html_theme = "sphinx_rtd_theme" -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] -html_theme_options = {} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix of source filenames. -source_suffix = ".rst" - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = "TripleO" -copyright = "2016, RDO CI Team" -bug_tracker = "Bugzilla" -bug_tracker_url = "https://bugzilla.redhat.com" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = "3.0.0" -# The full version, including alpha/beta/rc tags. -release = "3.0.0" - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -html_static_path = ["_custom"] -html_style = "custom.css" -html_last_updated_fmt = "%b %d, %Y" - -# Output file base name for HTML help builder. -htmlhelp_basename = "tripleo-documentor" - -html_show_sourcelink = True -html_show_sphinx = True -html_show_copyright = True - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', -} - -rst_prolog = """ -.. |project| replace:: %s -.. |bug_tracker| replace:: %s -.. |bug_tracker_url| replace:: %s -""" % ( - project, - bug_tracker, - bug_tracker_url, -) diff --git a/docs/static/baremetal-overcloud/_images/TripleO_Network_Diagram_.jpg b/docs/static/baremetal-overcloud/_images/TripleO_Network_Diagram_.jpg deleted file mode 100644 index 72d9f3a9ef4acf1a3375b0b1a3b63d17d6c0ac0d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 94564 zcmeFa1zc3$_6K?pkp@A!1*Ag|q!|z?5fPXiVPext_K)D0}DBwTfd<>8RFs@ud zzk-H=j*gCriGhVfgo}fXjYCd&4WEdbf|iDwf{Kcck&Bs*o`Zpkiba5x<0cOuA0I8V zkeDE^DAz4M-tS+6f{BTVgN;Lmi%Z6Po$5O8zx+D?0uW%J8lf?xqR<1E2vAT7P|h0x zT98gOl;2*!A73b!P%oohLC3(v!UjK3auv9Qf{J?SGAbI{<;&ow-NC;DmkH1auk%P= zxu*OSo!*9s_jOnj2E(1gI^svYTa34!*}lQVA|WLsr?|ny%)-jX$A4QuP)JznuC$D- zocz7VDynMg8k$-LhDOhgO-#+~>>XY@Iyt*|dU^Z!`uPWhzk44M8TH{~baKk4)U@=B z%&gBv#U-V%vhs@hhQ_Amme#iRzW#x4gG0k3qcgMcx%q{~rRA0Fo!!0tgTtfalkej~ z0Z{)iEb!+a2KLjq2*7b&x_lY+GWz#%pKJG$xJy1 zrd!W}Z9bH#ULEvH>oEFIxy1wa8yT@j74jl195Q>1~o;u1|^)>_2Taor0 zxB@=SKjqYQR`v`l8iK?DpLI=w`s_c{{Fn2_z$tY0E`(=GFGe^dlO&5cwCD4zhZt7C zH0LX};W1rVnChY3mnw%-LrlUo;tYeo7&!>x;NQV_0UU^w zCD4deNQda#>lcNi&7B6J{!j@X0Bo2f2mD8!{u8i&4iVpznO|3@?<)0c2YVNi=p1NGf}DAQ z0ddbYf8jgNb;v0W67^e&N92=E!xxmgltMoGpXBwQx$6Xm1#& z*sToIJ_o!>(Utpx4&(71I~QUbD~IQHIqvSlzCq6n`j*3+sJ5yeC0ALd>XI4W6ul~& z3TPHr!fJ|qHU?FqCk+W@>8X4k@nU3Vchyi-E@_q4PJh&mrsjQWlO8Dqc^<_?(ozojQ2Uh#^&wxR1W?6p+RYH+MB=%;> z=UbZ+G=Z8Yn~1>Cgfka~b0CqTC+Ex&DZQ?QtiEv$^xy(^5M1b7jFT&}D!d|V{uRYJ z&~|(4@XYy!g9@N5P4q7xs?^%D{Uu|V91aSCmg@;Qws}y3aax;By<`v3$;r)!N2*Y? z-2JaZlh|c%rEwe6?JEpuE){N)zBmBMsaDj~m14#DWO&sUBd1zcspOmeYOweLarEfy zEh@hW4E}ZmPfbNpsLIyM6m|v2#QO|B>Z_dDL5))$@Ucr^oc7$3@yZyyI>?Xbp!$m3r^6AJC$Fl8lGPJH=h0@-13rYn4xGm2y5IuD380H< ztkYJd$*622!wA}@)%U)SVF-1HOQ02kaJy_Wf%qW!63 z(h|tYUeq}t!H4n{!G8{fTg+^p?A%+)j`-1~Jw%Zv`n5wx*1N9ss1DBZ@R>-UIr|&P zC{{=G`f5-Qbnf#}6tb=~p)&)Vp8TIy%O7Q&FPSe6$~1f|=^PlQUpxn-wDu?fb_mGp zFjvq#^5vm>s9HbiXCj{Q4?3um-=KfYG)V(~m@zNw*Go;apbgBGEMzsj748{=z<@c2y)2bxZ=R0OQ5sVPQ#NU zW=JYm0=x8adECz2?oq9bp5$RQ7Gx_BgLilikC(;@lTFIf~xyRUr?_9=jfnWv1jX)K708UY5c5H zv8w`#t0NU5S!YL4gvxWOHQ<_u3|Mh|uO59Ac(?e-rBv_w)?%FTWk(w+cSEMOjfW%$?MME%~E!(T78yJ>f~^|dK?qTmN)mrTF`+mLqKGtM$s!2~X$@xX&DzK?t7fRL1| z&?5=lpqmgbe88uhED>)b08ZO9I$crJN7@iZ{mMyiLHqG5(HJKw3%ourAr2QHy*AZw z#bx3kJ=aNgJ=&ow%8sPuM%9)GVW^+RhU^zSUj}rVn)fKneCl{^Hdzy6lPfL|pU#ay zV@AIr+?NcIHssewSq|d9T(yZLZ!%l1;V%7FC25>K(oi$Hd5&0Q02Ks>i2OHLaECZ{1DY_}XTp zPx>cbALG z+X|o0ff8>zceC8725K%!NiI+Ru8%p*QcW3W$?{YMqQ@SkR5a5kJ?J*lwox@K16`H3 zeUnWI%+wLmAI)tS0%WzTH!;Pw8_{m(7#H9`gx#c`)7k3Q{<+G&i^ z2Jcsk)Ws8%Qm~tmo@DGip+Dv1q913p?Pz{r%WHkb*5x63hO&+^;>PHF6PVU1`ufsG zsli?|4G=196^ur8HBMfQe0IeNK$$Fb$iGJIGFHEnb4_Dn>?n;=WK7NN!<@=H_OJ%l zR=Pd99`v+cJ>w*&s8jR@+Z;Pm{Obcq*0Q3aQuVq>nXwWSQI_d&7Xa64>4E>IpvLV-qA%}qcA*}m1A}- z*x3BSUcf?U{E}=Rpt94|yqO$5IZytHt&8$}X_e=M!!Q*E~$tfTu z3fD!XBI;O(?9p=p#{u;ia~Ek?!`K;v#@2DyNYN{8Bs5F@!L_KxNkj1r)`?_5N>A=wIVe;)bc&hdzUC{lc7B( zEuL63d8|BI92gP!vR=319Kaw$WaSPu+R+3*b+AZQ6gz898&xU2VkF};%}$+KAW5#= zjfxvToQSs)09OhuX{mzsPH)XA=26<&IiTvJ;1;{K=NV!+-ZG#=%NcRV?o@A@#CFXU zU%ek^WtCV~`aYIK`{legAZ&l#mk4F)y*ilVV;-d2rRl7WI{I`D`A!={Oiwj5Q-d*a z>7`+4GI%IY9C1PM-;-a% z!6OZ8r3-;j0DCU%A=~?BD|vxQ%ZHCNJ9DNui=%j`M6JC!=JB7?J)s%NutZXrmiEfp z%#^3~IXLO+a$oL~uSH)V%(RLni1n19t|o5RbIN`kqg&m_hf5+Rqbv4sQc-MZ1g`$H zT=|t6$M9kS`P?_mSB#eZdmnR(H8;4q^9}pwicBMDChgJLS}qfWwe1NVeCO_r1;TVz zB<|6vbZC7TwO%Vnc;*0l*nB~I7}M7cW(G=E8fl(aj0%oMGv$1pSPC(5_Bw2LhEPPK zygce+Oxryx6Qq~y0)S1EJet*(LkhjPe5jep>Xpn-+PQ5d3UR&TcUw@x6%*lfo)Oc7 z3njDMYk}2x_@9q!M`6!~96!R^+izJeIl7MOi@oZNXLr}r zcqX?|wYV#PSKA&;YLo+B<9dp?8**k#LH5Y zI&yR(ltE=ZyZokn?nVgCxc&+OSNus(;h3g2E2@dl9U2*kb9cJr(NN08S%Hq~6e4m0 zA6Z8VIiVObzwovPJ#p}->CnFt4h14pny0Sv>ry_9wn<)kD_PUWs>NR(t;!k2CdrnC z-mxk{^IF=uIW@Im>r}tTijOH8SLD$ELw`QsF1zS`Lv+IcvbY?KMQT~N)+)SpsE#6} z*6zu(@`6Zh)114Nq4D{J2Dq>ze;q1&%l9b5$~s;(9N#t^w1aO+_dKhrC=9q=Yntzt zF&j2y?XUfmYk1QGo%jKKR`NE5XvHgk{%;E@4*c)5_&O--M4!xSOurGpQ+nDh#)4Xz zT-HB3Ve7ir_o7OKsG>@3)yADFYUXJvj-2%FCBmp6gbWZAx=4Ox##*W9>n@tK1s$x#(jFZZR}EClLv;_eTt+-m%6N)bz# z;ykO#sQC`UE0;4R$RK{)!iO@ZtP{~4$giDi{IFna!z(&``77aybN*%Q1%k>Br3l!~ z%Z9Ni(W%UhHvBbj>^mvUE93O!r%5(%`X@$S-({a8@UVL~jN<{uF9&Jsm&n`O#PbWO zsoF=1v}*lWF&SNnmqj!knOa67pOeE)nuYjkw+fl60~?^jCzO+ld1DHE@;AN&-7y#| z6B>P`r^L|ngsKPKQQ8`oaCM0sp!Re`qar;6>{u&)c9jug)q^_6n(p%BMSaMZEgWj!gV7iTOE^smt`1KqZ-5Jx8 zrJjmmEaFQgK|_@w-X1K7g#56OccdV8#ClT*jInhTXGIy>zPK!<%K(3X9~7> z>TXK3{i&ei>dgB>td2JHgjeRhCW-h=Wm_`Uxl6*u(eWQmxD)hswXn&VaEZ44zKNzi z?Po8yl~rEuG?d9&{==Va}sQp&DAY0+jjIL)HXm2Y|*gD9!NOP}|vuEMUK4n?F8;14N^ZGO5g0a`+ z)P>Qx8Aq~>!W%KO?vJ!Ja`wgK*D3m%(r^noKitHF&yTk_-hbV~zr6PlJ)zBf7D8~; z(l8pa@n-aS+%uRwZ<_;E_%%jpJ8wnOxcfRcY2ej2 ziY7*(>2+pn_gXHDX6G*UZ%c_77Rt#*Zv@DUf>lDT+06cN5g+ej6`;qw`(m-8`pUmh zQ784Yx6tcvBAI0C;he23*ETSAHegJKh;xp;PoY80?AF5&lUDel;+Kerb<)wOCs0{X zGEi)_rb{w9&y`1F?iFdCCKVBMnUQTVT)vx1>52QM zT99{1rYGjIxc2qj`1J)Pf4G!j1 znN)$kTzQqPj|blHil}1R=M$S#3 zU;*-=t&cpMS$&fBD`hL1d~35Udml?8KBp3EKpU6fT_gPe9=c<)j)Np zBgB_7iqV05x${LGsb5I~_0nkdg4d{JoRNhc0pWP_j+J%nTd&}B%PW|oRPcZelSDkn z=2x_3+gC!dh83xVBZ3DUOO;qeE*Hk58Wdjk+|N-slWlPsj2+d@_r(iz5ZqZ9_*4^e z_qEcbNj>$~;Vwb4yn_H>23oY=jqQ~FXds8L&7o545t(Rb6v3xn1wvYd8i@$*+&eiU+zC}VKT)oOiNK)IRs#KyfMBwV}MQVA${CmkBLX>A$kgKy)(b0F4 z+x9SDxb?yPwN#(*3?(uPHa#_=!1qIgy`it^!%gZ*3uk|0$za`fU>is`>OvyeqO2Ds z4cMsPj-K+ZGRiU+ei=Sm9hplc{^DUbm2DlzJr(P@OP>g)=t3fbYVe1Qbj6Vilh@sC zX8rbE%NX7hmwL&xlG-%SUKxjHirVKt0Ogi;4t$}AI|$2iE+yIh8qXA?`lipB)My+B z7?$hm`x0xMgvxIvyOm9?r%W+`xZA>DkOS~EyU-oC|E5CHS>&=%5Hjj zOE?P&iK=LAdOEOg(n#@Fq#vLm-s+lp6tqQ7y{vfauP_dg2Zj`|vGR2t4d^2HWK4aJ4(jj&3j9>KtL;nqPXtx~~Fb06gFV5>)g^n3+w zV?x}QQB1vRU-EppWDmiK2OS!{Rp#7&d=oq+3fg0QrDd=H!w`)L@_@2Q2{x&#GIo<$ zO`nbu9&Mxh?Ca`H5|Io=g}33@vYv8qqsaAy9gZ=Fi@BpYz6a?VF`eY-LC``xE$nYp z)+<<;XP#y8`?lL!_pypJuvkhwj&;bsYN9pd#lJ4Ts?ay4S)@pU%PUE=OysxCbG_9T z<#BvVz}`I;)DBKjhFOi`We4UW7)2EgLzH?z&jTz)%0xAZvk+`Rjw+uadb9wxjhDSm z*sW93ix-nFncn##o3qi1>%d;$3GI%bdbWtqdvJDer)H2tOx@m)4q67k46 z_NTC%-f2|y?>Y0w;Hsv?xn=voBgs`ST^(yh(+1}37X!Nc-KcX$Crlh+Ja>nYvO^kHA#2|f9t9|>RHw&!RNNvx))%oWe#7e?!alN++WgVjAwzSj+cMw5xOQ!a zYC#N+8=5ieA&9F`U$Gd-u+5d~Y*Jy-dGMjaAvouAc7f`%(@*RXXdmEjtl6})p~Oys zDqD?1;?&44&-*o+dH17iDg`!1xLRacZDrHg#^p6)Z;Qkrg zszzUP*e6p*TJw-m`9aM*X28p!h%Kv}Ig=3?v5BC}RN5wB_J52d=m_*HVJ{g+IaRsW zo<&+_FkcAIQcqHrpwZ89Vv&U7YCle>zv={9DJzGe0)>JV-NT&puXMzM=ycoN#LpY* zDN>8$3E!G(xqm3GjbGWoCL|Fgh8roj9gl6ik%gM=fi3R9%A!Fx2wYcIr{6yETQ0x0 z(<7&(IEF}a`f(xz=ynD2B#D(y(HkrB{oDjH&6TD6WRkhmNOiP6wPzDKBY6S30&4Rf zFud^uKG=3oe?7N#--SYM%Gi}`U%$ASZbzGafv9V%PHCcUv4(BUMZ9E&n}~lhDy62- z$R=$}b3w#!)+wIKLqmv`6+NR%jJ*fU7&!~K68vA-uX3u4NHP_ruc|vX_Zp)x2UxV% zQElN-GQ1SF3mETRFFBK%r8FN62|J5>!k1PYAP~p+4jP>si6uu)-i*0C#7sPVZTG-KqUi}ynEw&2*cu3(Syc^^3{_unkkK&p2$r^;bA_kq)$QT?-b zYu#1UNF8Zy58IncF4KFbijMWLc;GXIM)?3|@#$Pyv1SjCZtuGi|FN*Tx#_)5S8CO$ z2So|Wr2{u!Yi*2BUw^;Ik{|rV-5SCM?B=T1Q!EDXbrb84kIFuE&yu`M^3sa8{H`}F zc&%Lb>!cSv#Qct2mC`Ie@saVv%B}5wj=hmI*VmlmwnE(4ENnyx?`gyyxB%?6U<^%O zt#g@7NiwInd!oYJHaMkPBfQu;Vx!b94X|D)>O^sj@{)83Td#%xi>p^cw zdgdbkN>~w}TvrTL21j{U|8lg$MO4H8?v!f$S3nkjya(myqNoe5^Zt)jo5UWJf7;{x zm!Lns6Q)jlv3D{$bn!wl$`b$oe9!-)EB<#}^M4(q{#WPne^fl%np@k3le7wA5QSpW ziiP9_5s~MD%AwT^@$J=TP+mYdi@g)RwHUPF8<*jpD%{rDhEm^R}} z_9XfC%Z!Sxt(JiGXRuaQwNk9VvdI(&gRv^;`Dkw4(HtW^nKlw3jU+mY_5al&v*!PmvHfA-V#Ezj z#!aKis|^h_?65b~s&HN{-Lb{CYRMtsY=~M8sbgH8FG;*b{cjcGPi2R4gr%VckZ0$l8x&JV$oTj#6C%qKmAZY zrW7*q4GfH~@n-L}habpWx=0f*>R;gC1zmZ3V9OlOoZPGD@RQQ~ECre_$?j{tCi%h_ z^(L*>8=*}$WxkF2kT9QparExetCUNj#&n>J_S6YLew(H1TH!dQO=6TN6GV@$jEO7G z8N88zIQZHivNnSdngYIk-AaAyXdC4j(Y>x4i#@(j;7@S#@nrR#ZtkxoQ>DZkqK&?9M4c@S-a%smHQ z(VqiYVzHk8(wGI`HPCSRyLCWlr^tsQv}5GrD2IdWbD`)?6`Lwm6+I5C8fys2XGwE% zZ9L;LP9T0@GNlml1}sEpG8YajOB3I?0Gs;nc8xl|frzJepQf~yw1}hio!)FKv=Dju zB?eKE8!{ZZqeB;iX?+kzwxVO_0L=trXIn_u9~_L*z&93HL2UT=UcldX7U^qqO8*X{ z75^$ff49VzBax;dm(=+|&wQ;G<5_HCe0mxAkl9;bB1I4OsYB;Tn~*ol`>b;9Ik7sq zt0bLL;Q@E^ za1&pMuT+tAZ}s1aA9eH<2MN=PT(K(zTY+4;uwe~6cV$G0l4 z%~Aa;gRY9F+>V277K3J~E6=;bP*ofZhZ?*N+b1rX!6%(C8-lrzR6dUsJ`48^XKCZ< zCZARyhcv8@9W#@_Smnh+Y64g?$$m_4{|{u^>TzuD*>Jr~zFMB5zNZ0HpvQLPu-R;K z;dEY4M?`X81)+e#L&qg5;`GK_aTY|%y3B&V4?&%HoBFca=#^C34m;zVF+Qs)oH5N; zUIYoLFH4#c_s)U0cyVB>#9Zl&jLoQsO(aC+RK2UQLhQxp|iGh@3%NP4G&x22AV$8Wg+}2+H zy?nu=h;x9BS_D}?m~e7aJ!OzsKp?#Ubm||U{s8PmznF8EEb%W6U4%GEp#572QuM#h z$S_@RTeXB8jVcg2^<-6l0bGSG4e_dnUIhs>{5|SWSp}#Yhq~cfR|!vc)3~migvc&G z_-o9f4l`jwZVyZPrlNE38N=!gMX&kJ2jQHFSBxXC8>N)vb4|4!C# z=l|Ydh_Zkr4P6%(-_A=B=oG99=_NY{a7@X*(0C^FRSLrq;DSm9*fq39^1+fi8XNj___09@!L>%a0)Z}l8o%&yZY_3^p;8ff#=8R zq+z2Bv@^jBWT-uMCyx>8;MXR+fvhnwBLn!HAV`@|3Q*6r&jHh1ql3;@b>DM>%(s8i zbIRGfBvS+k;tZ*sue;r**J)cQWgg)71cm0lRBbuRN6JDjLx*LQ&WOyIw<7TU_Ow`% z%ey>&OhzvvMgJR+qoVNo%*CV4=EEICKjl)cCrxWu5X$k8kq#8>ht8R7=bw4k9z+j4 z)xCQ3z!@PaO{5zBI|k6EhEdXDo3>8N3&PPYdJs;4uqig2KEr4|>77y8ML5D;k6)Pv z@2S;=8p<^Th~-Ik5D(m4M|(i=BJW1)MAIvgok|(B8$+tFVU%rvK@Xm* z8pO*@G1r6^*FH{0kz!&`+d(B#sf0l2@%2Y5F*Y7bcSZ#Bl8VKYl(tLa&QfD<4VS)+ zevwN3YzM3SNu5Z(AYh**RUe&?U@b!k%*&pAqElgAz%3{AIh_zz83~nK{xsrL+gx*9 z(Tuia)U-yiBnPjN(I{nd)8o}PBhLPpTnM16qd0TC&XpGK%(uE4NWc*s=@8udNXae; z=Tt?2L~GJ}Hdsle&%SkE#6p?gl>YQieNP9)dUt2tqp~ic)@3a{f?ngEz3XW3mK$rk z@(%XBXH&c~91;iQEi;Oi%!QRYR*m_`!1B}q2^b@K-+=n^>ISqG{Q=mLFG)uR5zQB2 zZ1`J1<_evBk#&Q$o!L2zER1h~xpz8oKSnPHTlzLinFh8mo_l8Oqi|3l=|Z0a!A$7d%i~@ zs*60Xrm2F@sv|t?H&3y(pi6anp&BaVlks)ZL~2Ifzat~SGt~t=RN~No`BD}dS5pwm z5!eVhz$-yo=<0+N%Jhk#NuzxXl|ZwN{4u5u=zNJ%`#jbb7oH^3qC?=RON6|JiIBw- zg=WMt9(mix=&#W^KAD83ekcmP)iDN37jZNuCLeHcO`tVbMnPRp7-3rt^bvXJk5!R3 zZutyg5Ci7+a=>1t{_wJV3b$u5WlTnQWc1e#N(Lq;$DrW(?Pg))YWt$4id>O|@I7OgrKarZ-SS-k0Fw(Be8JX!am56ZOWI4&c%n)1{`b2HdvDya->6 z1(!1fkF=zkjG7G6@NPr=Zww|0C~N9)Mtr)zb{U|1Zd5np|6(!!Qfk#!O{PG7E<{2O zF6@PAmKW39V-QAOzTB&*;VS0ZFiH1qqd1t0^IpG-Q7mh@#{Fw(SYt4U#~x0L`9%9G zG~TtoheHCB1Ri?#`^U8v`Lo*>Zy?IaC7H{=kn{Hsa(nH`i$Z29b;X_4wBPD!&glg+ z6uxXJ)kYmw`}Sx!s{BHLGd=i$?*rX`I~(~7B~L!W%hMzzi`OJ$qmO(p=CNY#Ndp;x ze3tcl@Xm~nW20pZ(uc@_f89A)U+$V1X>ufe+2(94mh*9EAvooQLme5UtEy(Gj`YAq zMeF9)%h&t^hIjctM&6c@cy%fxvvcZd-z}7Tc^J(VEB)&DZmbX9#6H@P1-VbmrTuKoF9K_6f%oDcrA{=v%)E4U7 zRPz`kuRQE5e)?Qi^o(doQjt@BiTDDS%?mCjXZN5OGut$Q(nRV)J-ZcVSTNF$6>=Xu7RTqY4-UO4^sx*1?lvgHumkGbU$zLcJ zSE^yuyXAE;HJf11gl4t>M};61f;&fIClpVLwn=^Y6^&166?~OT1{MWuZt3{dbl+l8 zo5WXb_uSbXcnq`N<32)tRQ|0BKTUU1&Zum3TmJgWf`H+;d)N4of(DYL7cxV=)#>xT z&tGp6GASo~tB|A7)sm@{EwZt?>q6Qt`(t+3J6n59g&+W@T;z~F<&?T}V296X2w6XM zs%Q==)gKnF%>IzG&QuY1Ace&jE|B~>9C}?x5R?hYN4#3cR4wVGdnyQ6OkRq=A2x$5 z%u?vG#|Ut$Zk(yfl%6`NA=mV10p0t91Iqh1P_12UeYB~jbggCdx_DYr0^E~A-=~q^BhoGFaIloY&(-Ov zQp#Btr`q}&H7Zu+BHgYv@+sT?eyir4Pk$80TU@OHd)z1HldQK(l{<~|OtY~R7rk$` zhRMCY7uKbKCws3j)5%V`d`rob$*cMLQQpF=a3_55`aaTP1vaNbysVExrIQ2LW0x-MF7;muP;vMxOW07msxGA76)|J2_NUk}W07NM zRf-(JOIeuAJH#Cq?bklKCcdIH5Smu`VQ^zo4*PX@(m)iG;Ww>_6srupNsR|E=bD+FT!C?=!ZC7lk|T$7 z3RQfdqO2j(UWsatyP0bx6(05|vV*?*W4hCe6=AU|E#2ccU95yl#jRfx8H6oR(Wy}c zJEhW`T)LZ$8-*jYLOG`gn2SG0eSKo_h@k|>3BuaSA-7_#uinGG$w!MlDn&tVe|pNg zfnp8F)gZ;p5(~DueHR9E_S|`~$1G$P6tc~I$_dUs&e7g`m&%J|Dk z&dJmp``|mvuyF-T(Pl_v!#wkLGn8V9{Z>PTFo>Y6coiO6rBz=T)hr-jcN81lpQTNB zD0mKB42m@P;A|FR<*@D-((iPUm~!`lr~?b^aXW2=vJc z_vVv+`_-6JP>98f96?vCvv{GBI?tR|AHj-?zwL9fb>Y%1!*yC*Yi7}GFNvh}@U*sG z4ofgw<*(-9&&4_i?uWH=j^krm--1~)Kzy}!S0U(=XYm~mplD97CY?T#z*UR_z8h-T z?Tzs-j?K3QbElHn8inIFU?OhvZ?@@98E?Bvkm597KXA*CW)HgRNb`Yle^uMX%6~ss zc&K4&REzKGaT3YeW&VV3gK6HpWJ24G*W9=6M zXtN1qN7D`52;j+>(!#fqp$i?zhv04#zqmB``+a)1rn||Pkwkr#d%0>Isv#R>sK!0t%HRu{~C7C(T7uC_?Q+iO^xiP<~(#ha=6@u33ss8!xzR5+W?BrR9B zPJbT;lO<_)aA>R2i%j@H?DuJ^E4BN@A|Ao^J^RwFrqssQ0n(YHm1yLG`0?m=y4BW5 zo;Zgo0u!2R1y4EQ`c1BmMF~Y?jgEwqUfZF_vdn~f6-Tc|9X?g>aG|p$>BuIu)yuwc%V+0&7RB0sGO5 zncI~8!@IRVGn4iwKJMLP!xr5{l7PEeEKX^F`=`CAosqpLkZQsJ;Gg*-7St}V^ZNst zM9})dtwY4=D(w+x6oS1MgzT{Q?zf6Q-nLV$?a#Gp%FT-swQaL8+eUlt@TCK5{~lPe z%UI>98l_1hGSQPQ4O3My3|E=Ps+IIy$X#jBcy zL4SNI@e+vL152(yJ{#;Kd*{w<9v==uwzRpA)RRGk2xKPCa;TA(NK_K~JPI4Lp!B{H z_T3S*TJQ2aWI670R+nClUuF7Ee^znE7Jqrmr?R@w_nRZTYm11^Ea&#Zd^u=#O zu8Vz64}D~#0o#mD-T1D=+T?#KR9eSF1`_*3C8tkMBO;P~kTOq+jK*j*saWQQ9Ca+# zQgn}sW;%IWk3D4xr}Tw)=-VxVmCBjZlREDr31_xR|AFlC?Ck!rcttMq8SL?6Yd1{o zZstIzOZB(%?}8YgU(q$=r4w+1e^j0}wLA!$9UM{sF?nSrPH&2Yr;WT_Qi%Xn>gNJ6 zQUmnC*sTYBY*|6!_hM1hGZOJ>wu~D&8SZX9f0w4OWwz{K{-W?+cKSVur$e!7o4=~% zZQ?X5S63=vf%)r$sy^;YwwMo7Q0DVhMFTdn1B>F^1h$p*`}VKwqzuM!7awbTzhB3u zM?IUCfcJ0Z^;KNo`k?e%@%H!9D5}>sGWAN>EI#_wcup>kz|Jj<6t3Z&))`r*sF{r< zyzFeJ3gm>5hVj55T?E!m8m;}ut=XiXgB@qPefc^V7I3K)+}&1N16fP?#pmMVefh?O zh8mzm`)Aw_t=Pm&`%aZ2$}}JJe5Qj;Ud9uHU^OF;bE_MQL8oIt_j`3V?^?^rdkR@U zQCg_&=i-}FtzYSbDh)aqys4r$R)eeGo7a-K38iV$-ZYujUR~6?tEpI+dC0i8CzO#; zuOkk2t1t5MuefmHF3VqY^cNl9KWW<_*62sY%Q>Uf@oa*S@{OGd4no((0<#74O9&F%_E$~U4BkxZp@i=|<9jl7ET)7sJ ztg80QayY`f0TUN>{WqWT({ocU`c*xDFxoLUOx&SXSKD?tt7tFi&fJh>BYGlQvXUOA z`^_fWFh%MpeO8x#>)2SuX&;-!@^ZuTA=Gh#{+?^P-{+4v6B@xOujcQYb)aLy&+g$e zG}7SLmTPK}StANVey}E=l=xje<{)M{BjatK1JvzoinPfCu?vcqFCWZQ>Ap| zqY+f=?`ZuC)16=48|Uj#6(sd2=H5HSZ|j5$BV8}!u?m*jW$8kdwd}!Q#@v8?Dmib8 z6dp=~Q8C&PeaP4wm%Rr?u;`&B|9v)GHXZ_0_jO@1Fz5Xt2}S^1=vX9MMJ6towcM=@;I~#xBRgZgvbFKMAE`S&{}65MwNB(b z=EnCejgm_gn)e}e@@kGvA)fsd{eGd4Kht^tXn2QyO*~@h8}1^)X&gFq8>y1clswTIeUCGxfitH{L2N*omcmRz#G6r? zmRxWzZwg5~2yU|?jjDeB8=&in2DDXX2wBOzOT5MpH!Q0-^6UH-v2L(<(4(LDj~lencmaHOw=s`^@%Qq z8?}V)_Y<7f6NfM{jCjmeB;Xt=$!!IDj+)+{7j2&6>$`pY+-g&VM*d7_wsa}oHR?%n z0PEk|Zw)1EmK$f8hOJ-Gr|>!Ks%JUgy(){d`KPUrdvj4UAt~mkm9*5W+u9+-OsqW0 z%Vpmeq*T9YiT{a9DL<*d3o+3~lc951KUcB=rFnEpgouK3)~%3p;AJkpZ}hkaSTV~> zDo1VggYRWuI|o`CYc~@XbsHfo;H=Fa%C_~{P0_3JcBshDQs+Xy-5YeP`&k$Cc|e=#`P|;UN30+&!h&ci!O z%iYAxepRRX<+(qsPE_%;2IXmML%NkEI&nvSgQ6b6XMEb##>LP9&fb19qrgJVzw~%nZOM}(au%uEc$p$CJ0BIYHV(h&vG=nF zeS^-i+91!IjtA*;*?8>d`fHV>@-ZI}zI&v8D`ktO$eCii1s(28Ye5P4PUdo3 zr62gtXgWC)yB6whe6rfSMWbZA#yIMJgy`TTfF5429-DRkQvCgt|rbKcUWDc?8!ff7!ZVZ*_L0f`b&g`t__2i$P1J^0_Pu3E?s{6(mnD=BV zK90z*opzga4$k+lzXpZ2+RverZxlSTF!xrfxYL&P=DIT%ne}m9rO+!6ySDqTi`4K+*)w@N=4Re z>p3v->ctQ_LIRwI3=7@)o=hfHfnJj`^kiYGJuSlu0&j*R%V9fUg{3uf3nJCV0hwbk zKL>Q*9S0mxY_H-UV~m2E0~#WqRr}OHeQU7dGmb5Zk<|LCCt!b8|E~L!ACVLP;CGaI za5XzE;W04c zE#+1tSTxX=vHnxlz-fIE0o8#H$sirpRDT>omkoY{(&7p2jV66GZuJwIN(&J3S8xuL z)wZW*2R=?@+9zk1CE#9MfCL8kJ$$~x7NccN%F zhrAP@m&9vRM6C5G_k@FQAtfWvfekE}Dqr{U6%dnD#x~({1f6c^eI?7O6by=3`|5Yz zNI8Q<3DE9$)LVL<(6^AA#@fXP#7}sK?FNOw&FFZ%~v zk$4D!1WnZ~`sizg({QG{s;K194d{eukeNpwZu>8n)m|3FaUI0qKHpkKkN4(5CU z;<)=bAnC`OAv2dMz`E@)bn~5FXh|0&YDdhS>9&Yy zdIvo~@2+uL&sg9$&AftaL5i%oM1X3k*4)Cb_ zK%p#xPF;WZ6#s0;`CN72$56V1 zk;5G%0XLXV{Z3%$pE#w8|7Y@kx>7Q=iGDe+2@}9zxUkl z**p&qFYlUb));flF~@JrF#xIdy^p_xnj(K8Cied|S~92LR0d<9@woG!B-L>j2RtJd z>y!>mDt@I$xf&K4&)^nLaX{bW6CbpLNq!T7BEK zruv_wkQ4u3NOT_mzdv1z>A~WbbnaR_5unSct@O-co^*dGnT)4jq zmOmHn|Ev%u7`my}fr=d4K<+fCNvhnIh?%mI(Jrl=_VmtSXQxyo_+_vI{+eKoMia7i zKw8ZX1~lYTT~6M`?nS&rd_OsnOh~>yt=l*8%iB=-H_lEZN)R+_wx#0U_de&9+PV3TE%q_(Rg@ zb3*C-3CdOpNXEjQVfNBPe%Vk}P1!A`yzJzAs!L^cz|mTvZSkLVcT#o&&bOY^S5TGk zNC1oLps%0D>kRq|vd!G+1;9-l{xm6{AE+7aPiOqr5WrXlpUis-^8nnM3#nw~*^5<^pZXAmrfHbCc{tN#S_PWfbd5*3IsaKEvW{t|f#g9O7g$Ua%)NyRs z66wRefE#x+cxG9K@rM}YUqKe~RVN34iH-D~(O?)-URFgB_)IgJtqpP;=*214>t87T zla)1#xE}_s0657Mo~3_KMB{=za47z>jkN}_SUzK~KbZu7&VQ31zqs$2p2e^F9tbTt z>IN(u#X)kVe`Krt`SX9>ZuyT)jDP2I5+Gs+6v}wHem-uAeeoGEypeZR0G7h#CC#(` zqr4^Z6JR2%z~L@yLhzUMj3fJPaAxvf#ukJ0&&q_K%d6>S15^+XI@>PJdkUD_f>YZs zPZss(8-nsT`eyF6U122y2l8Ou`b*E;QHk%CPmMZq7}K$4NVn|r8$d3#{9Qxp-x=i} zR_?ov(&&ffQt=V^YAijOEg3F)@r9op=yp6DG%l}}|C5Bn7eQu=~?;YkJQ4a6Bf|jZ5blw)> zI`d7SCH{>-OZVxAziPkquB!SKtTaFVbMO3f@BF)aE6ZC2LIRT8DM4NFq5`@U{$7o# z4{ddBEZ3I;r-60A4)7BE{b{qlCj*89gq)1L;jd69v3bt@3w!;P-(?$&KU%-!8OSD_WldDOTJ_z1*i|fC;z|0bpIU!{ui7AF!^%*`uLFKC8vaRoEQIH+I-)^KditiT-fN3ba~aOYb3w# z!vE!5*YEsgw^s)LDd7JU@PCNX_g}KxZxa4*6{vsb{Wp^;h%SK2UwL9BY_6L8x4k|E z(OJ;v))Sxm;?PBHG+hRf`>D_-xkTiL>BPh`M>^}ldwBwoUAp3+jMsb4)??9yu$pMV z9$Z58N7#7(nL7S|Vh{c!4)_1p>-a0<#BY$28epY9Q;G8*3xZN^>3nwywW|AJdQ~RB z%j(F&@5hl=V+^uDc3)lT_op zdlr9m2EKxWmqmV*uVp;(CO_J}TF*z6$^vIFseQop+ZhvYdEUGR__q?ccmsf6MEvuB z%^hqFxNkNiSRpw~z(?l$*e6->@Cyh8jyz$IPj>0L4!;60AP6>W@bt3({4ySaAcrPj zz43uxLL4Bua=^iQ_!4j?Cu-@>cL-)8$3n;=`TO?xfqo~xPt2uNIR^GK|gok=TRdH znh~z`HQ=!<$kFB%;E{v<3e>V9)FtK-mF)=r>{dFa7}Y{- z*^m=%97g=n!3Zl$u`rj&ZOG_R`)O0B-+fG4!ORLnxk1@!eLgH89 zu7JYXufb~Vq87gVa(gh={Fa!(iY@t+9r);kd|l+Xua7hSRuV8O1M!sjz`JVZ@C!IW zU9QM^J!J-B_!p=r1GuLMF4=*sZ-M(NAv@Xo=ci8KqWt>x$qmG48=#}+H~NeMg*g6k z{p3>>^XDfpri)VYHF#k1rDQw8Eg8P?qa6U$`mN@!I-qI&VHZN9f+^%eA6y}Fu^{^G%_>;RxwnqJu1(g^d8t#7mPt(`p0 z%U|pLZIVhY>yNe%-kcbLd(TO~&H0#Hsk146jLeh4X$wwNV1oZ@z&Y7^oLwcW5<~M6 zUtTA^WRa1X9*7i*;$*`-tPq+Imau{RM#8@y_({awdwxyBr9Od zO;*pJ)c%#H{$Ei0#{vZ|fBeQozb;OZ(>xA@s~ZOYU)EZd#Iqrh8rWa>m^uvOruadLpsE&Z-?&7q8tdqiwABbq5_w71#dwv z_cCCeUtD-7Py&V?^DnclN0de0D|TcbWG&L>QOu(yX=csz&dHPwIOdmS>oZzpMt}d< znmvE~m`?(c$9!^u3;|++P+i*b95Nryb$qhQo*bdq*9#Yhn&RhZ8`| z^uR~U2O%4fgJ47Q$z0@MfyQd$)~>r!C`PJCvrFyk;oMa z1#lbzB<_!z9M|ELQSDdAYw%xQf3Ik2V*dOH#)O#ZfvmSLQVsxDLch^R^7>H2%3> z{($u&S6xt5V5^!z_UKJG=8Rl7S8Jn&Tuv?F(d#oFij%iLiPlYUTx-tByOb3Hk zh+M7WkoCBAyFx7Y;G!6r~)*}Dy+XvdH1{g4*w_4 zYki^%b<0ymT($gr&8*oq_GY9O8sgCc%&pdNoG?!7J>>-%D!$v7>`zfV^H&@e% zJ=wYX33ILL&qSTvQkEdT{rbL2YXLeR$3!P|l_dqq+Tu0937WGoRmss4K3eh6_7@{; z)k-$oJRMPdAMeYBkFub9O3FFgLMUxSv;Lif$8J z!{SQ7R!6ygg9|MYkL0M?7`fk)&Z5bSp2{Cj&N*zTVv_fHzLC>$6}flf@O^IA6qY$P zP1-8dPdFh-_t6qMsi5t6nu^sUC$-c9gSM`GCiy3%5;A)z78?T%ukyMe6>}?dt zCLv{1)4VDw>Nm!X;MLLTd3@W&_d*RduqsC2l#6V`Jx|ZWHj5|x45T-#hX8~ zj;&|~+2h(0;;oHqFpTo5E(YNkk&%fzl!(%in(pmsTb$wS_PiIMM9&UKBZo}fk=v=! z&e$Nkd*B^>#!biVo%lBN;plRS!s+QI(L$XPzI8Cu4V3kS1Z?@FnkvnYXLeE4A2~Xc z=Z#c^YV4V%-rRRK{xDm1JoB=E_fEwL9-uI_lCcl<1(A+=e>mfAT@NwGFn@eIG2pYK zzf(AcK5{S?s3QSeD5)Zsqa}{gOMSG2UZbt;=ylof`0EmGOM{3-G2Mw5{&r=nZPvc+ z=48fE8o5c&vu0s>mYEtthV<|KbIeSmp1X@NJipF~wu+0iDcy_P+9E^w$uy|cV`|Q4 z;P44A%x%CFJ^??dhSxUR%`9_n`=8~37ti3X4&8`0o}eo{Oe+nhf>9kOcsXFGlzNy!8Yq^11r>Lru<)e zK~D27(i*as8L$8ZmC=QF@P!RD2wTM36r-@iK9Pk5i{OnPmeeclT0;#Naf3ZL`Kh=; zO6k~xK<7zft}-#gJ4^tS!AIg1b2ThL_EE_3=rw*F$v9 z>!HuMlSup@_oE#YxX9*jL{%H|Q}{@P8v7CM%8ynG9JICGbn=*&B+~39S)v$pH zvS(GqT%5<-DlaN+o!$32k+^RfXK2p)@dkTO0%@NAj-;?7K^xb#%0Z~cm5nVFH`0v= z(N8666JFH|dl#e6Q?}pE!+xpHinMLhk>=|F@6ZvuOJydEqC-L^QfnEsBz4<33X!V2 z=x|(Un08~#aK9yBy<>y?tqG+8y0FrE*uap>LqpZYt7@rx;ZLfo8Of{*>jobQJ1-2N zsWT*NUtdwM)=+J?W$Llu%_2qkN=iwN>eV=?&uqlsb}?V#dy7f4MoM$b@um_%PnI@> z{;eMM8(vht{#S}393N{`69vh6~eNk-bTzV641|NN-<=mxs-=WvUgpapD`0W?=3 zOCstIF0y&BWLZyQ(?|5guSRr=qr`ZdSrZ;pYM~iWQc@jQ4`|2DeUA{y320D%d)36^ z=s(8qIq&g69|f?c zH+}_~s9wk|Lskr>R?mwUtjW(wWxj&SA?)_!il=s}{Nwi5>)^XbIyb9HE#otirVxDx z%%#w`-!0XBG zh#ZTN-5uc}F;|lhMFicfSs=?%>$tknmAHp>)dy(44+mIjw~dXZa0)`fY!yRk`Zo?e zfOQ30Fe7tc+Ye&pqiuFV^J1+yo@C6?8cRN+uSsN$q-wwaR_;La2gaje#wz@`55ONvCOsT$xadBb_=Hi( zV4f~R=k6Zl=AS13pz2P4TNIV=BsQ;kj6}rg{M=|Y`f-&hw#L#O5=^VClgYg zEhzBELy4uwc)=+Q@rG|6RcFpi=)~9Pm6Az{_uM%D!tO4xWj*H3_sI8XaaqFpITSTD z9Tqh&#XiQ(805{x1~oPZ?-FYz^5v1y!>$Q=+r7N z!4o%NqEijS+A9W2fDg+TI)4uq)It>Bynk0oogp4IIaRaK)b8LWZ>6?gz^61xxzL8& zV$1wBPNN@c@R8>;Kga-ux?Tr4jglJomCTbGVK3b>#afm<7)!RyXl9() zW35yUB@D4Q4d*DF61HgYU+`C;ZtjJs4Y|-RTDzE}4V}zlKuvrUN(rVOP)aSd{?k+bpPkCE%s90wf{BA?^zW)T{T)n=A4PI@&<+e>66 zReVXRhIWwY`sw5sRplWQO(e-oAxc`ioWd(M9oxNnMjF)7P5fZ5*HGEXx`Zj@7llfw za#7E14~8rBN{%`mC>SYb3?+*iVsUc1hvxYal`#B0z>Y(~Vof$zN@%wDr{ zj4l7pq!g|g*%zlWC@vaZ8k?$*|#W^jWRHFV=9ZPzPnVdnC zYaPwE5o(vcG;6$Av{fGAq~;`*$cK0b) zwxJ?Xmpw)v&B&8|2K^;S@J*q9?;1hO`00odjp>;QZ>2OJySA>vhf2((eb(ygAiO4r zGXyvX$RF?pVLLm1b9o+yCUkXXZ6Fex)TkRE6_J2_hkF}QjlDvf zlPA>xIm7~jf8{lwqi@ry@&)@FVkAp z*aaH9;=W}6C|Itt3fT)jz7lHza-qbW1=!rmzvEz=v!m}zJdy6ZHk*c$2Ako;479Ke zq$Qhv29-=*HYIQ2?ikulEbv|;FPiE)T7)3m?paoNmN<`Qgc6tOrPoD zY1uDIqV+KQBk(rlUZdS*@L0mU+oH(b=N{!cQD5pw+^t#}MJINfCY18M8Iy$|sSn#D zkHuOm>^sl1j4$3P7CkqlNqn^{rWa|Qb&XRaQK+EI>QFzO9C%JR%y;%Rb{Z8EZW z$?9m~g19w#U-4+dRBckgQAkPF3VY6u6x3%OwX>0ch2P##&i!DUn?Hp24$NAnNGEVk zD}N?QN$cTUy>w5PXRKdASaV{iehj%u)6sWLDXXVvRd=Pkal7~xDOVrV zvH*!JT%R|(-5;xAUZPcv-*F}mc!}#h)HlkE3%V`l_~Ok(d>Sfk@;IE&K5MIvu*y>u zk2SlQcX7hAd$nlM0!vcLMNwP`WXb>Vz$`g;+o^Z8^jM*A-hlE|K%?m;^S5SwnnNw072Wh`p;h3CE2ws}HfM{#lh% zSg*f=Zm;KpEScf#q5Vs5x5pxH2jGaV8|El~c*2^6HwhwWHkENCS;v#?ZzOv(RHA!u z5>W3GHOC@@sb^5GCW3Fp@#1whCyBltw_fd|(@i=P28InTYX+4w&uV0%7oiwzgpYq#mcOPt^P;E)Mrl>h z!O45f#s{((uS2h`f$1j0-o*9h(O-}1Q!D(~^6GuVxs?^}GG!br7n8c#J__6Kr|Ky2eqQ7Q>p;of1w^%$>6}jX$~7JhsiXM z&oFVG^BvjKTlZxjadw(0rVk2`$ZqgI7^o4-oQGN3Oh>CLb~`54h9@z-%DU_HNw!iH znYNwMOGN{9z`TZI(ZFW*DdDyLa|UlsRbWZ$|2)~M2b|A<3} zp?;0>a__r@50{yful3wO-oGt;vyVq!JO~?B-=a|E7L;EWh81hdE32-i(zX9^J>ogd zJDO|P1gOJye;J!K;@&`BiP*_mW#~Pkz+Czr4P|O07U2YTfo5IoSAj9(f`D`6Z=pW_ zdof?{qP4{zGJ>ycRMu+l0{N5Ct_saJ)fof8D;kiCjnZBRYJ@QM<=gq8bYRP%dKr8@an7N9ts8HGGNMn}j+dH~1+NuaKJgJ#! zN<4tW>eS7hn~e={Om5!`+3h~+mj^g%hTQG&hr+~HvZ@`mp>#U|XcrYrR5~;kfH|XA zPs2gHP1eC#u#_W8el9u5$k-CIwWo+?M!T1g_&nGVzw|#-ju8GZ>MOa4Cmv1jrJthfoNL>S{9l8+#1EnLbSeQ zx6fX3fZ0Kmps|_EIuNw2Jk{U4tAfD3{UN=`5|Z7RA!1HjAIWQ(<%L^8f=WBhmt8wj z%2`sO6{?gTEWvB>*qdeLY0NdtK@sbzyaBYh2?r4s2*HNROc@bbxNbUq6Y(wlw<4cE z&sGR020nv&xMNduX5MYMh=LP`HLf{J?V^^4-hXs@STuxBvfIbxN?Qlyp*cKiyr|)S zI>PqZruVL*_=J|Is!WKN(&ur*8|>_I@zDY4;MlkI-exrq(>Af4b2iH^R#awNPVeDR z`WeDkO$R$YE|a?B)=@C-YJ}v&f?ger`l=hr7TqKf!T_>}JhC2?Mx!%E2NN0)?T)BC zDoxQ;X4{-bAsY4W*b_zI&4yD?90`!5<_4VkA?|NSIJM5cD->HC)@MA`E(w|X@~|uK z6xn(Ke+6qsp)~Tni_HDuq0xgm^j6`&8q1(%;-|FI6sJ5##J^wU2U(Oy6Yx z3eq1op*KnB+h>@<3$WgP50b9}t>p4DI-B3~$6GC@R*79HZ|3MA!t*y@4iwysqseyQ zdCh<4z%6WD0@wl00dL>TAXA8418Up-A9xl_YmgAnJ|76Vp0!@kLh~L2HtI z%e4d1$}j#gZgtWqc7OeHPrH zpD(Ks;GC;scu^ObCbj08EaQkXZ?vL&yISXTfoL!Ri&1FMMPQDiw0?xHgpX!uiUW0C zN)U+6jbX+xm?=aOd@n3cl9r*nVQNJaJHLjJF3L&F)WVE4x?;u3{(P6^3zB9ehQKnf zKJ?i4F>?1l|2+*g!#Uhi@ow8o97pcPhxJ3rhB$_JZW`7P?)GtNALnlO-qjp(QO(${ zBNM+>aU!~;S}oqGf>R`1Ek8J_8jy~1XBtSm9-FkS5Ow9Hq3jqZ(@TZsw6G`gc-~tu z0gD|Qv}K4DYia3d$dKgnj4fAi+$xQ?Op1ZwCg*j5zNei_rEEdhpXWPtNr*`MPPwv3 z5FVcf6zO0Mna$K_xm2!@tW{k+M#gS(aw|;PVBVbS9pSK=D)c4(JP{Fb=M2N$!>1jM za5LseiCO@v5V9m;wfx+aey8+{j5hsnRi`n9fWMvl^FSnI+Rc6SQ7eu6Ft0a@(a{i995?CxBDZysw(}1 zo6~MIc4jfbnQ9Tul~Xp|v`5*GG7YuSizfxTs$Gh1pm?L~QD7w_`)-6SvB$7pW?#lq zY7~{0y|9bemvjg(=T1OZ^|WdZdKI-}fc(Djpsz4fvumn1A|TB$*O0zg#juOYaZ0eH zSadM+H4^8o?arL+{+kYS`%SkUM0=B6eFvKSTy%oPc%`|+Wuk8gH~GiJEYDL8ZzS9> zTey_}bb48KURY$-W*Oppl*`i?_qbq)my4zD6I&3rdASEl5rNt67d$1d=}(YIN{~ko zfLc684jm|=f2)Z%;}EaO9z4tTxv;FbBqT*NX};Ev2dUBzX#fPuD+7EMfxJxh&?|kw zlOKprvsWQS1{_aL;;*cMq`*1bSACj{Jy1@ct0uSXBSsiDLU3I3WCC(uUDwJD8Hmhc zJAP$H-l3@c%SpuI_)6kX^9{}2{FYsw zZa~xRB+*AUC*7|7UOtm`9!qI6c&i}&3$IG*NV(!oE{t2EAYB6F)rJl0lb<29C#7{z z8iOjg!t};v@mf>(msJir@2&;r>jNV7=+$CV@`9VHnr}yV1(&c%auD~&=yQ)JKPfls z+YR2BXvmQiq0+%%l$0VF+tI`{QJ{4uEDg|D7b*+U@p&1g%ak`9E@QW^gM#toEhk>J z;bJr`Pp#P+m)WgtEV6~C7*B58U0Du9b2(C9LyK1F%@J9Yy5UeIb$ct!h+}-rQmtip zX-;(2)xYlbxd__6HrHC2jVUnRw(VS_a*g|=IXXeFD@uyI+csKBt6HYzim*sO67m#} zlPJFA#Y@ZgE(t)9vOiU4FnHu~mUn6pZMDZ^AxIg61d~_o2Tk`nL)g4avkfhb7<6jO zio;Q?Dhc$6KYh~Mlk*=yyP4Vz_5+-!eN=j{c314}6(XB7i`7^SRB*hkJQ}e53rL00 zjC{ux$6Um;Gq#a8greW0sS7-{7XWHp8`6O9lWoCdtn(FouA86ci}FL5QpuY_E%?cXPo!{R+~oLTOSQtya@Bej{(fGk;yPFY z{>{NTiNZH|*JH+0m6fMA$JkF5Lg?7XpgGdQwRmz`*j<%`ZbXKA#mbMevxJhQn4{G1 zKa@tsnfCQ6cS_`X_#CMhq;II87{@tM^W5Gs->A$|X^X}1G}v%l;+im}e2Z9U2&XOirO$G;fb$?F1;Y#}yo?=097#&ko9=lWz;AkeXH4nti$H_jx5C7#y&x_ZPqAgLj+K}- zcfch@v3``7FWu$EkZEb`OM{Ya9^>dIdko6!?j6JyIOL!GGun9-dUsQSY!h&iMe&sJ z6XUj#eg1A|ywOGnompb`)u6pJ0?Tr|L8G|q!)%6Z=hSgf$x3s)+Wv?wb~?u?SsDvR zu0CTCxr39me9M%6ORB9Y^^VGS!VRcesTPxHlo(EL-Fc(>mBwns(loaPd3CQ6>5i`6 zm#oPbyHm0xePTd9tY=@PIy%V<9a>g`LO#;72B-kO+hXGCM7O%Dji2=3h+9(Lw%R^I zQBQYwiZgz@y*QeZ?}`^!n!R~pmq;mQ|Hhc+Mr`!{ZdZiB%*qqOexpjrf*)a8Vdc=? z-m!5t@f&@NF}nIKG|@?QC6>TsN{RtA%q0l6WaQl4!W!tqE2xBRLZHDOx;U;LyRM>^ zV~Aw{oP;Ru^7S|h|I@nS5k@kJTZ-Omk5PJkJSc@j>Gb!)=`6Q>w;sVPQ|kiEb@5J< zulI1YOA~p%Ok3Q$K~as9LgMAGO@jS$atO|$o2s@ooDwEEmC|JF!6oyw;1LL9A+R3> zT4sy2+gGWFV(x`!NRfR>-oW8z#lgc->%9}A8dyzbIWu=i2kL6|2T0ZQHFGE?;XCp2tlEQWs6kj-wIqCxpyZ?xMX!Ov9pGm(GUF zZD157s+fN^-f{dcZDw-io2QGEw{XNpZ{zo&nW56A9;}o{aRk{i-r`TT&rrH`qx6=2 z^`~yS>#-rp431iMo=-b6WfwRO3TVwmJ|%U$wI1!IIb$;8^~Gtt<_AG^ z!or}x+MIf>vj$cZ)+LV z4{C_k(o>CuFHZaRCG43xjkY*w7r2>pDLiqVhg4ybP1Ay-B{oqj(_5ixoll=KoIG83 z8}`RAvc7L$~+!SjC;smj*tIP!+%AF5Z*54YI&J;T_jJK&-1c z*osTYU`m*p?x9UQ8fm&pT+;0ur-%vXBJ2}=kY0H}8vQ8pQGCW0ku4%xuk7{QG`C~u zvjk8ms!Sedzho`5iU#SyXuS9C9{!xFU$kGQ|Cyq_B$;xIzwQT+1qk>_z>NA&Jfiq;7!I_ykQ zf)(S0Z&3J;`M_ysStPJ!hx76o_w+^|2C7bPQpxh>64O%j%_a&DfRI6VPx4dwYntkY z_H%fJ5@9bHaf~HUQ&0W6rz_1q#=bLh7a`U@Gg8{S55bUkeKxLEG0c3)%>M{sJ^-*lT(J-$YWr?R~{$Ns7S-^@~;}hPBHb*|e zcFtT=nzp01i=YqrjBK+St#qYaCEJ{=m&w;&6l$4k@n14?ofk3tPWmmLM$s!%*Vb27 zg}wHXjyqJw?Pe{!Q^3*fo($4LYURFc01{4kn%z7(Kv}ob0 zkv&HKFnr=_-f9IS8(LPbDd7;$Y=+SLoeQsb=*YhbMB?1yHZyr!RgX&C3vQizasZDC zSUm|3ntW7h-05GG9u=r89w*vm85f>wxX?=}RXLDaET$!aPncNxp1|b6>uNt89I5V@ z6)Q<=#hhbWRZ@uD*xOKT>rARAEQQ3lF9;Y0{ex<(L!E6BuvBHy0rQT{L*TlrOq5Xf zq}k^B{%hFTA>iAk>Hh!>Qy4!aV@QrbE7rYSg0R``o^8YQs=#*~^rfI|Im{a8gA(S{ z(WbVh_X7>cfrMR&@W$huXerQifaDmk%a7Tro2haf0hm{~OGb>1#fPk0*s@We)*7hL zyjLkXeq)9deftH?R@B?B)J`WdPOa5dW4WcV)J>5`u+S5F)_2vlrZj56 z4^ObYsIro91qI02V?Z%+8-YCp7t7ovdJl{28(KR`yi1Q_Kl9u=q~e!tJqrkLyuemq@L5SX8Al)zvy3BwAWn9<&u5$O}IFK(IYk zHoiC(YLl6>R#hKo>^G60XRZ_SYPg&DT6N%zDFHGF11mf=AEq&_5!ZL1K09H=E)-C^ znvC=Sl|c8thk~p}FyKQ&qWHn4JC^*Cs7u{S{pEEipWk$9_j%ZKW}o~_OhKP0b&Nfm zw(#(Njm6YlrtK1@M(g5;elegmuQZcZy_go%A>p~(IU6!nP62aH6Ar04i#+4-lJ=2_ z`h`Pz3oShA)KHM!vry4qiq8XDvnzz}7p?jA)-PIfQ8g;%f6dr(G?lCUqN@aPE{fU=yw^{0;Z*hO-yyD zt)^}!3+yBuaEG06<&|AFsF;jhJTolkaC$NI!W1uNOtLzh0MymkjEV+2d>UuOWA4LE zr$E#j+cG|$ox{R6Jkwj*ov=9SjzZ$@ii8ZpdW;-jbFgfniQ{s6aGNN02S>t=i0|1- z-}9?42wlAu9a~avE%X`K7uyUK2aTD{$e~Gf@ru6&1{i=c-t31{ zU8J9m1MU$eWzk-zlF{&d%Dvt8?(H~hjS$xa6(ge4FWEgq=NB&4?Ks!inLa%>ZSaq; zV)o=Y$+Je_MZ}((?+j-fwF@5c_U}NrT!Lz=_3ZX1;}>UrLA}G6^Fc0RIc$B|S({~5 zE_R)!rrK6o_g6lM_Q5XO=|E`ySTW<&Z?d+c^xw0RqTVbeB!4}|hEg5>O8y=ToeU3C z5Y=@$N2=QbXn30!@tzAcqbs9yY!V`(RdEl!S93^lCHhQm2`gsevj)LYNHFO2?G*=<`#i<7dSxvVw($nd3g^ z6=C~a0G??Eo!N65+Z>9gX?@E3gjNM76CQZa{Er&?gbfM~s1(jl?{QqL?{(J1g4bhLEwpp zt(sTJaQ_@Gioh%$*%MOqaoN$5SYg9Q$|ao|O7b_*TL%(nLpmDyn0rAEr?ENPa}N$? z>L@<)zP0u9q8ba(>!5qveEZ{SBvPb1K~dOPZL(P0*0ykAMO``jnbmaLQ4?e}I{bR% z0T*7k)WKMn=w43lNvy`ONOed`#W2Me6#9zQa50&U9I>u!q|QzCfZ7^`h(T4=w!&td z8_Plj=m}a#u-7P`fM~bB1fuv8O#jDVdM4>fN+(aNEplpg(W`EX2kaW6=FfP%hKk@E zKvuV6Nc|P9-(aaKCeL%&hw{ka2I$s!UMJ;y6ISDrg+uG7o>%4AYx&yLsODitjcv}+PHfBiRBP`)8;k=PdEUl&c z{7J*c%{JqjhGTtbZ9xCW5>gJ4nt~DfQnU)OwelFTRf6R?v@Xm3T2rwldK`Lbn@To6 zlo5gjmC~-8Vms{XY)#wD8-`HD#(?D95A{qMmZl}h;I12g$#pK+1)?59qH(nPiEM{`Wo@UH7uyr3wvJCJfA zaay>LHFFV4QZ{;$K5LABTca}|;&3LZV{-F7HSaibq9J}&Q0uL>(FLE^6<*JZu2v3K z?(vlKj}f9QS9Elel`1y-EqWi^`$z?qQ7P@Ht>LbZ4&t#0Df56UPnVnV6$+5{g)coZ zhpR71kSQevVp6H%e!3fZq|IP&w~Pmogkt$+_tO!X<-c)e)!e2muM{J6OStO`?RD&K zg<|eTtkO;K5%gj0v#R2f>FqD|_3Z8Y0~MXE=cEfkkG)S@K<)~@Ey z^~=dqK~C3<6j+qI9U{wmjj{IGb;Dminy;-a_V6cJPop8evKIwhCMrkRRB1&YwTIK4 zq5C~@JQw2xzjrhZOxJ#MVBV%Mu4vI}QEU{k_Q--nbZ1eK)XE;cc5$>2LaK^ss6~yq zQw`o3v_ev^;J2HBR`XcTlZ_xvIjFWsE#Z%S6n$q$6cb^|D-rU5h7z^E4l$$PotbZN z@gUi#DmVN?*q&>eZIJ!2B_ZI<{A{z+cqw(rrM>|B_KYLH;!Jzwz1enR>lkhSw^>_? zkIt4Dim}4eHwx#KrUW7*FM0wOl2LW<2y>!8JtFZB>jl}jD-K^zxu-8(a>=@WyLUaW z6YFMEnS}!ibi?&YlwNttwiu78R*)Sf4pAELCaZsffKsKaiFO>)oT`szDp1)@Gdx=!vAbv-PU2O^4`1l=C7dZb#Z0aAmvJ~ z!Oi78^tCQM%W=`=`gKox?KN?F58>?NeX;esU>y^azTF!0Ht^)E05QJCgF!ahNxB@{ z0RF8l-hIzopX}9ayS$>xF|T4-@z5d$Se(xta*@)qmUdF;$)rdVK7@MBig8kg4CP29Xj zHX=2>d->5cX%0g~M=D8)DEYoHvHdf-6<^RPA+J=z^E9DM@!ZYMH_Q)sBB}(5LmH8a zJ&nOK(AVYTF3g5+P4jZuzUV!-2&%X)+U=P_LK5P;KGglP6jnD@J~n1C-JYB|v}7ai zf0vff7c2V6e>q0sCA+(e;Ue3?ib;fF?iLa>p00a^bdBG#7(Bbob(VlF8sDPwuq-V~ z=b|kh?=l#Fa!*=^>iAr&pUp;nnz*g_-W}pp!Taua%lQi1nWE#u@E?^m*ORDZ|2osig|*8kdH;#ittDvn!5quv@FK_0}{=X?I=+4V`G@h;j^?mU;-+v0e_)m5G|Tr}Rw*UUJnk@+N%zyd~?PgRCWGgiW0j^ylz zvQO=ra(CcfkftnEGJeEYxOl(rm20-7oDilSgCY}iXU1}m?L~6_{0TZm8dlY0PNSg|C4+a@+?+xby9PGT(w{4IookFl3j>ao95AzZo zHVf_Bmn-xVH%Rw3ue$CR=%{ss1tmy5g^^E#ZrO#Af_;8s_pGQk+7Hy2$PXkBi% zbGaT>o!Fl6a*hx16#8O{Ier@R6=cYv>e6Uj;>g+0V`tOu1ls21Tbg!#-Y{kV;pF2k z>q^x--TG_5~1g<6o}AvnJ*p^|4P?--nrx8*=8NAJJc1TWemzkJZhdFi^ZVt{_Wa zmzrFB|IVYK7lp?ls8=WpXTESrO=*3Zcr?#vGt+q<-S^GeS_=z-ST`sS5^ioJ`GS54 za`xZI{PEFiS?c^=Jfe!;TI1$dP>2^|qWm~7^6z+M$O6|$LsG$!YyBQQkQG~T#4LPr zR%2_*E*ij>F5c(ncj#QfyW$^H;IAB`1iH53JC#LeOb~C=UaCej$Mj=#Z7`41VR7a$ zwv@3)`Lxd8YxD);_=i3? z?5VdE*DBnf&vkurvYu%;M>SCD=Tioa{|dD{6I zN)EGYCWq*Ay!*nA=N=^XD2iWt80qVtz#j3e9~>(Z=RKR=WG>JN>M*ZsIjr1Z5A-ft z9b!F!Aurwf3My_-~_i4yan3Ly}QqI-}}zKv-7^Qv(J8bK4g*^hDq{Ia$VBu4hN9*D32?-61e|SY? zHrL*l$BMs(Vj$C?I~G2Lj;ta2z`QH2q-OZiuichWq(t(mJ!`0k7WLvoZ0%h?V(Y>Z zGird%8MTFNo1;J;IGclbo}CKDv}?xe2&4o@dhX34*Dr1~Aa|vH!8iPQ@T6^b+f#lw z`n=!BPsUQN6`wMx_Y=I|9L0`tPn`^{s+q#(w(PSQ<56&?P07jx`S;?g$;D;6~ z#00?Y)%`5|%YCe9IPyeKahfWhXU(ak>zNTuqMf3|k>zK14N=DT7*@L2s47}En6Qs6 zpEP4-{XXv?p?ER0VbK2V}e_dr78B-zE zt+v{vHL0f}x{k@$_Cn#kz}rbta=`T5qib2#x+RX~gYsNxYVo4xzISNM*fV;oeWAQO zC|KE_<0R9!nnOEFFQUX9kB8B;!!3R3J}o(UHC}i7k-yL;Z3#Jvw6{NHJ;+p)*VfsK zVPAeD@j7?KOxut+K{gN9a7T_O7X6WvmLVvR6i&H|GK7@vf*)lH){hm-^gseWOCz!% z>vPrO=dQoRr-<>QT{C=yw}roG2LV&{1l>|pub>ggYp7aAG|d%{{X;r33jmG}7~Smt1&jsIZ2OZA?-*gqRTYa!owW-Xz(lnz z#godq1@KR$EVb`1FV9zdFQM5leI$FP0D0@C97yi*j^HA1Bx~tKY^Mm@+>TX!N1cL! z?FM^x%|!hg%5lgRa-s7lpO`^qjlif!Q>OpryzzXV z<(f)wV1s)8+!WeF-gH;bGWlxic`-+O@?A%8TzXBiG{T~^YX?U`X-;KL_vkoC;c$&h zk=cs9f=ZT@iHb7{urSXph(kBOZmYGp=(e>r?(}}B?Ru1Nf``>iAxn`wMmHsEIUD6` zV@1U7me0zt8pdMl1Q<*MCy8qxiCFQskhen{;-RQzcQ+GqxJ2A*z20d(g{|&z2`Kdy z)0}R<4UPyMP4Ss%g}beH)Nhg@9V&6-Ml(0eqt&ObAd0GT|HXc^?}BCXsEO*GCOcV1 zm>zzLY6s(AfpL60o}VnGt+4t;0#ZfBKv_%I<2Xz^uLF7-t08Bi-mVzu^e@u?O!|F^ zD}?yy$C2DS`&A1=c&FxZ|hSR(lJ zFMzIrmV<9BE(e`MPz&FevhJ{PrT>=CCEdqKNR45Z5KM0Nw&8u^L)WfT@8nz5HLeMe z{9>V}3q*eMHUT0pSfqp9K&&#OYCXinG zpSLUH&K1@Q-cLESnM-obDL%_zJ9H)FRKtcigGYxz3OgauHWu4m@(!u9{YKE%_OvF| z?fLp!v`_#Txos1AnDuiUr(O(mMRKu+BxIZHsS68dal1VX=rX42+ma9ES(_QhF|~bq zKVXiU+((~R;?zUv>MQu=0$P)7;tO{5t}ud?G^xnfOhu+d8`HEu%g+6Fh9y7u^AF4Y zK4_fp12C4wOp?pwwo1YsCC{KpR9uW|;CD?92zeRTj9`OiO}sGk3{4^Ga3{XLYF4H} z1W1Ileb%lR+BUZ{x6=`K{DSu<%W1JWr7^gcXPRJ!wG$1!nEuaTyxIai7VqzrblDw% z^RVKB@3AsLw6%{6{AxR%)!u)bCT5bAjh*p!Gqz`N4_@|<`cAJAK@wTMDQ9L;t*yU4 zoO2=hHSQ#?O~qO1&&JH)r8Y7?K>Mjd#(Ea7i79=u$5Y|}7`M}z0-Wz(YieOjfo2I;RocA09JkNz?ehimE z0A4`qH!~kxdFE&rSr~dgpFgK<{v~&UK~WdR-dkQ6_$dW7B+Xawpdxg0?iWg zO|=o(TFk}qsDoRgFJuv72S1<>Oe_9NSw%5VvlN1j9f%2Mw;n6DdtX5`jK0K!h2NOGzb(blRWoQ zNts)@&+B@F>B27qqAy9wu|8l>6q8eRg+Jt%GHk7SNT7dz^@qTx}WIJ;J8m zC)+Bg!^^B29_~E>;@nci|_n{J;5wm=Hc7cL^G7q zmu$S*7Il1NhwpQ-3h?8K`AYTU9YK&U?N7JAshQkl)lf zth-UF2T2QLYxqGg=(FQQveh%g!<)dmvlIrBat-Y*!PM$s#A@o{@!#-=DV=oHRnS1J z0^^FY@cPCUs0{eD?o&Bg9l9)Pasaw7;JPw1J`AZ&J7$I`OV@R_Mwl?Pw#ZmeXV8}* zpO;8WYBrg|Wt@A}>9!z($kfj4$I%^P=o~7#3{kxAe&7Z*U-7q-JtD& ziu22i7@1}_>9n0I(>#RBKm_Yx6chnT)IX^Gf$sV9M7itYHoC&7-`}@*d2PKf{=Fw; zVV!-Xk6wrxBp(Hs%o3Y9cR`x@W~+2qTQsbQ6R)eFY z-tfz6aBiCloAXbt!+h)s=Kf5^kV}!|V(sP5PC-4rYK!m!T9@0ZR85{taG|a?tPffJYdwNG&*FJh-N88bv>;BU2Y?l*Gvo{Q9ouQZM$*p@%!O5fWTJB`{gzeA%KQ9(s z1u=`g%fZV-BT*ZA#@ow;u|*Xp*W6=MaJ>^F!h4&&#)X`ZE)~m1^#c%=MvTwwDyuRm zS-`;HRb)4nt68&gU2k0xjGadE&>5M_X2{x))08VMgF$5*?4j5HvkFI}Ns;durRAx7 zl_{fR$&EA551Ix-pvt64&O;TNI_CZY8iAJ%tTlxLGi^f)u;TOeRB{(1@ALm z9F8mb1+J{BVk@YD1cNG{YU)dkbbHjKG7KV=IZbsD(rMNk&4o8JF84R+z^rf0;*82) zI>@HFJ~Yr{WF{si*Im}mO- zLG7(Z^In{Dl5+scBn|*HY=tZa?o{<8(l|p2KbND9ycU$CE+gZc`kWcAHtTX`xvXcK zO43q?j!WMbX|B@Wjd`NKS2loUQ`>a^kxS7^DO@c<%F5OA8OvAx=ao4%@<}MY#>}GP z2=`^W`AyB#efT6oy1(u+L=txu0$0mNyX6e@xrT~We`7M;> zBh|iQo7B>Iq_{94>y!1_56Abv-Y>6J1YQOQCJ5|Qt)1Lz=F+yzlwOhSc3Of}u-b1$?QCeQ0@dTJ==L;>Co+MEIR;E$%0BeI-TJpV$g-kLr*g&Wv;fKI^@*X%_UUvQ zVtytXL2g~QaM0N}&tl({E;8e4=0p$~;sNB?j;iCMf79Ix!Nc{)6eSqsy8Ufg04;kZeoVk>vbVn_}$;Z-dLGEO(s;=PoWA{vJ51(G{`Gv&2Ux>@ID%E8u2`wgR z9=$USVv7(7aMEuMtuS2kQAW7sasP<70ih*}W6`8?j11NZj}SA(-aw>hu6^;OQfTLI zs*<0(f5;l_v@I!4c(+xD8sM=XAb>=U%t5o18;(;+mYRR9@2!2Jay|*(+$f5dn5COV z9&6#Vr)kPtPv6A4HN~I%v<|D_&1=1e+&k-CbVoba9B#Z@{P>Z@6ch=Dz6+UND~$co z;T9Vb;HX8U?-BGclzE;~c-V4vQu@>q!i7EFd;H11i}R7Hk=aqma8ZcV9PcNY**I1ip55&TGIc0^ ze|8@QxE5rk`8=0nIIPsv>>43KPO52*vvIBkt^rJ zg6m^ z{mad4Pvw78_0oI~5TxS#o=ZJblX>!a`uapLY=ooFE@JumIW7j&de8!dwlFx)Gd=9& z)H0PpkFp12>p&jd9XV%P{lg63^~^Wz%?OG8T0DcmTXm5A!uTp1z`M6M^n!Y8-V4fD zNA2O!-BgFVkfkW@RfXDy`o!vUe4)9vHs6sIA=3cCHZEaC1NQ3`u5q5auR4$zti2?q z9XB``FgefAhU*MNb{12>KFz}NlPOEzvx1c*w_V%}AbdV^ON9KWJ+5L~m}%f{P3YRHg>dc7H(FQsCVs)Od%F?H}NlRU}5zLWsRnJrkJJ+9K#LaaG)9J4Sj&X zG)AS4hmqKvEBZW(i=qGw(U`H_{K?Pj61k7-lH9sA`?;?`3W6sSCO&=(^cfb`sb7f0 zV9I5l5KKP(Ds`>5Td$F_-eTwlQP zt!ia`LI|%VZy4!FRq1938$TyxH;~=2s4eK zb8qdE+>d8j7R#sk4O^wsFX%Bc0XN70!;$cerSR3V-are1en_OcGCjHt8+&VFA5)E# zlOJ+s5Pu^2d*46DQ)lTRyW1#Z#|iHt6JR_|7@O9l-Dl!DB4mYlm`B7D@e znNiv7fhcX=4jqW6_mzfqTGQfus9A}j-60im*lK%y`_>94s}n*EzFE- zplfF{|8LZnC|3ZHy3-W)6rB`{{S42_vpNP`&fr5wTY13S{8wI-Gh%2&D;9C2xjF`B zu_%^qd@nK7lx@St{!FPgzvg-EXdou{OdE(>99KlU6TtG{y{G_H*&oCT>?mTRO!=k# zTw1S+r$>@L!N0b%z3-pLUkpHoP^+Lwao7~Lgzo36M*+!@SkX#hIpKG`VSpe^p`f=Y z*5y07WOu(fhM7|LXv|Qd;VMqk@$v*7=DD(J+A!?^4q9U!&N=#KN9$+u|5Md3(i}a3 zEXY-hFGgAFWmv$#PcQJb2{OpYs2cbS&fi2?7bIEF>U^b_HYU5^a1`8f79&8u`2PqK zt>gd+Z^NVfwEb3H(hg?R$-LWpAIJ|vTTbK9_28vVh*PFT*G=f+w_K8Xgi@l9eUaV- z;~p?sIZSx7tG{(M#tIErnUp$rZ0S|WcfF>W>e*+bWJr|QvDmSbn$~Qdq4S17L)*L$ zTCL8?_c{Fuv-dLO*lDI^0PiLKqO2U{AwRPDrO)%MXN5ENvf8mdWhz0b0U>Zb@4kT64gROW&aW3N{-s$$fVJEB-8;gXiw3UHY>utWaf=!&%+s`}RGS^|DmBZmx9^6kH4-5x=UR#H2Lq z@`4bdgu`*L@8iFFg=J8HTa$zre`ecdC>^ef{6KROQ1hrULch-%j4T~@*ojqqwyLX0 zLj86}CF&X5v2ywrbsH6EJ`jo8nh~RBb++NYE!*~ve%onQA>0iCv+n-RRx5V=NV`Kv z?3|o5R{Rzt*AwblbUbtDeg`Vqkhkl!b(MT3;WG+Z6jP8c`)U4_eTn$whVu2Rb%{p3 z3CbBBw@y1UC0stb0HLvmvu+MVS%yu?KNu02ID1Kk2Cz>0iG(ssGJsP|U^b-{@kC!? z(*|+V#o1&sTrfuksF`eYX`o1zHXfii(HA2Vm>6?QTI*UE@a~ws(fZDbuCczwRfQjT zCYn*pId}b}EIB7a18O+Znj0>)`3dp`5iyhn|Byl%@b}oNA;op{(yB&Ycc~p_1DQeR z`yzveS#C@SM!5MLYfQRcCx?{2lZeQz%+eUcLMd{g-k0jY1FLtl!Mr_fV47R6{F z?Rr0YpdW-d5ekF6E#$Zz?jT>B%s%e8Jm8`HoAnXWKX0_S7rPNe=k!KKu~I0)Ik1uFTM6y+PX=o<_kI z3_#Me1IEHU>COx4uHqAN-_VVUQmNwM;*CM;tbK8^F?qr&_vg4dpjqpDTq+^0YEb5S z*{yVPg|78t!T$UQ2fIw3bTmQJP>Ov^rV3Q3nTJ)cR_0K=Yt@NBb4RHW=->Tx10;!OR<EI2(f)O!S(T#PEtwd>KXTTeB5e0Sxr)>&%*1Kgt#Ae4qKWRgvLS2W)X# z+9#vDHWtuK>-T1^q6viwm+Uny-m$N8baG0o&Y#=A%$0IOzRTAaLqq5N+lGl&o*-f9 zyrV2NS<%~Lg*HINt}F4t{f7hTSs|)YZ4eC}cLfyFlvyaeAHoQ4&Fj=+iqV21zd|aIdmXR3us=8G?a2#QNsN3YtkM zV`3=XOAwP`znjRJg0ENKfnBP`ix{pK6VBo=&3@hP;#B^4a5@&>>3|{a#GMM;IFkKK zy2?>a>k5X8Ke;+e2=w&%a&lqHNh(q0FZgKhj1DAcmi(3r>W9G2ze*+fjmT}B4s~mT zw=ag0%k-3fSBJil4Pk%HCXi-~M4%~ueND{$Gt#x{t&mpXeb}l(SMHQ#vTA64}bZX_4OZ&zJXEEtEe{%cst3t_O2&UzQ;0RDLGO9wlVoQCJghn+Fa7x}-JwihbD z7z60z7U#+kuOGhl3cZa4J5}oSp`BD*5p1Xet&&igar0wn>~`9`f8yRJ@v)rcM@ubo z?bb(a7>IsBQ)Lhjrel*H5$;FQ6Iud0xj%*qab?Z(aw{Eb_F2N z{*?(3Wq>$~JcV6g*1kUe2+Zrc+~EEjYlTF28w+|^FICtPC)^Z@-iu;PhEDVSG=u~(ZZKps62Qpn6H;Im)gZ;0{BW%i~e7(oc1|%8qlj0@ZMDGe~fvPDh?m@_8(!onaOw(7!+ad9h zf8fO`@cJRv9yVMVRYGW82jr@ z&pe0uFsM9hzF(dgUOe(-x?uE^a~Jl`DwcoFHE2tA8gT00e}<_~FaXbeuh4_GIdu_5 zg9#a|>sF98=1AHw+_eB6kPVM{f8m~bah!C&-6?KkR%BZ*XPtV2TYg%)kP^90h6%?I zy1?rKSZVvncXwViDiU|+xL0*UpQ7zB!AVROofdq!+^CY-4nqiE7Zc!t)fJgYlY zPdR^*TotwYUr*cevaia4OV%tw(tbp{9WRI|(fRbPxR*Vq472)02)h@~&O!&8k6Se- z7sk<8mJe)$Tj6!wQ`+|qg9IB#75)y}Ty)lHNv`K=d?NhIG zZQa|sE)JJQE&MN=)J92NfUo51hSzCdIgLyJzP8ImaQhlAspvRdQyD3O$S4hMl+*_q zMHf2lPCY4uhn1w9{iqOlQ9isEoBf#s(?^PhK)1!8cF)7$V{YmM7?W$e!7UDwOWZ9M zTd`^f1DmVvsRGoAKsV>rZ?h}u>s$divq{mHH5}+Kro7rbw{j%l+nRaX8m-N*Q%7xU z-?8rHz5is{H{|)!X%Ham`jZd@+o%O?LuVnU*A?AkS4P9YupP&O3^8egN?w#)WvFDa z$L|x(?u)^m<-!(UY0x<6Fg?}UqUB3M2<0o;b3F7?f!C2uI1XBE7b~+hEKr^9`3t1R z$a3Rrfli$(6rjol=X{5i>32d96`e@LVP! zdIz_|$+_enF6psDa_hzATomA*n$Svn72{I`waIZen&%#x_Tl3?l+Lku-jc>U(;3hi z@GVwD$NL>a<7SEF`aSX}+2BzFSM8Y7Jkg&kghufx{Bk5(?_>BXjmbZ(=0U01glxlI z$Kxwh-+MM4O`KJ;Ax%jmh?@Jr7jzs|ZopSEa53UPB*OFURuP83iGt3cCyiXPt0_jz z33yJqVU2L4{4es)`}<}pz|jmzbG=PjmyBbp98b9vJ^$!|5cts9;A4=GXvMw1(!AM1 z4dmSZCpTHGu7ab)<2@XsOmbLMH&0=@hQ-Hf2gg#5&YJrzL=C9s{w`9V=bU{K5kn|O zs+r5S1Qo@AZR+XREFwYTc1g3py)&9&uwQm#vtK3I7^I14c6Je&Q;mp?4xtU&IaYfY zD$BnB<+;Hd$WUJE;G0n#!gA+ucnq(TV(YzY6>Dzx_)~`8qAqm(O532(H+P%SlDYMa z&t+0(Oxjsbg!-bp(E_e>^CbE30n5{6+cMSdO+c_+R>1S}HGWg2*U2&N`7ibm86EyF z850slTzjq~vD@e@%30iO2oFthrIj=|zJ{hdqegj{!r z?;R)!9m*<#OHxRI5z6>>m;aZ}*0v~b35Y|<_qGXWhwN+vW&BSzN;g#>l3rAF*=aHal$?mp&9vlSsg(_nRBdgo1RyEW}Hht9O&|%^uMf1(&{k{)W6sLdWC-a%# zt;Y6Z1H2J|hDad>&Z7)d;Aa0T*uCvwh;428U{^`|`@XR4I$~g8>t4r}7r?8uK|29auK)Y1ab=48dLh=ble9$v1)c`**>wls=co;zE^=vLKsxp^Dy^|q zmlt&rnBhFx-L^ziF5jb%DL_8)m$QD(a8NDdy?_4U%!VW`q%hhBa?oepc`waRNl>;= z-6|Ri5Lq=uUXUH%kFp5bIr}#YpQ6FeCKSVaW6`ZeRh;Jd*=)jX#L~*t$8s0?9@Bcr zLF4I;5fjK$hr{kD8%}-WwJDAuX+-|N7J}v$+$5lq2sx`Z_w*Sxa7%Nu&LG)s;(=J3y!2Uj zE7{x>XHPX4n#0!TbJ>p;%Z)t#0*q!tQ#Q)Hw_~sL2Z>-T9H||l20__;U&+#h9$U8- zZ)>-~wg-c-umTfT8=SbZ65k+CvTSAh{vR+z0Gp@Nd|b*zOaB!9__m+{I)x~X&8CGl z**W;Gaf5VSZ~tleT~0^juSik-X|#?vJ689}aF=P^3~W<9=_JsuCu++|GG=9n*?NOl z462IE4&jawiI3nsV}PDMWU>V9hhPJJDj%6P>YBN^H@;qyb?%4RPMsYbJt;8t&Z?$| ze4i&U!o)oEi=T#)CkFE&27RKcO&(s2%lPI?<=YW^+zjrs!N<6lO_%!g&(w`$?CnG1 z&;Ta>-{*STXKQ@Kt;+9B1u>&Lbu|VjKQcwWcz(Va>IO(LC@Zg3o2xBY;ObD6SW8Bm zedmE_X{taLi!``hsRtzOIrCCAV7Mg{8V_+|8sGCe3{<+V_p^8n zD}4?<{wd_8X9doIF1h?V$nw*g+IUkxUNb5DHXkwxvZh~zrRm=x_Ufm4S)LV ze;&VQ#GmL-P&zkIgCW9+-f9v&ZDGb9zQ(&+Z*{zQ(Mnw~>MJ5}B?*xb|3e)siQJ5oCWt$n$f(Q;*Jo)LPo57&yMfgw1GZ&_10&&^QI)e^=q9HKQfZ{^;|i= zjBo)ReYT-h@T4${sWMTm6N`GV56h6N9RpjT0 z*Vgq;0Eg9mAjC!&FCwhS#*DG5aJ}FDN}>VlYtk7r^GB9vVWWWD9NWFHt{o~Zg@q7- z_;4t*lliQ8`O)dBni8VZ&%_Rn+)0#d2fB%tw&s15%WZU1Akd)_SFjVf3#*F7sj}&7 z5T9lrnW3Zq%ZlQAdKCx4DafXdHB8+51uHhWs^!(n$oPnSKHo4Z+1*aSyL%%w1Y_Ps zPrvl$!&}eke9%erJDg%_ZQ2p6Fn6Z-Z-G;MW!gRpsk5SlI7$cd3tG}m3dQZS+7Wx} ziFcvYH9VTNO+|j;Q&v`; zh||rJ#+efo1o3H~8*iZ|_i_(DvLMv&45_Cd*$c3AKAbJ;+`KCr)$@p%3Vn&ma+Y|C zVc1$THr79MrMgAS()$1?bRhoCtU}KX=GThbXjiBm{PVmGMR7%5d3wnpt)K&p1Hv#O#gIKAB|Eecw$h=qnud3lk& zKoq<iV^il2Y$CAV@!nuBM7ILNic zy6!kr-z#eUcx-}#6aK0KyU39;Nm+sqa>o4qE>(mE`N0FHx!Oz4&IVYDw>3@W!{Fsp zd{c0pvVLlY`dKaz=4w8nvgXR%DApMie6V9k-!An{tOY`P2ac5DUN<{d%)RjC785;E?dWtOoD$4o z9}>%GLT6 znxi)U&}Xkw7RlQTS+cHjh zAKS{ShR1(sYlQ9Lt$TqSiz`yY_J=(3lO%m+F}6t?(ZX}tWKrEs#HheTs>mtC|KjHO zpY4}_i{|-%YrjZ3!G+&1ZW?7Oi&}<>bq@GdQtTb%VpeqiyFGQ{ruNr!9|L46n&`EJ zW}DGNv9PF`ljxw8?q2|5Q-=r1@tq6;CnXU8T^~xLjYK*8(a%SHvogry{5@&l7||Xk z^ChA85|jIYPzL_w|J_R3fL^MhrL&Rb%;m23o$Lxv(kt3=W5gt706J znS|Qq=uZF5p`Uh|yGj~CaVYuY!B&@b`-oi z+0amrH;pA4tAkjR#@+KIh7vWZ%ePE%yNBey>J(OTJtX=>?$hhuMo z9MFm?6ySA640k`PvZltK{$R4meR3u_kgO}^a^gq@*Nn{>Y_ESA<9En^$hp{bZHd4s ziwNybHrrta3+zuiG2K2Mar6t+m?cvDaO>m87Wn!Wb>6y`YWm{`mmH%{ieB(R0nIAl z3Gu*EGDQd0ow4-bo2WNTDZQje6Ot8cwwa3O1@1e7c|_V{E%6N9jVA9}>*A>X0^V9{ z)K#?vk;zSZ=HpoxW>H46$N}xBDA6?yL#RWv&=B<6^Q;b!f`uzM3ghz!-`4ysQuI>e zfa~>SQK6&sOtf2+Di}__Zq4Mr6Yyb{NC!gs(U%TxFJFAGbsm0JDPFTYj# zqj^*`WfA%!UVm>{dcAZeluNFcCTil|V)LNR` zI~GdmHjaQJe35Gj%BrD&m*%%~)|np%&g6&H=8M`}+N|NuZUjO$B3-EfvDUGnij zU)m(Ua&S(tdLH*X?feB$o(Nd%mzBeot#20#-`dsaqOv3vIMZ{y8YTr`!y#%5>(wSR z*9t$0kAGK3+@YK`MP!}1%a>v8%f?=YOp-o!#2bD?*|-nZZ@gFxb*;9gwpsyywUTw|Vmp0E!=b11xCA4W?5u01+#|^~Lm|COG zAJ&S@J}UCYbV_a4o<2mHG)5~6_-Jw}~oS!3Vd_)UihRUXy zP(;|y{(AFBeI;j#<{yG(RQlW|lxZs486W9P{3{XIZLEigQQA1}tE-b#O3heLhMUgZ zmx$XEUy!%EPwECC(&0(50<(-wQs-OSWLFatN{$0@ES?NR799Pa%(H+q=Qwp{?4=&> z&|7>Z4LaHT)v8XL)1i)!v@c2*z`ZGO>`&z4l@~r*H3m<$dkZ?Szd=)YYnFJ$_9Xyi z!i8rJGEx1dvYn-zlDTFP5VI9G)R}ak4CXbuFF@KV1(Ni2j-t5~Z-=)tVVa|42R08q z>t|Zq(JF}7F-fK9CsK zQd+>Rr_xVf&{j~1m>@e8H7A<6R5J$)A3LWSB~(OHJF@nTi{h(Z&eRetT?;eF6pxJrTx93i<5_^q-&_ zt^dvwUC8@q+FhqO z23clgoUjwd3gC(I0=Zi;`07w9B!26XmjXiaUP4u$glfeYCma_^USeFlt!6PLL3e1zwAvc9yn%|iuOyOaj2?X6l<6i6ri z4qrY?%*Mv{wgR3^?eyTT>OB?udVh|iDt~~Tv+;DhQ63Z-Z{77>SIKD2p{H_7mcwv_ zY7lOgM$8U1k8qk;fYmWI!5p_Jj_#orwg1W)$r}d_xn}EJwr2u$hy7HxIcb3`rd_Fb z>ODP+HHIr^#5XU9L!3l$E6Xd-vaA7e`?O-1AxNM!$C4>lIsNWseaEX8 z%W}WM%b)-$hZ2Lnol%FR9gR7^-qa(|0U@d5z`6GCcUXbBzs{okBYb!*b`-4X+r1+g zo4+BwXduQ>W2YgFI&T6bCGSYna)~R;m(^7cs_^#u766%3Dnw@C@i>B*v=E^&e%K2) zXSlb+Y7aOaBSIA+k!{kBtI9+u&qYw`E}>ui%WVZ>_sc97ISmbDOFsn$qP=<_sygZA zy1QTIkEZ?9^QkN7PNg|dHV|B7ZJ~PbohK*hSsCpq+0gq=YW5d9a@Ek+O-*&-vO!Ph z7QTVHG8VLrlqkK!i|wH3RXuk6wze=7u4p1dyFm92-v#{}vt?XGfGJbZ^~mFE(a=w6 zqAOP{m6Q6O>4__=7^KI;l)TYEgbTVer2dk2jjRJc}2z^4#QyY^Vca6zZyHT&GL^73vpo# zSLg{=MvYHreSYUp83*>qZ519E@(bLGnzuVU5915qMXc3N-!Ls;9U6$3FjP|7+aujf z_b4tL(<6w>x`R?@tBPFI#vafZ<4b_o7fXag*jZ^D8T9D`zqx5dtUD=7UsM9RSJ-o3 z>9?pvj{?oDzQQ+D-g@&iimh=wKh~7eBNKx7)KMB^2zs-vo?Di5BROqGYoMCD-`^ZT zPG=p^IE%{`6PfhSlR(!h%1F%I;q{~~VNr_zb(Mfp{(SL>V2&1nZAaTNZmG?Xw ze*x+SN^@W4U8;~qX}xjbm?-R-xWInqcgVmNGO`nOnSS37%i3}!o3;WLdr7iEmm3;3 zmIOQpSG-jiNa>1_pGBhiQF1prU9ogh?J zR8~qA{yd%)=uN$xg3>2+)z&9Imw;R!0fF75hqKxzsSO*pU;hHY#R}dmX(%~A%u|fz zFFc1!0RU{xCYWn!aQO++h`3Q{g>Q1?`k`lNgS(HZZi?dDhI@vBNih459 z9FBv*Zi>bj;mMwofzVrY@7CV+faQ}V?6O1gpIRSXV>uP;^-j(tW??h+&7P&|lDX2P z*+?@3b;gzg1(hw>^!zVCW#MC4qxZYw?nE6!MhkFeqkQIv+BJC zSMOMAm{bzgd+$(dqKdJKU+xXTG5=s_g9iM#ni?(`D|T349wUGMd^Y>SDqY%wL#8vhbql@<1!%Wje290qsDw@3h8f6T8+AnTbaDa;ba?>8 z^v3kHerswrxi6uO3v(Jh^H3_&+qb%yG%AUt60vtdm6b6GY;w;tVR@Q!kfzPEcR$Ze z>)(^=caG81UJEwHkcVv|fiLR;JuM5T42>L^o!4y=S)+gtr*L2b`%xxs{ zbXao(SEv(Ez_HEfJqb=L_?P=DJSkJL$NK5MGQ+ay^Ba=(>}5F{N;&(vRB>8Mh7^I1 zq@zFcZ0k$!gQXYEQ-wcj)^{b*+(d&N8;E`fctFViV;CNX^Quxvs>b&Jt`ISOrg(q& z@2Nj+c?SmT|JoJ%PiggklUD1?gyMy3$s!ycft#ie4!0>ww|TJJpO;sct_Su%z{Auj ztM?9ngpmVd|KA$XKXI+Sa!%dTev9O}x>EBf3O5MW2pA`3{;Ns%G9Kfo2j*29V73I7 z8XwMI8;NuYY`uCVEh~D8Zy1bt$y|I7t{E&T&`BL!AbAr+gqQx7=JcS%#d3C~Cg}*W z!;4?=DLsFq(kTH?1s8*Qf!jTuT5*SSo2T*K&n4MpA8FrPd5S-MaQ~(AtKB&+W6H<9 z@(&#d@>*}Hlft4+belQ%UA2x3{;x8Nl}BQQzkuGVIaZ{&BjYcD1W2a$;1ayOW0n!=EDuqFPa1q6NGu@uKpbh-R_DAL#8>g5Q?wX43~Rb~ zAe%cI$^8pZv{6pwWXjA)AY9{7@&zq`J4K6hh7wn8NM036&%7VDi+aYpp&B@SM|Ipc zSFP9uZEI{{TP3!pXaBGq^)y>VswQYw0`QcHg={q%T;~=6UAxW zdxvLhJUmwLsCE49(veYtA@*|*yl14VaX#xAa5uLDN;s_Ry)Vc2eI1L|hUQc`lLw)A zp#0#@q#J<@a}v^gB85)Z>1redK|q#+Tij z$u%U&aSN`n)^L**>%HBRq6QTPNd|YkLNR z9*r9<0)(<3oT6@{Yw-?SU_QFO8_8zE-?T5!c3Y%l*`F5ZQ7_6EgNvjHrKppQ6PrY> z-Zqb*L<3KUM9$^-jwS$(n!hc&p>P8kg?6%_pWjobd+Mwb%)R5F3VloNe(#o=gn3mo zKW_5@U;jE>3e3d3I=Q6p_J_K+K69UU#@wtu#($O?AC%5> zwbFSHE+#bR3ukNm-rgHhQGqG5n4$9|^Y6acnxUYc!R;T-L~agW^Q117SGYP59mWAQ zQcwZCyKGaAUgQ-f1xJ}&Ut(!K*VlXcSzgj=_qUj?icJ7KEu)gxk28(>GwvF!1!B^8 zSvH4lkj}U%Q_`_un(H4+t_bSi<4PAKWjqC@4iFtR-j=IO+J_uVr=PTlzkne6&en_L&mXd-z{Q{Xk~j%b+s1u20&t2%Lq(Nmt`F`N;8kS}E#8rg z3+6V|Y}yTEJd`(ZK)j@3hShcshSDbmRzjr~^>3dGnbq}WIZ(0jI#Gm>KAsObrm4xg ztU2wgiaiq{p9+BMIjd=;{G*dz`vIJf{J`0JX1PQ?m$Nd7zN`+?VJ1W^@f>ZYL&(yL za@%fFXp4w-7cybN@V8*>0YgMGqsm%*+FXp*G`9;*i+ zWDa&#EF~H-pjGiQi~LvtEh5$u*XG~?x9Ea)@mWRt?l&580#-#hB5x-SLpR2w#TH(< zMn8PNYtuc0=jo*DOFP2O*{C)Djh|FmQ%Fqkew30U21d_^&=R$iv(nin*0T;z(n9xy zq@eKpuV3ZEt5s|uOVq;j{O$-nN@#-siZWJ)VxYqQx&ot&OSjY#;)hQ~8-gRzZE=X2 zzw2#H&RlNG5TQ^+pHZxN##;Wd7UM#=l*&R&@FBOh<=)1eSzSD0^J(Jv^_xbP7}#^K zxFe+4rcG*Kuy{*jcqj|A?QYc_4SG}Z=OQV1W?v}N@h!BHyhVHl-{&ivV|>y?+5`y= zow0?1+x^Al)nc1tPzHvZDz%Y%+9%=p4^0F81}QD>Ptk*JrDZks8{RhWsQ)l}nXX-18UqR%rD&Kl z*h@5vuBQqbDJl3z>aX``#$WM{Zgyy$ z-cAVORn(Wg{%U3y{BRE7p6(JMuL)0m<03NhLFVm%;DAAU;?UN!ps zw(5)xgfL`Rp~PKn>}?mwbfYx44+U;Bl@bVAdAMp+xz$>|8!p+!Y3>a-J~|@e!+bQy9=92@tFRn#AG6J7 zi#{#yXU@7f_A^v0sR;b=86>g5>S*(Nns$lW^&s9OrRu5zW^7Dhq4j4R%?#gQ_0*7} zJ1vR!90Q@#UDwPZ^VdE-c@sd%2B+GUH%(f$-&YzC?Dh;QE|Xmp2!0BM+DmrqJt1L& za6uqZRP$+1qaezPrVE=n)jP|dzj?kb>RWX3<1u(LaSLlV^T!g+5ps3KE=7`Io)<$+0?T!|M@$R;=32Ag`jG#vgy`NGnu1|~~^18dw zfnDr|Nh0-y@3rpR7%zPel)Alm9GKk_v^s7Ux=YLSZL%q*108jk@_P9V)(S+9%W=pM zYN~+p)`gT`#CYUxP&~;%kuL-t5PI}p$I7PXVlXo^rx#}%I`G4_I!dyucBBqJ5oqQV zi46LhM4k`7Nk{s&_N0!1&;ud03T+h3o1Q%$Z$F(-Mu|mEcCb-yxo$Ynxly{xQ`Oy9 z7*qinzp}uDqrmS<8U;#A*iMumLW0NFWFD1*PKLeE~%42-f zo0M`*UP?L2yASI6<(`GY>8ahBlRh0MWEpJG%Z!y~w$w&EZsgII$^SS!LpF6IKCBR{ zYR7>-Qy^+%!yi&zDQRr`y+{QRduH0$ItFZ5l&+-kYHg7X1+tokZ?QpI3zu{r( zXcFM7o$)}}rK){yqK#{NXIj#UVdvd=GMzaSm)I@daxr5I&;XyXhPK50D=6fjOc-Spy zV;Z5gg04`AW`cKZxZ{@!d78!4VoQ-&UxFQjt+I6XP6Smv{5I*m+PKIEYVCzRgAG;K zP>`J^S`b#(oCMfKD*>3}GTb^A-uUpE#RQW0#q%5p56|_1mwM+KqjvYdyKsK@c_~h_ zl^tfqBNcG(YcEJ}paBCi(t0!P%@?+{MpQ&-&6eDd;+QQ!UKAbJGep$j^Hs|c%DtO3 z4I_HO{nWn308pzbW@iB`SPDu=O-diif4XWO2JfHn$B}#lT0fwDCorBgUYMh6QQw^F zk!NF_Tl4lt5f3(gyyN5&wIVhqCchy#H3%Y`Q1X|R^8{@DAVA2!2gE573{gw+4i{Jw~@4Ggx-e^P(@^9-T} zQ03kuWbb|cXo(&(Js0FMfffmv(`c-!7={{XPZDx6R)&z@wK2ETNpLd7Dq0RZMJYEP zhh(?%*;h$>6~LH01~*R|dOi~v!!>Tzf2%>jEcwFKBp{gU8SRt>7Z`Pb_3s!r1$Tcx2{U=iU9U;c4(V@t9x0^v8Ox#i1gdv`pqwg%YV<|vb4VQ z?`Yit63R%fKg-DTzr*y)lb=`(AY;4#S>>8qI7@TK=;}*Qnfb|t+fTr*p{5IZjWc1@ zJw*n+Ao=Vh;V*LdY#s#jza-2h%@EBRpoD&*;#cfxrFpbTH$5~~@$-uQRvl>Z*x z^1tePr!+wJG2)%%j3nTt{*!mAhsV%a4xkG%`UcH<-~BguR`cug-#@E^N!O(|^K=-3 z#0g(}(C2@YkUG&fO9SWrtd?P?_BhC_Hb6LASdmyHTJ&a={$cXc5P{X{(mJVU3XuVx zhl!aw69e~MDdCTAg?q`C#({}(G(a=?K{dG&n#)xBg0zw}dhfN@eXv6%P_9#~P7L!T z(AInsvD8AaiKs!?r4~x_x8JO|AL_kOA$$*m8B<6Y6YHiFH8n|c(WYyXcy}^>Z)9_( zGyC1iI?9jLPD7YCtRMOR9`JzqnO)5$iwM2eY*KsArn^Hf%k)8|nhb)U~t;*mG$jz!&&9CGx| z=WB8Q7Kc;--1OI+pIV@7V`tcV>) z&!hk|BmmQo(cpU;0n`1ZaeJU3F!v#vU}Qywu;*^-s3CAWfpQ3)2zY;>au7fH$q>0e zjj~z^jK6+3+%HyC!GbR~z?QPt&&_bK^x4>%74e(jNe3wDkw#pxkqSJHknGW1qlTTW@S}I^(8TlZS~x8J zi)ruf6a&4h9FGcM^3ZPO3CpBy;{+S!Wje#F%2HSL?|)+O86vZRy>g04*H0>3e0QMd zyV+=m@&CL8-sl<4lxB05^D*{CZs=##v!egx*pCEP#Wi8zV6?haQv?lsH z+1AHelN#GTD$`YV|0mqzbz1^ z1G%MnMntpeM`QK5ZZ$!{M6N8RqZxvJzVQU~gfyvT)imAt53Dm4(J!kG+S( z#I;%X51_V`_(``{yklA}i$=KG$3?h8<+=6mzq`?G+=eCezldbJC*1A+HBI!js%0AP zXbDqOZ9{G2m$Gu>GrkFmumCx(y|W6EXAgE2K~8gg7;6Gmn=qKJW?|ksnma*@M}~3i z=!hFyI>Jq3%(5DpjkSfyfUu1c& zXZrEm5`eZf&auDALaJvgu(~YX_`-LMQijX0?L+7{XcI+$rwIFaOhLCG9jb9?)_3k6Vy^vz2IWAD?=oj9u*({aO9f#uVua9_X)z`|b#EEki zU<|#Iws$n3DbJa*u2&vRENBPT3(C=Vg?cxDE(qN@1QH*cT05P(*br~RLx6|jNICnl zO4iGDblVTOk-6g%Jw50p!={I@rMCna*mwAn)wPWAlQT+iSR1q-%C?iQV3OI9fv&)x zQF9vxTkjlx1j@m`a6nV7RqIFfp@ofgTGwyHvH52UV;7!4u=!2IaC4c`P0SeUcT1ou zTiTF=zGN>6aR1S(HAs_i1_6PR-?6N>5(-BOgl!s9H8Lxo0r| zGaJZcL8;k<%~nmR(&N!{Q>LO9Z_C!kI83eea-NH|Y7E|$!T^o{eOgxBVu3z(0iedK{pFr(-=%YRH-1 z$&;gHWSz7qS*hHBm7lP>b(2tmOjRB`^bns{s(K{%3k@xg$`)YClltOr&|*;z2IPZU z+mPp4!U80c4$=puHdLk%ei)J?-Yd|7?VxWv`;^8tz6)fASdTd_Uc?S#=f^kI3hM4F zK@VBmdhV3i9Bycz=Z~3F^C-44)%9iqEqf-EXxiqRy@l}*D%M{HHLA%7BFA_&S83Xmq)>LS-SI) z>xAlKQj29*S4n{x)1xpE=L}p4@!GVWh3zIsYq1Iy{IF80$~vtF#PdN^fk^!@(g|OB z)G~#^YGsjpKA1rFtK!wB>8{mB20@FhYnuVQuTvJAz}h412TeDS3Jo5)aL`JVEtl4`91 zXIG(#J)410WY=m>_;&)Gprs%&NH$4+p`rc;jnJ2U%PlbtpS*9Mr={&lW1I=&EjkjJ zoLEKSckrTx7UU`3+;TQNy_DTU3aqfzJxpWoN2$c!$?O3UllAgVmtR2)x!`X# zSC;5l;o8KH8tM*aDdniO#qz|x+}ty!vA_(BM|J`NIDk3G=9bgBC%>pF4L&6 zHTrRXF(=)zJR=;#<3OcGl~{o zZOH|gA&hIO*io-MSe3gcEi{jj;^1Bk-YHj1mW_FstU10s{4p-Ht1MO?>{#=5#x+gq zdB^K6D=P_{nxFftf1sXOjpm`Ykbz^SN>vC(=(w?ZL?hZ#n7 z-jVG#@T%VKHOsN>A}yh8sTJ%fUSmU$)Y{&4xCaNKNJ)u}Ut2i%ea8{eB(J^GR90DH znN(~ri!wr@dAr>-HLFZ!RoTneVEi$SX7# z^W}_q4AHT}oRPuh35U!_F58JKVpDH^ez8Lm*&KS<@$8H9^jj!oM5{UxJDtiDw~>dd zm%1Cw*Do0?!v6LHG(d0o`j_l4JA3vLp0*pGaPDkjEyn4+Egjtzzio&83*}v-qZ_{I#lX6-33KfC)#2aNI?VcG_4xn#`=3_m&MrSI zT<-3rA1Qjb@aE&3Kh^#n?6c%O(4-d%r7u>G?_Yw5Ej)8K zwRJX0Z{O~%i=JhBYp(AyPlcKrdEY{O?AxKCEP&J-oo{{xf=4fq?0Pz=_|8H& z9EGNXd_qSkok%m6GefS#$zZOye|KOD8lkVXzj_?RuA#fLl4oak34%UvskG#fNw8Dg zlUoWSY2C+U(DHbVbctdPZImP^<4fdk66e3tCR?qa5##txU`_aN^YPAit()$gkKY-H z542Cp6^m8IS`xGcy|9xOGo$gb{&fH4&WJZw6TI14tpJdoU1fQ9Cf(d`dgpxh<+*9g zlQE2q`h*ZZaoSH5@s*|)!?v^4`m9ykgEIk6gX2AA;M^zt;oLRlTWSReYSEWK4^R46 zY!BvAo;M=fU9T-IKHS|n+~mjx2irUh=Lvcd88kOhM^~pbX%?sua6P96(6c$WbdRPi zct8CBQ!tiVn)Z&p0|uSHc(vk0O}OXnWW}*k&g`%)S9qL}<-QO=RBwX9(ylYt1+kQo z<#6}i(Twx&y8+gKxR;B5HjZn)*W)PBkn`1uBWcl{ub4^J{XKr3{A|aaee9H5h#D~u zK22ZN9odynA^0_(vF~+#QAS-jV{PRE14PNOg@uUdzl zmO_!8dz>vbzRdt1IY`Ex$|kqi+9sI<>tl}2Hm(^q4ZB2;x%V-(IOX>&^v>7U*3g(l z97W#>uN%a~izxFN&NuwJKFhfCwe(XO%>-WGeUTUFl;&rh^dFg>Rod_Ib+gt_isP zJ87%<@`gJHIoJbw-?r~lBTZ{(UumCYB)YvKn0vx)01uuW99<-+ZDy!@U6%Z~iZsU1 zKh5k44bWb=CMk{^!|VeoyW1WypzqTu+b0PThrtEq+!Jd%%PrSyw4F(-MMA8l zb93vAgzW_L9I)>;AAUbutp{ISkLPt?>Yk|9m0|p7GQ_Q&T?roXGw>QuO5R7|+pHcnW6d);Swi`c8HE zHsknyL&MPbE~Ns#erAZ|L84p~yb6iwfLR940PHLOR+8Yawqd?WM@s;(r}q!v9AIUn zL3@^cq6PkI`I7RK-j)!pmDAMYxgaKRxa6{--|08#fIBl<4t(}}ZlpefOqQXUCZS(_ znq2$!dKz>U)z83L&q7X__jG=oywu1v;;kl35q<`&l+9Pf%a)>lpHVe+grZ?L?Xc^S&d$Q44nK=Cw&k^ zySH_*fQD)SM^ixp#z$WGBujIDad3&B&)Tnq3utMN1pgXzi~ny2{V#OUSABo_|M{t^ z`-QLR7(NiW{oGOC{rjR&5=Zpg@>Pdboqg^PZn6(1;D(>Jy|I7NL9esF(nV*f(fjd1 zV+)J;&Q%wFMx#B_ zE9S>MJ97U*5g^!*q@@Ci{sXs^u%3K|9lyWBNJ;kbl!{2GNXw9B*uH!Ojg%kV=DbB| z#|V>;ava5zlXWnp5$Sp7JV)|P)0e3})p@flug_{|zVH6YLoP469*4CQPaDIN6*8S2Jq|f&Ild}n}g`i=ZEq>g7FzJkS zJgUGl%~sYf9+Z(La4{2h;V4DTgP6`8ohiAA8>|{}Pr51A8SCKA%WM&sD0te;)bt1D zmzLSP2{vJK?iJlQ;(OzbDn=8HDtS}KGEalmnut_LW~)7HUOZ=fBcil&GwuB?LTno? zTa=_xLtq<9Hq%f{FeyN6SpR`TAD&ubYTH(1rr$hZGQln=H?nV$#?Gl+T^T;8V{5D( zNUKhaHwK51dd3wa<`B{@zm^L(*i)n9jxBi+Rcg~`VgosD%x2K<3GHU*M`lA`9%i^`k4 zrzb8k^7WywDq(xU6|>aFAMO?TOj#8>==Zt$ijMh~q@+#?T)TSH3^sG>8MkyOjKZHc zP25&!9^q$a+R&z0oG~oAn#513wn%LiU09rb#F1kG97$7n8IWLK-qW#qNOtdyg3WdCRsY=eI#c6!GizP+B>-{KhyAqy^gctocd)1<4pTdvhatw}L=d@tZ zme{Xf0WGkkhhito>eB6NAY&q>o3AH`F~+A6(+GOTnwt+wx~5E}GAfs2AZz1qVXrw1 zSB%5WqaHM7f?i@~0J_=Cdu7i#X>?0rBII|aKyuEMXW(HxPAmu*g%bdi>d_$e_7UPcyp8q{@$9) zC)eI80nZixzTzm|={ANc87#HNfo9EzpoV{8F?&vMn=f66FO5q=ZIFB(H6P-=K9rLv z?oLvdsUsIIziYkSAPs)&}AtL_<@T92_1Ce*R&{0Wp0fLrRX zRl8Z{4&4`=Anc-(?nuSjANB6Am#hb- zACC`}p@KB-;DcP1bJvkC7-J@<`E%YR__ybh#Y8_^!e&lV7F8T|euqc#GTrh8j%b+c z=MpQGGU6M#i+)5?#~KP0spn2jEvZxt50XFJHqKb8CEps#vkFRB(k^#d9!A#qC@Iz_ zk=DY_tTJ?>92>6cRCL^lbdUwP5NqAK1mSXYP6Mvler8rJlYr&iMdeC<)Da9h-Qbsy zzBnEFsUEeRo$r2R1g{-|lJBRAKGE~9D3Q0IoXS0Gjck*meG&*5TPl+b7#=SvR()xd z%omPZo_Gqa&ba!_H2~MCd#e@CJSpg`=hPsq{*B!1u7@orQM=H57_Wgo5BBa7`Le&9 zVk@KOgheIks9vRi5jLjgAYlf)8 zL7iZ4Jc7g_lIp*y?evc%bZJ#DAd{D%{zl28*204FEJqb*k8PIicfLK=z})dOFsr;0 zQHcO%gf;)tAh(aE^GwL~p`L~DUJ5&3u6$wA_EBOkq zUx)mkHn5TYS3w{UH7GYLGz-Y5{~H(4A2C(q5Hg><}&g!Xzm@tLqYHE{Tw``n_m&BtU0>T^NiwttUjXG**p_x)DI_eFi1XHpeZLH!Hc-Ij|N?B z>E-K&Yf3*V)@jaqchP*iqlR8DH_q}GOIsDU!U6olo=6hOWXWes6(lj(H_rHDmgI3a zIdDS)XRB?z#f$TU5A;m3tVkDCZXsr6NILjN$OIoMRj5M{C?OR7>obug!R8efK&`vidh0^B^^OX;= z3ajYGSbo{0@pMNyD7RRpu*;_ikg~d0 z_-g$2%ez8qZ)b&<2{6cnka`qyF)uPLHBVorf&xaqy7rJUETu#%E=Maeq~(0ZPQbrz zgmBM7KK|^of}b%n)#g+@^JH{@zEDEnQ9*IKbq(Tv1zuj0e8T4!9L1jU=}^#h=v5|x z39j9SlBaQ`1oRuRDt7$wbh~HyJE%RWw58p=!MDQ@dwTD}`%#}p$T~bzFt+N8wbwNf zd6iiVG}I`Qc--1;J4)@QI&JnFPIXF^I+Q{n4(T-ptx~|@#Us;F z+?Z)0p;Ik$S&;j)^kBp3_F;EVWQQ+n{rFAU;#T+sPX0&UaI~>KyD8fsciLpx`1F(H zC&#Hy#HR)`^>*1g(iSm2zRZ{j&+kU<(8Zm@%DivBw^6q&N55JLAl@L-@?Q|rSe_}^PCq>`!^^fmVI`Hqq6%~-*H2}?Hon}&#Dnub&bX>QG*EMOLtdkcVj{S_98GX+E_X)# zxmI>meevFFqKCJQBT>C8cKk}2rP=-@MP&j+zK8#R zzBCl8U?SEBQ3pAn%};~3+<(hj0veycd-1j9RLOBgiv(VbhPollFF|R$eseDyQ<$76 zh)osS5BEf@7N}NNoa(b`jcNdBTznZ=FbvRIrlY-pVhf+7Z|J$;UCG9w4bHl+YWkeN z?;+x+#?w1!`J>=5qrbZoN%+Ef1QJ}(yyhb7(DM7fB}OIBk7Cege17&FC$zJ)YCeM- zp$ophZS~3XJ3%sE99sNq7bLRIQooS2U;MW9?C~XNZ&u~(`|w2zc-hb0NA^gxi0$nk zn-3EKIKMIdXe#^6m$nzZbNYVU?Wq>-ADg#$fd!R^B(hrm@LV$LV&UNS%yvQ9?LRjE zakg8>e)MM@zXst?8-zubOAsv)5Nm#^ZiJVh!ibCE^WrxcsjZ-D?s+$+hTb;qVHH(S zVXsS^qNi_m_ZG1Tzy{71`ln?B|5vT8qGGu^K=PTnS^o69cY(`S5lFU$ioNq&i}stN z%gM@*77uonUzWK9iN&B#h&_O@90CaC&9aI=7W22wd>=~g^+_V5. -A customized ``instackenv.json`` can be copied to the undercloud by overwriting the -``undercloud_instackenv_template`` variable with the path to the customized file. - -Below is an explanation of, and example of, the ``instackenv.json`` file: - -The JSON file describing your Overcloud baremetal nodes, is called -``instackenv.json``. The file should contain a JSON object with the only field -``nodes`` containing list of node descriptions. - -Each node description should contains required fields: - -* ``pm_type`` - driver for Ironic nodes - -* ``pm_addr`` - node BMC IP address (hypervisor address in case of virtual - environment) - -* ``pm_user``, ``pm_password`` - node BMC credentials - -Some fields are optional if you're going to use introspection later: - -* ``mac`` - list of MAC addresses, optional for bare metal - -* ``cpu`` - number of CPU's in system - -* ``arch`` - CPU architecture (common values are ``i386`` and ``x86_64``) - -* ``memory`` - memory size in MiB - -* ``disk`` - hard driver size in GiB - -It is also possible (but optional) to set Ironic node capabilities directly -in the JSON file. This can be useful for assigning node profiles or setting -boot options at registration time: - -* ``capabilities`` - Ironic node capabilities. For example:: - - "capabilities": "profile:compute,boot_option:local" - -For example:: - - { - "nodes": [ - { - "pm_type":"pxe_ipmitool", - "mac":[ - "fa:16:3e:2a:0e:36" - ], - "cpu":"2", - "memory":"4096", - "disk":"40", - "arch":"x86_64", - "pm_user":"admin", - "pm_password":"password", - "pm_addr":"10.0.0.8" - }, - { - "pm_type":"pxe_ipmitool", - "mac":[ - "fa:16:3e:da:39:c9" - ], - "cpu":"2", - "memory":"4096", - "disk":"40", - "arch":"x86_64", - "pm_user":"admin", - "pm_password":"password", - "pm_addr":"10.0.0.15" - }, - { - "pm_type":"pxe_ipmitool", - "mac":[ - "fa:16:3e:51:9b:68" - ], - "cpu":"2", - "memory":"4096", - "disk":"40", - "arch":"x86_64", - "pm_user":"admin", - "pm_password":"password", - "pm_addr":"10.0.0.16" - } - ] - } - - -network-environment.yaml -^^^^^^^^^^^^^^^^^^^^^^^^ - -Similarly, the ``network-environment.yaml`` file is generated from a template, - -A customized ``network-environment.yaml`` file can be copied to the undercloud by overwriting the -`` network_environment_file`` variable with the path to the customized file. - -nic-configs -^^^^^^^^^^^ - -By default, the virtual environment deployment uses the standard nic-configs files are there is no -ready section to copy custom nic-configs files. -The ``ansible-role-tripleo-overcloud-prep-config`` repo includes a task that copies the nic-configs -files if they are defined, - diff --git a/docs/static/baremetal-overcloud/customizing-external-network-vlan.rst b/docs/static/baremetal-overcloud/customizing-external-network-vlan.rst deleted file mode 100644 index 317b856..0000000 --- a/docs/static/baremetal-overcloud/customizing-external-network-vlan.rst +++ /dev/null @@ -1,20 +0,0 @@ -Customizing external network vlan -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -If network-isolation is used in the deployment, tripleo-quickstart will, by default, -add a NIC on the external vlan to the undercloud, -. -When working with a baremetal overcloud, the vlan values must be customized with the correct -system-related values. The default vlan values can be overwritten in a settings file passed -to triple-quickstart as in the following example: -:: - - undercloud_networks: - external: - address: 10.0.7.13 - netmask: 255.255.255.192 - device_type: ovs - type: OVSIntPort - ovs_bridge: br-ctlplane - ovs_options: '"tag=102"' - tag: 102 diff --git a/docs/static/baremetal-overcloud/customizing-undercloud-conf.rst b/docs/static/baremetal-overcloud/customizing-undercloud-conf.rst deleted file mode 100644 index 5610df2..0000000 --- a/docs/static/baremetal-overcloud/customizing-undercloud-conf.rst +++ /dev/null @@ -1,20 +0,0 @@ -Customizing undercloud.conf -=========================== - -The undercloud.conf file is copied to the undercloud VM using a template where the system values -are variables. . -The tripleo-quickstart defaults for these variables are suited to a virtual overcloud, -but can be overwritten by passing custom settings to tripleo-quickstart in a settings file -(--extra-vars @). For example: -:: - - undercloud_network_cidr: 10.0.5.0/24 - undercloud_local_ip: 10.0.5.1/24 - undercloud_network_gateway: 10.0.5.1 - undercloud_undercloud_public_vip: 10.0.5.2 - undercloud_undercloud_admin_vip: 10.0.5.3 - undercloud_local_interface: eth1 - undercloud_masquerade_network: 10.0.5.0/24 - undercloud_dhcp_start: 10.0.5.5 - undercloud_dhcp_end: 10.0.5.24 - undercloud_inspection_iprange: 10.0.5.100,10.0.5.120 diff --git a/docs/static/baremetal-overcloud/env-setup.rst b/docs/static/baremetal-overcloud/env-setup.rst deleted file mode 100644 index 8b98180..0000000 --- a/docs/static/baremetal-overcloud/env-setup.rst +++ /dev/null @@ -1,42 +0,0 @@ -Install the dependencies ------------------------- - -You need some software available on your local system before you can run -`quickstart.sh`. You can install the necessary dependencies by running: - -:: - - bash quickstart.sh --install-deps - -Setup your virtual environment ------------------------------- - -tripleo-quickstart includes steps to set up libvirt on the undercloud host -machine and to create and setup the undercloud VM. - -Deployments on baremetal hardware require steps from third-party repos, -in addition to the steps in tripleo-quickstart. -Below is an example of a complete call to quickstart.sh to run a full deploy -on baremetal overcloud nodes: - -:: - - # $HW_ENV_DIR is the directory where the baremetal environment-specific - # files are stored - - pushd $WORKSPACE/tripleo-quickstart - bash quickstart.sh \ - --ansible-debug \ - --bootstrap \ - --working-dir $WORKSPACE/ \ - --tags all \ - --no-clone \ - --teardown all \ - --requirements quickstart-role-requirements.txt \ - --requirements $WORKSPACE/$HW_ENV_DIR/requirements_files/$REQUIREMENTS_FILE \ - --config $WORKSPACE/$HW_ENV_DIR/config_files/$CONFIG_FILE \ - --extra-vars @$WORKSPACE/$HW_ENV_DIR/env_settings.yml \ - --playbook $PLAYBOOK \ - --release $RELEASE \ - $VIRTHOST - popd diff --git a/docs/static/baremetal-overcloud/env-specific-pre-deploy-steps.rst b/docs/static/baremetal-overcloud/env-specific-pre-deploy-steps.rst deleted file mode 100644 index 4b16cf0..0000000 --- a/docs/static/baremetal-overcloud/env-specific-pre-deploy-steps.rst +++ /dev/null @@ -1,15 +0,0 @@ -Additional steps preparing the environment for deployment ---------------------------------------------------------- - -Depending on the parameters of the baremetal overcloud environment in use, -other pre-deployment steps may be needed to ensure that the deployment succeeds. - -includes a number of these steps. Whether each step is run, depends on variable values -that can be set per environment. - -Some examples of additional steps are: - -- Adding disk size hints -- Adding disk hints per node, supporting all Ironic hints -- Adjusting MTU values -- Rerunning introspection on failure diff --git a/docs/static/baremetal-overcloud/environment-settings-structure.rst b/docs/static/baremetal-overcloud/environment-settings-structure.rst deleted file mode 100644 index 0f29f59..0000000 --- a/docs/static/baremetal-overcloud/environment-settings-structure.rst +++ /dev/null @@ -1,98 +0,0 @@ -Settings for hardware environments -================================== - -Throughout the documentation, there are example settings and custom files to -overwrite the virt defaults in TripleO Quickstart. It is recommended to use a -organized directory structure to store the settings and files for each hardware -environment. - -Example Directory Structure ---------------------------- - -Each baremetal environment will need a directory structured as follows: - -:: - - |-- environment_name - | |-- instackenv.json - | |-- vendor_specific_setup - | |-- - | |-- network_configs - | | |-- - | | | |-- - | | | |-- env_settings.yml - | | | |-- nic_configs - | | | | |-- ceph-storage.yaml - | | | | |-- cinder-storage.yaml - | | | | |-- compute.yaml - | | | | |-- controller.yaml - | | | | |-- swift-storage.yaml - | | | |-- config_files - | | | | |--config.yml - | | | | |-- - | | | |-- requirements_files - | | | | |--requirements1.yml - | | | | |--requirements2.yml - | | |-- - | | | |-- - | | | |-- env_settings.yml - | | | |-- nic_configs - | | | | |-- ceph-storage.yaml - | | | | |-- cinder-storage.yaml - | | | | |-- compute.yaml - | | | | |-- controller.yaml - | | | | |-- swift-storage.yaml - | | | |-- config_files - | | | | |--config.yml - | | | | |-- - | | | |-- requirements_files - | | | | |--requirements1.yml - | | | | |--requirements2.yml - - -Explanation of Directory Contents -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - - instackenv.json (required) - - The instackenv.json file added at this top-level directory will replace the templated instackenv.json file for virt deployments. - - - vendor_specific_setup (optional) - - If any script needs to run to do environment setup before deployment, such as RAID configuration, it can be included here. - - - architecture diagram (optional) - - Although not required, if there is a diagram or document detailing the network architecture, it is useful to include that document or diagram here as all the settings and network isolation files will be based off of it. - - - network_configs (required) - - This directory is used to house the directories divided by network isolation type. - - - network-isolation-type (required) - - Even if deploying without network isolation, the files should be included in a 'none' directory. - There are files examples of the following network isolation types: single-nic-vlans, multiple-nics, bond-with-vlans, public-bond, none [1]. - Network isolation types 'single_nic_vlans', 'bond_with_vlans', 'multi-nic' will be deprecated. - - [1] Names are derived from the `tripleo-heat-templates configuration `_ - - - network-environment.yaml (required, unless deploying with no network isolation) - - This file should be named after the network-isolation type, for example: bond_with_vlans.yaml. This naming convention follows the same pattern used by the default, virt workflow. - - - env_settings.yaml (required) - - This file stores all environment-specific settings to override default settings in TripleO quickstart and related repos, for example: the location of instackenv.json file, and setting 'overcloud_nodes' to empty so that quickstart does not create VMs for overcloud nodes. All settings required for undercloud.conf are included here. - - - nic_configs (optional) - - If the default nic-config files are not suitable for a particular hardware environment, specific ones can be added here and copied to the undercloud. Ensure that the network-environment.yaml file points to the correct location for the nic-configs to be used in deploy. - - - config_files (required) - - The deployment details are stored in the config file. Different config files can be created for scaling up nodes, HA, and other deployment combinations. - - - requirements_files (required) - - Multiple requirements files can be passed to quickstart.sh to include additional repos. For example, to include IPMI validation, the requirements files would need to include are `here `_ diff --git a/docs/static/baremetal-overcloud/introduction.rst b/docs/static/baremetal-overcloud/introduction.rst deleted file mode 100644 index d7e84b5..0000000 --- a/docs/static/baremetal-overcloud/introduction.rst +++ /dev/null @@ -1,20 +0,0 @@ -TripleO Quickstart -================== - -TripleO Quickstart is a fast and easy way to setup and configure your virtual environment for TripleO. -Further documentation can be found at https://github.com/openstack/tripleo-quickstart - -A quick way to test that your virthost machine is ready to rock is: - -:: - - ssh root@$VIRTHOST uname -a - -Getting the script ------------------- - -You can download the `quickstart.sh` script with `wget`: - -:: - - wget https://raw.githubusercontent.com/openstack/tripleo-quickstart/master/quickstart.sh diff --git a/docs/static/baremetal-overcloud/networking.rst b/docs/static/baremetal-overcloud/networking.rst deleted file mode 100644 index f8c5af8..0000000 --- a/docs/static/baremetal-overcloud/networking.rst +++ /dev/null @@ -1,37 +0,0 @@ -Networking ----------- - -With a Virtual Environment, tripleo-quickstart sets up the networking as part of the workflow. -The networking arrangement needs to be set up prior to working with tripleo-quickstart. - -The overcloud nodes will be deployed from the undercloud machine and therefore the -machines need to have have their network settings modified to allow for the -overcloud nodes to be PXE boot'ed using the undercloud machine. -As such, the setup requires that: - -* All overcloud machines in the setup must support IPMI -* A management provisioning network is setup for all of the overcloud machines. - One NIC from every machine needs to be in the same broadcast domain of the - provisioning network. In the tested environment, this required setting up a new - VLAN on the switch. Note that you should use the same NIC on each of the - overcloud machines ( for example: use the second NIC on each overcloud - machine). This is because during installation we will need to refer to that NIC - using a single name across all overcloud machines e.g. em2 -* The provisioning network NIC should not be the same NIC that you are using - for remote connectivity to the undercloud machine. During the undercloud - installation, a openvswitch bridge will be created for Neutron and the - provisioning NIC will be bridged to the openvswitch bridge. As such, - connectivity would be lost if the provisioning NIC was also used for remote - connectivity to the undercloud machine. -* The overcloud machines can PXE boot off the NIC that is on the private VLAN. - In the tested environment, this required disabling network booting in the BIOS - for all NICs other than the one we wanted to boot and then ensuring that the - chosen NIC is at the top of the boot order (ahead of the local hard disk drive - and CD/DVD drives). -* For each overcloud machine you have: the MAC address of the NIC that will PXE - boot on the provisioning network the IPMI information for the machine (i.e. IP - address of the IPMI NIC, IPMI username and password) - -Refer to the following diagram for more information - -.. image:: _images/TripleO_Network_Diagram_.jpg diff --git a/docs/static/baremetal-overcloud/requirements.rst b/docs/static/baremetal-overcloud/requirements.rst deleted file mode 100644 index 5c28651..0000000 --- a/docs/static/baremetal-overcloud/requirements.rst +++ /dev/null @@ -1,22 +0,0 @@ -Minimum System Requirements ---------------------------- - -By default, tripleo-quickstart requires 3 machines: - -* 1 Undercloud (can be a Virtual Machine) -* 1 Overcloud Controller -* 1 Overcloud Compute - -Commonly, deployments include HA (3 Overcloud Controllers) and multiple Overcloud Compute nodes. - -Each Overcloud machine requires at least: - -* 1 quad core CPU -* 8 GB free memory -* 60 GB disk space - -The undercloud VM or baremetal machine requires: - -* 1 quad core CPU -* 16 GB free memory -* 80 GB disk space diff --git a/docs/static/baremetal-overcloud/validate-prior-to-deploy.rst b/docs/static/baremetal-overcloud/validate-prior-to-deploy.rst deleted file mode 100644 index edb1b2a..0000000 --- a/docs/static/baremetal-overcloud/validate-prior-to-deploy.rst +++ /dev/null @@ -1,9 +0,0 @@ -Validating the environment prior to deployment ----------------------------------------------- - -In a baremetal overcloud deployment there is a custom environment and many related settings -and steps. As such, it is worthwhile to validate the environment and custom configuration -files prior to deployment. - -A collection of validation tools is available in the 'clapper' repo: -. diff --git a/docs/static/baremetal-overcloud/virtual-vs-baremetal-undercloud.rst b/docs/static/baremetal-overcloud/virtual-vs-baremetal-undercloud.rst deleted file mode 100644 index ad7afa7..0000000 --- a/docs/static/baremetal-overcloud/virtual-vs-baremetal-undercloud.rst +++ /dev/null @@ -1,16 +0,0 @@ -Virtual Undercloud VS. Baremetal Undercloud -------------------------------------------- - -When deploying the overcloud on baremetal nodes, there is the option of using an undercloud -deployed on a baremetal machine or creating a virtual machine (VM) on that same baremetal machine -and using the VM to serve as the undercloud. - -The advantages of using a VM undercloud are: - -* The VM can be rebuilt and reinstalled without reprovisioning the entire baremetal machine -* The tripleo-quickstart default workflow is written for a Virtual Environment deployment. - Using a VM undercloud requires less customization of the default workflow. - -.. note:: When using a VM undercloud, but baremetal nodes for the overcloud - deployment, the ``overcloud_nodes`` variable in tripleo-quickstart - must overwritten and set to empty. diff --git a/docs/static/env-setup-virt.rst b/docs/static/env-setup-virt.rst deleted file mode 100644 index 23cd4af..0000000 --- a/docs/static/env-setup-virt.rst +++ /dev/null @@ -1,82 +0,0 @@ -Virtual Environment -=================== - -Quickstart can be used in a virtual environment using virtual machines instead -of actual baremetal. However, one baremetal machine ( VIRTHOST ) is still -needed to act as the host for the virtual machines. - - -Minimum System Requirements ---------------------------- - -By default, this setup creates 3 virtual machines: - -* 1 Undercloud -* 1 Overcloud Controller -* 1 Overcloud Compute - -.. note:: - Each virtual machine must consist of at least 4 GB of memory and 40 GB of disk - space. - The virtual machine disk files are thinly provisioned and will not take up - the full 40GB initially. - -You will need a baremetal host machine (referred to as ``$VIRTHOST``) with at least -**16G** of RAM, preferably **32G**, and you must be able to ``ssh`` to the -virthost machine as root without a password from the machine running ansible. -Currently the virthost machine must be running a recent Red Hat-based Linux -distribution (CentOS 7, RHEL 7, Fedora 22 - only CentOS 7 is currently tested), -but we hope to add support for non-Red Hat distributions too. - -Quickstart currently supports the following operating systems: - -* CentOS 7 x86_64 - -TripleO Quickstart ------------------- - -TripleO Quickstart is a fast and easy way to setup and configure your virtual environment for TripleO. -Further documentation can be found at https://github.com/openstack/tripleo-quickstart - -A quick way to test that your virthost machine is ready to rock is:: - - ssh root@$VIRTHOST uname -a - -Getting the script -^^^^^^^^^^^^^^^^^^ - -You can download the `quickstart.sh` script with `wget`:: - - wget https://raw.githubusercontent.com/openstack/tripleo-quickstart/master/quickstart.sh - -Install the dependencies -^^^^^^^^^^^^^^^^^^^^^^^^ - -You need some software available on your local system before you can run -`quickstart.sh`. You can install the necessary dependencies by running:: - - bash quickstart.sh --install-deps - -Setup your virtual environment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Deploy with the most basic default options virtual environment by running:: - - bash quickstart.sh $VIRTHOST - -There are many configuration options available in -tripleo-quickstart/config/general_config/ and also in -tripleo-quickstart-extras/config/general_config/ -In the examples below the ha.yml config is located in the tripleo-quickstart repository -and the containers_minimal.yml is located in the tripleo-quickstart-extras repository. -All the configuration files will be installed to your working_directory. - -This does require the user to know what the working directory is set to. The variable OPT_WORKDIR -by default is ~/.quickstart but can be overriden with -w or --working_dir - -Please review these options and use the appropriate configuration for your deployment. - -Below are some examples.:: - - bash quickstart.sh --config=~/.quickstart/config/general_config/ha.yml $VIRTHOST - bash quickstart.sh --config=~/.quickstart/config/general_config/containers_minimal.yml $VIRTHOST diff --git a/docs/static/ovb-openstack-cloud/calling-quickstart.rst b/docs/static/ovb-openstack-cloud/calling-quickstart.rst deleted file mode 100644 index 33db0f6..0000000 --- a/docs/static/ovb-openstack-cloud/calling-quickstart.rst +++ /dev/null @@ -1,87 +0,0 @@ -Install the dependencies for TripleO Quickstart ------------------------------------------------ - -You need some software available on your local system before you can run -`quickstart.sh`. You can install the necessary dependencies by running: - -:: - - bash quickstart.sh --install-deps - -Deploy TripleO using Quickstart on Openstack Instances ------------------------------------------------------- - -Deployments on Openstack instances require steps from third-party repos, -in addition to the steps in TripleO Quickstart. -Below is an example of a complete call to quickstart.sh to run a full deploy -on Openstack Instances launched via Openstack Virtual Baremetal (OVB/Heat): - -:: - - # $HW_ENV_DIR is the directory where the environment-specific - # files are stored - - pushd $WORKSPACE/tripleo-quickstart - bash quickstart.sh \ - --ansible-debug \ - --bootstrap \ - --working-dir $WORKSPACE/ \ - --tags all \ - --no-clone \ - --requirements quickstart-role-requirements.txt \ - --requirements $WORKSPACE/$HW_ENV_DIR/requirements_files/$REQUIREMENTS_FILE \ - --config $WORKSPACE/$HW_ENV_DIR/config_files/$CONFIG_FILE \ - --extra-vars @$OPENSTACK_CLOUD_SETTINGS_FILE \ - --extra-vars @$OPENSTACK_CLOUD_CREDS_FILE \ - --extra-vars @$WORKSPACE/$HW_ENV_DIR/env_settings.yml \ - --playbook $PLAYBOOK \ - --release $RELEASE \ - localhost - popd - - -Modify the settings -^^^^^^^^^^^^^^^^^^^ - -After the undercloud connectivity has been set up, the undercloud is installed and the -overcloud is deployed following the 'baremetal' workflow, using settings relevant to the -undercloud and baremetal nodes created on the Openstack cloud. - -Below are a list of example settings (overwriting defaults) that would be passed to quickstart.sh: - -:: - - # undercloud.conf - undercloud_network_cidr: 192.0.2.0/24 - undercloud_local_ip: 192.0.2.1/24 - undercloud_network_gateway: 192.0.2.1 - undercloud_undercloud_public_vip: 192.0.2.2 - undercloud_undercloud_admin_vip: 192.0.2.3 - undercloud_local_interface: eth1 - undercloud_masquerade_network: 192.0.2.0/24 - undercloud_dhcp_start: 192.0.2.5 - undercloud_dhcp_end: 192.0.2.24 - undercloud_inspection_iprange: 192.0.2.25,192.0.2.39 - - overcloud_nodes: - undercloud_type: ovb - introspect: true - - # file locations to be copied to the undercloud (for network-isolation deployment) - undercloud_instackenv_template: "{{ local_working_dir }}/instackenv.json" - network_environment_file: "{{ local_working_dir }}/openstack-virtual-baremetal/network-templates/network-environment.yaml" - baremetal_nic_configs: "{{ local_working_dir }}/openstack-virtual-baremetal/network-templates/nic-configs" - - network_isolation: true - - # used for access to external network - external_interface: eth2 - external_interface_ip: 10.0.0.1 - external_interface_netmask: 255.255.255.0 - external_interface_hwaddr: fa:05:04:03:02:01 - - # used for validation - floating_ip_cidr: 10.0.0.0/24 - public_net_pool_start: 10.0.0.50 - public_net_pool_end: 10.0.0.100 - public_net_gateway: 10.0.0.1 diff --git a/docs/static/ovb-openstack-cloud/introduction.rst b/docs/static/ovb-openstack-cloud/introduction.rst deleted file mode 100644 index 465e409..0000000 --- a/docs/static/ovb-openstack-cloud/introduction.rst +++ /dev/null @@ -1,14 +0,0 @@ -TripleO Quickstart -================== - -TripleO Quickstart is a fast and easy way to setup and configure your virtual environment for TripleO. -Further documentation can be found at https://github.com/openstack/tripleo-quickstart. - -Getting the script ------------------- - -You can download the `quickstart.sh` script with `wget`: - -:: - - wget https://raw.githubusercontent.com/openstack/tripleo-quickstart/master/quickstart.sh diff --git a/docs/static/ovb-openstack-cloud/running-quickstart-instances-openstack.rst b/docs/static/ovb-openstack-cloud/running-quickstart-instances-openstack.rst deleted file mode 100644 index 18d7f66..0000000 --- a/docs/static/ovb-openstack-cloud/running-quickstart-instances-openstack.rst +++ /dev/null @@ -1,85 +0,0 @@ -Running TripleO Quickstart on Openstack instances -------------------------------------------------- - -By default, TripleO Quickstart uses libvirt to create virtual machines (VM) to serve -as undercloud and overcloud nodes for a TripleO deployment. -With some steps and modification, TripleO Quickstart can setup an undercloud and -deploy the overcloud on instances launched on an Openstack cloud rather than libvirt VMs. - -Beginning assumptions -^^^^^^^^^^^^^^^^^^^^^ - -This document details the workflow for running TripleO Quickstart on Openstack -instances. In particular, the example case is instances created via Heat and -Openstack Virtual Baremetal . - -The following are assumed to have been completed before following this document: - -* An Openstack cloud exists and has been set up - (and configured as described in `Patching the Host Cloud`_. - if the cloud is pre-Mitaka release). From the Mitaka release the cloud should - not require patching -* The undercloud image under test has been uploaded to Glance in the Openstack cloud. -* A heat stack has been deployed with instances for the undercloud, bmc, and overcloud nodes. -* The nodes.json file has been created (later to be copied to the undercloud as instackenv.json) - -Below is an example `env.yaml` file used to create the heat stack that will support a -tripleo-quickstart undercloud and overcloud deployment with network isolation: - - -.. _Patching the Host Cloud: https://openstack-virtual-baremetal.readthedocs.io/en/latest/host-cloud/patches.html - -:: - - parameters: - os_user: admin - os_password: password - os_tenant: admin - os_auth_url: http://10.10.10.10:5000/v2.0 - - bmc_flavor: m1.medium - bmc_image: 'bmc-base' - bmc_prefix: 'bmc' - - baremetal_flavor: m1.large - baremetal_image: 'ipxe-boot' - baremetal_prefix: 'baremetal' - - key_name: 'key' - private_net: 'private' - node_count: {{ node_count }} - public_net: 'public' - provision_net: 'provision' - - # QuintupleO-specific params ignored by virtual-baremetal.yaml - undercloud_name: 'undercloud' - undercloud_image: '{{ latest_undercloud_image }}' - undercloud_flavor: m1.xlarge - external_net: '{{ external_net }}' - undercloud_user_data: | - #!/bin/sh - sed -i "s/no-port-forwarding.*sleep 10\" //" /root/.ssh/authorized_keys - - #parameter_defaults: - ## Uncomment and customize the following to use an existing floating ip - # undercloud_floating_ip_id: 'uuid of floating ip' - # undercloud_floating_ip: 'address of floating ip' - - resource_registry: - ## Uncomment the following to use an existing floating ip - # OS::OVB::UndercloudFloating: templates/undercloud-floating-existing.yaml - - ## Uncomment the following to use no floating ip - # OS::OVB::UndercloudFloating: templates/undercloud-floating-none.yaml - - ## Uncomment the following to create a private network - OS::OVB::PrivateNetwork: {{ templates_dir }}/private-net-create.yaml - - ## Uncomment to create all networks required for network-isolation. - ## parameter_defaults should be used to override default parameter values - ## in baremetal-networks-all.yaml - OS::OVB::BaremetalNetworks: {{ templates_dir }}/baremetal-networks-all.yaml - OS::OVB::BaremetalPorts: {{ templates_dir }}/baremetal-ports-all.yaml - - ## Uncomment to deploy a quintupleo environment without an undercloud. - # OS::OVB::UndercloudEnvironment: OS::Heat::None diff --git a/galaxy.yml b/galaxy.yml deleted file mode 100644 index c027102..0000000 --- a/galaxy.yml +++ /dev/null @@ -1,45 +0,0 @@ ---- -name: collect_logs -namespace: tripleo -version: 0.0.1 -authors: - - tripleo -readme: README.rst - -build_ignore: - - "**/.mypy_cache" - - "**/.pytest_cache" - - "*.egg-info" - - .DS_Store - - .ansible - - .benchmarks - - .cache - - .eggs - - .envrc - - .github - - .gitignore - - .gitreview - - .mypy_cache - - .pre-commit-config.yaml - - .pytest_cache - - .quickstart - - .tox - - .vscode - - .yamllint - - ansible.cfg - - bindep.txt - - build - - dist - - docs/source/_build - - infrared_plugin - - module_utils - - modules - - pyproject.toml - - report.html - - roles/collect_logs/library - - scripts - - setup.cfg - - setup.py - - test-playbooks - - tox.ini - - zuul.d diff --git a/infrared_plugin/main.yml b/infrared_plugin/main.yml deleted file mode 100644 index 8cdcfa9..0000000 --- a/infrared_plugin/main.yml +++ /dev/null @@ -1,75 +0,0 @@ ---- -# This file and plugin.spec are required by Infrared project - -# This section collects data from the nodes -- hosts: "{{ other.openstack_nodes }}" - remote_user: "{{ other.remote_user }}" - ignore_errors: true - gather_facts: false - vars: - ansible_python_interpreter: "{{ py_interpreter.get('stdout_lines', ['/usr/libexec/platform-python']) | first | trim }}" - tasks: - - - name: Detect python interpreter - raw: 'command -v python3 python2 /usr/libexec/platform-python' - register: py_interpreter - - - name: Gather facts - setup: - - - name: Remap infrared parameters to role variables - set_fact: - "{{ item.key }}": "{{ item.value }}" - with_dict: "{{ other }}" - - - name: Ansible role collect logs - include_role: - name: collect_logs - -# This section takes care of preparing the collected data for publishing -# and for publishing itself -- hosts: localhost - ignore_errors: true - tasks: - - - name: Remap infrared parameters to role variables - set_fact: - "{{ item.key }}": "{{ item.value }}" - with_dict: "{{ other }}" - - - name: Disable artcl_collect to prepare for publishing - set_fact: - # override artcl_collect to false because in ansible-role-collect-logs - # role collect and publish tasks are complementary - artcl_collect: false - when: artcl_publish|default(false)|bool - - - name: Set path to a report server key - set_fact: - artcl_report_server_key: "-i {{ artcl_report_server_key }}" - when: artcl_report_server_key is defined - - - name: Extract the logs - shell: | - cat *.tar | tar xf - -i - args: - chdir: "{{ artcl_collect_dir }}" - executable: /bin/bash - - - name: delete the tar file after extraction - shell: | - rm -r *.tar - args: - chdir: "{{ artcl_collect_dir }}" - executable: /bin/bash - - - name: Ansible role collect logs - include_role: - name: collect_logs - when: artcl_publish|default(false)|bool - - - name: Delete artifact files from localhost - file: - state: absent - path: "{{ artcl_collect_dir }}" - when: not disable_artifacts_cleanup | default(false) | bool diff --git a/infrared_plugin/plugin.spec b/infrared_plugin/plugin.spec deleted file mode 100644 index 7f30dfa..0000000 --- a/infrared_plugin/plugin.spec +++ /dev/null @@ -1,215 +0,0 @@ ---- -# This file and main.yml are required by Infrared project -config: - plugin_type: other - entry_point: main.yml - roles_path: ../roles/ -subparsers: - ansible-role-collect-logs: - description: An Ansible role for aggregating logs from different nodes. - include_groups: ["Ansible options", "Common options"] - groups: - - title: Collecting - options: - openstack_nodes: - type: Value - help: | - OpenStack nodes ansible-role-collect-logs will be executed on. - default: all:!localhost - artcl_report_server_key: - type: Value - help: | - A path to a key for an access to the report server. - artcl_rsync_path: - type: Value - help: | - Specifies a server hostname and a path where the artifacts will - be stored. Example: username@hostname:/path/to/the/dest - artcl_collect_list: - type: ListValue - help: | - A list of files and directories to gather from the target. - Directories are collected recursively and need to end with a “/” - to get collected. Should be specified as a YaML list, e.g.: - infrared ansible-role-collect-logs \ - --artcl_collect_list /etc/nova/,/home/stack/*.log,/var/log/ - artcl_collect_list_append: - type: ListValue - help: | - A list of files and directories to be appended in the default - list. This is useful for users that want to keep the original - list and just add more relevant paths. - artcl_exclude_list: - type: ListValue - help: | - A list of files and directories to exclude from collecting. This - list is passed to rsync as an exclude filter and it takes - precedence over the collection list. For details see the - “FILTER RULES” topic in the rsync man page. - artcl_exclude_list_append: - type: ListValue - help: | - A list of files and directories to be appended in the default - exclude list. This is useful for users that want to keep the - original list and just add more relevant paths. - artcl_commands: - type: NestedDict - help: | - Collect commands executed by the role. Keep the dict sorted. - Example: --artcl_commands ..cmd= - Note: group types to be collected are defined by collect_log_types - Example2: --artcl_commands system.cpuinfo.cmd="cat /proc/cpuinfo" - artcl_commands_extras: - type: NestedDict - help: | - Commands to be executed, combined with artcl_commands. - artcl_find_maxdepth: - type: Value - help: | - Max depth passed to find via -maxdepth arg, it makes effect only - when artcl_rsync_collect_list is set to False. - default: 4 - artcl_find_max_size: - type: Value - help: | - Max file size passed to find via -size arg, it makes effect only - when artcl_rsync_collect_list is set to False. - default: 256 - artcl_rsync_collect_list: - type: Bool - help: | - If true, artcl_collect_list is given to rsync to collect - logs, otherwise it is given to find to create a list of files - to collect for rsync. - default: True - local_working_dir: - type: Value - help: | - Destination on the executor host where the logs will be collected - to. - default: /tmp/collect_logs - artcl_collect_dir: - type: Value - help: | - A directory on the executor host within local_working_dir where - the logs should be gathered, without a trailing slash. - artcl_build_url: - type: Value - help: | - Build URL used for fetching console.log - artcl_gzip: - type: Bool - help: | - When true, gathered files are gzipped one by one - in artcl_collect_dir, when false, a tar.gz file will contain all - the logs. - collect_log_types: - type: ListValue - help: | - A list of which type of logs will be collected, such as openstack - logs, network logs, system logs, etc. Acceptable values are - system, monitoring, network, openstack and container. - artcl_collect_sosreport: - type: Bool - help: | - If true, create and collect a sosreport for each host. - - title: Publishing - options: - artcl_publish: - type: Bool - help: | - If true, the role will attempt to rsync logs to the target - specified by artcl_rsync_url. Uses BUILD_URL, BUILD_TAG vars from - the environment (set during a Jenkins job run) and requires the - next to variables to be set. - artcl_txt_rename: - type: Bool - help: | - Rename compressed text based files to end with txt.gz extension. - artcl_readme_path: - type: Value - help: | - Path to a readme file to be copied to base directory, containing - information regarding the logs. - artcl_readme_file: - type: Value - help: | - Name of the readme file - artcl_publish_timeout: - type: Value - help: | - The maximum seconds the role can spend uploading the logs. - artcl_use_rsync: - type: Bool - help: | - If true, the role will use rsync to upload the logs. - artcl_rsync_use_daemon: - type: Bool - help: | - If true, the role will use rsync daemon instead of ssh to - connect. - artcl_rsync_url: - type: Value - help: | - rsync target for uploading the logs. The localhost needs to have - passwordless authentication to the target or the PROVISIONER_KEY - var specificed in the environment. - artcl_use_swift: - type: Bool - help: | - If true, the role will use swift object storage to publish - the logs. - artcl_swift_auth_url: - type: Value - help: | - The OpenStack auth URL for Swift. - artcl_swift_username: - type: Value - help: | - OpenStack username for Swift. - artcl_swift_password: - type: Value - help: | - Password for the Swift user. - artcl_swift_tenant_name: - type: Value - help: | - OpenStack tenant name for Swift. - artcl_swift_container: - type: Value - help: | - The name of the Swift container to use. - artcl_swift_delete_after: - type: Value - help: | - The number of seconds after which Swift will remove the uploaded - objects. - artcl_artifact_url: - type: Value - help: | - An HTTP URL at which the uploaded logs will be accessible after - upload. - influxdb_create_data_file: - type: Bool - help: | - Upload data to the InfluxDB database. - default: False - ara_enabled: - type: Bool - help: | - If true, the role will generate ara reports. - ara_generate_html: - type: Bool - help: | - Whether to generate ara html or not. - default: False - remote_user: - type: Value - help: | - Name of a remote user under which the tasks will be executed. - default: stack - disable_artifacts_cleanup: - type: Bool - help: | - Determines whether to keep collected files - default: False diff --git a/infrared_plugin/roles b/infrared_plugin/roles deleted file mode 120000 index d8c4472..0000000 --- a/infrared_plugin/roles +++ /dev/null @@ -1 +0,0 @@ -../roles \ No newline at end of file diff --git a/plugins/module_utils/test_utils.py b/plugins/module_utils/test_utils.py deleted file mode 100644 index 8dec610..0000000 --- a/plugins/module_utils/test_utils.py +++ /dev/null @@ -1,59 +0,0 @@ -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -import json - -from ansible.module_utils import basic -from ansible.module_utils._text import to_bytes - -try: - from unittest.mock import patch -except ImportError: - from mock import patch # old pythons - - -def set_module_args(**args): - if "_ansible_remote_tmp" not in args: - args["_ansible_remote_tmp"] = "/tmp" - if "_ansible_keep_remote_files" not in args: - args["_ansible_keep_remote_files"] = False - - args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) - basic._ANSIBLE_ARGS = to_bytes(args) - - -class AnsibleExitJson(Exception): - pass - - -class AnsibleFailJson(Exception): - pass - - -def exit_json(*args, **kwargs): - if "changed" not in kwargs: - kwargs["changed"] = False - raise AnsibleExitJson(kwargs) - - -def fail_json(*args, **kwargs): - kwargs["failed"] = True - raise AnsibleFailJson(kwargs) - - -class ModuleTestCase: - def setup_method(self): - self.mock_module = patch.multiple( - basic.AnsibleModule, - exit_json=exit_json, - fail_json=fail_json, - ) - self.mock_module.start() - - def teardown_method(self): - self.mock_module.stop() - - -def generate_name(test_case): - return test_case["name"] diff --git a/plugins/modules/ara_graphite.py b/plugins/modules/ara_graphite.py deleted file mode 100644 index 888167a..0000000 --- a/plugins/modules/ara_graphite.py +++ /dev/null @@ -1,190 +0,0 @@ -#!/usr/bin/python -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -__metaclass__ = type -DOCUMENTATION = """ ---- -module: ara_graphite -version_added: "1.0.0" -author: Red Hat (@RedHatOfficial) -short_description: Send ARA stats to graphite -description: > - Python ansible module to send ARA stats to graphite -options: - graphite_host: - description: > - The hostname of the Graphite server with optional port: - graphite.example.com:2004. The default port is 2003 - required: True - type: str - graphite_prefix: - description: - - TBD - type: str - graphite_port: - description: - - TBD - default: 2003 - type: int - ara_mapping: - description: > - Mapping task names to Graphite paths - required: True - type: dict - ara_data: - description: > - List of ARA results: ara result list --all -f json - required: True - type: str - only_successful_tasks: - description: > - Whether to send only successful tasks, ignoring skipped and failed, - by default True. - required: False - default: True - type: bool -""" - -EXAMPLES = """ -- name: Get ARA json data - shell: "{{ local_working_dir }}/bin/ara task list --all -f json" - register: ara_data - -- ara_graphite: - graphite_host: 10.2.2.2 - ara_data: "{{ ara_task_output.stdout }}" - ara_mapping: - - "Name of task that deploys overcloud": overcloud.deploy.seconds -""" - -import ast # noqa: E402 -import datetime # noqa: E402 -import socket # noqa: E402 - - -def stamp(x): - """Convert ISO timestamp to Unix timestamp - - :param x: string with timestamp - :return: string with Unix timestamp - """ - return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").strftime("%s") - - -def task_length(x): - """Calculate task length in seconds from "%H:%M:%S" format - - :param x: datetime string - :return: number of seconds spent for task - """ - t = datetime.datetime.strptime(x, "%H:%M:%S") - return datetime.timedelta( - hours=t.hour, minutes=t.minute, seconds=t.second - ).total_seconds() - - -def translate(mapping, json_data, only_ok): - """Create data to send to Graphite server in format: - - GraphitePath Timestamp TaskDuration - GraphitePath is taken from mapping dictionary according to task name. - - :param mapping: dictionary of mapping task names to graphite paths - :param json_data: JSON data with tasks and times - :return: list of graphite data - """ - items = [] - data = ast.literal_eval(json_data) - for task in data: - if not only_ok or (only_ok and task["Status"] in ["changed", "ok"]): - if task["Name"] in mapping: - timestamp, duration = stamp(task["Time Start"]), task_length( - task["Duration"] - ) - items.append([mapping[task["Name"]], duration, timestamp]) - return items - - -def send(data, gr_host, gr_port, prefix): - """Actual sending of data to Graphite server via network - - :param data: list of items to send to Graphite - :param gr_host: Graphite host (with optional port) - :param prefix: prefix to append before Graphite path - :return: True if sent successfully, otherwise False - """ - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.settimeout(3.0) - try: - s.connect((gr_host, gr_port)) - except Exception as exc: - return False, str(exc) - for content in data: - s.send(prefix + " ".join([str(i) for i in content]) + "\n") - s.close() - return True, "" - - -def send_stats(gr_host, gr_port, mapping, json_data, prefix, only_ok): - """Send ARA statistics to Graphite server - - :param gr_host: Graphite host (with optional port) - :param mapping: dictionary of mapping task names to graphite paths - :param json_data: JSON data with tasks and times - :param prefix: prefix to append before Graphite path - :return: JSON ansible result - """ - data2send = translate(mapping, json_data, only_ok) - response, reason = send(data2send, gr_host, gr_port, prefix) - if not response: - return { - "changed": False, - "failed": True, - "graphite_host": gr_host, - "msg": "Can't connect to Graphite: %s" % reason, - } - return { - "changed": True, - "graphite_host": gr_host, - "sent_data": data2send, - } - - -def main(): - from ansible.module_utils.basic import AnsibleModule - - module = AnsibleModule( - argument_spec=dict( - graphite_host=dict(required=True, type="str"), - graphite_port=dict(required=False, type="int", default=2003), - ara_mapping=dict(required=True, type="dict"), - ara_data=dict(required=True, type="str"), - graphite_prefix=dict(required=False, type="str", default=""), - only_successful_tasks=dict(required=False, type="bool", default=True), - ) - ) - result = send_stats( - module.params["graphite_host"], - module.params["graphite_port"], - module.params["ara_mapping"], - module.params["ara_data"], - module.params["graphite_prefix"], - module.params["only_successful_tasks"], - ) - module.exit_json(**result) - - -if __name__ == "__main__": - main() diff --git a/plugins/modules/ara_influxdb.py b/plugins/modules/ara_influxdb.py deleted file mode 100644 index 178cbbe..0000000 --- a/plugins/modules/ara_influxdb.py +++ /dev/null @@ -1,593 +0,0 @@ -#!/usr/bin/python -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -__metaclass__ = type -DOCUMENTATION = """ ---- -module: ara_influxdb -version_added: "1.0.0" -author: Red Hat (@RedHatOfficial) -short_description: Send ARA stats to InfluxDB -description: | - Python ansible module to send ARA stats to InfluxDB timeseries database -options: - influxdb_url: - description: | - The URL of HTTP API of InfluxDB server: - for example https://influxdb.example.com - required: True - type: str - influxdb_port: - description: | - The port of HTTP API of InfluxDB server, by default is 8086 - required: True - type: int - influxdb_user: - description: | - User for authentication to InfluxDB server - required: False - type: str - influxdb_password: - description: | - Password for authentication to InfluxDB server - required: False - type: str - influxdb_db: - description: | - Database name in InfluxDB server for sending data to it - required: True - type: str - measurement: - description: | - Name of Influx measurement in database - required: True - type: str - data_file: - description: | - Path to file to save InfluxDB data in it - required: True - type: str - ara_data: - description: | - List of ARA results: ara result list --all -f json - required: True - type: str - only_successful_tasks: - description: | - Whether to send only successful tasks, ignoring skipped and failed, - by default True. - required: True - type: bool - mapped_fields: - description: | - Whether to use configured static map of fields and tasks, - by default True. - required: False - default: True - type: bool - standard_fields: - description: > - Whether to send standard fields of each job, i.e. times, - by default True. - required: False - default: True - type: bool - longest_tasks: - description: > - Whether to to print only longest tasks and how many, - by default 0. - required: False - type: int -""" - -EXAMPLES = """ -- name: Get ARA json data - shell: "{{ local_working_dir }}/bin/ara result list --all -f json" - register: ara_data - -- name: Collect and send data to InfluxDB - ara_influxdb: - influxdb_url: https://influxdb.example.com - influxdb_port: 8086 - influxdb_user: db_user - influxdb_password: db_password - influxdb_db: db_name - ara_data: "{{ ara_data.stdout }}" - measurement: test - data_file: /tmp/test_data - only_successful_tasks: true - mapped_fields: false - standard_fields: false - longest_tasks: 15 - when: ara_data.stdout != "[]" -""" - -import ast # noqa pylint: disable=C0413 -import datetime # noqa pylint: disable=C0413 -import json # noqa pylint: disable=C0413 -import os # noqa pylint: disable=C0413 -import re # noqa pylint: disable=C0413 - -SCHEME = "{measure},{tags} {fields} {timestamp}" - -CUSTOM_MAP = { - "undercloud_install": ["undercloud-deploy : Install the undercloud"], - "prepare_images": [ - "overcloud-prep-images : Prepare the overcloud images for deploy" - ], - "images_update": [ - "modify-image : Convert image", - "modify-image : Run script on image", - "modify-image : Close qcow2 image", - ], - "images_build": ["build-images : run the image build script (direct)"], - "containers_prepare": [ - "overcloud-prep-containers : Prepare for the containerized deployment" - ], - "overcloud_deploy": ["overcloud-deploy : Deploy the overcloud"], - "pingtest": ["validate-simple : Validate the overcloud"], - "tempest_run": ["validate-tempest : Execute tempest"], - "undercloud_reinstall": [ - "validate-undercloud : Reinstall the undercloud to check idempotency" - ], - "overcloud_delete": [ - "overcloud-delete : check for delete command to complete or fail" - ], - "overcloud_upgrade": [ - "overcloud-upgrade : Upgrade the overcloud", - "tripleo-upgrade : run docker upgrade converge step", - "tripleo-upgrade : run docker upgrade composable step", - ], - "undercloud_upgrade": ["tripleo-upgrade : upgrade undercloud"], -} - - -class InfluxStandardTags(object): - - """InfluxStandardTags contains: - - calculation of standard job describing parameters as: - * release - * nodepool provider cloud - * zuul pipeline name - * toci_jobtype - and rendering them in tags template - - """ - - def branch(self): - return os.environ.get("STABLE_RELEASE") or "master" - - def cloud(self): - return os.environ.get("NODEPOOL_PROVIDER", "null") - - def pipeline(self): - if os.environ.get("ZUUL_PIPELINE"): - if "check" in os.environ["ZUUL_PIPELINE"]: - return "check" - elif "gate" in os.environ["ZUUL_PIPELINE"]: - return "gate" - elif "periodic" in os.environ["ZUUL_PIPELINE"]: - return "periodic" - return "null" - - def toci_jobtype(self): - return os.environ.get("TOCI_JOBTYPE", "null") - - def render(self): - return ("branch=%s," "cloud=%s," "pipeline=%s," "toci_jobtype=%s") % ( - self.branch(), - self.cloud(), - self.pipeline(), - self.toci_jobtype(), - ) - - -class InfluxStandardFields(object): - """InfluxStandardFields contains: - - calculation of time of job steps as: - * whole job duration - * testing environment preparement - * quickstart files and environment preparement - * zuul host preparement - and rendering them in template - - """ - - def job_duration(self): - if os.environ.get("START_JOB_TIME"): - return int(datetime.datetime.utcnow().strftime("%s")) - int( - os.environ.get("START_JOB_TIME") - ) - return 0 - - def logs_size(self): - # not implemented - return 0 - - def timestamp(self): - return datetime.datetime.utcnow().strftime("%s") - - def testenv_prepare(self): - return os.environ.get("STATS_TESTENV", 0) - - def quickstart_prepare(self): - return os.environ.get("STATS_OOOQ", 0) - - def zuul_host_prepare(self): - - if os.environ.get("DEVSTACK_GATE_TIMEOUT") and os.environ.get( # noqa: W504 - "REMAINING_TIME" - ): - return ( - int(os.environ["DEVSTACK_GATE_TIMEOUT"]) - - int(os.environ["REMAINING_TIME"]) - ) * 60 - return 0 - - def render(self): - return ( - "job_duration=%d," - "logs_size=%d," - "testenv_prepare=%s," - "quickstart_prepare=%s," - "zuul_host_prepare=%d," - ) % ( - self.job_duration(), - self.logs_size(), - self.testenv_prepare(), - self.quickstart_prepare(), - self.zuul_host_prepare(), - ) - - -class InfluxConfiguredFields(object): - """InfluxConfiguredFields contains calculation: - - * whole job duration - * testing environment preparement - * quickstart files and environment preparement - * zuul host preparement - and rendering them in template - """ - - def __init__(self, match_map, json_data, only_ok=True): - """Set up data for configured field - - :param match_map {dict} -- Map of tasks from ansible playbook to - names of data fields in influxDB. - :param json_data: {dict} -- JSON data generated by ARA - :param only_ok=True: {bool} -- to count only passed tasks - """ - self.map = match_map - self.only_ok = only_ok - self.data = json_data - - def task_maps(self): - times_dict = tasks_times_dict(self.data, self.only_ok) - tasks = {} - for i in self.map: - tasks[i] = sum([int(times_dict.get(k, 0)) for k in self.map[i]]) - return tasks - - def render(self): - tasks = self.task_maps() - result = "" - for task, timest in tasks.items(): - result += "%s=%d," % (task, timest) - return result - - -class InfluxLongestFields(object): - """InfluxLongestFields runs calculation of: - - tasks that took the longest time. - The tasks could be from undercloud or overcloud playbooks. - - """ - - def __init__(self, json_data, only_ok=True, top=15): - """Constructor for InfluxLongestFields - - :param json_data: {dict} -- JSON data generated by ARA - :param only_ok=True: {bool} -- to count only passed tasks - :param top=15: {int} -- how many tasks to send to DB - """ - self.top = top - self.only_ok = only_ok - self.data = json_data - - def collect_tasks(self): - tasks_dict = tasks_times_dict(self.data, self.only_ok) - return sorted( - [[k, v] for k, v in tasks_dict.items()], key=lambda x: x[1], reverse=True - )[: self.top] - - def translate_names(self, names): - for i in names: - i[0] = re.sub( - r"[^0-9A-z\-_]+", "", i[0].replace(":", "__").replace(" ", "_") - ) - i[1] = int(i[1]) - return names - - def render(self): - result = "" - for i in self.translate_names(self.collect_tasks()): - result += "{0}={1},".format(*i) - return result - - -class SovaFields(object): - """SovaFields provides Sova calculated failure reasons.""" - - def __init__(self, sova_file): - """Constructor for SovaFields - - :param sova_file: {str} -- path to 'failures_file' of Sova - """ - self.sova_file = sova_file - - def parse_sova_file(self): - if not os.path.exists(self.sova_file): - return "" - with open(self.sova_file) as f: - text = f.readlines() - reason = text[0] - reason_tag = text[1].split("Reason: ")[1] - return reason.strip(), reason_tag.strip() - - def render(self): - scheme = 'sova_reason="%s",sova_tag="%s",' - res = self.parse_sova_file() - if not res: - return scheme % ("", "") - return scheme % (res[0], res[1]) - - -def tasks_times_dict(tasks, only_ok=True): - times_dict = {} - for task in tasks: - if not only_ok or task["Status"] in ["changed", "ok"]: - name = task["Name"] - if name in times_dict: - times_dict[name].append(task["Duration"]) - else: - times_dict[name] = [task["Duration"]] - # because of some tasks are executed multiple times we need to count - # all of them and make summary of all durations - for i in times_dict: - times_dict[i] = sum([task_length(t) for t in times_dict[i]]) - return times_dict - - -def task_length(x): - """Calculate task length in seconds from "%H:%M:%S" format - - Arguments: - x {string} -- a timestamp - - Returns: - int -- total seconds for the task - """ - - t = datetime.datetime.strptime(x, "%H:%M:%S") - return datetime.timedelta( - hours=t.hour, minutes=t.minute, seconds=t.second - ).total_seconds() - - -def translate( - measure, - json_data, - only_ok, - mapped_fields=True, - standard_fields=True, - longest_tasks=0, - data_file=None, -): - """Create data to send to InfluxDB server in format SCHEME - - Fields keys are taken from ARA data according to task names. - - :param measure: name of InfluxDB measurement - :param json_data: JSON data with tasks and times - :param: only_ok: boolean, where to count only successful tasks - :return: full InfluxDB scheme - """ - data = ast.literal_eval(json_data) - data = json.loads(data) - tags = InfluxStandardTags() - std_fields = InfluxStandardFields() - map_fields = InfluxConfiguredFields( - match_map=CUSTOM_MAP, json_data=data, only_ok=only_ok - ) - longest_fields = InfluxLongestFields( - json_data=data, top=longest_tasks, only_ok=only_ok - ) - fields = "" - if standard_fields: - fields += std_fields.render() - if mapped_fields: - fields += map_fields.render() - if longest_tasks: - fields += longest_fields.render() - if data_file: - sova_fields = SovaFields( - os.path.join(os.path.dirname(data_file), "failures_file") - ) - fields += sova_fields.render() - fields = fields.rstrip(",") - result = SCHEME.format( - measure=measure, - tags=tags.render(), - fields=fields, - timestamp=std_fields.timestamp(), - ) - - return result - - -def create_file_with_data(data, path): - """Create a file with InfluxDB data to send - - :param data: data to write - :param path: path of the file - :return: - """ - with open(path, "a") as f: - f.write(data + "\n") - - -def send(file_path, in_url, in_port, in_user, in_pass, in_db): - """Actual sending of data to InfluxDB server via network - - :param file_path: path to file with data to send - :param in_url: InfluxDB URL - :param in_port: InfluxDB port - :param in_user: InfluxDB user - :param in_pass: InfluxDB password - :param in_db: InfluxDB database name - :return: True if sent successfully, otherwise False - """ - import requests # noqa pylint: disable=C0413 - from requests.auth import HTTPBasicAuth # noqa pylint: disable=C0413 - - url = in_url.rstrip("/") - if in_port != 80: - url += ":%d" % in_port - url += "/write" - params = {"db": in_db, "precision": "s"} - if in_user: - if not in_pass: - if os.environ.get("INFLUXDB_PASSWORD"): - with open(os.environ["INFLUXDB_PASSWORD"]) as f: - in_pass = f.read().strip() - else: - return False, "InfluxDB password was not provided!" - auth = HTTPBasicAuth(in_user, in_pass) - else: - auth = None - with open(file_path, "rb") as payload: - req = requests.post(url, params=params, data=payload, auth=auth, verify=False) - if not req or req.status_code != 204: - return False, "HTTP: %s\nResponse: %s" % (req.status_code, req.content) - return True, "" - - -def send_stats( - in_url, - in_port, - in_user, - in_pass, - in_db, - json_data, - measure, - data_file, - only_ok, - mapped_fields=True, - standard_fields=True, - longest_tasks=0, -): - """Send ARA statistics to InfluxDB server - - :param in_url: InfluxDB URL - :param in_port: InfluxDB port - :param in_user: InfluxDB user - :param in_pass: InfluxDB password - :param in_db: InfluxDB database name - :param json_data: JSON data with tasks and times from ARA - :param measure: InfluxDB measurement name - :param data_file: path to file with data to send - :param: only_ok: boolean, where to count only successful tasks - :param: mapped_fields: if to use configured map of fields and tasks - :param: standard_fields: if to send standard fields of each job, i.e. times - :param: longest_tasks: if to print only longest tasks and how many - :return: JSON ansible result - """ - data2send = translate( - measure, - json_data, - only_ok, - mapped_fields, - standard_fields, - longest_tasks, - data_file, - ) - create_file_with_data(data2send, data_file) - if in_url: - response, reason = send(data_file, in_url, in_port, in_user, in_pass, in_db) - if not response: - return { - "changed": False, - "failed": True, - "influxdb_url": in_url, - "msg": reason, - } - return { - "changed": True, - "influxdb_url": in_url, - "sent_data": data2send, - } - else: - return { - "changed": True, - "data_file": data_file, - "sent_data": data2send, - } - - -def main(): - module = AnsibleModule( # noqa - argument_spec=dict( - influxdb_url=dict(required=True, type="str"), - influxdb_port=dict(required=True, type="int"), - influxdb_user=dict(required=False, type="str", default=None), - influxdb_password=dict( - required=False, type="str", default=None, no_log=True - ), - influxdb_db=dict(required=True, type="str"), - ara_data=dict(required=True, type="str"), - measurement=dict(required=True, type="str"), - data_file=dict(required=True, type="str"), - only_successful_tasks=dict(required=True, type="bool"), - mapped_fields=dict(default=True, type="bool"), - standard_fields=dict(default=True, type="bool"), - longest_tasks=dict(default=0, type="int"), - ) - ) - result = send_stats( - module.params["influxdb_url"], - module.params["influxdb_port"], - module.params["influxdb_user"], - module.params["influxdb_password"], - module.params["influxdb_db"], - module.params["ara_data"], - module.params["measurement"], - module.params["data_file"], - module.params["only_successful_tasks"], - module.params["mapped_fields"], - module.params["standard_fields"], - module.params["longest_tasks"], - ) - module.exit_json(**result) - - -# pylint: disable=W0621,W0622,W0614,W0401,C0413 -from ansible.module_utils.basic import * # noqa - -if __name__ == "__main__": - main() diff --git a/plugins/modules/flatten_nested_dict.py b/plugins/modules/flatten_nested_dict.py deleted file mode 100644 index 85bdd2d..0000000 --- a/plugins/modules/flatten_nested_dict.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - - -ANSIBLE_METADATA = { - "metadata_version": "0.1", - "status": ["preview"], - "supported_by": "community", -} - -DOCUMENTATION = """ -module: flatten_nested_dict -author: Red Hat (@RedHatOfficial) -version_added: '2.7.0' -short_description: Flattens a nested dictionary into a list -notes: [] -description: - - Flattens the commands nested dictionary into a list of commands. -options: - data: - description: - - Nested dictionary - required: True - type: dict -""" -EXAMPLES = """ -- name: Determine commands to run - flatten_nested_dict: - data: - system: - cmd: df -""" -RETURN = """ -data: - description: Commands to be executed - returned: success - type: list - sample: - - 'cmd': 'df' - 'capture_file': '/var/log/extra/df.txt' - 'name': 'df' - 'group': 'system' -""" - -from ansible.module_utils.basic import AnsibleModule # noqa: E402 - - -def main(): - result = {"data": [], "changed": False} - module = AnsibleModule(argument_spec={"data": {"type": "dict", "required": True}}) - try: - - for group, commands in module.params["data"].items(): - for cmd_name, cmd_dict in commands.items(): - cmd_dict["name"] = cmd_name - cmd_dict["group"] = group - result["data"].append(cmd_dict) - - except Exception as e: - module.fail_json(msg=str(e)) - - module.exit_json(**result) - - -if __name__ == "__main__": - main() diff --git a/plugins/modules/sova.py b/plugins/modules/sova.py deleted file mode 100644 index 541b886..0000000 --- a/plugins/modules/sova.py +++ /dev/null @@ -1,301 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - - -ANSIBLE_METADATA = { - "metadata_version": "0.1", - "status": ["preview"], - "supported_by": "community", -} - -DOCUMENTATION = """ -module: sova -author: Sagi Shnaidman (@sshnaidm) -version_added: '2.7.0' -short_description: Parse CI jobs files for known failures -notes: [] -description: - - Parse CI job files and find there known patterns of failures -requirements: - - "Better to use with 'regex' module installed" -options: - files: - description: - - Dictionary of patterns file name and file location. - Patterns are divided by sections in config file, match each section - to the file path on the host, It will search these patterns from this - section in the given file. - required: True - type: dict - result: - description: - - Path to file where to write result message. - type: path - result_file_dir: - description: - - Directory where to create a file with result message and name - of file. For example for pattern 'Overcloud failed on host' will be - created file Overcloud_failed_on_host.log in this directory. - It helps to know what is the reason without opening actually the file. - type: path - config: - description: config - type: dict -""" -EXAMPLES = """ -- name: Run sova task - sova: - files: - console: /var/log/job-output.txt.gz - errors: /var/log/errors.txt.gz - "ironic-conductor": /var/log/ironic-conductor.log.txt.gz - syslog: /var/log/journal.txt.gz - logstash: /var/log/logstash.txt.gz - bmc: /var/log/bmc-console.log - result: /home/zuul/result_file - result_file_dir: /home/zuul/workspace/logs/ -""" -RETURN = """ -processed_files: - description: - - Files which have been processed by module - returned: if changed - type: list - sample: [ - "/tmp/var/log/job-output.txt.gz", - "/tmp/var/log/errors.txt.gz", - "/tmp/var/log/ironic-conductor.log.txt.gz" - ] -message: - description: - - Text with all messages about failures - returned: if changed - type: list - sample: 'Overcloud stack: FAILED.' -tags: - description: - - Tags of patterns which were found in files - returned: if changed - type: list - sample: ["info"] -file_name_written: - description: - - Path of file which written with message as filename - returned: if changed - type: str - sample: '/var/log/_Overcloud_stack__FAILED.log' -file_written: - description: - - Path of file where written result message and reason. - returned: if changed - type: str - sample: '/var/log/result_file' -""" - -import gzip # noqa: E402 -import logging # noqa: E402 -import os # noqa: E402 -from copy import deepcopy # noqa: E402 - -from ansible.module_utils.basic import AnsibleModule # noqa: E402 - -try: - import regex as regex_module -except ImportError: - import re as regex_module - - -__metaclass__ = type -logging.basicConfig( - format=( - "%(asctime)s - %(name)s - %(levelname)s - " - "%(module)s.%(funcName)s:%(lineno)d - %(message)s" - ) -) -log = logging.getLogger("parser") -log.setLevel(logging.ERROR) - - -class Pattern(object): - def __init__(self, data): - self.data = data - self.load_yaml() - self.setup_regexes() - self.setup_patterns() - - def load_yaml(self): - import yaml - - if isinstance(self.data, dict): - self.config = self.data - else: - self.config = yaml.safe_load(self.data) - - def setup_regexes(self): - self.regexes = {} - if self.config: - for regexp in self.config.get("regexes", []): - flags = [] - if regexp.get("multiline"): - flags.append(regex_module.MULTILINE) - self.regexes[regexp.get("name")] = regex_module.compile( - r"{0}".format(regexp.get("regex")), *flags - ) - - def setup_patterns(self): - self._patterns = self.config.get("patterns", {}) - if self._patterns: - for key in self._patterns: - for p in self._patterns[key]: - if p["pattern"] in self.regexes: - p["pattern"] = self.regexes[p["pattern"]] - if p["logstash"] in self.regexes: - p["logstash"] = self.regexes[p["logstash"]] - - @property - def patterns(self): - return self._patterns - - -def line_match(pat, line, exclude=None): - if isinstance(pat, str): - return pat in line - found = pat.search(line) - if not found: - return False - if found.groups(): - if exclude: - if any(i in found.group(1) for i in exclude): - return False - return found.group(1) - return True - - -def parse(text_file, patterns): - ids = [] - msgs = [] - if text_file.split(".")[-1] == "gz": - open_func = gzip.open - else: - open_func = open - with open_func(text_file, "rt") as finput: - text = finput.read() - for p in patterns: - line_matched = line_match(p["pattern"], text, exclude=p.get("exclude")) - if line_matched: - log.debug("Found pattern %s in file %s", repr(p), text_file) - ids.append(p["id"]) - msgs.append(p["msg"].format(line_matched)) - return list(set(ids)), list(set(msgs)) - - -def format_msg_filename(text): - for s in ( - " ", - ":", - ".", - "/", - ",", - "'", - ): - text = text.replace(s, "_") - text = text[:100] - return "_" + text.rstrip("_") + ".log" - - -def main(): - - module = AnsibleModule( - argument_spec=dict( - config=dict(type="dict", default={}), - files=dict(type="dict", required=True), - result=dict(type="path"), - result_file_dir=dict(type="path"), - ) - ) - if not module.params["files"]: - module.fail_json(msg="Files for logs parsing have to be provided!") - existing_files = [] - for pattern_file in module.params["files"]: - file_ = module.params["files"][pattern_file] - if os.path.exists(file_): - existing_files.append(file_) - if not existing_files: - results = {"processed_files": [], "changed": False} - module.exit_json(**results) - dict_patterns = deepcopy(module.params["config"]) - - pattern = Pattern(dict_patterns) - PATTERNS = pattern.patterns - for name in module.params["files"]: - if name not in PATTERNS: - module.fail_json( - msg="File name %s wasn't found in [%s]" - % (name, ", ".join(list(PATTERNS.keys()))) - ) - - messages, tags = [], [] - for name, file_ in module.params["files"].items(): - if module.params["files"][name] not in existing_files: - continue - ids, msgs = parse(file_, PATTERNS[name]) - found = [i for i in PATTERNS[name] if i["id"] in ids] - msg_tags = [i["tag"] for i in found if i.get("tag")] - messages += msgs - tags += msg_tags - messages = list(set(messages)) - tags = list(set(tags)) - if "infra" in tags: - reason = "infra" - elif "code" in tags: - reason = "code" - else: - reason = "unknown" - text = " ".join(messages) or "No failure reason found" - file_name = format_msg_filename(text) - result = {"changed": True, "processed_files": existing_files} - result.update({"message": text}) - result.update({"tags": tags}) - if module.params["result"] and messages: - try: - with open(module.params["result"], "w") as f: - f.write(text + "\n") - f.write("Reason: " + reason + "\n") - result.update({"file_written": module.params["result"]}) - except Exception as e: - module.fail_json( - msg="Can't write result to file %s: %s" - % (module.params["result"], str(e)) - ) - if module.params["result_file_dir"]: - log_file = os.path.join(module.params["result_file_dir"], file_name) - try: - with open(log_file, "w") as f: - f.write(text + "\n") - f.write("Reason: " + reason + "\n") - result.update({"file_name_written": log_file}) - except Exception as e: - module.fail_json( - msg="Can't write result to file %s: %s" % (log_file, str(e)) - ) - module.exit_json(**result) - - -if __name__ == "__main__": - main() diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 655bb21..0000000 --- a/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -pbr>=1.6 -ansible-core>=2.11,<2.12 diff --git a/requirements.yml b/requirements.yml deleted file mode 100644 index e30393e..0000000 --- a/requirements.yml +++ /dev/null @@ -1,2 +0,0 @@ -collections: - - name: ansible.posix # needed by synchronize diff --git a/roles/collect_logs/defaults/main.yml b/roles/collect_logs/defaults/main.yml deleted file mode 100644 index fd81789..0000000 --- a/roles/collect_logs/defaults/main.yml +++ /dev/null @@ -1,510 +0,0 @@ ---- -# formally in tq common and tqe extras-common -# zuul does not allow use of lookup env plugin (security), so we cannot use them -# defaults but we can load non-zuul values from vars. -local_working_dir: "{{ zuul_work_dir | default('~') }}/.quickstart" -artcl_collect_dir: "{{ local_working_dir }}/collected_files" -working_dir: "/home/{{ undercloud_user }}" -undercloud_user: stack - - -artcl_build_tag: "{{ zuul.build | default('') }}" # overriden by vars/unsecure.yml - -artcl_collect: true -artcl_collect_list: - - /var/lib/container-config-scripts/ - - /var/lib/heat-config/ - - /var/lib/kolla/config_files - - /var/lib/mistral/ - - /var/lib/nova/instances/*/console.log - - /var/lib/oooq-images/*/*.log - - /var/lib/oooq-images/*/*.sh - - /var/lib/pacemaker/cib/cib* - - /var/lib/pacemaker/pengine/pe-input* - - /var/log/atop* - - /var/log/dmesg.txt - - /var/log/host_info.txt - - /var/log/journal.txt - - /var/log/postci.txt - - /var/log/secure - - /var/log/bootstrap-subnodes.log - - /var/log/unbound.log - - /var/log/{{ ansible_pkg_mgr }}.log - - /var/log/cloud-init*.log - - /var/log/aodh/ - - /var/log/audit/ - - /var/log/barbican/ - - /var/log/ceilometer/ - - /var/log/ceph/ - - /var/log/cinder/ - - /var/log/cloudkitty/ - - /var/log/cluster/ - - /var/log/config-data/ - - /var/log/congress/ - - /var/log/containers/ - - /var/log/deployed-server-enable-ssh-admin.log - - /var/log/deployed-server-os-collect-config.log - - /var/log/designate/ - - /var/log/dmesg/ - - /var/log/extra/ - - /var/log/ec2api/ - - /var/log/glance/ - - /var/log/gnocchi/ - - /var/log/heat/ - - /var/log/heat-launcher/ - - /var/log/horizon/ - - /var/log/httpd/ - - /var/log/ironic/ - - /var/log/ironic-inspector/ - - /var/log/libvirt/ - - /var/log/keystone/ - - /var/log/manila/ - - /var/log/mariadb/ - - /var/log/mistral/ - - /var/log/monasca/ - - /var/log/murano/ - - /var/log/neutron/ - - /var/log/nova/ - - /var/log/novajoin/ - - /var/log/octavia/ - - /var/log/openvswitch/ - - /var/log/ovn/ - - /var/log/pacemaker/ - - /var/log/panko/ - - /var/log/qdr/ - - /var/log/rabbitmq/ - - /var/log/redis/ - - /var/log/sahara/ - - /var/log/sensu/ - - /var/log/swift/ - - /var/log/tacker/ - - /var/log/tempest/ - - /var/log/trove/ - - /var/log/tripleo-container-image-prepare.*.log - - /var/log/vitrage/ - - /var/log/watcher/ - - /var/log/zaqar/ - - /var/tmp/sosreport* - - /etc/ - - /home/*/undercloud-ansible-* - - /home/*/.instack/install-undercloud.log - - /home/*/*rc - - /home/*/*rc.v3 - - /home/*/*.log - - /home/*/*.json - - /home/*/*.conf - - /home/*/*.yml - - /home/*/*.yaml - - /home/*/*.sh - - /home/*/*.rst - - /home/*/*.pem - - /home/*/network-environment.yaml - - /home/*/skip_file - - /home/*/*.subunit - - /home/*/tempest/*.xml - - /home/*/tempest/*.html - - /home/*/tempest/*.log - - /home/*/tempest/etc/*.conf - - /home/*/tempest/*.subunit - - /home/*/tempest/*.json - - /home/*/tripleo-heat-installer-templates/ - - /home/*/local_tht/ - - /home/*/gating_repo.tar.gz - - /home/*/browbeat/ - - /usr/share/openstack-tripleo-heat-templates/ - - /home/*/overcloud-deploy - - /home/*/tripleo-heat-templates/ - - /home/*/.ssh/config - - /tmp/tripleoclient* - # The next 2 items are temporary until config-download is executed - # from a Mistral workflow (WIP in Queens) - - /home/*/inventory - - /home/*/inventories - - /home/*/tripleo-config-download/ -artcl_exclude_list: - - /etc/udev/hwdb.bin - - /etc/puppet/modules - - /etc/project-config - - /etc/services - - /etc/selinux/targeted - - /etc/pki/ca-trust/extracted - - /etc/alternatives - - /var/log/journal - - overlay* - - root - - console*primary.log - - anaconda* - -# if true, a rsync filter file is generated for rsync to collect files, -# if false, find is used to generate list of files to collect for rsync. -artcl_rsync_collect_list: true -artcl_find_maxdepth: 4 -# size in MBs -artcl_find_max_size: 256 - -# os specific values loaded from tasks/main.yaml -artcl_collect_pkg_list: [] - -# In upstream logs the compression is handled -# by the storage servers themselves and this -# can be false. In other storage servers -# the role must compress files. -artcl_gzip: false - -## publishing related vars -artcl_publish: false -artcl_env: default -artcl_readme_path: "{{ working_dir }}/src/opendev.org/openstack/tripleo-ci/docs/tripleo-quickstart-logs.html" -artcl_readme_file: "{{ artcl_collect_dir }}/README.html" -artcl_txt_rename: false -# give up log upload after 30 minutes -artcl_publish_timeout: 1800 -artcl_artifact_url: "file://{{ local_working_dir }}" -artcl_full_artifact_url: "{{ artcl_artifact_url }}/{{ artcl_build_tag }}/" -artcl_use_rsync: false -artcl_rsync_use_daemon: false -artcl_container_collect_timeout: 1800 # 30 mins - -artcl_use_swift: false -# clean up the logs after 31 days -artcl_swift_delete_after: 2678400 -artcl_swift_container: logs -artcl_use_zuul_swift_upload: false -artcl_zuul_swift_upload_path: /usr/local/bin - -artcl_collect_sosreport: false -artcl_sosreport_options: "--batch" - -# User defined commands to be executed, combined with default ones. -artcl_commands_extras: {} - -# Used to determine which ignore_errors strategy to use. Defaults to true -# but for testing purposes we may want to make it false, to avoid false -# positives. -artcl_ignore_errors: true - -# Implicit commands executed by the role. Keep the dict sorted. -artcl_commands: - system: - cpuinfo: - cmd: | - cat /proc/cpuinfo - echo "" - grep -s -H '' /sys/module/{kvm_intel,kvm_amd}/parameters/nested - capture_file: /var/log/extra/cpuinfo.txt - dmesg: - cmd: dmesg - meminfo: - cmd: cat /proc/meminfo - capture_file: /var/log/extra/meminfo.txt - pcs: - cmd: | - if type pcs &>/dev/null; then - echo "+ pcs status" - pcs status - echo "+ pcs config" - pcs config - echo "+ pcs cluster cib" - pcs cluster cib - fi - pcs_cpu_throttle: - cmd: | - if type pcs &>/dev/null; then - echo "+ high CPU throttling events" - grep throttle_check_thresholds /var/log/pacemaker/pacemaker.log - fi - chrony: - cmd: | - echo "+ chrony tracking" - chronyc tracking - echo "+ chrony sources" - chronyc sources -a -v - echo "+ chrony source stats" - chronyc sourcestats -a -v - echo "+ chrony activity" - chronyc activity - ipa: - cmd: | - if type ipa &>/dev/null; then - echo "+ ipa env" - ipa env - echo "+ ipa config-show" - ipa config-show --all - echo "+ ipa dnsconfig-show" - ipa dnsconfig-show --all - fi - swaps: - cmd: cat /proc/swaps - capture_file: /var/log/extra/swaps.txt - vmstat: - cmd: vmstat -s - ps: - cmd: ps axfo %mem,size,rss,vsz,pid,args - rpm-list: - cmd: rpm -qa | sort -f - package-list-installed: - cmd: "{{ ansible_pkg_mgr }} list installed" - repolist: - cmd: "{{ ansible_pkg_mgr }} repolist -v" - dnf-module-list: - cmd: "{{ ansible_pkg_mgr }} module list" - when: ansible_distribution_major_version|int >= 8 - dnf-module-list-enabled: - cmd: "{{ ansible_pkg_mgr }} module list --enabled" - when: ansible_distribution_major_version|int >= 8 - record_available_packages: - # the timeout is like a fail-safe from collect_logs point of view, - # we encountered an issue when repolist query took several minutes - # which lead to timeouts and unfinished log collections - cmd: | - timeout 120 repoquery -a --qf "%{ui_from_repo} %{name}" | sort - capture_file: /var/log/extra/all_available_packages.txt - selinux: - cmd: | - /usr/sbin/sestatus -v - /usr/sbin/sestatus -b - installed_crons: - cmd: | - for user in $(cut -f1 -d':' /etc/passwd); do - echo $user; crontab -u $user -l | grep -v '^$\|^\s*\#\|^\s*PATH' - done - import-delorean: - # used by OSP Release Engineering to import into internal builds - cmd: > - repoquery --disablerepo='*' --enablerepo='delorean' - -a --qf '%{sourcerpm}'|sort -u|sed 's/.src.rpm//g' - import-delorean-deps: - # used by OSP Release Engineering to import into internal builds - cmd: > - repoquery --disablerepo='*' --enablerepo='delorean-*-deps' - -a --qf '%{sourcerpm}'|sort -u|sed 's/.src.rpm//g' - failed_services: - cmd: > - systemctl -t service --failed --no-legend | awk '{print $1}' - | xargs -r -n1 journalctl -u - lsof: - cmd: > - lsof -P -n &> /var/log/extra/lsof.txt - pstree: - cmd: pstree -p - sysctl: - cmd: sysctl -a - haproxy-stats: - cmd: > - pgrep haproxy && \ - test -S /var/lib/haproxy/stats && \ - echo 'show info;show stat;show table' | socat /var/lib/haproxy/stats stdio || \ - echo "No HAProxy or no socket on host" - lsmod: - cmd: lsmod - lspci: - cmd: lspci - pip: - cmd: "{{ ansible_python.executable }} -m pip list" - lvm: - cmd: | - vgs - pvs - lvs - disk: - cmd: | - df -h - shell_variables: - cmd: | - set - services: - cmd: | - systemctl list-units --full --all - systemctl status "*" - selinux_denials: - cmd: > - grep -i denied /var/log/audit/audit* - selinux_consolidated_avc: - cmd: > - /usr/bin/perl /usr/local/bin/consolidate-avc.pl /var/log/extra/selinux_denials.txt - selinux_denials_detail: - cmd: > - sealert -a /var/log/extra/selinux_consolidated_avc.txt - seqfaults: - cmd: > - grep -v ansible-command /var/log/messages | grep segfault - oom-killers.txt: - cmd: | - grep -v ansible-command /var/log/messages | grep oom-killer - delorean-logs: - cmd: > - if [[ -e /home/{{ undercloud_user }}/DLRN/data/repos ]]; then - rm -rf /tmp/delorean_logs && mkdir /tmp/delorean_logs; - find /home/{{ undercloud_user }}/DLRN/data/repos/ -name '*.log' -exec cp --parents \{\} /tmp/delorean_logs/ \; ; - find /home/{{ undercloud_user }}/DLRN/ -name 'projects.ini' -exec cp \{\} /tmp/delorean_logs/ \; ; - find /tmp/delorean_logs -name '*.log' -exec gzip \{\} \; ; - find /tmp/delorean_logs -name '*.log.gz' -exec sh -c 'x="{}"; mv "$x" "${x%.log.gz}.log.txt.gz"' \; ; - rm -rf {{ artcl_collect_dir }}/delorean_logs && mkdir {{ artcl_collect_dir }}/delorean_logs; - mv /tmp/delorean_logs/home/{{ undercloud_user }}/DLRN/data/repos/* {{ artcl_collect_dir }}/delorean_logs/; - mv /tmp/delorean_logs/projects.ini {{ artcl_collect_dir }}/delorean_logs/; - fi - capture_disable: true - journal: - cmd: journalctl --since=-4h --lines=100000 - journal_errors: - cmd: journalctl --since=-4h -p err --output=short-iso - rabbitmq: - cmd: | - if type pcs &>/dev/null; then - echo "+ rabbitmq cookie" - podman exec rabbitmq-bundle-podman-0 rabbitmqctl eval 'erlang:get_cookie().' - echo "+ rabbitmq report" - podman exec rabbitmq-bundle-podman-0 rabbitmqctl report - fi - slow_requests: - cmd: > - echo "+ slow req_ids"; - find /var/log/containers -type f -name '*.log' - -not -path '*/stdouts/*' -not -path '*httpd*' - -exec grep -HE '(time:\s?|held |waited )([3-9][0-9]\.|[0-9]{3,}\.)' {} \;; - echo "+ slow haproxy api calls"; - grep -E '([0-9]+\/){3,}[3-9][0-9]{4,}/?' /var/log/containers/haproxy/haproxy.log - monitoring: {} - network: - ovn: - cmd: | - if type ovs-vsctl &>/dev/null; then - function pod_exec() { - timeout -k 10 5 sudo podman exec ovn_controller $@ - } - function sbctl() { - SBDB=$(sudo ovs-vsctl get open . external_ids:ovn-remote | sed -e 's/\"//g'); - pod_exec ovn-sbctl --db=$SBDB $1 - } - function nbctl() { - NBDB=$(sudo ovs-vsctl get open . external_ids:ovn-remote | sed -e 's/\"//g' | sed -e 's/6642/6641/g'); - pod_exec ovn-nbctl --db=$NBDB $1 - } - echo "Output of ovs-vsctl get open . external_ids" - pod_exec ovs-vsctl get open . external_ids - echo "\nOutput of ovn-sbctl show" - sbctl show - echo "\nOutput of ovn-nbctl show" - nbctl show - echo "\nOutput of ovn-sbctl lflow-list" - sbctl lflow-list - fi - openstack: - baremetal_list: - cmd: | - if [[ -e {{ working_dir }}/stackrc ]]; then - source {{ working_dir }}/stackrc - openstack baremetal node list --long - fi - nova_list: - cmd: | - if [[ -e {{ working_dir }}/stackrc ]]; then - source {{ working_dir }}/stackrc - openstack server list --long - fi - openstack-status: - cmd: | - if type openstack-status &> /dev/null; then - . ~/keystonerc_admin - openstack-status - fi - when: "'controller' in inventory_hostname" - container: {} - -# Doc generation specific vars -artcl_gen_docs: false -artcl_create_docs_payload: - included_deployment_scripts: [] - included_static_docs: [] - table_of_contents: [] -artcl_docs_source_dir: "{{ local_working_dir }}/share/ansible/roles/collect-logs/docs/source" -artcl_docs_build_dir: "{{ artcl_collect_dir }}/docs/build" -artcl_verify_sphinx_build: false -artcl_logstash_files: - - /home/*/container_image_build.log - - /home/*/deployed_server_prepare.txt - - /home/*/docker_journalctl.log - - /home/*/failed_deployment_list.log - - /home/*/hostname.sh.log - - /home/*/install_built_repo.log - - /home/*/install_packages.sh.log - - /home/*/install-undercloud.log - - /home/*/ironic-python-agent.log - - /home/*/nova_actions_check.log - - /home/*/overcloud_create_ssl_cert.log - - /home/*/overcloud_custom_tht_script.log - - /home/*/overcloud_delete.log - - /home/*/overcloud_deploy.log - - /home/*/overcloud_deploy_post.log - - /home/*/overcloud_failed_prepare_resources.log - - /home/*/overcloud-full.log - - /home/*/build-err.log - - /home/*/overcloud_image_build.log - - /home/*/overcloud_image_upload.log - - /home/*/overcloud_import_nodes.log - - /home/*/overcloud_introspect.log - - /home/*/overcloud_prep_containers.log - - /home/*/overcloud_prep_images.log - - /home/*/overcloud_prep_network.log - - /home/*/overcloud_validate.log - - /home/*/pkg_mgr_mirror_error.log - - /home/*/pkg_mgr_mirror.log - - /home/*/repo_setup.log - - /home/*/repo_setup.sh.*.log - - /home/*/standalone_deploy.log - - /home/*/tempest.log - - /home/*/undercloud_custom_tht_script.log - - /home/*/undercloud_install.log - - /home/*/undercloud_reinstall.log - - /home/*/*update*.log - - /home/*/*upgrade*.log - - /home/*/upgrade-undercloud-repo.sh.log - - /home/*/validate-overcloud-ipmi-connection.log - - /home/*/vxlan_networking.sh.log - - /home/*/workload_launch.log - - /var/log/bootstrap-subnodes.log - - /var/log/ipaserver-install.log - - /var/log/tripleo-container-image-prepare.log - - /var/log/extra/journal_errors.txt - - /var/log/extra/pcs_cpu_throttle.txt - - /var/log/ceph/cephadm.log - - /var/log/extra/errors.txt - -# ara_graphite_server: graphite.tripleo.org -# if ara_enabled is false, no ara tasks will be executed -ara_enabled: true -ara_overcloud_db_path: "/var/lib/mistral/overcloud/ara_overcloud.sqlite" -ara_generate_html: true -ara_only_successful_tasks: true -ara_tasks_map: - "overcloud-deploy : Deploy the overcloud": overcloud.deploy.seconds - "undercloud-deploy : Install the undercloud": undercloud.install.seconds - "build-images : run the image build script (direct)": overcloud.images.seconds - "overcloud-prep-images : Prepare the overcloud images for deploy": prepare_images.seconds - "validate-simple : Validate the overcloud": overcloud.ping_test.seconds - "validate-tempest : Execute tempest": overcloud.tempest.seconds - -collect_log_types: - - system - - monitoring - - network - - openstack - - container - -# This set sova to use the specified json file instead of downloading from -# internet. Right now it is used by molecule, only set this if you do not want -# to use the official sova-config file. -# sova_config_file: "/path/to/sova/json/file - -# InfluxDB module settings -influxdb_only_successful_tasks: true -influxdb_measurement: test -# influxdb_url: -influxdb_port: 8086 -influxdb_user: -influxdb_password: -influxdb_dbname: testdb -influxdb_data_file_path: "{{ local_working_dir }}/influxdb_data" -influxdb_create_data_file: true -odl_extra_log_dir: /var/log/extra/odl -odl_extra_info_log: "{{ odl_extra_log_dir }}/odl_info.log" diff --git a/roles/collect_logs/files/collect-container-logs.sh b/roles/collect_logs/files/collect-container-logs.sh deleted file mode 100755 index 93e1047..0000000 --- a/roles/collect_logs/files/collect-container-logs.sh +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env bash -set -x - -get_engine() { - if ! command -v docker &>/dev/null ; then echo "podman"; exit; fi - if ! command -v podman &>/dev/null ; then echo "docker"; exit; fi - if ! systemctl is-active docker &>/dev/null ; then echo "podman"; exit; fi - if [[ -z $(docker ps --all -q) ]]; then - echo "podman"; - exit; - fi - if [[ -z $(podman ps --all -q) ]]; then - echo "docker"; exit; - fi - echo 'podman' -} - -container_cp() { - ${engine} cp "${1}:${2}" "$3" -}; - -engine=$(get_engine) -echo "${engine} was detected." -BASE_CONTAINER_EXTRA=/var/log/extra/${engine}; -mkdir -p "$BASE_CONTAINER_EXTRA"; -ALL_FILE=$BASE_CONTAINER_EXTRA/${engine}_allinfo.log; - -CONTAINER_INFO_CMDS=( - "${engine} ps --all" - "${engine} images" - "${engine} version" - "${engine} info" - "${engine} volume ls" - "${engine} network ls" -); - -for cmd in "${CONTAINER_INFO_CMDS[@]}"; do - { - echo "+ $cmd" - $cmd - echo "" - echo "" - } >> "$ALL_FILE" -done; - -# Get only failed containers, in a dedicated file -${engine} ps -a | grep -vE ' (IMAGE|Exited \(0\)|Up) ' &>> /var/log/extra/failed_containers.log; - -# Get inspect infos for all containers even the ones not running. -for cont in $(${engine} ps -a | awk '{print $NF}' | grep -v NAMES); do - INFO_DIR=$BASE_CONTAINER_EXTRA/containers/${cont}; - mkdir -p "$INFO_DIR"; - ( - ${engine} inspect "$cont"; - ) &> "$INFO_DIR/${engine}_info.log"; -done; - -# Get other infos for running containers -for cont in $(${engine} ps | awk '{print $NF}' | grep -v NAMES); do - INFO_DIR=$BASE_CONTAINER_EXTRA/containers/${cont}; - mkdir -p "$INFO_DIR"; - ( - if [ "${engine}" = 'docker' ]; then - ${engine} top "$cont" auxw; - # NOTE(cjeanner): `podman top` does not support `ps` options. - elif [ "${engine}" = 'podman' ]; then - ${engine} top "$cont"; - fi - ${engine} exec "$cont" vmstat -s - ${engine} exec "$cont" ps axfo %mem,size,rss,vsz,pid,args - ${engine} exec -u root "$cont" bash -c "\$(command -v dnf || command -v yum) list installed"; - ) &>> "$INFO_DIR/${engine}_info.log"; - - container_cp "$cont" /var/lib/kolla/config_files/config.json "$INFO_DIR/config.json"; - - # Capture rpms updated from more recent repos - update_repos="gating delorean-current" - if ls /etc/yum.repos.d/*-component.repo 1> /dev/null 2>&1; then - component_name=$(cat /etc/yum.repos.d/*-component.repo | grep "name=" | sed "s/name=//g") - update_repos="${update_repos} ${component_name}" - fi - echo "*** ${cont} rpm update info ***" >> "$BASE_CONTAINER_EXTRA/container_updates_info.log" - for repo in $update_repos; do - grep "@${repo}" "$INFO_DIR/${engine}_info.log" >> "$BASE_CONTAINER_EXTRA/container_updates_info.log" - done; - - # NOTE(flaper87): This should go away. Services should be - # using a `logs` volume - # NOTE(mandre) Do not copy logs if the containers is bind mounting /var/log directory - if ! ${engine} inspect "$cont" | jq .[0].Mounts[].Source | grep -x '"/var/log[/]*"' >/dev/null 2>&1; then - container_cp "$cont" /var/log "$INFO_DIR/log"; - BIND_DESTS=$(${engine} inspect "$cont" | jq .[0].Mounts[].Destination -r) - for path in $(echo "$BIND_DESTS" | grep "^/var/log" | sed -e "s#^/var/log/##g"); do - rm -rf "$INFO_DIR/log/$path" - echo "Omitting $INFO_DIR/log/$path in $cont because it is mounted from the host" - done - fi; - - # Delete symlinks because they break log collection and are generally - # not useful - find "$INFO_DIR" -type l -delete; -done; - -# NOTE(cjeanner) previous loop cannot have the "-a" flag because of the -# "exec" calls. So we just loop a second time, over ALL containers, -# in order to get all the logs we can. For instance, the previous loop -# would not allow to know why a container is "Exited (1)", preventing -# efficient debugging. -for cont in $(${engine} ps -a | awk '{print $NF}' | grep -v NAMES); do - INFO_DIR=$BASE_CONTAINER_EXTRA/containers/${cont}; - mkdir -p "$INFO_DIR"; - ${engine} logs "$cont" &> "$INFO_DIR/stdout.log"; -done; - -# NOTE(flaper87) Copy contents from the logs volume. We can expect this -# volume to exist in a containerized environment. -# NOTE(cjeanner): Rather test the eXistenZ of the volume, as podman does not -# have such thing -if [ -d /var/lib/docker/volumes/logs/_data ]; then - cp -r /var/lib/docker/volumes/logs/_data "$BASE_CONTAINER_EXTRA/logs"; -fi diff --git a/roles/collect_logs/files/consolidate-avc.pl b/roles/collect_logs/files/consolidate-avc.pl deleted file mode 100644 index 9849d05..0000000 --- a/roles/collect_logs/files/consolidate-avc.pl +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2021 Red Hat Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Usage: -# will use /var/log/audit/audit.log as source -# ./consolidate-avc.pl -# Will use another input -# ./consolidate-avc.pl /var/log/extras/denials.txt - -use strict; -use warnings; -use List::Util qw'first'; - -my $logfile = shift // '/var/log/audit/audit.log'; - -open(AUDIT_LOG, $logfile) or die("Could not open file '${logfile}'."); - -my @denials = (); -while( my $line = ) { - my @matched = $line =~ m{type=AVC.* denied \{([\w\s]+)\}.* scontext=([\w:]+)(:[,c0-9]+)? tcontext=([\w:,]+) tclass=([\w]+) permissive=[01]}; - if (@matched) { - my $action = $matched[0]; - my $scontext = $matched[1]; - my $tcontext = $matched[3]; - my $tclass = $matched[4]; - my $matcher = "${action}_${scontext}_${tcontext}_${tclass}"; - if (!first {$matcher eq $_} @denials) { - push(@denials, $matcher); - print $line; - } - } -} -close(AUDIT_LOG); diff --git a/roles/collect_logs/files/heat-deploy-times.py b/roles/collect_logs/files/heat-deploy-times.py deleted file mode 100644 index c0ea799..0000000 --- a/roles/collect_logs/files/heat-deploy-times.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# Copyright 2016 Red Hat Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Usage: openstack stack event list -f json overcloud | \ -# heat-deploy-times.py [list of resource names] -# If no resource names are provided, all of the resources will be output. -from __future__ import absolute_import, division, print_function - -import json -import sys -import time - -__metaclass__ = type - - -def process_events(all_events, events): - times = {} - for event in all_events: - name = event["resource_name"] - status = event["resource_status"] - # Older clients return timestamps in the first format, newer ones - # append a Z. This way we can handle both formats. - try: - strptime = time.strptime(event["event_time"], "%Y-%m-%dT%H:%M:%S") - except ValueError: - strptime = time.strptime(event["event_time"], "%Y-%m-%dT%H:%M:%SZ") - etime = time.mktime(strptime) - if name in events: - if status == "CREATE_IN_PROGRESS": - times[name] = {"start": etime, "elapsed": None} - elif status == "CREATE_COMPLETE": - times[name]["elapsed"] = etime - times[name]["start"] - for name, data in sorted( - times.items(), key=lambda x: x[1]["elapsed"], reverse=True - ): - elapsed = "Still in progress" - if times[name]["elapsed"] is not None: - elapsed = times[name]["elapsed"] - print("%s %s") % (name, elapsed) - - -if __name__ == "__main__": - stdin = sys.stdin.read() - all_events = json.loads(stdin) - events = sys.argv[1:] - if not events: - events = set() - for event in all_events: - events.add(event["resource_name"]) - process_events(all_events, events) diff --git a/roles/collect_logs/library b/roles/collect_logs/library deleted file mode 120000 index eab9f86..0000000 --- a/roles/collect_logs/library +++ /dev/null @@ -1 +0,0 @@ -../../plugins/modules \ No newline at end of file diff --git a/roles/collect_logs/meta/main.yml b/roles/collect_logs/meta/main.yml deleted file mode 100644 index 018a086..0000000 --- a/roles/collect_logs/meta/main.yml +++ /dev/null @@ -1,27 +0,0 @@ ---- -galaxy_info: - author: OpenStack - description: n Ansible role for aggregating logs from different nodes - company: Red Hat - license: Apache 2.0 - min_ansible_version: 2.5 - - platforms: - - name: EL - versions: - - 7 - - name: Fedora - versions: - - 28 - - - galaxy_tags: - - docker - - buildah - - container - - openstack - - tripleo - - packaging - - system - -dependencies: [] diff --git a/roles/collect_logs/molecule/default/converge.yml b/roles/collect_logs/molecule/default/converge.yml deleted file mode 100644 index f42892b..0000000 --- a/roles/collect_logs/molecule/default/converge.yml +++ /dev/null @@ -1,95 +0,0 @@ ---- -# vars are defined molecule.yml to avoid repeading them for each play -- name: Create collection dir play - hosts: localhost - tasks: - - - name: Create temp collection folder - file: - path: "{{ local_working_dir }}" - state: directory - mode: 0755 - - # Assure we do not have leftovers from previous runs there, not needed in production. - - name: Remove collected_files folder - file: - path: "{{ local_working_dir }}/collected_files" - state: absent - -- name: "Converge collect play" - hosts: all - strategy: free - vars: - expected: - - 'cmd': 'cat /proc/cpuinfo' - 'capture_file': '/var/log/extra/cpuinfo.txt' - 'name': 'cpuinfo' - 'group': 'system' - - 'cmd': 'cat /proc/meminfo' - 'capture_file': '/var/log/extra/meminfo.txt' - 'name': 'meminfo' - 'group': 'system' - - 'cmd': 'cat /proc/swaps' - 'capture_file': '/var/log/extra/swaps.txt' - 'name': 'swaps' - 'group': 'system' - tasks: - - # brief call used a very short override artcl_commands, enough to validate - # that the combining of the commands works. Later we import the role with - # its default artcl_commands in order to test these commands, too. - - name: "Include collect_logs :: collect (brief)" - vars: - artcl_collect: true - artcl_commands: - system: - cpuinfo: - cmd: cat /proc/cpuinfo - capture_file: /var/log/extra/cpuinfo.txt - meminfo: - cmd: cat /proc/meminfo - capture_file: /var/log/extra/meminfo.txt - swaps: - cmd: cat /proc/swaps - capture_file: /var/log/extra/swaps.txt - include_role: - name: collect_logs - - - name: Verify expected combined commands - assert: - that: artcl_commands_flatten['data'] == expected - fail_msg: | - artcl_commands_flatten had unexpected value {{ artcl_commands_flatten }} - success_msg: artcl_commands_flatten had correct value - - - name: Verify that expected files where collected and they are not empty - delegate_to: localhost - stat: - path: "{{ local_working_dir }}/collected_files/{{ inventory_hostname }}{{ item.capture_file }}" - register: st - failed_when: not st.stat.exists or st.stat.size == 0 - loop: "{{ expected }}" - - - name: "Include ansible-role-collect-logs :: collect (full)" - vars: - artcl_collect: true - include_role: - name: collect_logs - -- name: "Converge publish play" - hosts: localhost - tasks: - - name: "Include ansible-role-collect-logs :: publish" - vars: - # disabling collect here is key for testing because collection needs - # sudo on targeted hosts, which is not available on molecule, zuul and - # some development environments. - artcl_collect: false - artcl_publish: true - include_role: - name: collect_logs - - - name: Display stats - debug: - msg: | - Collected files should be under {{ local_working_dir }}/collected_files diff --git a/roles/collect_logs/molecule/default/molecule.yml b/roles/collect_logs/molecule/default/molecule.yml deleted file mode 100644 index ac63fe0..0000000 --- a/roles/collect_logs/molecule/default/molecule.yml +++ /dev/null @@ -1,44 +0,0 @@ ---- -driver: - name: podman -log: true -platforms: - # - name: centos7 - # image: quay.io/pycontribs/centos:centos7 - # pre_build_image: true - - name: centos8 - # image below is based on official quay.io/centos/centos:stream8 but - # it has python preinstalled on it. - image: quay.io/pycontribs/centos:stream8 - pre_build_image: true - # - name: debian - # image: quay.io/pycontribs/python:3.8-slim-buster - # pre_build_image: true -provisioner: - name: ansible - config_options: - defaults: - interpreter_python: auto - forks: 50 - stdout_callback: yaml - timeout: 30 - inventory: - group_vars: - all: - local_working_dir: "{{ lookup('env', 'TOX_ENV_DIR') or '~/.cache' }}/log" - artcl_ignore_errors: false -verifier: - name: ansible -scenario: - # custom because "idempotence" is not yet supported by the role - test_sequence: - - dependency - - cleanup - - destroy - - syntax - - create - - prepare - - converge - - verify - - cleanup - - destroy diff --git a/roles/collect_logs/molecule/default/prepare.yml b/roles/collect_logs/molecule/default/prepare.yml deleted file mode 100644 index 8f71770..0000000 --- a/roles/collect_logs/molecule/default/prepare.yml +++ /dev/null @@ -1,17 +0,0 @@ ---- -- name: Mock some logs - hosts: all - become: true - tasks: - - - name: Generate bootstrap-subnodes.log - copy: - content: | - 2000-00-00T00:00:00.000Z | this is sparta - dest: /var/log/bootstrap-subnodes.log - mode: 0644 - - - name: Remove /var/log/extra/logstash.txt - file: - path: /var/log/extra/logstash.txt - state: absent diff --git a/roles/collect_logs/molecule/default/verify.yml b/roles/collect_logs/molecule/default/verify.yml deleted file mode 100644 index 00c174f..0000000 --- a/roles/collect_logs/molecule/default/verify.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -- name: Validate "/var/log/extra/logstash.txt - hosts: all - tasks: - - - name: Check logstash.txt contains expected data - command: grep -q "this is sparta" /var/log/extra/logstash.txt - changed_when: false diff --git a/roles/collect_logs/molecule/infrared/converge.yml b/roles/collect_logs/molecule/infrared/converge.yml deleted file mode 100644 index 4806769..0000000 --- a/roles/collect_logs/molecule/infrared/converge.yml +++ /dev/null @@ -1,99 +0,0 @@ ---- -- name: Converge - hosts: all - tasks: - - - name: "Download Infrared" - git: - repo: "https://github.com/redhat-openstack/infrared.git" - version: "master" - dest: "{{ infrared_location }}" - update: true - - - name: "Create Infrared venv" - pip: - name: - - pbr - - pip - - setuptools - virtualenv: "{{ infrared_venv }}" - - - name: "Install Infrared" - # this task is always changed, the problem is on pip module side: - # https://github.com/ansible/ansible/issues/28952 - pip: - name: "." - virtualenv: "{{ infrared_venv }}" - chdir: "{{ infrared_location }}" - - - name: "Create infrared_plugin dir" - file: - path: "{{ infrared_location }}/infrared_plugin" - state: directory - mode: 0755 - - - name: "Copy ansible-role-collect-logs to test host" - synchronize: - src: "{{ playbook_dir }}/../../../../" - dest: "{{ ansible_env.HOME }}/artcl-src" - rsync_opts: - - "--exclude=.tox" - - - name: "Install ansible-role-collect-logs plugin" - shell: | - export PATH=$PATH:/usr/local/sbin:/usr/sbin - source {{ infrared_venv }}/bin/activate - ir plugin add {{ ansible_env.HOME }}/artcl-src --src-path infrared_plugin - args: - executable: /bin/bash - register: plugin_install_output - changed_when: true - - - name: "Debug: output from plugin installation task main playbook" - debug: - msg: "{{ plugin_install_output }}" - - - name: "Create an empty dummy file" - file: - path: "{{ item }}" - state: touch - mode: 0644 - with_items: - - "/tmp/dummy.log" - - "/tmp/append.log" - - "/tmp/exclude.log" - - "/tmp/exclude_append.log" - - "/tmp/config.conf" - - "/tmp/just_file" - changed_when: false - - - name: "Create a dummy file of 1MB" - command: dd if=/dev/urandom of=/tmp/1MB_dummy.log bs=1MB count=1 - args: - creates: "/tmp/1MB_dummy.log" - changed_when: false - - - name: "Run infrared ansible-role-collect-logs" - become: false - shell: | - export PATH=$PATH:/usr/local/sbin:/usr/sbin - source {{ infrared_venv }}/bin/activate - ir ansible-role-collect-logs --openstack_nodes localhost \ - --collect_log_types "testing" \ - --artcl_collect_dir {{ infrared_location }}/collected_files_test \ - --artcl_collect_list /tmp/*.log,/tmp/just_file \ - --artcl_collect_list_append /tmp/config.conf \ - --artcl_exclude_list /tmp/exclude.log \ - --artcl_exclude_list_append /tmp/exclude_append.log \ - --artcl_gzip true \ - --artcl_rsync_collect_list false \ - --local_working_dir "{{ infrared_location }}" \ - --disable_artifacts_cleanup true - args: - executable: /bin/bash - register: output_collection - changed_when: true - - - name: "Debug collection output" - debug: - msg: "{{ output_collection }}" diff --git a/roles/collect_logs/molecule/infrared/molecule.yml b/roles/collect_logs/molecule/infrared/molecule.yml deleted file mode 100644 index 9b05f43..0000000 --- a/roles/collect_logs/molecule/infrared/molecule.yml +++ /dev/null @@ -1,47 +0,0 @@ ---- -driver: - name: podman -log: true -platforms: - # - name: centos7 - # image: quay.io/pycontribs/centos7 - # pre_build_image: true - - name: centos8 - # image below is based on official quay.io/centos/centos:stream8 but - # it has python preinstalled on it. - image: quay.io/pycontribs/centos:stream8 - pre_build_image: true - # - name: debian - # image: quay.io/pycontribs/3.8-slim-buster - # pre_build_image: true -provisioner: - name: ansible - config_options: - defaults: - interpreter_python: auto - forks: 50 - stdout_callback: yaml - timeout: 30 - inventory: - group_vars: - all: - local_working_dir: "{{ lookup('env', 'TOX_ENV_DIR') or '~/.cache' }}/log" - artcl_gzip: true - artcl_min_size: 500000 # ~0.5mb - infrared_location: "~/ir" - infrared_venv: "{{ infrared_location }}/.infrared" -verifier: - name: ansible -scenario: - test_sequence: - - dependency - - cleanup - - destroy - - syntax - - create - - converge - - verify - - cleanup - - destroy -markers: - - xfail # https://projects.engineering.redhat.com/browse/RHOSINFRA-4174 diff --git a/roles/collect_logs/molecule/infrared/verify.yml b/roles/collect_logs/molecule/infrared/verify.yml deleted file mode 100644 index 1cfc2c9..0000000 --- a/roles/collect_logs/molecule/infrared/verify.yml +++ /dev/null @@ -1,84 +0,0 @@ ---- -- name: Converge - hosts: all - tasks: - - - name: "List available plugins" - shell: | - export PATH=$PATH:/usr/local/sbin:/usr/sbin - source {{ infrared_venv }}/bin/activate - ir plugin list - args: - executable: /bin/bash - register: plugin_output - changed_when: false - - - name: "Verify playbook list plugins output" - debug: - msg: "{{ plugin_output }}" - - - name: "Check if ansible-role-collect-logs is present" - fail: - msg: "ansible-role-collect-logs not installed" - when: "'ansible-role-collect-logs' not in plugin_output.stdout" - - - name: Get directory with collected log files - stat: - path: "{{ infrared_location }}/collected_files_test" - register: - collected_files_dir - - - name: Ensure directory with collected log files is created - assert: - that: - - collected_files_dir.stat.exists - - - name: Get the stats of collected files - stat: - path: "{{ item }}" - register: collected_files_stats - loop: - - "{{ infrared_location }}/collected_files_test/localhost/tmp/dummy.log.gz" - - "{{ infrared_location }}/collected_files_test/localhost/tmp/1MB_dummy.log.gz" - - "{{ infrared_location }}/collected_files_test/localhost/tmp/just_file.gz" - - "{{ infrared_location }}/collected_files_test/localhost/tmp/config.conf.gz" - - - name: Ensure all files were collected - assert: - that: - - item.stat.exists - loop: "{{ collected_files_stats.results }}" - - - name: Get the stats of excluded files - stat: - path: "{{ item }}" - register: excluded_files_stats - loop: - - "{{ infrared_location }}/collected_files_test/localhost/tmp/exclude.log.gz" - - "{{ infrared_location }}/collected_files_test/localhost/tmp/exclude_append.log.gz" - - - name: Ensure excluded files were not collected - assert: - that: - - not item.stat.exists - loop: "{{ excluded_files_stats.results }}" - - - name: Get tar files with logs - find: paths={{ infrared_location }}/collected_files_test/ patterns='*.tar' - register: tar_files - - - name: Ensure .tar files with logs are deleted - assert: - that: - - tar_files.matched == 0 - - - name: Get file generated by find - stat: - path: /tmp/localhost-rsync-list - register: - rsync_list - - - name: Ensure that find is used to generate list of files for rsync - assert: - that: - - rsync_list.stat.exists diff --git a/roles/collect_logs/molecule/sova/converge.yml b/roles/collect_logs/molecule/sova/converge.yml deleted file mode 100644 index e3eac2d..0000000 --- a/roles/collect_logs/molecule/sova/converge.yml +++ /dev/null @@ -1,12 +0,0 @@ ---- -- name: Converge - hosts: all - tasks: - - name: Include collect_logs - vars: - sova_config_file: "{{ ansible_user_dir }}/workspace/logs/sova_config.json" - include_role: - name: collect_logs - tasks_from: sova.yml - tags: - - molecule-idempotence-notest diff --git a/roles/collect_logs/molecule/sova/molecule.yml b/roles/collect_logs/molecule/sova/molecule.yml deleted file mode 100644 index 9d05618..0000000 --- a/roles/collect_logs/molecule/sova/molecule.yml +++ /dev/null @@ -1,15 +0,0 @@ ---- -driver: - name: delegated - options: - managed: false - ansible_connection_options: - ansible_connection: local - -log: true -platforms: - - name: instance -provisioner: - name: ansible -verifier: - name: ansible diff --git a/roles/collect_logs/molecule/sova/prepare.yml b/roles/collect_logs/molecule/sova/prepare.yml deleted file mode 100644 index cd572cb..0000000 --- a/roles/collect_logs/molecule/sova/prepare.yml +++ /dev/null @@ -1,105 +0,0 @@ ---- -- name: Prepare - hosts: all - tasks: - - - name: Prepare the console file directory - file: - path: '{{ ansible_user_dir }}/workspace/logs/' - state: directory - mode: 0755 - - - name: Create a sample console file - copy: - content: | - No valid host was found. There are not enough hosts - dest: '{{ ansible_user_dir }}/workspace/logs/quickstart_install.log' - mode: 0644 - - - name: Create a sample sova config file - copy: - content: | - { - "patterns": { - "bmc": [ - { - "id": "Introspection_failed_cannot_get_IP_address", - "logstash": "", - "msg": "Introspection failed, cannot get IP address", - "pattern": "Introspection_failed_cannot_get_IP_address", - "tag": "infra" - } - ], - "console": [ - { - "id": "Not_enough_hosts", - "logstash": "", - "msg": "No valid host was found.", - "pattern": "Not_enough_hosts", - "tag": "info" - } - ], - "errors": [ - { - "id": "Buildah_pull_image_failed", - "logstash": "", - "msg": "Buildah pull image failed", - "pattern": "Buildah_pull_image_failed", - "tag": "info" - } - ], - "ironic-conductor": [ - { - "id": "Ironic_deployment_timeout", - "logstash": "", - "msg": "Ironic deployment timeout.", - "pattern": "Ironic_deployment_timeout", - "tag": "info" - } - ], - "logstash": [ - { - "id": "Ping_timeout_when_deploying_OC", - "logstash": "", - "msg": "Ping timeout when deploying OC.", - "pattern": "Ping_timeout_when_deploying_OC", - "tag": "infra" - } - ], - "registry_log": [ - { - "id": "Invalid_checksum_format", - "logstash": "", - "msg": "Invalid checksum format.", - "pattern": "Invalid_checksum_format", - "tag": "infra" - } - ], - "selinux": [ - { - "id": "selinux_denials_found", - "logstash": "", - "msg": "selinux denials found", - "pattern": "selinux_denials_found", - "tag": "code" - } - ], - "syslog": [ - { - "id": "service_FAIL", - "logstash": "", - "msg": "service FAIL", - "pattern": "service_FAIL", - "tag": "command_exe" - } - ] - }, - "regexes": [ - { - "name": "Not_enough_hosts", - "regex": "No\\ valid\\ host\\ was\\ found\\.\\ There\\ are\\ not\\ enough\\ hosts" - } - ] - } - dest: '{{ ansible_user_dir }}/workspace/logs/sova_config.json' - mode: 0644 diff --git a/roles/collect_logs/molecule/sova/verify.yml b/roles/collect_logs/molecule/sova/verify.yml deleted file mode 100644 index ecb05b3..0000000 --- a/roles/collect_logs/molecule/sova/verify.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -- hosts: all - tasks: - - - name: Ls {{ ansible_user_dir }}/workspace/logs/ # remove before merge - command: ls -la {{ ansible_user_dir }}/workspace/logs/ - changed_when: false - register: test_output - - - name: debug test # remove before merge - debug: var=test_output - - - name: Ensure all files exists - stat: - path: "{{ item }}" - register: failures_file - loop: - - "{{ ansible_user_dir }}/workspace/logs/failures_file" - # TODO: Revert back after bug #1947133 is fixed - loop_control: - label: '{{ item.split("/")[-1] }}' - - - name: Ensure all files exists - test - assert: - that: - - item.stat.exists - loop: "{{ failures_file.results }}" - - - name: Check if we have strings in failures_file - command: grep 'No valid host was found' {{ ansible_user_dir }}/workspace/logs/failures_file - changed_when: false - - # TODO: Revert back after bug #1947133 is fixed diff --git a/roles/collect_logs/scripts/doc_extrapolation.awk b/roles/collect_logs/scripts/doc_extrapolation.awk deleted file mode 100644 index b1d91ec..0000000 --- a/roles/collect_logs/scripts/doc_extrapolation.awk +++ /dev/null @@ -1,42 +0,0 @@ -# AWK script used to parse shell scripts, created during TripleO deployments, -# and convert them into rST files for digestion by Sphinx. -# -# General notes: -# -# - Only blocks between `### ---start_docs` and `### ---stop_docs` will be -# parsed -# - Lines containing `# nodocs` will be excluded from rST output -# - Lines containing `## ::` indicate subsequent lines should be formatted -# as code blocks -# - Other lines beginning with `## ` will have the prepended -# `## ` removed. (This is how you would add general rST formatting) -# - All other lines (including shell comments) will be indented by four spaces - -/^### --start_docs/ { - for (;;) { - if ((getline line) <= 0) - unexpected_eof() - if (line ~ /^### --stop_docs/) - break - if (match(line, ".* #nodocs$")) - continue - if (substr(line, 0, 5) == "## ::") { - line = "\n::\n" - } if (substr(line, 0, 3) == "## ") { - line = substr(line, 4) - } else if (line != "") { - line = " "line - } - print line > "/dev/stdout" - } -} - -function unexpected_eof() { - printf("%s:%d: unexpected EOF or error\n", FILENAME, FNR) > "/dev/stderr" - exit 1 -} - -END { - if (curfile) - close(curfile) -} diff --git a/roles/collect_logs/tasks/collect.yml b/roles/collect_logs/tasks/collect.yml deleted file mode 100644 index 8f9c2ed..0000000 --- a/roles/collect_logs/tasks/collect.yml +++ /dev/null @@ -1,232 +0,0 @@ ---- -- become: true - ignore_errors: true - block: - - name: Ensure required rpms for logging are installed - package: - state: present - name: "{{ artcl_collect_pkg_list }}" - - - name: Prepare directory with extra logs - file: - dest: /var/log/extra - state: directory - mode: 0755 - - - name: Create rsync filter file - template: - src: "odl_extra_logs.j2" - dest: "/tmp/odl_extra_logs.sh" - mode: 0644 - - - name: Determine commands to run - run_once: true - vars: - combined_cmds: "{{ artcl_commands | combine(artcl_commands_extras, recursive=True) }}" - # combines default dictionary with user defined one - # keeps only commands from groups mentioned in collect_log_types - flatten_nested_dict: - data: "{{ combined_cmds | dict2items|selectattr('key', 'in', collect_log_types) | list | items2dict }}" - register: artcl_commands_flatten - - - name: install setools - ansible.builtin.package: - name: - - setools - - setroubleshoot - state: present - - - name: install custom consolidation script - ansible.builtin.copy: - dest: /usr/local/bin/consolidate-avc.pl - src: consolidate-avc.pl - mode: 0555 - - - name: Run artcl_commands - # noqa 305 - # noqa 102 :: No Jinja2 in when - vars: - capture_file: "{{ item.capture_file | default( item.name + '.txt') }}" - shell: - # redirection of output to log file, see https://ops.tips/gists/redirect-all-outputs-of-a-bash-script-to-a-file/ - cmd: | - {% if not item.capture_disable | default(False) %} - exec >{% if not capture_file.startswith('/') %}/var/log/extra/{% endif %}{{ capture_file }} 2>&1 - {% endif %} - {# do not put anything after the command #} - {{ item.cmd }} - warn: false - args: - chdir: /var/log/extra - executable: /bin/bash - changed_when: false - when: item.when | default(true) - loop: "{{ artcl_commands_flatten.data }}" - loop_control: - label: "{{ item.name }}" - -# Change the collect_log_types if you don't want to collect -# some specific logs -- import_tasks: collect/container.yml - when: "'container' in collect_log_types" - -- import_tasks: collect/system.yml - when: "'system' in collect_log_types" - -- import_tasks: collect/network.yml - when: "'network' in collect_log_types" - -- import_tasks: collect/monitoring.yml - when: "'monitoring' in collect_log_types" - -- name: Set default collect list - set_fact: - collect_list: "{{ artcl_collect_list }} + {{ artcl_collect_list_append|default([]) }}" - -- name: Override collect list - set_fact: - collect_list: "{{ artcl_collect_override[inventory_hostname] }}" - when: - - artcl_collect_override is defined - - artcl_collect_override[inventory_hostname] is defined - -- name: Set default exclude list - set_fact: - artcl_exclude_list: "{{ artcl_exclude_list|default([]) }} + {{ artcl_exclude_list_append|default([]) }}" - -- name: Create temp directory before gathering logs - file: - dest: "/tmp/{{ inventory_hostname }}" - state: directory - mode: 0755 - -- name: Create rsync filter file - template: - src: "rsync-filter.j2" - dest: "/tmp/{{ inventory_hostname }}-rsync-filter" - mode: 0644 - when: artcl_rsync_collect_list|bool - -# This task needs to be finished before generating find list of files -# to collect (Create find list file task) otherwise not all the container -# log files may be found and thus not collected later -- name: Wait for container logs collection if not finished yet - become: true - async_status: - jid: "{{ container_collection.ansible_job_id }}" - register: container_collection_result - until: container_collection_result.finished - delay: 10 - retries: "{{ ((artcl_container_collect_timeout|int) / 10)|int }}" - when: "'container' in collect_log_types" - -- name: Find and move logfiles generic case (typically without compression) - when: not (artcl_gzip | bool) or ( sanitize_lines is defined and sanitize_lines|length ) or ( artcl_rsync_collect_list|bool ) - block: - - name: Create find list file - become: true - shell: > - find {{ collect_list|join(' ') }} - -maxdepth {{ artcl_find_maxdepth }} - -type f \ - -size -{{ artcl_find_max_size }}M - {% if artcl_exclude_list is defined %} - -not -path {{ artcl_exclude_list|map('quote')|join(' -not -path ') }} - {% endif %} - -print0 > /tmp/{{ inventory_hostname }}-rsync-list - failed_when: false - when: not artcl_rsync_collect_list|bool - - - name: Gather the logs to /tmp - become: true - shell: > - set -o pipefail && - rsync --quiet --recursive --copy-links --prune-empty-dirs --ignore-errors - {% if artcl_rsync_collect_list|bool %} - --filter '. /tmp/{{ inventory_hostname }}-rsync-filter' - {% else %} - --from0 --files-from=/tmp/{{ inventory_hostname }}-rsync-list - {% endif %} - / /tmp/{{ inventory_hostname }}; - find /tmp/{{ inventory_hostname }} -type d -print0 | xargs -0 chmod 755; - find /tmp/{{ inventory_hostname }} -type f -print0 | xargs -0 chmod 644; - find /tmp/{{ inventory_hostname }} -not -type f -not -type d -delete; - {# chown can fail with: chown: invalid spec: '0:' #} - chown -R {{ ansible_user | default(ansible_effective_user_id) }}: /tmp/{{ inventory_hostname }} || true; - args: - executable: /bin/bash - changed_when: true - - # See README section 'Sanitizing Log Strings' - - name: Sanitize logs to remove sensitive details - include_tasks: sanitize_log_strings.yaml - loop: "{{ sanitize_lines }}" - loop_control: - loop_var: outer_item - when: sanitize_lines is defined and sanitize_lines|length - - # it makes sense to compress the logs prior - # to sending them over the wire to the - # node where they are collected by infra. - # Regardless of the file size. - - name: Compress the collected files if configured - when: artcl_gzip | bool - shell: gzip -r ./{{ inventory_hostname }} - args: - chdir: /tmp - warn: false - changed_when: true - tags: - - skip_ansible_lint - -- name: Create gz compressed log files to the /tmp (special case) - when: - - artcl_gzip | bool - - not ( sanitize_lines is defined and sanitize_lines|length ) - - not ( artcl_rsync_collect_list|bool ) - block: - - name: On the fly compress copy - become: true - shell: > - find {{ collect_list|join(' ') }} - -maxdepth {{ artcl_find_maxdepth }} - -type f \ - -size -{{ artcl_find_max_size }}M - {% if artcl_exclude_list is defined %} - -not -path {{ artcl_exclude_list|map('quote')|join(' -not -path ') }} - {% endif %} - -print0 | - xargs -0 -P 8 -I ITER sh -c 'mkdir -p "/tmp/{{ inventory_hostname }}$(dirname ITER)"; gzip -c "ITER" > "/tmp/{{ inventory_hostname }}/ITER.gz"' - failed_when: false - -- name: Create tar archive of logs for faster copying # noqa: command-instead-of-module - shell: - cmd: tar cf {{ inventory_hostname }}.tar {{ inventory_hostname }}; - chdir: /tmp - changed_when: true - -- name: Fetch log archive (tar) - fetch: - src: "/tmp/{{ inventory_hostname }}.tar" - dest: "{{ artcl_collect_dir }}/{{ inventory_hostname }}.tar" - flat: true - validate_checksum: false - -- name: Delete temporary log directory after collection - file: - path: "/tmp/{{ inventory_hostname }}" - state: absent - ignore_errors: true # noqa ignore-errors - -- name: Extract the logs archive - unarchive: - src: "{{ artcl_collect_dir }}/{{ inventory_hostname }}.tar" - dest: "{{ artcl_collect_dir }}" - remote_src: true - delegate_to: localhost - -- name: Remove logs archive - file: - path: "{{ artcl_collect_dir }}/{{ inventory_hostname }}.tar" - state: absent - delegate_to: localhost diff --git a/roles/collect_logs/tasks/collect/container.yml b/roles/collect_logs/tasks/collect/container.yml deleted file mode 100644 index ad424bf..0000000 --- a/roles/collect_logs/tasks/collect/container.yml +++ /dev/null @@ -1,49 +0,0 @@ ---- -- become: true - ignore_errors: true - block: - - name: check if ODL is enabled via docker - shell: docker ps | grep opendaylight_api - register: odl_container_enabled - - - name: check if ODL is enabled via podman - shell: podman ps | grep opendaylight_api - register: odl_container_enabled - when: odl_container_enabled.rc != 0 - - - - name: check if ODL is enabled via rpm # noqa: command-instead-of-module - shell: rpm -qa | grep opendaylight - register: odl_rpm_enabled - - - name: Create ODL log directory - file: - dest: "{{ odl_extra_log_dir }}" - state: directory - mode: 0755 - when: (odl_rpm_enabled.rc == 0) or (odl_container_enabled.rc == 0) - - - name: Collect OVS outputs for ODL - shell: "bash /tmp/odl_extra_logs.sh" # noqa 305 - when: (odl_rpm_enabled.rc == 0) or (odl_container_enabled.rc == 0) - - - name: Collect ODL info and logs (RPM deployment) - shell: > - cp /opt/opendaylight/data/log/* /var/log/extra/odl/; - journalctl -u opendaylight > /var/log/extra/odl/odl_journal.log - when: odl_rpm_enabled.rc == 0 - - - name: Copy collection logs script for containers - copy: - src: collect-container-logs.sh - dest: /tmp/collect-container-logs.sh - mode: 0755 - - - name: Run container logs collection with timeout - command: >- - timeout --preserve-status -s 15 -k {{ (artcl_container_collect_timeout|int + 30)|string }} - {{ artcl_container_collect_timeout|string }} bash -x /tmp/collect-container-logs.sh - changed_when: true - async: "{{ artcl_container_collect_timeout }}" - poll: 0 - register: container_collection diff --git a/roles/collect_logs/tasks/collect/monitoring.yml b/roles/collect_logs/tasks/collect/monitoring.yml deleted file mode 100644 index 6847516..0000000 --- a/roles/collect_logs/tasks/collect/monitoring.yml +++ /dev/null @@ -1,39 +0,0 @@ ---- -- become: true - ignore_errors: true - block: - - name: check for dstat log file - stat: path=/var/log/extra/dstat-csv.log - register: dstat_logfile - - - name: kill dstat - shell: "pkill dstat" # noqa 305 - become: true - when: dstat_logfile.stat.exists - - - name: Get dstat_graph tool - git: - repo: "https://github.com/Dabz/dstat_graph.git" - dest: "/tmp/dstat_graph" - version: master - when: dstat_logfile.stat.exists - - - name: Generate HTML dstat graphs if it exists - shell: "/tmp/dstat_graph/generate_page.sh /var/log/extra/dstat-csv.log > /var/log/extra/dstat.html" - when: dstat_logfile.stat.exists - args: - chdir: "/tmp/dstat_graph" - - - name: Generate human-readable SAR logs - shell: "[[ -f /usr/lib64/sa/sa2 ]] && /usr/lib64/sa/sa2 -A" - - - name: Ensure sos package is installed when collect sosreport(s) - package: - name: sos - state: present - when: artcl_collect_sosreport|bool - - - name: Collect sosreport - command: > - sosreport {{ artcl_sosreport_options }} - when: artcl_collect_sosreport|bool diff --git a/roles/collect_logs/tasks/collect/network.yml b/roles/collect_logs/tasks/collect/network.yml deleted file mode 100644 index b703f55..0000000 --- a/roles/collect_logs/tasks/collect/network.yml +++ /dev/null @@ -1,49 +0,0 @@ ---- -- become: true - ignore_errors: true - block: - - name: netstat -laputen - shell: "netstat -laputen &> /var/log/extra/netstat.txt" - - - name: Collect network status info - shell: > - echo "netstat" > /var/log/extra/network.txt; - netstat -i &> /var/log/extra/network.txt; - for ipv in 4 6; do - echo "### IPv${ipv} addresses" >> /var/log/extra/network.txt; - ip -${ipv} a &>> /var/log/extra/network.txt; - echo "### IPv${ipv} routing" >> /var/log/extra/network.txt; - ip -${ipv} r &>> /var/log/extra/network.txt; - if [[ ! $(command -v nft) ]]; then - echo "### IPTables (IPv${ipv})" &>> /var/log/extra/network.txt; - test $ipv -eq 4 && iptables-save &>> /var/log/extra/network.txt; - test $ipv -eq 6 && ip6tables-save &>> /var/log/extra/network.txt; - echo "### IPTables Stats (IPv${ipv})" &>> /var/log/extra/network.txt; - test $ipv -eq 4 && iptables -vnL &>> /var/log/extra/network.txt; - test $ipv -eq 6 && ip6tables -vnL &>> /var/log/extra/network.txt; - fi - done; - command -v nft && nft list ruleset &>/var/log/extra/nftables.txt; - journalctl -p warning -t kernel -o short -g DROPPING --no-pager &> /var/log/extra/dropped-packets.txt; - (for NS in $(ip netns list | cut -f 1 -d " "); do - for ipv in 4 6; do - echo "==== $NS (${ipv})===="; - echo "### IPv${ipv} addresses"; - ip netns exec $NS ip -${ipv} a; - echo "### IPv${ipv} routing"; - ip netns exec $NS ip -${ipv} r; - echo "### IPTables (IPv${ipv})"; - test $ipv -eq 4 && ip netns exec $NS ip iptables-save; - test $ipv -eq 6 && ip netns exec $NS ip ip6tables-save; - done - PIDS="$(ip netns pids $NS)"; - [[ ! -z "$PIDS" ]] && ps --no-headers -f --pids "$PIDS"; - echo ""; - done) &>> /var/log/extra/network-netns; - (for NB in $(ovs-vsctl show | grep Bridge |awk '{print $2}'); do - echo "==== Bridge name - $NB ===="; - ovs-ofctl show $NB; - ovs-ofctl dump-flows $NB; - echo ""; - done; - ovsdb-client dump) &> /var/log/extra/network-bridges; diff --git a/roles/collect_logs/tasks/collect/system.yml b/roles/collect_logs/tasks/collect/system.yml deleted file mode 100644 index 45d18e8..0000000 --- a/roles/collect_logs/tasks/collect/system.yml +++ /dev/null @@ -1,39 +0,0 @@ ---- -- become: true - ignore_errors: "{{ artcl_ignore_errors }}" - block: - - - name: Collect errors and rename if more than 10 MB - shell: > - grep -rE '^[-0-9]+ [0-9:\.]+ [0-9 ]*ERROR ' /var/log/ | - sed "s/\(.*\)\(20[0-9][0-9]-[0-9][0-9]-[0-9][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9]\.[0-9]\+\)\(.*\)/\2 ERROR \1\3/g" > /tmp/errors.txt; - if (( $(stat -c "%s" /tmp/errors.txt) > 10485760 )); then - ERR_NAME=big-errors.txt; - else - ERR_NAME=errors.txt; - fi; - mv /tmp/errors.txt /var/log/extra/${ERR_NAME} - - # logstash.txt file format expects to follow a strict format (console) like: - # TIMESTAMP_ISO8601 | message - # If timestamp is missing on a line, previous value will be used. - - # https://opendev.org/openstack/logstash-filters/src/branch/master/filters/openstack-filters.conf#L6-L20 - # https://github.com/logstash-plugins/logstash-patterns-core/blob/master/patterns/grok-patterns#L71 - # https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString - - # Valid examples: - # 2011-10-05T14:48:00.000Z | foo - # 2011-10-05T14:48:00Z | foo - # 2011-10-05 14:48:00 | foo - - - name: Create a index file for logstash - # This removes and regenerates timestamp suffix to a know valid formart, - # but we should improve the code to keep original when valid. - vars: - suffix: "{{ ansible_date_time.iso8601_micro }} | " - shell: > - find {{ artcl_logstash_files | default([]) | join(" ") }} 2>/dev/null | - xargs -r sed - -E "s/^[0-9[:space:].:TZ|-]+ //g; s/^/{{ suffix }}/" - >> /var/log/extra/logstash.txt diff --git a/roles/collect_logs/tasks/create-docs.yml b/roles/collect_logs/tasks/create-docs.yml deleted file mode 100644 index bb6b53e..0000000 --- a/roles/collect_logs/tasks/create-docs.yml +++ /dev/null @@ -1,45 +0,0 @@ ---- - -- name: Ensure required python packages are installed - pip: - requirements: "{{ local_working_dir }}/share/ansible/roles/collect-logs/docs/doc-requirements.txt" - executable: "{{ local_working_dir }}/bin/pip" - -- name: Generate rST docs from scripts and move to Sphinx src dir - shell: > - awk -f "{{ local_working_dir }}/share/ansible/roles/collect-logs/scripts/doc_extrapolation.awk" \ - "{{ artcl_collect_dir }}/undercloud/home/{{ undercloud_user }}/{{ item }}.sh" > \ - "{{ artcl_docs_source_dir }}/{{ item }}.rst" - with_items: "{{ artcl_create_docs_payload.included_deployment_scripts }}" - ignore_errors: true # noqa: ignore-errors - changed_when: true - -- name: Fetch static rST docs to include in output docs - shell: > - cp "{{ artcl_docs_source_dir }}/../static/{{ item }}.rst" "{{ artcl_docs_source_dir }}" - with_items: "{{ artcl_create_docs_payload.included_static_docs }}" - ignore_errors: true # noqa: ignore-errors - changed_when: true - -- name: Generate fresh index.rst for Sphinx - template: - src: index.rst.j2 - dest: "{{ artcl_docs_source_dir }}/index.rst" - force: true - mode: 0644 - -- name: Ensure docs dir exists - file: - path: "{{ artcl_collect_dir }}/docs" - state: directory - mode: 0755 - -- name: Build docs with Sphinx - shell: > - set -o pipefail && - sphinx-build -b html "{{ artcl_docs_source_dir }}" "{{ artcl_docs_build_dir }}" - 2>&1 {{ timestamper_cmd }} > {{ artcl_collect_dir }}/docs/sphinx_build.log - args: - executable: /bin/bash - ignore_errors: true # noqa: ignore-errors - changed_when: true diff --git a/roles/collect_logs/tasks/main.yml b/roles/collect_logs/tasks/main.yml deleted file mode 100644 index d93a7ca..0000000 --- a/roles/collect_logs/tasks/main.yml +++ /dev/null @@ -1,48 +0,0 @@ ---- -- name: gather facts used by role - setup: - gather_subset: "!min,pkg_mgr,python" - when: "['pkg_mgr', 'python'] | difference(ansible_facts.keys()|list)" - -- name: Load unsecure.yml defaults when outside zuul - when: zuul is not defined - include_vars: unsecure.yml - -- name: Load operating system specific variables - include_vars: "{{ item }}" - failed_when: false - # pattern: v3 - loop: - - "family-{{ ansible_os_family | lower }}.yml" - - "family-{{ ansible_os_family | lower }}-{{ ansible_distribution_major_version | lower }}.yml" - - "{{ ansible_distribution | lower | replace(' ', '-') }}.yml" - - "{{ ansible_distribution | lower | replace(' ', '-') }}-{{ ansible_distribution_major_version | lower }}.yml" - - "{{ ansible_distribution | lower | replace(' ', '-') }}-{{ ansible_distribution_version.split('.')[0:2] | join('-') | lower }}.yml" - tags: - - always - -- name: Collect logs - include: collect.yml - when: artcl_collect|bool - -- name: Generate docs - include: create-docs.yml - when: - - artcl_gen_docs|bool - - not artcl_collect|bool - -- name: Publish logs - include: publish.yml - when: - - artcl_publish|bool - - not artcl_collect|bool - -- name: Verify Sphinx build - shell: | # noqa 305 - grep -q "{{ item }}" "{{ artcl_collect_dir }}/docs/build/index.html" - with_items: "{{ artcl_create_docs_payload.table_of_contents }}" - changed_when: false - when: - - artcl_gen_docs|bool - - artcl_verify_sphinx_build|bool - - not artcl_collect|bool diff --git a/roles/collect_logs/tasks/publish.yml b/roles/collect_logs/tasks/publish.yml deleted file mode 100644 index e408975..0000000 --- a/roles/collect_logs/tasks/publish.yml +++ /dev/null @@ -1,150 +0,0 @@ ---- -# collection dir could be either a dir or a link -# file module cannot be used here, because it changes link to dir -# when called with state: directory -- name: Ensure the collection directory exists - shell: | - if [[ ! -d "{{ artcl_collect_dir }}" && ! -h "{{ artcl_collect_dir }}" ]]; then - mkdir -p "{{ artcl_collect_dir }}" - fi - changed_when: true - -- name: Fetch console log - shell: > - set -o pipefail && - curl -k "{{ artcl_build_url }}/timestamps/?time=yyyy-MM-dd%20HH:mm:ss.SSS%20|&appendLog&locale=en_GB" - > {{ artcl_collect_dir }}/console.log - args: - executable: /bin/bash - when: - - artcl_build_url is defined - - artcl_build_url|length > 0 - -- include: sova.yml - ignore_errors: true - -- import_tasks: publish_ara.yml - when: ara_enabled|bool - ignore_errors: true - -- name: fetch stackviz results to the root of the collect_dir - shell: > - if [ -d {{ artcl_collect_dir }}/undercloud/var/log/extra/stackviz/data ]; then - cp -r {{ artcl_collect_dir }}/undercloud/var/log/extra/stackviz {{ artcl_collect_dir }}; - gunzip -fr {{ artcl_collect_dir }}/stackviz; - fi; - changed_when: true - -- name: fetch stackviz results to the root of the collect_dir for os_tempest - shell: > - if [ -d {{ artcl_collect_dir }}/undercloud/var/log/tempest/stackviz/data ]; then - cp -r {{ artcl_collect_dir }}/undercloud/var/log/tempest/stackviz {{ artcl_collect_dir }}; - gunzip -fr {{ artcl_collect_dir }}/stackviz; - fi; - when: use_os_tempest is defined - changed_when: true - -- name: tempest results to the root of the collect_dir - shell: > - cp {{ artcl_collect_dir }}/undercloud/home/stack/tempest/tempest.{xml,html}{,.gz} {{ artcl_collect_dir }} || true; - gunzip {{ artcl_collect_dir }}/tempest.{xml,html}.gz || true; - changed_when: true - -- name: Copy and unzip testrepository.subunit file to the root of {{ artcl_collect_dir }} for os_tempest - shell: > - cp {{ artcl_collect_dir }}/undercloud/var/log/tempest/testrepository.subunit* {{ artcl_collect_dir }} || true; - gunzip {{ artcl_collect_dir }}/testrepository.subunit.gz || true; - changed_when: true - -- name: Fetch .sh and .log files from local working directory on localhost - shell: > - cp {{ item }} {{ artcl_collect_dir }}/ - with_items: - - "{{ local_working_dir }}/*.sh" - - "{{ local_working_dir }}/*.log" - ignore_errors: true # noqa: ignore-errors - changed_when: true - -# the doc footer for logging has been removed. -# copy the log readme into the base directory. -- name: copy in the logs README.html - shell: > - if [ -f {{ artcl_readme_path }} ]; then - cp {{ artcl_readme_path }} {{ artcl_readme_file }}; - fi; - ignore_errors: true # noqa: ignore-errors - changed_when: true - -- name: Rename compressed text based files to end with txt.gz extension - shell: > - set -o pipefail && - find {{ artcl_collect_dir }}/ -type f | - awk 'function rename(orig) - { new=orig; sub(/\.gz$/, ".txt.gz", new); system("mv " orig " " new) } - /\.(conf|ini|json|sh|log|yaml|yml|repo|cfg|j2|py)\.gz$/ { rename($0) } - /(\/var\/log\/|\/etc\/)[^ \/\.]+\.gz$/ { rename($0) }'; - args: - executable: /bin/bash - when: artcl_txt_rename|bool - -- name: Create the zuul-based reproducer script if we are running on zuul - include_role: - name: create-zuul-based-reproducer - when: zuul is defined - -# This is where upload starts, no local change after this line -# All tags after this line are marked with "notest" to allow testing (molecule) in absence of access to log servers. -- name: upload to the artifact server using pubkey auth # noqa: command-instead-of-module - command: > - rsync -av - --quiet -e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null {{ artcl_report_server_key|default('') }}" - {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ artcl_build_tag }} - async: "{{ artcl_publish_timeout }}" - poll: 15 - retries: 5 - delay: 60 - when: artcl_use_rsync|bool and not artcl_rsync_use_daemon|bool - tags: notest - -- name: upload to the artifact server using password auth # noqa: command-instead-of-module - environment: - RSYNC_PASSWORD: "{{ artcl_rsync_password | default(omit) }}" - command: rsync -av --quiet {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ artcl_build_tag }} - async: "{{ artcl_publish_timeout }}" - poll: 15 - retries: 5 - delay: 60 - when: artcl_use_rsync|bool and artcl_rsync_use_daemon|bool - tags: notest - -- name: upload to swift based artifact server - shell: swift upload --quiet --header "X-Delete-After:{{ artcl_swift_delete_after }}" {{ artcl_swift_container }}/{{ artcl_build_tag }} * - args: - chdir: "{{ artcl_collect_dir }}" - changed_when: true - environment: - OS_AUTH_URL: "{{ artcl_swift_auth_url }}" - OS_USERNAME: "{{ artcl_swift_username }}" - OS_PASSWORD: "{{ artcl_swift_password }}" - OS_TENANT_NAME: "{{ artcl_swift_tenant_name }}" - async: "{{ artcl_publish_timeout }}" - poll: 15 - when: artcl_use_swift|bool - tags: notest - -- name: use zuul_swift_upload.py to publish the files - shell: > - "{{ artcl_zuul_swift_upload_path }}/zuul_swift_upload.py --name {{ artcl_swift_container }} - --delete-after {{ artcl_swift_delete_after }} {{ artcl_collect_dir }}" - async: "{{ artcl_publish_timeout }}" - poll: 15 - when: artcl_use_zuul_swift_upload|bool - tags: notest - -- name: create the artifact location redirect file - template: - src: full_logs.html.j2 - dest: "{{ artcl_collect_dir }}/full_logs.html" - mode: 0644 - when: artcl_env != 'tripleo-ci' - tags: notest diff --git a/roles/collect_logs/tasks/publish_ara.yml b/roles/collect_logs/tasks/publish_ara.yml deleted file mode 100644 index b3b8e1a..0000000 --- a/roles/collect_logs/tasks/publish_ara.yml +++ /dev/null @@ -1,43 +0,0 @@ ---- -- when: ara_generate_html|bool - block: - - name: Generate and retrieve the ARA static playbook report - shell: > - {{ local_working_dir }}/bin/ara generate html {{ local_working_dir }}/ara_oooq; - {{ local_working_dir }}/bin/ara task list --all -f json > {{ artcl_collect_dir }}/ara.json; - cp -r {{ local_working_dir }}/ara_oooq {{ artcl_collect_dir }}/; - - - name: Generate and retrieve the ARA static playbook report for undercloud - shell: > - {{ local_working_dir }}/bin/ara generate html {{ local_working_dir }}/ara_oooq_uc; - {{ local_working_dir }}/bin/ara task list --all -f json > {{ artcl_collect_dir }}/ara.oooq.uc.json; - cp -r {{ local_working_dir }}/ara_oooq_uc {{ artcl_collect_dir }}/; - environment: - ARA_DATABASE: 'sqlite:///{{ working_dir }}/ara_db.sql' - - - name: Generate and retrieve the ARA static playbook report for OC deploy - become: true - shell: > - {{ local_working_dir }}/bin/ara generate html {{ local_working_dir }}/ara_oooq_oc; - {{ local_working_dir }}/bin/ara task list --all -f json > {{ artcl_collect_dir }}/ara.oooq.oc.json; - cp -r {{ local_working_dir }}/ara_oooq_oc {{ artcl_collect_dir }}/; - ignore_errors: true # noqa: ignore-errors - environment: - ARA_DATABASE: 'sqlite:///{{ ara_overcloud_db_path }}' - -- name: Copy ara files to ara-report directories # noqa: deprecated-command-syntax - shell: | - mkdir -p {{ artcl_collect_dir }}/{{ item.dir }}/ara-report; - cp {{ item.file }} {{ artcl_collect_dir }}/{{ item.dir }}/ara-report/ansible.sqlite; - loop: - - dir: ara_oooq - file: "{{ local_working_dir }}/ara.sqlite" - - dir: ara_oooq_overcloud - file: "{{ ara_overcloud_db_path }}" - when: not ara_generate_html|bool - -- import_tasks: publish_ara_graphite.yml - when: ara_graphite_server is defined - -- import_tasks: publish_ara_influxdb.yml - when: influxdb_url is defined or influxdb_create_data_file|bool diff --git a/roles/collect_logs/tasks/publish_ara_graphite.yml b/roles/collect_logs/tasks/publish_ara_graphite.yml deleted file mode 100644 index 9527b54..0000000 --- a/roles/collect_logs/tasks/publish_ara_graphite.yml +++ /dev/null @@ -1,14 +0,0 @@ ---- -- name: Get ARA json data - shell: | # noqa 305 - {{ local_working_dir }}/bin/ara result list --all -f json - register: ara_data - changed_when: false - -- name: Send to graphite - ara_graphite: - graphite_host: "{{ ara_graphite_server }}" - ara_mapping: "{{ ara_tasks_map }}" - ara_data: "{{ ara_data.stdout|to_json }}" - graphite_prefix: "{{ ara_graphite_prefix | default('') }}" - only_successful_tasks: "{{ ara_only_successful_tasks }}" diff --git a/roles/collect_logs/tasks/publish_ara_influxdb.yml b/roles/collect_logs/tasks/publish_ara_influxdb.yml deleted file mode 100644 index 073b12a..0000000 --- a/roles/collect_logs/tasks/publish_ara_influxdb.yml +++ /dev/null @@ -1,69 +0,0 @@ ---- -- name: Get ARA json data - shell: | # noqa 305 - {{ local_working_dir }}/bin/ara result list --all -f json - register: ara_data - no_log: true - changed_when: false - -- name: Collect and send data to InfluxDB - ara_influxdb: - influxdb_url: "{{ influxdb_url|default('') }}" - influxdb_port: "{{ influxdb_port }}" - influxdb_user: "{{ influxdb_user }}" - influxdb_password: "{{ influxdb_password }}" - influxdb_db: "{{ influxdb_dbname }}" - ara_data: "{{ ara_data.stdout|to_json }}" - measurement: "{{ influxdb_measurement }}" - data_file: "{{ influxdb_data_file_path }}" - only_successful_tasks: "{{ influxdb_only_successful_tasks }}" - no_log: true - -- name: Get ARA json data for undercloud - become: true - shell: "{{ local_working_dir }}/bin/ara result list --all -f json" # noqa 305 - register: ara_root_data - no_log: true - changed_when: false - -- name: Collect and send data to InfluxDB - ara_influxdb: - influxdb_url: "{{ influxdb_url|default('') }}" - influxdb_port: "{{ influxdb_port }}" - influxdb_user: "{{ influxdb_user }}" - influxdb_password: "{{ influxdb_password }}" - influxdb_db: "{{ influxdb_dbname }}" - ara_data: "{{ ara_root_data.stdout|to_json }}" - measurement: "undercloud" - data_file: "{{ influxdb_data_file_path }}" - only_successful_tasks: "{{ influxdb_only_successful_tasks }}" - mapped_fields: false - standard_fields: false - longest_tasks: 15 - when: ara_root_data.stdout != "[]" - no_log: true - -- name: Get ARA json data for overcloud - shell: "{{ local_working_dir }}/bin/ara result list --all -f json" # noqa 305 - register: ara_oc_data - environment: - ARA_DATABASE: 'sqlite:///{{ ara_overcloud_db_path }}' - no_log: true - changed_when: false - -- name: Collect and send data to InfluxDB - ara_influxdb: - influxdb_url: "{{ influxdb_url|default('') }}" - influxdb_port: "{{ influxdb_port }}" - influxdb_user: "{{ influxdb_user }}" - influxdb_password: "{{ influxdb_password }}" - influxdb_db: "{{ influxdb_dbname }}" - ara_data: "{{ ara_oc_data.stdout|to_json }}" - measurement: "overcloud" - data_file: "{{ influxdb_data_file_path }}" - only_successful_tasks: "{{ influxdb_only_successful_tasks }}" - mapped_fields: false - standard_fields: false - longest_tasks: 15 - when: ara_oc_data.stdout != "[]" - no_log: true diff --git a/roles/collect_logs/tasks/sanitize_log_strings.yaml b/roles/collect_logs/tasks/sanitize_log_strings.yaml deleted file mode 100644 index 1d57d26..0000000 --- a/roles/collect_logs/tasks/sanitize_log_strings.yaml +++ /dev/null @@ -1,14 +0,0 @@ ---- -# See README section 'Sanitizing Log Strings' -- name: Sanitize logs to remove senstive details - find: - paths: "{{ outer_item.dir_path }}" - patterns: "{{ outer_item.file_pattern }}" - register: files_with_pattern - -- name: Replace orig_string with sanitized_string in the files - replace: - path: "{{ item.path }}" - regexp: "{{ outer_item.orig_string }}" - replace: "{{ outer_item.sanitized_string }}" - with_items: "{{ files_with_pattern.files }}" diff --git a/roles/collect_logs/tasks/sova.yml b/roles/collect_logs/tasks/sova.yml deleted file mode 100644 index a51708c..0000000 --- a/roles/collect_logs/tasks/sova.yml +++ /dev/null @@ -1,47 +0,0 @@ ---- -- block: - - name: Load sova patterns from URL - uri: - url: https://opendev.org/openstack/tripleo-ci-health-queries/raw/branch/master/output/sova-pattern-generated.json - method: GET - return_content: true - status_code: 200 - body_format: json - retries: 3 # to avoid accidental failures due to networking or rate limiting - delay: 60 - register: pattern_config - - - name: Set sova_config from URL content - set_fact: - sova_config: "{{ pattern_config.json }}" - when: sova_config_file is not defined - -- block: - - name: Load sova patterns from local file - command: cat "{{ sova_config_file }}" - register: sova_config_file_output - - - name: Set sova_config from local file - set_fact: - sova_config: "{{ sova_config_file_output.stdout | from_json }}" - when: sova_config_file is defined - -- name: Run sova task - sova: - config: "{{ sova_config }}" - files: - console: "{{ ansible_user_dir }}/workspace/logs/quickstart_install.log" - errors: "/var/log/errors.txt" - "ironic-conductor": "/var/log/containers/ironic/ironic-conductor.log" - syslog: "/var/log/journal.txt" - logstash: "/var/log/extra/logstash.txt" - registry_log: "/var/log/extra/podman/containers/docker_registry/stdout.log" - result: "{{ ansible_user_dir }}/workspace/logs/failures_file" - result_file_dir: "{{ ansible_user_dir }}/workspace/logs" - -- name: Run sova task (denials) - sova: - config: "{{ sova_config }}" - files: - selinux: "/var/log/extra/denials.txt" - result: "{{ ansible_user_dir }}/workspace/logs/selinux_denials.log" diff --git a/roles/collect_logs/templates/full_logs.html.j2 b/roles/collect_logs/templates/full_logs.html.j2 deleted file mode 100644 index 0e4ced3..0000000 --- a/roles/collect_logs/templates/full_logs.html.j2 +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - Redirection to logs - - - If you are not redirected automatically, follow the link to the logs. - - diff --git a/roles/collect_logs/templates/index.rst.j2 b/roles/collect_logs/templates/index.rst.j2 deleted file mode 100644 index efec820..0000000 --- a/roles/collect_logs/templates/index.rst.j2 +++ /dev/null @@ -1,22 +0,0 @@ -Welcome to collect-logs Documentation: -=================================== - -.. note:: This documentation was generated by the collect-logs_ role. If you - find any problems, please note the TripleO-Quickstart call, if available, - that was used to deploy the environment and create a bug on Launchpad for - tripleo-quickstart_. - -.. _collect-logs: https://github.com/openstack/tripleo-quickstart-extras/tree/master/roles/collect-logs#documentation-generation-related -.. _tripleo-quickstart: https://bugs.launchpad.net/tripleo-quickstart/+filebug - --------- -Contents --------- - -.. toctree:: - :maxdepth: 2 - :numbered: - -{% for doc in artcl_create_docs_payload.table_of_contents %} - {{ doc }} -{% endfor %} diff --git a/roles/collect_logs/templates/odl_extra_logs.j2 b/roles/collect_logs/templates/odl_extra_logs.j2 deleted file mode 100644 index e11e51f..0000000 --- a/roles/collect_logs/templates/odl_extra_logs.j2 +++ /dev/null @@ -1,20 +0,0 @@ -echo "+ ip -o link" > {{ odl_extra_info_log }}; -ip -o link &>> {{ odl_extra_info_log }}; -echo "+ ip -o addr" >> {{ odl_extra_info_log }}; -ip -o addr &>> {{ odl_extra_info_log }}; -echo "+ arp -an" >> {{ odl_extra_info_log }}; -arp -an &>> {{ odl_extra_info_log }}; -echo "+ ip netns list" >> {{ odl_extra_info_log }}; -ip netns list &>> {{ odl_extra_info_log }}; -echo "+ ovs-ofctl -OOpenFlow13 show br-int" >> {{ odl_extra_info_log }}; -ovs-ofctl -OOpenFlow13 show br-int &>> {{ odl_extra_info_log }}; -echo "+ ovs-ofctl -OOpenFlow13 dump-flows br-int" >> {{ odl_extra_info_log }}; -ovs-ofctl -OOpenFlow13 dump-flows br-int &>> {{ odl_extra_info_log }}; -echo "+ ovs-ofctl -OOpenFlow13 dump-groups br-int" >> {{ odl_extra_info_log }}; -ovs-ofctl -OOpenFlow13 dump-groups br-int &>> {{ odl_extra_info_log }}; -echo "+ ovs-ofctl -OOpenFlow13 dump-group-stats br-int" >> {{ odl_extra_info_log }}; -ovs-ofctl -OOpenFlow13 dump-group-stats br-int &>> {{ odl_extra_info_log }}; -echo "+ ovs-vsctl list Open_vSwitch" >> {{ odl_extra_info_log }}; -ovs-vsctl list Open_vSwitch &>> {{ odl_extra_info_log }}; -echo "+ ovs-vsctl show" >> {{ odl_extra_info_log }}; -ovs-vsctl show &>> {{ odl_extra_info_log }}; diff --git a/roles/collect_logs/templates/rsync-filter.j2 b/roles/collect_logs/templates/rsync-filter.j2 deleted file mode 100644 index 2579f05..0000000 --- a/roles/collect_logs/templates/rsync-filter.j2 +++ /dev/null @@ -1,30 +0,0 @@ -# Exclude these paths to speed up the filtering -# These need to be removed/made more specific if we decide to collect -# anything under these paths -- /dev -- /proc -- /run -- /sys - -# Exclude paths -{% for exclude_path in artcl_exclude_list|default([]) %} -- {{ exclude_path }} -{% endfor %} - -# Include all subdirectories and log files in the check -# See "INCLUDE/EXCLUDE PATTERN RULES" section about --recursive -# in the rsync man page -+ */ -+ *.log - -# Include paths -{% for include_path in collect_list|default([]) %} -{% if include_path|list|last == "/" %} -+ {{ include_path }}** -{% else %} -+ {{ include_path }} -{% endif %} -{% endfor %} - -# Exclude everything else -- * diff --git a/roles/collect_logs/vars/family-redhat.yml b/roles/collect_logs/vars/family-redhat.yml deleted file mode 100644 index 4d2f143..0000000 --- a/roles/collect_logs/vars/family-redhat.yml +++ /dev/null @@ -1,7 +0,0 @@ ---- -# list of packages to be installed for collection -artcl_collect_pkg_list: - - gzip - - rsync - - socat - - tar diff --git a/roles/collect_logs/vars/infrared-collect-exclude-list.yml b/roles/collect_logs/vars/infrared-collect-exclude-list.yml deleted file mode 100644 index 5b3c833..0000000 --- a/roles/collect_logs/vars/infrared-collect-exclude-list.yml +++ /dev/null @@ -1,66 +0,0 @@ ---- -artcl_collect_list: - - /etc/ - - /etc/neutron - - /etc/tempest/saved_state.json - - /etc/tempest/tempest.conf - - /etc/tempest/tempest.conf.sample - - /etc/tempest/*.xml - - /home/*/*/black_list_* - - /home/*/composable_roles/network/nic-configs/ - - /home/*/composable_roles/*/*.yaml - - /home/*/composable_roles/*.yaml - - /home/*/*.conf - - /home/*/deploy-overcloudrc - - /home/*/.instack/install-undercloud.log - - /home/*/inventory/group_vars/*.yml - - /home/*/*.json - - /home/*/*.log - - /home/*/openshift_deploy_logs/*.log - - /home/*/ovb - - /home/*/overcloud_deploy.sh - - /home/*/overcloudrc* - - /home/*/overcloudrc* - - /home/*/robot/ - - /home/*/*.sh - - /home/*/shiftstackrc* - - /home/*/stackrc - - /home/*/tempest*/etc/*.conf - - /home/*/tempest*/*.log - - /home/*/tempest*/saved_state.json - - /home/*/tempest*/*.xml - - /home/*/.tripleo - - /home/*/undercloud-install-*.tar.bzip2 - - /home/*/virt - - /home/*/*/white_list_* - - /home/*/*.yaml - - /home/*/*.yml - - /root/ - - /usr/share/ceph-osd-run.sh - - /usr/share/openstack-tripleo-heat-templates - - /var/lib/config-data/ - - /var/lib/config-data/puppet-generated/ - - /var/lib/config-data/puppet-generated/neutron/etc/neutron/plugins/ - - /var/lib/docker-puppet - - /var/lib/heat-config - - /var/lib/mistral/ - - /var/lib/openvswitch/ovn/*.db - - /var/lib/tripleo-config - - /var/log/ - - /var/log/containers/opendaylight - - /var/log/extra/containers/ - - /var/log/extra/podman/containers - - /var/run/heat-config - - /var/tmp/packstack - -artcl_exclude_list: - - '.*' - - /etc/pki/* - - /etc/selinux/targeted/* - - /root/*.initrd* - - /root/*.tar* - - /root/*.vmlinuz* - - /root/*.qcow* - - /udev/hwdb.bin - - /var/lib/config-data/*/etc/puppet/modules - - /var/log/journal/* diff --git a/roles/collect_logs/vars/unsecure.yml b/roles/collect_logs/vars/unsecure.yml deleted file mode 100644 index a6aec87..0000000 --- a/roles/collect_logs/vars/unsecure.yml +++ /dev/null @@ -1,10 +0,0 @@ ---- -# these vars are defaults overrides which are loaded only when the role is -# running in an unsecured environment (outside zuul). -# * any call env lookup plugin under zuul would generate a runtime exception -ara_graphite_prefix: "tripleo.{{ lookup('env', 'STABLE_RELEASE')|default('master', true) }}.{{ lookup('env', 'TOCI_JOBTYPE') }}." -artcl_build_url: "{{ lookup('env', 'BUILD_URL') }}" -artcl_build_tag: "{{ lookup('env', 'BUILD_TAG') }}" -artcl_rsync_password: "{{ lookup('env', 'RSYNC_PASSWORD') }}" -influxdb_data_file_path: "{{ lookup('env', 'LOCAL_WORKING_DIR')|default('/tmp', true) }}/influxdb_data" -local_working_dir: "{{ zuul_work_dir | default(lookup('env', 'HOME')) }}/.quickstart" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 7e5076d..0000000 --- a/setup.cfg +++ /dev/null @@ -1,45 +0,0 @@ -[metadata] -name = ansible-role-collect-logs -summary = ansible-role-collect-logs - An Ansible role for aggregating logs from different nodes. -description_file = - README.rst -author = TripleO Team -author_email = openstack-discuss@lists.openstack.org -home_page = https://opendev.org/openstack/ansible-role-collect-logs -classifier = - License :: OSI Approved :: Apache Software License - Development Status :: 4 - Beta - Intended Audience :: Developers - Intended Audience :: System Administrators - Intended Audience :: Information Technology - Topic :: Utilities - -[global] -setup_hooks = - pbr.hooks.setup_hook - -[files] -# Allows us to install the role using pip so Ansible can find it. -data_files = - share/ansible/roles/collect-logs = roles/collect_logs/* - share/ansible/roles/collect-logs/library = plugins/modules/* - share/ansible/roles/collect-logs/module_utils = plugins/module_utils/* - share/ansible/roles/collect-logs/docs = docs/* - -[wheel] -universal = 1 - -[pbr] -skip_authors = True -skip_changelog = True - -[flake8] -# black compatible settings -# https://black.readthedocs.io/en/stable/the_black_code_style.html -max_line_length = 88 -extend_ignore = E203,E501,W503,BLK100 -show_source = True -builtins = _ - -[isort] -profile=black diff --git a/setup.py b/setup.py deleted file mode 100644 index 8407074..0000000 --- a/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright Red Hat, Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function - -import setuptools - -__metaclass__ = type - - -setuptools.setup(setup_requires=["pbr"], pbr=True, py_modules=[]) diff --git a/test-playbooks/zuul-ansible-role-collect-logs.yaml b/test-playbooks/zuul-ansible-role-collect-logs.yaml deleted file mode 100644 index 30c12cb..0000000 --- a/test-playbooks/zuul-ansible-role-collect-logs.yaml +++ /dev/null @@ -1,9 +0,0 @@ ---- -- hosts: all - tasks: - - name: include ansible-role-collect-logs role - vars: - artcl_collect: true - artcl_publish: true - include_role: - name: collect_logs diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 6260227..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -pre-commit>=1.20.0 # MIT -pytest -pytest-mock -pyyaml diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt deleted file mode 100644 index 83ff2bf..0000000 --- a/tests/sanity/ignore-2.10.txt +++ /dev/null @@ -1,4 +0,0 @@ -plugins/modules/ara_graphite.py validate-modules:missing-gplv3-license -plugins/modules/ara_influxdb.py validate-modules:missing-gplv3-license -plugins/modules/flatten_nested_dict.py validate-modules:missing-gplv3-license -plugins/modules/sova.py validate-modules:missing-gplv3-license diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt deleted file mode 120000 index 0c39ad8..0000000 --- a/tests/sanity/ignore-2.11.txt +++ /dev/null @@ -1 +0,0 @@ -ignore-2.10.txt \ No newline at end of file diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt deleted file mode 120000 index 0c39ad8..0000000 --- a/tests/sanity/ignore-2.12.txt +++ /dev/null @@ -1 +0,0 @@ -ignore-2.10.txt \ No newline at end of file diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt deleted file mode 120000 index 0c39ad8..0000000 --- a/tests/sanity/ignore-2.13.txt +++ /dev/null @@ -1 +0,0 @@ -ignore-2.10.txt \ No newline at end of file diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt deleted file mode 120000 index 0c39ad8..0000000 --- a/tests/sanity/ignore-2.9.txt +++ /dev/null @@ -1 +0,0 @@ -ignore-2.10.txt \ No newline at end of file diff --git a/tests/sanity/requirements.txt b/tests/sanity/requirements.txt deleted file mode 100644 index 1c6d8b4..0000000 --- a/tests/sanity/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -pyyaml -requests diff --git a/tests/unit/requirements.txt b/tests/unit/requirements.txt deleted file mode 100644 index bfd196b..0000000 --- a/tests/unit/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -pytest -pytest-mock -mock; python_version < '3.3' diff --git a/tests/unit/test_flatten_nested_dict.py b/tests/unit/test_flatten_nested_dict.py deleted file mode 100644 index 6623f07..0000000 --- a/tests/unit/test_flatten_nested_dict.py +++ /dev/null @@ -1,76 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import pytest -import yaml - -try: - # ansible-test style imports - from ansible_collections.tripleo.collect_logs.plugins.module_utils.test_utils import ( - AnsibleExitJson, - AnsibleFailJson, - ModuleTestCase, - set_module_args, - ) - from ansible_collections.tripleo.collect_logs.plugins.modules import ( - flatten_nested_dict, - ) -except ImportError: - # avoid collection errors running: pytest --collect-only - import os - import sys - - plugins_path = os.path.join(os.path.dirname(__file__), "../../plugins/") - plugins_path = os.path.realpath(plugins_path) - sys.path.append("%s/%s" % (plugins_path, "module_utils")) - sys.path.append("%s/%s" % (plugins_path, "modules")) - import flatten_nested_dict - from test_utils import ( - AnsibleExitJson, - AnsibleFailJson, - ModuleTestCase, - set_module_args, - ) - - -__metaclass__ = type -SAMPLE_INPUT_1 = """ -data: - system: - cpuinfo: - cmd: cat /proc/cpuinfo - capture_file: /var/log/extra/cpuinfo.txt -""" - -SAMPLE_OUTPUT_1 = """ -data: - - cmd: cat /proc/cpuinfo - capture_file: /var/log/extra/cpuinfo.txt - name: cpuinfo - group: system -""" - - -class TestFlattenNestedDict(ModuleTestCase): - def test_invalid_args(self): - set_module_args( - data="invalid", - ) - with pytest.raises(AnsibleFailJson) as context: - flatten_nested_dict.main() - assert context.value.args[0]["failed"] is True - assert "msg" in context.value.args[0] - - def test_empty(self): - set_module_args( - data={}, - ) - with pytest.raises(AnsibleExitJson) as context: - flatten_nested_dict.main() - assert context.value.args[0] == {"data": [], "changed": False} - - def test_one(self): - set_module_args(data=yaml.safe_load(SAMPLE_INPUT_1)["data"]) - with pytest.raises(AnsibleExitJson) as context: - flatten_nested_dict.main() - assert context.value.args[0]["changed"] is False - assert context.value.args[0]["data"] == yaml.safe_load(SAMPLE_OUTPUT_1)["data"] diff --git a/tests/unit/test_sova.py b/tests/unit/test_sova.py deleted file mode 100644 index f4f5678..0000000 --- a/tests/unit/test_sova.py +++ /dev/null @@ -1,68 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import pytest - -try: - # ansible-test style imports - from ansible_collections.tripleo.collect_logs.plugins.module_utils.test_utils import ( - AnsibleExitJson, - AnsibleFailJson, - ModuleTestCase, - set_module_args, - ) - from ansible_collections.tripleo.collect_logs.plugins.modules import sova -except ImportError: - # avoid collection errors running: pytest --collect-only - import os - import sys - - plugins_path = os.path.join(os.path.dirname(__file__), "../../plugins/") - plugins_path = os.path.realpath(plugins_path) - sys.path.append("%s/%s" % (plugins_path, "module_utils")) - sys.path.append("%s/%s" % (plugins_path, "modules")) - import sova - from test_utils import ( - AnsibleExitJson, - AnsibleFailJson, - ModuleTestCase, - set_module_args, - ) - -__metaclass__ = type - - -class TestFlattenNestedDict(ModuleTestCase): - def test_invalid_args(self): - set_module_args( - data="invalid", - ) - with pytest.raises(AnsibleFailJson) as context: - sova.main() - assert context.value.args[0]["failed"] is True - assert "msg" in context.value.args[0] - - def test_min(self): - set_module_args( - # just a file that exists on almost any platform - config={ - "regexes": [{"regex": "127.0.0.1", "name": "hosts"}], - "patterns": { - "console": [ - { - "id": 1, - "logstash": "", - "msg": "Overcloud stack installation: SUCCESS.", - "pattern": "Stack overcloud CREATE_COMPLETE", - "tag": "info", - } - ] - }, - }, - files={"console": "/etc/hosts"}, - ) - with pytest.raises(AnsibleExitJson) as context: - sova.main() - assert context.value.args[0]["changed"] is True - assert context.value.args[0]["processed_files"] == ["/etc/hosts"] - assert "message" in context.value.args[0] - assert context.value.args[0]["tags"] == [] diff --git a/tox.ini b/tox.ini deleted file mode 100644 index eac6359..0000000 --- a/tox.ini +++ /dev/null @@ -1,83 +0,0 @@ -[tox] -# for tty option -minversion = 3.4.0 -envlist = docs, linters, molecule -skipsdist = True -requires = - tox-ansible >= 1.5.3 - tox<4 - -[testenv] -usedevelop = True -# Do not use openstack constraint files here, this repository is used for CI -# and should not be subject to these. -passenv = - ANSIBLE_* - CURL_CA_BUNDLE - PODMAN_* - DOCKER_* - HOME - REQUESTS_CA_BUNDLE - SSH_AUTH_SOCK - SSL_CERT_FILE - TERM -setenv = VIRTUAL_ENV={envdir} -deps = -r{toxinidir}/test-requirements.txt -whitelist_externals = bash - -[testenv:bindep] -basepython = python3 -# Do not install any requirements. We want this to be fast and work even if -# system dependencies are missing, since it's used to tell you what system -# dependencies are missing! This also means that bindep must be installed -# separately, outside of the requirements files. -deps = bindep -commands = bindep test - -[testenv:linters] -basepython = python3 -setenv = - ANSIBLE_LIBRARY=./library -commands = - # check only modified files: - python -m pre_commit run -a - -[testenv:releasenotes] -basepython = python3 -whitelist_externals = bash -commands = bash -c ci-scripts/releasenotes_tox.sh - -[testenv:venv] -basepython = python3 -commands = {posargs} - -[testenv:molecule] -setenv = - ANSIBLE_FORCE_COLOR=1 - ANSIBLE_CALLBACK_WHITELIST=profile_tasks - ANSIBLE_SHOW_CUSTOM_STATS=1 - PYTHONPATH={env:PYTHONPATH:}:library - # failsafe mechanism to avoid accidental disablement of tests - # 2/3 molecule scenarios are expected to pass - PYTEST_REQPASS=2 -deps = - ansible-core>=2.11,<2.12 - molecule[test,podman]>=3.3.2,<3.4 # MIT - pytest-molecule>=1.4.0 - pytest-plus # provides support for PYTEST_REQPASS -commands = - ansible-galaxy collection install --force containers.podman:=1.8.2 community.general:=4.0.2 - python -m pytest --color=yes --html={envlogdir}/reports.html --self-contained-html {tty:-s} -k molecule {posargs} - -[testenv:ansible] -description = Used as base for all tox-ansible environments - -# https://github.com/ansible-community/tox-ansible/issues/96 -[testenv:sanity] -usedevelop = False -deps = - ansible-core>=2.11,<2.14 - -[testenv:units] -usedevelop = False -deps = diff --git a/zuul.d/layout.yaml b/zuul.d/layout.yaml deleted file mode 100644 index 3c64a30..0000000 --- a/zuul.d/layout.yaml +++ /dev/null @@ -1,101 +0,0 @@ ---- -- job: - name: tox-ansible-test-sanity - description: Runs ansible-test sanity (tox -e sanity) - parent: tox - vars: - tox_envlist: sanity # dynamic tox env added by tox-ansible - # we want to run sanity only on py3.10 instead of implicit 2.6-3.9 range - tox_extra_args: -- --python 3.10 - -- job: - name: tox-ansible-test-units - description: Runs ansible-test sanity (tox -e sanity) - parent: tox - vars: - tox_envlist: units # dynamic tox env added by tox-ansible - # we want to run sanity only on py3.10 instead of implicit 2.6-3.9 range - tox_extra_args: -- --python 3.10 - -- job: - name: zuul-ansible-role-collect-logs - description: Validate that zuul can use that role. - parent: base - run: test-playbooks/zuul-ansible-role-collect-logs.yaml - roles: - - zuul: opendev.org/openstack/ansible-role-collect-logs - irrelevant-files: - - ^vars/sova-patterns.yml$ - -- job: - description: Molecule job - name: tripleo-logs-tox-molecule - parent: tripleo-tox-molecule - pre-run: - - zuul.d/playbooks/pre.yml - timeout: 3600 - -- project: - vars: - fail_logs_collection: true - queue: tripleo - check: - jobs: &jobs - - openstack-tox-linters - - tripleo-logs-tox-molecule - - tox-ansible-test-sanity - - tox-ansible-test-units - - zuul-ansible-role-collect-logs - experimental: - jobs: - # Limit the number of jobs executed while still assuring a relevant - # level of coverage. If specific tasks are to be tested we should - # consider implementing functional tests for them, especially as - # running full integration does not effectively validates that the - # outcome was produced (they still rely on manual verification). - - tripleo-ci-centos-8-scenario001-standalone: - irrelevant-files: &irrelevant_sa - # do not put requirements.txt here, as it can have a huge impact - - ^.*\.md$ - - ^.*\.rst$ - - ^.ansible-lint$ - - ^.pre-commit-config.yaml$ - - ^.yamllint$ - - ^Puppetfile.*$ - - ^doc/.*$ - - ^etc/.*$ - - ^lower-constraints.txt$ - - ^metadata.json$ - - ^releasenotes/.*$ - - ^spec/.*$ - - ^test-requirements.txt$ - - ^vars/sova-patterns.yml$ - - ^zuul.d/tox\.yaml$ - - tox.ini - vars: - consumer_job: false - build_container_images: true - tags: - - standalone - - tripleo-ci-centos-8-containers-multinode: - irrelevant-files: *irrelevant_sa - vars: - consumer_job: false - build_container_imags: true - tags: - - undercloud-setup - - undercloud-scripts - - undercloud-install - - undercloud-post-install - - tripleo-validations - - overcloud-scripts - - overcloud-prep-config - - overcloud-prep-containers - - overcloud-deploy - - overcloud-post-deploy - - overcloud-validate - gate: - jobs: *jobs - post: - jobs: - - publish-openstack-python-branch-tarball diff --git a/zuul.d/playbooks/pre.yml b/zuul.d/playbooks/pre.yml deleted file mode 100644 index eea6858..0000000 --- a/zuul.d/playbooks/pre.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -- hosts: all - tasks: - - - name: Remove docker from molecule - become: true - package: - name: - - docker-ce - - docker-ce-cli - - containerd.io - state: absent - - - name: Set containers module to 3.0 - become: true - shell: | - dnf module disable container-tools:rhel8 -y - dnf module enable container-tools:3.0 -y - dnf clean metadata - tags: - - skip_ansible_lint - - - name: Install podman - include_role: - name: ensure-podman