Retire Tripleo: remove repo content

TripleO project is retiring
- https://review.opendev.org/c/openstack/governance/+/905145

this commit remove the content of this project repo

Change-Id: I3732addd9d6544f232885cd5d731104e9f804406
This commit is contained in:
Ghanshyam Mann 2024-02-24 11:34:25 -08:00
parent f273ccbb96
commit fa15022851
198 changed files with 8 additions and 8607 deletions

View File

@ -1,39 +0,0 @@
exclude_paths:
- releasenotes/
- roles/fetch_validations/
parseable: true
quiet: false
rulesdir:
- .ansible-lint_rules/
# Mock modules or roles in order to pass ansible-playbook --syntax-check
mock_modules: []
mock_roles:
- fetch-devstack-log-dir
- fetch-output
- fetch-python-sdist-output
- fetch-subunit-output
- ensure-if-python
- ensure-pip
- ensure-tox
- ensure-virtualenv
skip_list:
# Lines should be no longer than 120 chars.
- '204'
# Using command rather module we have where
# we need to use curl or rsync.
- '303'
# shell tasks uses pipeline without pipefail,
# this requires refactoring, skip for now.
- '306'
# Tasks that run when changed should likely be handlers
# this requires refactoring, skip for now.
- '503'
# meta/main.yml should contain relevant info
- '701'
# Tags must contain lowercase letters and digits only
- '702'
# meta/main.yml default values should be changed
- '703'
verbosity: 1

View File

@ -1,37 +0,0 @@
# Molecule managed
# Copyright 2021 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
{% if item.registry is defined %}
FROM {{ item.registry.url }}/{{ item.image }}
{% else %}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install sudo python*-devel python*-dnf bash epel-release {{ item.pkg_extras | default('') }} && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl python-setuptools bash {{ item.pkg_extras | default('') }} && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml {{ item.pkg_extras | default('') }} && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates {{ item.pkg_extras | default('') }}; \
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates {{ item.pkg_extras | default('') }} && xbps-remove -O; fi
{% for pkg in item.easy_install | default([]) %}
# install pip for centos where there is no python-pip rpm in default repos
RUN easy_install {{ pkg }}
{% endfor %}
CMD ["sh", "-c", "while true; do sleep 10000; done"]

View File

@ -1,70 +0,0 @@
---
# validations-common uses a shared molecule configuration file to avoid
# repetition. That configuration file is located at the repository level
# ({REPO}/.config/molecule/config.yml) and defines all the default values for
# all the molecule.yml files across all the roles. By default, the role-addition
# process will produce an empty molecule.yml inheriting this config.yml file.
#
# Any key defined in the role molecule.yml file will override values from this
# config.yml file.
#
# IMPORTANT: if you want to override the default values set here in this file,
# you will have to redefine them completely in your molecule.yml (at the role
# level) and add your extra configuration!
#
# For instance, if you need to add an extra package in your CentOS 8 Stream
# container, you will have to add the entire "platforms" key into your
# molecule.yml file and add your package name in the pkg_extras key.
#
# No merge will happen between your molecule.yml and this config.yml
# files. That's why you will have to redefine them completely.
driver:
name: podman
log: true
platforms:
- name: centos
hostname: centos
image: centos/centos:stream8
registry:
url: quay.io
dockerfile: ../../../../../.config/molecule/Dockerfile
pkg_extras: python*-setuptools python*-pyyaml
volumes:
- /etc/ci/mirror_info.sh:/etc/ci/mirror_info.sh:ro
privileged: true
environment: &env
http_proxy: "{{ lookup('env', 'http_proxy') }}"
https_proxy: "{{ lookup('env', 'https_proxy') }}"
ulimits: &ulimit
- host
provisioner:
name: ansible
inventory:
hosts:
all:
hosts:
centos:
ansible_python_interpreter: /usr/bin/python3
log: true
options:
vvv: true
env:
ANSIBLE_STDOUT_CALLBACK: yaml
ANSIBLE_ROLES_PATH: "${ANSIBLE_ROLES_PATH}:${HOME}/zuul-jobs/roles"
ANSIBLE_LIBRARY: "${ANSIBLE_LIBRARY:-/usr/share/ansible/plugins/modules}"
scenario:
test_sequence:
- destroy
- create
- prepare
- converge
- verify
- destroy
verifier:
name: ansible

View File

@ -1,7 +0,0 @@
[run]
branch = True
source = validations_common
omit = validations_common/tests/*
[report]
ignore_errors = True

View File

@ -1,41 +0,0 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.155.1/containers/docker-existing-dockerfile
{
"name": "Existing Dockerfile",
// Sets the run context to one level up instead of the .devcontainer folder.
"context": "..",
// Set *default* container specific settings.json values on container create.
"settings": {
"terminal.integrated.shell.linux": null,
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python"
],
"dockerFile": "../Dockerfile",
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Uncomment the next line to run commands after the container is created - for example installing curl.
// "postCreateCommand": "apt-get update && apt-get install -y curl",
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
// "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ],
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
// "remoteUser": "vscode"
// Required for an empty mount arg, since we manually add it in the runArgs
"workspaceMount": "",
"runArgs": [
"--volume=${localWorkspaceFolder}:/workspaces/${localWorkspaceFolderBasename}:Z"
]
}

View File

@ -1,55 +0,0 @@
# Docker image doesn't need any files that git doesn't track.
#Therefore the .dockerignore largely follows the structure of .gitignore.
# C extensions
*.so
# Packages
*.egg*
*.egg-info
dist
build
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
lib
lib64
# Installer logs
pip-log.txt
# Unit test / coverage reports
cover/
.coverage*
!.coveragerc
.tox
nosetests.xml
.testrepository
.venv
.stestr/*
# Translations
*.mo
# Mr Developer
.mr.developer.cfg
.project
.pydevproject
# Complexity
output/*.html
output/*/index.html
# Sphinx
doc/build
doc/source/reference/api/
# pbr generates these
AUTHORS
ChangeLog
# Editors
*~
.*.swp
.*sw?
# Files created by releasenotes build
releasenotes/build
# Ansible specific
hosts
*.retry
#Vagrantfiles, since we are using docker
Vagrantfile.*

67
.gitignore vendored
View File

@ -1,67 +0,0 @@
*.py[cod]
# C extensions
*.so
# Packages
*.egg*
*.egg-info
dist
build
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
lib
lib64
# Installer logs
pip-log.txt
# Unit test / coverage reports
cover/
.coverage*
!.coveragerc
.tox
nosetests.xml
.testrepository
.venv
.stestr/*
# Translations
*.mo
# Mr Developer
.mr.developer.cfg
.project
.pydevproject
# Complexity
output/*.html
output/*/index.html
# Sphinx
doc/build
doc/source/reference/api/
# pbr generates these
AUTHORS
ChangeLog
# Editors
*~
.*.swp
.*sw?
# Files created by releasenotes build
releasenotes/build
# Ansible specific
hosts
*.retry
# JSON files created by molecule tests
*/roles/*/molecule/*/*Z.json

View File

@ -1,54 +0,0 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: mixed-line-ending
- id: check-byte-order-marker
- id: check-executables-have-shebangs
- id: check-merge-conflict
- id: check-symlinks
- id: debug-statements
- id: check-yaml
files: .*\.(yaml|yml)$
- repo: https://github.com/pycqa/flake8
rev: 3.9.1
hooks:
- id: flake8
additional_dependencies: [flake8-typing-imports==1.12.0]
entry: flake8 --ignore=E24,E121,E122,E123,E124,E126,E226,E265,E305,E402,F401,F405,E501,E704,F403,F841,W503,W605
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.30.0
hooks:
- id: yamllint
files: \.(yaml|yml)$
types: [file, yaml]
entry: yamllint --strict -f parsable
- repo: https://github.com/ansible-community/ansible-lint
rev: v5.3.2
hooks:
- id: ansible-lint
always_run: true
pass_filenames: false
additional_dependencies:
- 'ansible-core<2.12'
verbose: true
entry: ansible-lint --force-color -p -v
- repo: https://github.com/openstack-dev/bashate.git
rev: 2.0.0
hooks:
- id: bashate
entry: bashate --error . --verbose --ignore=E006,E040
# Run bashate check for all bash scripts
# Ignores the following rules:
# E006: Line longer than 79 columns (as many scripts use jinja
# templating, this is very difficult)
# E040: Syntax error determined using `bash -n` (as many scripts
# use jinja templating, this will often fail and the syntax
# error will be discovered in execution anyway)

View File

@ -1,6 +0,0 @@
---
packages:
all:
- name: "python-yaml"
- name: "python-PyYAML"
- name: "PyYAML"

View File

@ -1,3 +0,0 @@
[DEFAULT]
test_path=${TEST_PATH:-./validations_common/tests}
top_dir=./

View File

@ -1,14 +0,0 @@
---
extends: default
rules:
line-length:
# matches hardcoded 160 value from ansible-lint
max: 160
indentation:
spaces: consistent
indent-sequences: true
check-multi-line-strings: false
ignore: |
releasenotes/notes/*.yaml

View File

@ -1,224 +0,0 @@
---
- job:
name: validations-common-functional
parent: devstack
run: playbooks/validations-common-functional.yaml
post-run: playbooks/post.yaml
timeout: 7200
required-projects:
- openstack/validations-libs
- openstack/validations-common
- name: openstack/openstacksdk
override-checkout: master
- name: opendev.org/openstack/devstack
override-checkout: master
vars:
devstack_localrc:
USE_PYTHON3: true
run_validation: true
irrelevant-files:
- ^.*\.rst$
- ^doc/.*$
- ^releasenotes/.*$
- job:
name: validations-common-reqcheck
nodeset: centos-9-stream
parent: base
run: playbooks/reqcheck.yaml
timeout: 1600
voting: true
required-projects:
- openstack/validations-common
files:
- ^requirements.txt$
- project:
templates:
- check-requirements
- validations-common-molecule-jobs
check:
jobs:
- validations-common-reqcheck
- openstack-tox-linters
- openstack-tox-cover
- openstack-tox-py38
- openstack-tox-py39
- validations-common-functional
- openstack-tox-docs: &tripleo-docs
files:
- ^doc/.*
- ^README.rst
- ^validations_common/.*
- ^CONTRIBUTING.rst
gate:
jobs:
- openstack-tox-linters
- openstack-tox-py38
- openstack-tox-py39
- validations-common-functional
- openstack-tox-docs: *tripleo-docs
promote:
jobs:
- promote-openstack-tox-docs: *tripleo-docs
- project-template:
check:
jobs:
- validations-common-centos-8-molecule-advanced_format_512e_support
- validations-common-centos-8-molecule-check_latest_packages_version
- validations-common-centos-8-molecule-dns
- validations-common-centos-8-molecule-ntp
- validations-common-centos-8-molecule-service_status
- validations-common-centos-8-molecule-check_cpu
- validations-common-centos-8-molecule-check_disk_space
- validations-common-centos-8-molecule-check_ram
- validations-common-centos-8-molecule-check_selinux_mode
- validations-common-centos-8-molecule-validate_selinux
- validations-common-centos-8-molecule-xfs_check_ftype
- validations-common-centos-8-molecule-check_rsyslog
gate:
jobs:
- validations-common-centos-8-molecule-check_latest_packages_version
- validations-common-centos-8-molecule-dns
- validations-common-centos-8-molecule-check_cpu
- validations-common-centos-8-molecule-check_disk_space
- validations-common-centos-8-molecule-check_ram
- validations-common-centos-8-molecule-validate_selinux
name: validations-common-molecule-jobs
- job:
files:
- ^validations_common/roles/check_selinux_mode/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-check_selinux_mode
parent: validations-common-centos-8-base
vars:
validations_common_role_name: check_selinux_mode
voting: false
- job:
files:
- ^validations_common/roles/ntp/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-ntp
parent: validations-common-centos-8-base
vars:
validations_common_role_name: ntp
voting: false
- job:
files:
- ^validations_common/roles/service_status/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-service_status
parent: validations-common-centos-8-base
vars:
validations_common_role_name: service_status
voting: false
- job:
files:
- ^validations_common/roles/advanced_format_512e_support/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-advanced_format_512e_support
parent: validations-common-centos-8-base
vars:
validations_common_role_name: advanced_format_512e_support
voting: false
- job:
files:
- ^validations_common/roles/dns/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-dns
parent: validations-common-centos-8-base
vars:
validations_common_role_name: dns
- job:
files:
- ^validations_common/roles/check_cpu/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-check_cpu
parent: validations-common-centos-8-base
vars:
validations_common_role_name: check_cpu
- job:
files:
- ^validations_common/roles/check_ram/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-check_ram
parent: validations-common-centos-8-base
vars:
validations_common_role_name: check_ram
- job:
files:
- ^validations_common/roles/check_disk_space/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-check_disk_space
parent: validations-common-centos-8-base
vars:
validations_common_role_name: check_disk_space
- job:
files:
- ^validations_common/roles/xfs_check_ftype/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-xfs_check_ftype
parent: validations-common-centos-8-base
vars:
validations_common_role_name: xfs_check_ftype
voting: false
- job:
files:
- ^validations_common/roles/check_latest_packages_version/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-check_latest_packages_version
parent: validations-common-centos-8-base
vars:
validations_common_role_name: check_latest_packages_version
- job:
files:
- ^validations_common/roles/validate_selinux/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-validate_selinux
parent: validations-common-centos-8-base
vars:
validations_common_role_name: validate_selinux
- job:
files:
- ^validations_common/roles/check_rsyslog/.*
- ^tests/prepare-test-host.yml
- ^playbooks/molecule/pre.yml
- ^playbooks/molecule/run.yml
- ^molecule-requirements.txt
name: validations-common-centos-8-molecule-check_rsyslog
parent: validations-common-centos-8-base
vars:
validations_common_role_name: check_rsyslog

View File

@ -1,87 +0,0 @@
If you're interested in contributing to the validations-common project,
the following will help get you started:
https://docs.openstack.org/infra/manual/developers.html
If you already have a good understanding of how the system works and your
OpenStack accounts are set up, you can skip to the development workflow
section of this documentation to learn how changes to OpenStack should be
submitted for review via the Gerrit tool:
https://docs.openstack.org/infra/manual/developers.html#development-workflow
Pull requests submitted through GitHub will be ignored.
Validations are meant to verify functionality of tripleo systems.
Therefore a special care should be given to testing your code before submitting a review.
The information below will cover the project specific information you'll
need to get started with validation-framework.
Branches and version management
===============================
Validation Framework project uses semantic versioning and derives names of stable branches
from the released minor versions. The latest minor version released is the only exception
as it is derived from the `master` branch.
Therefore, all code used by version 1.n.* of the project resides in `stable/1.n` branch,
and when version 1.(n+1) is released, new branch `stable/1.(n+1)` will be created.
By default, stable branches recieve only bug fixes and feature backports are decided on case basis
after all the necessary discussions and procedures have taken place.
Documentation
=============
For description of provided validations, modules and tools please refer to the `upstream documentation <https://docs.openstack.org/validations-common/latest/>`_.
Separate documentation is provided about the runtime, `validations-libs <https://docs.openstack.org/validations-libs/latest/>`_
For general information on contributing to OpenStack, please check out the
`contributor guide <https://docs.openstack.org/contributors/>`_ to get started.
It covers all the basics that are common to all OpenStack projects: the accounts
you need, the basics of interacting with our Gerrit review system, how we
communicate as a community, etc.
Communication
=============
* IRC channel ``#validation-framework`` at `Libera`_ (For all subject-matters)
* IRC channel ``#tripleo`` at `OFTC`_ (OpenStack and TripleO discussions)
* Mailing list (prefix subjects with ``[tripleo][validations]`` for faster responses)
http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-discuss
.. _Libera: https://libera.chat/
.. _OFTC: https://www.oftc.net/
Contacting the Core Team
========================
Please refer to the `TripleO Core Team
<https://review.opendev.org/#/admin/groups/190,members>`_ contacts.
Bug Tracking
=============
We track our tasks in `Launchpad <https://bugs.launchpad.net/tripleo/+bugs?field.tag=validations>`_ and in
`StoryBoard <https://storyboard.openstack.org/#!/project_group/76>`_
Reporting a Bug
===============
You found an issue and want to make sure we are aware of it? You can do so on
`Launchpad <https://bugs.launchpad.net/tripleo/+filebug>`__. Please, add the
validations tag to your bug.
More info about Launchpad usage can be found on `OpenStack docs page
<https://docs.openstack.org/contributors/common/task-tracking.html#launchpad>`_
Getting Your Patch Merged
=========================
All changes proposed to the TripleO requires two ``Code-Review +2`` votes from
TripleO core reviewers before one of the core reviewers can approve patch by
giving ``Workflow +1`` vote.
Project Team Lead Duties
========================
All common PTL duties are enumerated in the `PTL guide
<https://docs.openstack.org/project-team-guide/ptl.html>`_.
The Release Process for TripleO is documented in `Release Management
<https://docs.openstack.org/tripleo-docs/latest/developer/release.html>`_.
Documentation for the TripleO project can be found `here <https://docs.openstack.org/tripleo-docs/latest/index.html>`_

View File

@ -1,28 +0,0 @@
FROM redhat/ubi9:latest
LABEL name="Validations common development container file"
LABEL version="1.1"
LABEL description="Provides environment for development of new validations."
RUN dnf install -y git python3-pip gcc python3-devel jq
# Copy contents of the local validations-common repo with all of our changes
COPY . /root/validations-common
# validations-libs repo is cloned
RUN git clone https://opendev.org/openstack/validations-libs /root/validations-libs
# Install wheel, validations-libs, validations-common, pytest and all dependencies
RUN python3 -m pip install wheel &&\
python3 -m pip install /root/validations-common &&\
python3 -m pip install -r /root/validations-common/test-requirements.txt &&\
python3 -m pip install pytest &&\
python3 -m pip install /root/validations-libs
# Setting up the default directory structure for both ansible,
# and the VF
RUN ln -s /usr/local/share/ansible /usr/share/ansible &&\
mkdir -p /var/log/validations
# Simplified ansible inventory is created, containing only localhost,
# and defining the connection as local.
RUN mkdir -p /etc/ansible && \
echo "localhost ansible_connection=local" > /etc/ansible/hosts

175
LICENSE
View File

@ -1,175 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

View File

@ -1,6 +0,0 @@
include AUTHORS
include ChangeLog
exclude .gitignore
exclude .gitreview
global-exclude *.pyc

View File

@ -1,114 +1,10 @@
==================
Validations-common
==================
This project is no longer maintained.
.. image:: https://governance.openstack.org/tc/badges/validations-common.svg
:target: https://governance.openstack.org/tc/reference/tags/index.html
The contents of this repository are still available in the Git
source code management system. To see the contents of this
repository before it reached its end of life, please check out the
previous commit with "git checkout HEAD^1".
.. Change things from this point on
A collection of Ansible roles and playbooks to detect and report potential
issues during deployments.
The validations will help detect issues early in the deployment process and
prevent field engineers from wasting time on misconfiguration or hardware
issues in their environments.
* Free software: Apache_license_
* Documentation: https://docs.openstack.org/validations-common/latest/
* Release notes: https://docs.openstack.org/releasenotes/validations-commons/
* Source: https://opendev.org/openstack/validations-common
* Bugs - Upstream: https://bugs.launchpad.net/tripleo/+bugs?field.tag=validations
* Bugs - Downstream: https://bugzilla.redhat.com/buglist.cgi?component=validations-common&product=Red%20Hat%20OpenStack
Installation
============
Recommended process
-------------------
There are several different ways to install validations-common.
However it is **recommended** to both install and use
the package inside python virtual environment.
At the command line using `pip`.
.. code-block:: console
$ python3 -m pip install validations-common
Or, if you have virtualenvwrapper_ installed.
.. code-block:: console
$ mkvirtualenv validations-common
$ python3 -m pip install validations-common
Installation with package manager
---------------------------------
Alternatively it is possible to install validations-common using package manager.
Such as `yum` or `dnf`...
.. code-block:: console
$ yum|dnf install validations-common
or the more modern `dnf`.
.. code-block:: console
$ dnf install validations-common
Usage
=====
Once the validations-common project has been installed,
navigate to the chosen share path, usually `/usr/share/ansible`
to access the installed roles, playbooks, and libraries.
While the validations-common can be run by itself,
it nonetheless depends on Ansible and validations-libs.
Therefore it isn't recommended to use only validations-common.
.. _virtualenvwrapper: https://pypi.org/project/virtualenvwrapper/
.. _Apache_license: http://www.apache.org/licenses/LICENSE-2.0
Validations Callbacks
=====================
http_json callback
------------------
The callback `http_json` sends Validations logs and information to an HTTP
server as a JSON format in order to get caught and analysed with external
tools for log parsing (as Fluentd or others).
This callback inherits from `validation_json` the format of the logging
remains the same as the other logger that the Validation Framework is using
by default.
To enable this callback, you need to add it to the callback whitelist.
Then you need to export your http server url and port::
export HTTP_JSON_SERVER=http://localhost
export HTTP_JSON_PORT=8989
The callback will post JSON log to the URL provided.
This repository has a simple HTTP server for testing purpose under::
tools/http_server.py
The default host and port are localhost and 8989, feel free to adjust those
values to your needs.
Running the molecule tests
==========================
In order to run the molecule tests locally on your local machine, please follow
the instructions documented in the `Tripleo Validations` `Developers Guide`_.
.. _Developers Guide: https://docs.openstack.org/tripleo-validations/latest/contributing/developer_guide.html#local-testing-of-new-roles
For any further questions, please email
openstack-discuss@lists.openstack.org or join #openstack-dev on
OFTC.

View File

@ -1,6 +0,0 @@
---
collections:
- containers.podman
- community.general
- community.crypto
- ansible.posix

View File

@ -1,24 +0,0 @@
export VALIDATIONS_COMMON_WORKPATH="$(dirname $(readlink -f ${BASH_SOURCE[0]}))"
export ANSIBLE_STDOUT_CALLBACK=debug
export ANSIBLE_LIBRARY="${VALIDATIONS_COMMON_WORKPATH}/validations_common/library"
# TODO(gchamoul): uncomment once we host lookup plugins
#export ANSIBLE_LOOKUP_PLUGINS="${VALIDATIONS_COMMON_WORKPATH}/validations_common/lookup_plugins"
export ANSIBLE_ROLES_PATH="${VALIDATIONS_COMMON_WORKPATH}/validations_common/roles"
export ANSIBLE_INVENTORY="${VALIDATIONS_COMMON_WORKPATH}/tests/hosts.ini"
export ANSIBLE_RETRY_FILES_ENABLED="0"
export ANSIBLE_LOAD_CALLBACK_PLUGINS="1"
export ANSIBLE_HOST_KEY_CHECKING=False
function unset-ansible-test-env {
for i in $(env | grep ANSIBLE_ | awk -F'=' '{print $1}'); do
unset ${i}
done
unset VALIDATIONS_COMMON_WORKPATH
echo -e "Ansible test environment deactivated.\n"
unset -f unset-ansible-test-env
}
echo -e "Ansible test environment is now active"
echo -e "Run 'unset-ansible-test-env' to deactivate.\n"

View File

@ -1,47 +0,0 @@
# This file facilitates OpenStack-CI package installation
# before the execution of any tests.
#
# See the following for details:
# - https://docs.openstack.org/infra/bindep/
# - https://opendev.org/opendev/bindep/
#
# Even if the role does not make use of this facility, it
# is better to have this file empty, otherwise OpenStack-CI
# will fall back to installing its default packages which
# will potentially be detrimental to the tests executed.
# The gcc compiler
gcc
# Base requirements for RPM distros
gcc-c++ [platform:rpm]
git [platform:rpm]
libffi-devel [platform:rpm]
openssl-devel [platform:rpm]
podman [platform:rpm]
python3-devel [platform:rpm !platform:rhel-7 !platform:centos-7]
PyYAML [platform:rpm !platform:rhel-8 !platform:centos-8 !platform:fedora]
python3-pyyaml [platform:rpm !platform:rhel-7 !platform:centos-7]
python3-dnf [platform:rpm !platform:rhel-7 !platform:centos-7]
# RH Mechanisms
python-rhsm-certificates [platform:redhat]
# SELinux cent7
libselinux-python3 [platform:rpm !platform:rhel-8 !platform:centos-8]
libsemanage-python3 [platform:redhat !platform:rhel-8 !platform:centos-8]
# SELinux cent8
python3-libselinux [platform:rpm !platform:rhel-7 !platform:centos-7]
python3-libsemanage [platform:redhat !platform:rhel-7 !platform:centos-7]
# Required for compressing collected log files in CI
gzip
# Required to build language docs
gettext [doc test]
graphviz [doc test]
# libsrvg2 is needed for sphinxcontrib-svg2pdfconverter in docs builds.
librsvg2-tools [doc platform:rpm]
librsvg2-bin [doc platform:dpkg]
# PDF Docs package dependencies
tex-gyre [platform:dpkg doc]

View File

@ -1,10 +0,0 @@
# this is required for the docs build jobs
sphinx>=2.0.0,!=2.1.0 # BSD License
openstackdocstheme>=2.2.2 # Apache-2.0
reno>=3.1.0 # Apache-2.0
sphinxcontrib-apidoc>=0.2.0 # BSD License
sphinxcontrib-svg2pdfconverter>=1.1.1 # BSD License
doc8>=0.8.0 # Apache-2.0
bashate>=0.6.0 # Apache-2.0
ruamel.yaml>=0.15.5 # MIT
six>=1.11.0 # MIT

View File

@ -1,402 +0,0 @@
# Copyright 2021 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import imp
import os
from docutils import core
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.parsers import rst
from docutils.writers.html4css1 import Writer
from sphinx import addnodes
import yaml
from ruamel.yaml import YAML as RYAML
try:
import io
StringIO = io.StringIO
except ImportError:
import StringIO
class DocYaml(RYAML):
def _license_filter(self, data):
"""This will filter out our boilerplate license heading in return data.
The filter is used to allow documentation we're creating in variable
files to be rendered more beautifully.
"""
lines = list()
mark = True
for line in data.splitlines():
if '# Copyright' in line:
mark = False
if mark:
lines.append(line)
if '# under the License' in line:
mark = True
return '\n'.join(lines)
def dump(self, data, stream=None, **kw):
if not stream:
stream = StringIO()
try:
RYAML.dump(self, data, stream, **kw)
return self._license_filter(stream.getvalue().strip())
finally:
stream.close()
DOCYAML = DocYaml()
DOCYAML.default_flow_style = False
class AnsibleAutoPluginDirective(Directive):
directive_name = "ansibleautoplugin"
has_content = True
option_spec = {
'module': rst.directives.unchanged,
'role': rst.directives.unchanged,
'documentation': rst.directives.unchanged,
'examples': rst.directives.unchanged
}
@staticmethod
def _render_html(source):
return core.publish_parts(
source=source,
writer=Writer(),
writer_name='html',
settings_overrides={'no_system_messages': True}
)
def make_node(self, title, contents, content_type=None):
section = self._section_block(title=title)
if not content_type:
# Doc section
for content in contents['docs']:
for paragraph in content.split('\n'):
retnode = nodes.paragraph()
retnode.append(self._raw_html_block(data=paragraph))
section.append(retnode)
# Options Section
options_list = nodes.field_list()
options_section = self._section_block(title='Options')
for key, value in contents['options'].items():
options_list.append(
self._raw_fields(
data=value['description'],
field_name=key
)
)
else:
options_section.append(options_list)
section.append(options_section)
# Authors Section
authors_list = nodes.field_list()
authors_list.append(
self._raw_fields(
data=contents['author']
)
)
authors_section = self._section_block(title='Authors')
authors_section.append(authors_list)
section.append(authors_section)
elif content_type == 'yaml':
for content in contents:
section.append(
self._literal_block(
data=content,
dump_data=False
)
)
return section
@staticmethod
def load_module(filename):
return imp.load_source('__ansible_module__', filename)
@staticmethod
def build_documentation(module):
docs = DOCYAML.load(module.DOCUMENTATION)
doc_data = dict()
doc_data['docs'] = docs['description']
doc_data['author'] = docs.get('author', list())
doc_data['options'] = docs.get('options', dict())
return doc_data
@staticmethod
def build_examples(module):
examples = DOCYAML.load(module.EXAMPLES)
return_examples = list()
for example in examples:
return_examples.append(DOCYAML.dump([example]))
return return_examples
def _raw_html_block(self, data):
html = self._render_html(source=data)
return nodes.raw('', html['body'], format='html')
def _raw_fields(self, data, field_name=''):
body = nodes.field_body()
if isinstance(data, list):
for item in data:
body.append(self._raw_html_block(data=item))
else:
body.append(self._raw_html_block(data=data))
field = nodes.field()
field.append(nodes.field_name(text=field_name))
field.append(body)
return field
@staticmethod
def _literal_block(data, language='yaml', dump_data=True):
if dump_data:
literal = nodes.literal_block(
text=DOCYAML.dump(data)
)
else:
literal = nodes.literal_block(text=data)
literal['language'] = 'yaml'
return literal
@staticmethod
def _section_block(title, text=None):
section = nodes.section(
title,
nodes.title(text=title),
ids=[nodes.make_id('-'.join(title))],
)
if text:
section_body = nodes.field_body()
section_body.append(nodes.paragraph(text=text))
section.append(section_body)
return section
def _yaml_section(self, to_yaml_data, section_title, section_text=None):
yaml_section = self._section_block(
title=section_title,
text=section_text
)
yaml_section.append(self._literal_block(data=to_yaml_data))
return yaml_section
def _run_role(self, role):
section = self._section_block(
title="Role Documentation",
text="Welcome to the '{}' role documentation.".format(
os.path.basename(role)
),
)
molecule_defaults = None
abspath_role = os.path.dirname(os.path.abspath(role))
molecule_shared_file = os.path.join(
os.path.dirname(abspath_role), "../.config/molecule/config.yml"
)
if os.path.exists(molecule_shared_file):
with open(molecule_shared_file) as msf:
molecule_defaults = DOCYAML.load(msf.read())
defaults_file = os.path.join(role, "defaults", "main.yml")
if os.path.exists(defaults_file):
with open(defaults_file) as f:
role_defaults = DOCYAML.load(f.read())
section.append(
self._yaml_section(
to_yaml_data=role_defaults,
section_title="Role Defaults",
section_text="This section highlights all of the defaults"
" and variables set within the '{}'"
" role.".format(os.path.basename(role)),
)
)
vars_path = os.path.join(role, "vars")
if os.path.exists(vars_path):
for v_file in os.listdir(vars_path):
vars_file = os.path.join(vars_path, v_file)
with open(vars_file) as f:
vars_values = DOCYAML.load(f.read())
section.append(
self._yaml_section(
to_yaml_data=vars_values,
section_title="Role Variables: {}".format(v_file),
)
)
test_list = nodes.field_list()
test_section = self._section_block(
title="Molecule Scenarios",
text="Molecule is being used to test the '{}' role. The"
" following section highlights the drivers in service"
" and provides an example playbook showing how the role"
" is leveraged.".format(os.path.basename(role)),
)
molecule_path = os.path.join(role, "molecule")
if os.path.exists(molecule_path):
for test in os.listdir(molecule_path):
molecule_section = self._section_block(
title="Scenario: {}".format(test)
)
molecule_file = os.path.join(molecule_path, test, "molecule.yml")
if not os.path.exists(molecule_file):
continue
with open(molecule_file) as f:
molecule_conf = DOCYAML.load(f.read())
# if molecule.yml file from the scenarios, we get the
# information from the molecule shared configuration file.
if not molecule_conf:
molecule_conf = molecule_defaults
# Now that we use a shared molecule configuration file, the
# molecule.yml file in the role scenarios could be empty or
# contains only overriding keys.
driver_data = molecule_conf.get('driver',
molecule_defaults.get('driver'))
if driver_data:
molecule_section.append(
nodes.field_name(text="Driver: {}".format(driver_data["name"]))
)
options = driver_data.get("options")
if options:
molecule_section.append(
self._yaml_section(
to_yaml_data=options, section_title="Molecule Options"
)
)
platforms_data = molecule_conf.get('platforms',
molecule_defaults.get('platforms'))
if platforms_data:
molecule_section.append(
self._yaml_section(
to_yaml_data=platforms_data,
section_title="Molecule Platform(s)",
)
)
default_playbook = [molecule_path, test, "converge.yml"]
provisioner_data = molecule_conf.get('provisioner',
molecule_defaults.get('provisioner'))
if provisioner_data:
inventory = provisioner_data.get('inventory')
if inventory:
molecule_section.append(
self._yaml_section(
to_yaml_data=inventory,
section_title="Molecule Inventory",
)
)
try:
converge = provisioner_data['playbooks']['converge']
default_playbook = default_playbook[:-1] + [converge]
except KeyError:
pass
molecule_playbook_path = os.path.join(*default_playbook)
with open(molecule_playbook_path) as f:
molecule_playbook = DOCYAML.load(f.read())
molecule_section.append(
self._yaml_section(
to_yaml_data=molecule_playbook,
section_title="Example {} playbook".format(test),
)
)
test_list.append(molecule_section)
else:
test_section.append(test_list)
section.append(test_section)
self.run_returns.append(section)
# Document any libraries nested within the role
library_path = os.path.join(role, "library")
if os.path.exists(library_path):
self.options['documentation'] = True
self.options['examples'] = True
for lib in os.listdir(library_path):
if lib.endswith(".py"):
self._run_module(
module=self.load_module(
filename=os.path.join(library_path, lib)
),
module_title="Embedded module: {}".format(lib),
example_title="Examples for embedded module",
)
def _run_module(self, module, module_title="Module Documentation",
example_title="Example Tasks"):
if self.options.get('documentation'):
docs = self.build_documentation(module=module)
self.run_returns.append(
self.make_node(
title=module_title,
contents=docs
)
)
if self.options.get('examples'):
examples = self.build_examples(module=module)
self.run_returns.append(
self.make_node(
title=example_title,
contents=examples,
content_type='yaml'
)
)
def run(self):
self.run_returns = list()
if self.options.get('module'):
module = self.load_module(filename=self.options['module'])
self._run_module(module=module)
if self.options.get('role'):
self._run_role(role=self.options['role'])
return self.run_returns
def setup(app):
classes = [
AnsibleAutoPluginDirective,
]
for directive_class in classes:
app.add_directive(directive_class.directive_name, directive_class)
return {'version': '0.2'}

View File

@ -1,114 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from unittest import mock
# Add the project
sys.path.insert(0, os.path.abspath('../..'))
# Add the extensions
sys.path.insert(0, os.path.join(os.path.abspath('.'), '_exts'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinxcontrib.apidoc',
'sphinxcontrib.rsvgconverter',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
'openstackdocstheme',
'ansible-autodoc'
]
# sphinxcontrib.apidoc options
apidoc_module_dir = '../../validations_common'
apidoc_output_dir = 'reference/api'
apidoc_excluded_paths = []
apidoc_separate_modules = True
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/validations-common'
openstackdocs_use_storyboard = True
openstackdocs_pdf_link = True
openstackdocs_bug_project = 'tripleo'
openstackdocs_bug_tag = 'documentation'
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
autodoc_mock_imports = ['validations_libs', 'oslotest', 'ansible']
# Mocking autodoc deps manually
sys.modules['ansible.module_utils.basic'] = mock.Mock()
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = '2021, OpenStack Foundation'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['validations_common.']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of glob-style patterns that should be excluded when looking for
# source files. They are matched against the source file names relative to the
# source directory, using slashes as directory separators on all platforms.
exclude_patterns = ['']
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'validations-commondoc'
html_theme = 'openstackdocs'
latex_use_xindy = False
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
(
'index',
'doc-validations-common.tex',
'Validations Framework Client Documentation',
'OpenStack LLC',
'manual'
),
]
# Allow deeper levels of nesting for \begin...\end stanzas
latex_elements = {'maxlistdepth': 10, 'extraclassoptions': ',openany,oneside'}

View File

@ -1,6 +0,0 @@
==================================
Contributing to validations-common
==================================
.. include:: ../../CONTRIBUTING.rst

View File

@ -1,25 +0,0 @@
.. validations-common documentation master file, created by
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to validations-common documentation!
============================================
Contents:
.. toctree::
:maxdepth: 2
readme
usage
contributing
roles
modules
reference/index
Indices and tables
==================
* :ref:`genindex`
* :ref:`search`

View File

@ -1,9 +0,0 @@
Documented modules in Validations-Common
========================================
Contents:
.. toctree::
:glob:
modules/*

View File

@ -1,14 +0,0 @@
========================
Module - advanced_format
========================
This module provides for the following ansible plugin:
* advanced_format
.. ansibleautoplugin::
:module: validations_common/library/advanced_format.py
:documentation: true
:examples: true

View File

@ -1,29 +0,0 @@
==================
http_json callback
==================
The callback http_json sends Validations logs and information to an HTTP
server as a JSON format in order to get caught and analysed with external
tools for log parsing (as Fluentd or others).
This callback inherits from validation_json the format of the logging
remains the same as the other logger that the Validation Framework is using
by default.
To enable this callback, you need to add it to the callback whitelist.
Then you need to export your http server url and port.
.. code-block:: console
export HTTP_JSON_SERVER=http://localhost
export HTTP_JSON_PORT=8989
The callback will post JSON log to the URL provided.
This repository has a simple HTTP server for testing purpose under:
.. code-block:: console
tools/http_server.py
The default host and port are localhost and 8989, feel free to adjust those
values to your needs.

View File

@ -1,14 +0,0 @@
=============================
Module - check_package_update
=============================
This module provides for the following ansible plugin:
* check_package_update
.. ansibleautoplugin::
:module: validations_common/library/check_package_update.py
:documentation: true
:examples: true

View File

@ -1,14 +0,0 @@
==============
Module - hiera
==============
This module provides for the following ansible plugin:
* hiera
.. ansibleautoplugin::
:module: validations_common/library/hiera.py
:documentation: true
:examples: true

View File

@ -1,14 +0,0 @@
====================
Module - reportentry
====================
This module provides for the following ansible plugin:
* reportentry
.. ansibleautoplugin::
:module: validations_common/library/reportentry.py
:documentation: true
:examples: true

View File

@ -1,14 +0,0 @@
=============================
Module - validations_read_ini
=============================
This module provides for the following ansible plugin:
* validations_read_ini
.. ansibleautoplugin::
:module: validations_common/library/validations_read_ini.py
:documentation: true
:examples: true

View File

@ -1,14 +0,0 @@
=============
Module - warn
=============
This module provides for the following ansible plugin:
* warn
.. ansibleautoplugin::
:module: validations_common/library/warn.py
:documentation: true
:examples: true

View File

@ -1 +0,0 @@
.. include:: ../../README.rst

View File

@ -1,8 +0,0 @@
============================================
Full Validations-common Python API Reference
============================================
.. toctree::
:maxdepth: 1
api/modules

View File

@ -1,9 +0,0 @@
Documented roles in Validations-Common
======================================
Contents:
.. toctree::
:glob:
roles/*

View File

@ -1,51 +0,0 @@
============================
check_disk_space_pre_upgrade
============================
--------------
About The Role
--------------
An Ansible role to check that the partitioning schema on the host(s) has enough
free space to perform an upgrade.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
vars:
volumes:
- { mount: /home, min_size: 25 }
- { mount: /, min_size: 50 }
roles:
- check_disk_space
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/check_disk_space

View File

@ -1,47 +0,0 @@
================
check_rsyslog
================
--------------
About The Role
--------------
An Ansible role which detects package presence and running service on the target host
and fails if it doesn't find it.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: true
roles:
- check_rsyslog
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/check_rsyslog

View File

@ -1,46 +0,0 @@
============================
advanced_format_512e_support
============================
--------------
About The Role
--------------
An Ansible role to detect whether the node disks use Advanced Format.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
roles:
- advanced_format_512e_support
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/advanced_format_512e_support

View File

@ -1,48 +0,0 @@
=========
check_cpu
=========
--------------
About The Role
--------------
An Ansible role to check if the Host(s) fit(s) the CPU core requirements
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
vars:
minimal_cpu_count: 42
roles:
- check_cpu
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/check_cpu

View File

@ -1,51 +0,0 @@
================
check_disk_space
================
--------------
About The Role
--------------
An Ansible role to check that the partitioning schema on the host(s) has enough
free space.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
vars:
volumes:
- { mount: /home, min_size: 25 }
- { mount: /, min_size: 50 }
roles:
- check_disk_space
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/check_disk_space

View File

@ -1,50 +0,0 @@
=============================
check_latest_packages_version
=============================
--------------
About The Role
--------------
An Ansible role to check if latest version of a list of packages is installed.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
vars:
packages_list:
- coreutils
- wget
roles:
- check_latest_packages_version
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/check_latest_packages_version

View File

@ -1,48 +0,0 @@
=========
check_ram
=========
--------------
About The Role
--------------
An Ansible role to check if the Host(s) fit(s) the RAM requirements.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
vars:
minimal_ram_gb: 42
roles:
- check_ram
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/check_ram

View File

@ -1,46 +0,0 @@
==================
check_selinux_mode
==================
--------------
About The Role
--------------
An Ansible role to check SELinux status on the host(s).
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
roles:
- check_selinux_mode
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:Security**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/check_selinux_mode

View File

@ -1,48 +0,0 @@
===
dns
===
--------------
About The Role
--------------
An Ansible role to verify that the DNS resolution works.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
vars:
server_to_lookup: example.com
roles:
- dns
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/dns

View File

@ -1,46 +0,0 @@
=====
no_op
=====
--------------
About The Role
--------------
A no-op Ansible role for testing that the validations framework runs.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
roles:
- no_op
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/no_op

View File

@ -1,46 +0,0 @@
===
ntp
===
--------------
About The Role
--------------
An Ansible role to check if the host(s) have their clock synchronized.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
roles:
- ntp
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/ntp

View File

@ -1,46 +0,0 @@
==============
service_status
==============
--------------
About The Role
--------------
An Ansible role to detect services status on the target host(s).
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
roles:
- service_status
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/service_status

View File

@ -1,58 +0,0 @@
================
validate_selinux
================
--------------
About The Role
--------------
An Ansible role to ensure we don't have any SELinux denials on the host(s).
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
vars:
validate_selinux_working_dir: /tmp
validate_selinux_audit_source: /var/log/audit/audit.log
validate_selinux_skip_list_dest: "{{ validate_selinux_working_dir }}/denials-skip-list.txt"
validate_selinux_filtered_denials_dest: "{{ validate_selinux_working_dir }}/denials-filtered.log"
validate_selinux_strict: false
validate_selinux_filter: "None"
validate_selinux_skip_list:
- entry: 'tcontext=system_u:system_r:init_t'
comment: 'This one is a real-life entry'
- entry: 'tcontext=system_u:system_r:system_dbusd_t'
comment: 'This one is another real-life entry'
roles:
- validate_selinux
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/validate_selinux

View File

@ -1,47 +0,0 @@
===============
xfs_check_ftype
===============
--------------
About The Role
--------------
An Ansible role to check if there is at least 1 XFS volume with ftype=0 in any
deployed server(s).
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
roles:
- xfs_check_ftype
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/xfs_check_ftype

View File

@ -1,47 +0,0 @@
================
verify-package
================
--------------
About The Role
--------------
An Ansible role which runs `rpm --verify` on RedHat OS family and
returns the status.
Requirements
============
No requirements.
Dependencies
============
No dependencies.
Example Playbook
================
.. code-block:: yaml
- hosts: localhost
gather_facts: false
roles:
- verify-package
License
=======
Apache
Author Information
==================
**Red Hat TripleO DFG:DF Squad:VF**
----------------
Full Description
----------------
.. ansibleautoplugin::
:role: validations_common/roles/verify-package

View File

@ -1,14 +0,0 @@
=====
Usage
=====
Once the validations-common project has been installed,
navigate to the chosen share path, usually `/usr/share/ansible`
to access the installed roles, playbooks, and libraries.
While the validations-common can be run by itself,
it nonetheless depends on Ansible and validations-libs.
Therefore it isn't recommended to use only validations-common.
The validations included with validations-common are intended to be demonstrations,
capable of running on most setups. But they are not meant for production environment.

View File

@ -1,12 +0,0 @@
# this is required for the molecule jobs
ansible-core<2.12 # GPLv3+
ansi2html # LGPLv3+
jinja2>=2.8.0 # BSD-3-Clause
mock # BSD
molecule>=3.3.1,<4 # MIT
molecule-podman # MIT
pytest # MIT
pytest-cov # MIT
pytest-html # Mozilla Public License 2.0
pytest-xdist # MIT
selinux # MIT

View File

@ -1,101 +0,0 @@
---
- hosts: all
pre_tasks:
- name: Set project path fact
set_fact:
validations_common_project_path: "{{ ansible_user_dir }}/{{ zuul.projects['opendev.org/openstack/validations-common'].src_dir }}"
- name: Ensure output dirs
file:
path: "{{ ansible_user_dir }}/zuul-output/logs"
state: directory
- name: Ensure pip is available
include_role:
name: ensure-pip
- name: Ensure virtualenv is available
include_role:
name: ensure-virtualenv
- name: Setup bindep
pip:
name: "bindep"
virtualenv: "{{ ansible_user_dir }}/test-python"
virtualenv_command: "{{ ensure_pip_virtualenv_command }}"
virtualenv_site_packages: true
- name: Set containers module to 3.0
become: true
shell: |
dnf module disable container-tools:rhel8 -y
dnf module enable container-tools:3.0 -y
dnf clean metadata
- name: Run bindep
shell: |-
. {{ ansible_user_dir }}/test-python/bin/activate
{{ validations_common_project_path }}/scripts/bindep-install
become: true
changed_when: false
- name: Ensure a recent version of pip is installed in virtualenv
pip:
name: "pip>=19.1.1"
virtualenv: "{{ ansible_user_dir }}/test-python"
virtualenv_command: "{{ ensure_pip_virtualenv_command }}"
- name: Setup test-python
pip:
requirements: "{{ validations_common_project_path }}/molecule-requirements.txt"
virtualenv: "{{ ansible_user_dir }}/test-python"
virtualenv_command: "{{ ensure_pip_virtualenv_command }}"
virtualenv_site_packages: true
- name: Set up collections
command: "{{ ansible_user_dir }}/test-python/bin/ansible-galaxy install -fr {{ validations_common_project_path }}/ansible-collections-requirements.yml"
- name: Display test-python virtualenv package versions
shell: |-
. {{ ansible_user_dir }}/test-python/bin/activate
pip freeze
- name: Basic ci setup
become: true
block:
- name: Ensure ci directories
file:
path: "/etc/ci"
state: "directory"
- name: Ensure ci mirror file
file:
path: "/etc/ci/mirror_info.sh"
state: "touch"
- name: Set an appropriate fs.file-max
sysctl:
name: fs.file-max
value: 2048000
sysctl_set: true
state: present
reload: true
- name: Set container_manage_cgroup boolean
seboolean:
name: container_manage_cgroup
state: true
persistent: true
failed_when: false
- name: Create limits file for containers
copy:
content: |
* soft nofile 102400
* hard nofile 204800
* soft nproc 2048
* hard nproc 4096
dest: /etc/security/limits.d/containers.conf
- name: Reset ssh connection
meta: reset_connection

View File

@ -1,14 +0,0 @@
---
- hosts: all
tasks:
- name: set basic zuul fact
set_fact:
zuul:
projects:
"opendev.org/openstack/validations-common":
src_dir: "{{ tripleo_src }}"
ansible_connection: ssh
- import_playbook: pre.yml
- import_playbook: run.yml

View File

@ -1,39 +0,0 @@
---
- hosts: all
environment:
ANSIBLE_LOG_PATH: "{{ ansible_user_dir }}/zuul-output/logs/ansible-execution.log"
pre_tasks:
- name: Set project path fact
set_fact:
validations_common_project_path: "{{ ansible_user_dir }}/{{ zuul.projects['opendev.org/openstack/validations-common'].src_dir }}"
- name: Set roles path fact
set_fact:
validations_common_roles_paths:
- "{{ validations_common_project_path }}/validations_common/roles"
- "/usr/share/ansible/roles"
- name: Set library path fact
set_fact:
validations_common_library_paths:
- "{{ validations_common_project_path }}/validations_common/library"
- "/usr/share/ansible/library"
tasks:
- name: Run role test job
shell: |-
. {{ ansible_user_dir }}/test-python/bin/activate
. {{ validations_common_project_path }}/ansible-test-env.rc
pytest --color=yes \
--html={{ ansible_user_dir }}/zuul-output/logs/reports.html \
--self-contained-html \
--ansible-args='{{ tripleo_job_ansible_args | default("") }}' \
{{ validations_common_project_path }}/tests/test_molecule.py
args:
chdir:
"{{ validations_common_project_path }}/validations_common/roles/{{ validations_common_role_name }}"
executable: /bin/bash
environment:
ANSIBLE_ROLES_PATH: "{{ validations_common_roles_paths | join(':') }}"
ANSIBLE_LIBRARY: "{{ validations_common_library_paths | join(':') }}"

View File

@ -1,10 +0,0 @@
---
- hosts: all
vars:
tox_envlist: functional
roles:
- fetch-subunit-output
- fetch-devstack-log-dir
- fetch-output
- fetch_validations
- fetch-python-sdist-output

View File

@ -1,44 +0,0 @@
---
- hosts: all
name: validations-common-reqcheck
vars:
req_check_override: "{{ ansible_user_dir }}/{{ zuul.project.src_dir }}/.reqcheck_override.yaml"
tasks:
- name: Install rdopkg
changed_when: true
shell:
cmd: |
set -e
# Need to inherit system-site-packages for python-yum
python3 -m venv --system-site-packages {{ ansible_user_dir }}/.venv
source {{ ansible_user_dir }}/.venv/bin/activate
git clone https://github.com/softwarefactory-project/rdopkg.git
cd rdopkg
pip install .
args:
chdir: "{{ ansible_user_dir }}"
- name: Get distgit project info
changed_when: true
shell:
cmd: |
set -e
source {{ ansible_user_dir }}/.venv/bin/activate
rdopkg findpkg {{ zuul.project.name }} | sed -n "/^distgit/ s/distgit. \(.*\)/\1/p"
register: distgit
args:
chdir: "{{ ansible_user_dir }}"
- name: Clone distgit and reqcheck {{ zuul.project.name }} with rdopkg
changed_when: true
shell:
cmd: |
set -e
source {{ ansible_user_dir }}/.venv/bin/activate
git clone {{ distgit.stdout }}
cd validations-common-distgit
git remote add upstream {{ ansible_user_dir }}/{{ zuul.project.src_dir }}
git fetch upstream
rdopkg reqcheck --strict --override {{ req_check_override }}
args:
chdir: "{{ ansible_user_dir }}"

View File

@ -1,14 +0,0 @@
---
- hosts: tempest
name: validations-common-functional
roles:
- ensure-tox
- ensure-pip
- ensure-virtualenv
- role: ensure-if-python
vars:
zuul_work_dir: "src/opendev.org/openstack/validations-libs"
- role: ensure-if-python
vars:
zuul_work_dir: "src/opendev.org/openstack/validations-common"
- role: validations

View File

@ -1,5 +0,0 @@
---
upgrade:
- |
Python 3.6 & 3.7 support has been dropped. The minimum version of Python now
supported is Python 3.8.

View File

@ -1,6 +0,0 @@
========================
1.6 Series Release Notes
========================
.. release-notes::
:branch: stable/1.6

View File

@ -1,321 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# flake8: noqa
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'openstackdocstheme',
'reno.sphinxext',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
authors = 'Validations Framework Developers'
project = 'validations-common Release Notes'
copyright = '2020, ' + authors
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
# todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'validations-common v1.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'validations-commonReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'validations-commonReleaseNotes.tex',
'validations-common Release Notes Documentation',
authors, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'validations-commonreleasenotes',
'validations-common Release Notes Documentation',
[authors], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'validations-commonReleaseNotes',
'validations-common Release Notes Documentation',
authors, 'validations-commonReleaseNotes',
'A collection of Ansible playbooks to detect and report potential issues during TripleO deployments.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
# openstackdocstheme options
repository_name = 'openstack/validations-common'
bug_project = 'tripleo'
bug_tag = 'documentation'

View File

@ -1,19 +0,0 @@
=============================================
Welcome to validations-common' Release Notes!
=============================================
Contents
========
.. toctree::
:maxdepth: 2
unreleased
1.6
ussuri
Indices and tables
==================
* :ref:`genindex`
* :ref:`search`

View File

@ -1,5 +0,0 @@
==============================
Current Series Release Notes
==============================
.. release-notes::

View File

@ -1,6 +0,0 @@
===========================
Ussuri Series Release Notes
===========================
.. release-notes::
:branch: stable/ussuri

View File

@ -1,7 +0,0 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr>=3.1.1 # Apache-2.0
PyYAML>=3.13 # MIT
PrettyTable>=0.7.2 # BSD-3-Clause

View File

@ -1,5 +0,0 @@
---
src_dir: "{{ zuul.project.src_dir }}"
output_dir: "{{ ansible_user_dir }}"
log_path: "{{ zuul.executor.log_root }}"
artifacts_dir: "{{ zuul.executor.work_root }}/artifacts"

View File

@ -1,51 +0,0 @@
---
- name: Ensure local output dirs
delegate_to: localhost
file:
path: "{{ vf_output_dir }}"
state: directory
with_items:
- "{{ log_path }}/validations-logs"
- "{{ artifacts_dir }}"
loop_control:
loop_var: vf_output_dir
# TODO(jpodivin):
# This is a temporary construct to bridge the time span
# when new log path handling is being tested but isn't merged yet
- name: Discover new log dir
stat:
path: "{{ output_dir }}/validations"
register: new_log_dir
- name: Set log dir
set_fact:
available_log_dir: "{{'validations' if new_log_dir.stat.exists else 'artifacts'}}"
# End of the temporary construct
- name: Collect logs and artifacts
synchronize:
dest: "{{ log_path }}/validations-logs/"
mode: pull
src: "{{ output_dir }}/{{ available_log_dir }}/"
verify_host: true
owner: false
group: false
- name: Find validations data
find:
paths: "{{ output_dir }}"
patterns: "*.json,*.log,*.yaml"
register: validation_json
- name: Collect Validation logs
synchronize:
dest: "{{ log_path }}/validations-logs/"
mode: pull
src: "{{ logs.path }}"
verify_host: true
owner: false
group: false
loop: "{{ validation_json.files }}"
loop_control:
loop_var: logs

View File

@ -1,27 +0,0 @@
---
user_dir: "{{ ansible_env.HOME | default('') }}"
val_working_dir: "{{ ansible_env.HOME | default('') }}"
zuul_work_virtualenv: "{{ user_dir }}/.venv"
vf_log_dir: "{{ ansible_user_dir }}/logs"
ansible_dir: ""
inventory: ""
inventory_list:
- '{{ user_dir }}/tripleo-deploy/overcloud/tripleo-ansible-inventory.yaml'
- '{{ user_dir }}/tripleo-deploy/undercloud/tripleo-ansible-inventory.yaml'
- '{{ user_dir }}/tripleo-deploy/tripleo-ansible-inventory.yaml'
validation_component: "{{ job.component|default('validation') }}"
# both commands can be used:
# "openstack tripleo validator" or "validation"
# the default is validation because this is the 1st entry available on
# a system for validation.
# The openstack command needs to install python-tripleoclient and its
# dependencies first. This value should be override as needed in the roles/vars
# calls.
cli_command: "validation"
run_validation: true
execute_full_vf_catalogue: "{{ job.execute_full_vf_catalogue|default(false)|bool }}"
vf_catalogue_overrides: "{{ ansible_user_dir }}/catalog_vars_override.yaml"
files_to_run_dest: "{{ ansible_user_dir }}"
files_test_failure: false

View File

@ -1,14 +0,0 @@
---
# LP#1959864
volumes:
- {mount: /var/lib/docker, min_size: 10}
- {mount: /var/lib/config-data, min_size: 3}
- {mount: /var/log, min_size: 3}
- {mount: /usr, min_size: 5}
- {mount: /var, min_size: 8}
- {mount: /, min_size: 8}
minimal_ram_gb: 2
# LP#1987506
minimal_cpu_count: 4

View File

@ -1,16 +0,0 @@
---
# Execute entire Validations Framework catalogue as provided
# by the validations-common and tripleo-validations packages.
# This task is intended to be used only by the RDO CI component pipeline.
# Executing it anywhere else might lead to unforseen consequences.
- name: Run validations catalogue
shell:
cmd: >-
{{ validation_command }} run --validation {{ validation_catalogue.full_list | join(",") }}
{{ validation_dir_arg }} {{ ansible_dir_arg }}
--inventory {{ inventory }}
--output-log validation_catalogue_execution.log
{{ validation_catalogue.extra_args }}
{{ validation_catalogue.extra_env_args }}
--extra-vars-file={{ vf_catalogue_overrides }}
executable: /bin/bash

View File

@ -1,118 +0,0 @@
---
- name: Run validations from the YAML file
vars:
file_wrong_path: foo/bar.yaml
junitxml_path: /home/stack/logs
junitxml_wrong_path: /foo/bar
wrong_path_string: must be properly formatted
wrong_directory_string: No such file or directory
junitxml_missing_string: junitxml output disabled
validation_failed_string: have failed
validation_not_found_string: Following validations were not found
no_validation_run_string: No validation has been run
expected_result: 1
block:
- debug:
msg: "{{ item }}"
loop: "{{ files }}"
- name: Execute the file command
block:
- name: Passed validation test
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} file {{ files[0] }}
executable: /bin/bash
- set_fact:
files_test_failure: true
when: "{{ run_results.rc }} == 1"
- name: Failed validation test
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} file {{ files[3] }}
executable: /bin/bash
- set_fact:
files_test_failure: true
when: run_results.rc != expected_result
- name: Run with no executed validation
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} file {{ files[1] }}
executable: /bin/bash
- set_fact:
files_test_failure: true
when:
- run_results.rc != expected_result
- name: Run with non-existent validation
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} file {{ files[2] }}
executable: /bin/bash
- set_fact:
files_test_failure: true
when:
- validation_not_found_string not in run_results.stdout
- validation_not_found_string not in run_results.stderr
- name: Execute the file command with wrong path
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} file {{ file_wrong_path }}
executable: /bin/bash
- set_fact:
files_test_failure: true
when:
- wrong_path_string not in run_results.stdout
- wrong_path_string not in run_results.stderr
- name: Execute the file command with junitxml output
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} file {{ files[0] }} --junitxml {{ junitxml_path }}
executable: /bin/bash
- set_fact:
files_test_failure: true
when:
- wrong_directory_string in run_results.stdout
- junitxml_missing_string not in run_results.stdout
- wrong_directory_string in run_results.stderr
- junitxml_missing_string not in run_results.stderr
- "{{ run_results.rc }} == 1"
- name: Execute the file command with wrong Junitxml path
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} file {{ files[0] }} --junitxml {{ junitxml_wrong_path }}
executable: /bin/bash
- set_fact:
files_test_failure: true
when:
- wrong_directory_string in run_results.stdout
- junitxml_missing_string not in run_results.stdout
- wrong_directory_string in run_results.stderr
- junitxml_missing_string not in run_results.stderr
- "{{ run_results.rc }} == 1"

View File

@ -1,36 +0,0 @@
---
- name: List Validations - all - to file
shell:
cmd: "{{ validation_command }} list {{ validation_dir_arg }} -f json > {{ val_working_dir }}/list.log 2>&1"
executable: /bin/bash
when: val_format == "json"
- name: List Validations - all - to stdout - {{ val_format }}
shell:
cmd: "{{ validation_command }} list {{ validation_dir_arg }} -f {{ val_format }}"
executable: /bin/bash
# Metadata dependent list output
- name: List Validations - group - to stdout - {{ val_format }}
shell:
cmd: "{{ validation_command }} list {{ validation_dir_arg }} --group {{ val_group }} -f {{ val_format }}"
executable: /bin/bash
loop: "{{ validation_metadata.group }}"
loop_control:
loop_var: val_group
- name: " List Validations - category - to stdout - {{ val_format }} "
shell:
cmd: "{{ validation_command }} list {{ validation_dir_arg }} --category {{ val_category }} -f {{ val_format }}"
executable: /bin/bash
loop: "{{ validation_metadata.category }}"
loop_control:
loop_var: val_category
- name: "List Validations - product - to stdout - {{ val_format }}"
shell:
cmd: "{{ validation_command }} list {{ validation_dir_arg }} --product {{ val_product }} -f {{ val_format }}"
executable: /bin/bash
loop: "{{ validation_metadata.product }}"
loop_control:
loop_var: val_product

View File

@ -1,52 +0,0 @@
---
# The subcommand used is 'show history' but it is implemented
# as a subclass of Lister and it formats the results as such.
# Both tests use regex to keep only lines starting with UUID[1].
# As every validation run has UUID assigned and the 'value' output format
# places it's octal form in the first column, it is possible to use it to
# match only records about validation runs, and remove the rest.
# [1]https://datatracker.ietf.org/doc/html/rfc4122#section-4.1
- name: List all history
register: list_all_history_output
shell:
cmd: >-
{{ validation_command }} {{ history_command }} -f value 2>&1
| grep "^[[:alnum:]]\{8\}-[[:alnum:]]\{4\}-[[:alnum:]]\{4\}-[[:alnum:]]\{4\}-[[:alnum:]]\{12\}"
| tee {{ val_working_dir }}/full_validation_history.log
executable: /bin/bash
- name: List truncated history
register: list_truncated_history_output
shell:
cmd: >-
{{ validation_command }} {{ history_command }} --limit 1 -f value 2>&1
| grep "^[[:alnum:]]\{8\}-[[:alnum:]]\{4\}-[[:alnum:]]\{4\}-[[:alnum:]]\{4\}-[[:alnum:]]\{12\}"
| tee {{ val_working_dir }}/truncated_validation_history.log
executable: /bin/bash
- name: Verify history output
block:
# To ensure that we are getting the right number of validation runs
# we are querying the relevant item of the 'validations_list', for the number of 'extra_args' entries.
# As all validations defined in the 'validations_list' have 'extra_args' defined for both normal,
# and false positive run, we can use the number of 'extra_args' keys as an indication
# of the validations runs that were supposed to occur.
# Please note that this assertion will not hold, if the format of the default_vars changes substantially.
- name: Verify full history output
fail:
msg: >
The history output length {{ list_all_history_output.stdout_lines | length }}
doesn't match the number of expected validations runs {{ expected_history_length }}.
when: (list_all_history_output.stdout_lines | length) != (expected_history_length | int)
vars:
expected_history_length: "{{ validations_list[validation_component] | string | regex_findall('extra_args') | length }}"
- name: Verify truncated history output
fail:
msg: >
The number of history items displayed is {{ list_truncated_history_output.stdout_lines | length }}
but it should be 1.
when: (list_truncated_history_output.stdout_lines | length) != 1
when:
- run_validation|default(false)|bool
- validation_component | length > 0

View File

@ -1,230 +0,0 @@
---
- name: Remove user local log dir to ensure clean env
become: true
file:
path: "{{ ansible_user_dir }}/validations"
state: absent
- name: Recreate user local log dir
become: true
file:
path: "{{ ansible_user_dir }}/validations"
state: directory
mode: "0755"
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
recurse: true
- name: Ensure validations Log dir exists
become: true
file:
path: "{{ vf_log_dir }}"
state: directory
mode: "0755"
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
recurse: true
- name: check if virtualenv is used
register: is_virtualenv
stat:
path: "{{ zuul_work_virtualenv }}"
- name: Set commmand if virtualenv exists
set_fact:
validation_command: "source {{ zuul_work_virtualenv }}/bin/activate; {{ cli_command }}"
when:
- is_virtualenv.stat.exists
- validation_command is not defined
- name: Set Validation directory if virtualenv exists
set_fact:
validation_dir: "{{ zuul_work_virtualenv }}/share/ansible/validation-playbooks"
when: is_virtualenv.stat.exists
- name: Set Validation directory argument if virtualenv exists
set_fact:
validation_dir_arg: "--validation-dir {{ validation_dir }}"
when: is_virtualenv.stat.exists
- name: Set Ansible base directory path if virtualenv exists
set_fact:
ansible_dir: "{{ zuul_work_virtualenv }}/share/ansible/"
when: is_virtualenv.stat.exists
- name: Set Ansible base directory argument if virtualenv exists
set_fact:
ansible_dir_arg: "--ansible-base-dir {{ ansible_dir }}"
when: is_virtualenv.stat.exists
- name: Set commmand without virtualenv
set_fact:
validation_command: "{{ cli_command }}"
when:
- not is_virtualenv.stat.exists
- validation_command is not defined
- name: Set validation dir without virtualenv
set_fact:
validation_dir: "/usr/share/ansible/validation-playbooks"
when: not is_virtualenv.stat.exists
- name: Set validation dir argument without virtualenv
set_fact:
validation_dir_arg: "--validation-dir {{ validation_dir }}"
when: not is_virtualenv.stat.exists
- name: Set Ansible base directory path withnout virtualenv
set_fact:
ansible_dir: "/usr/share/ansible/"
when: not is_virtualenv.stat.exists
- name: Set Ansible base directory argument withnout virtualenv exists
set_fact:
ansible_dir_arg: "--ansible-base-dir {{ ansible_dir }}"
when: not is_virtualenv.stat.exists
- name: Set a valid inventory
block:
- name: Stat all possible inventory location
register: stat_results
stat:
path: '{{ inv_path }}'
loop: '{{ inventory_list }}'
loop_control:
loop_var: inv_path
- name: Set inventory path or fallback to default localhost
set_fact:
inventory_path: '{{ stat_result.inv_path }}'
when:
- '{{ stat_result.stat.exists }}'
loop: '{{ stat_results.results }}'
loop_control:
loop_var: stat_result
- name: Set inventory variable
set_fact:
inventory: '{{ inventory_path|default("localhost") }}'
when: inventory == ""
- name: Run positive validation tests
include_tasks: run.yaml
vars:
name: "{{ item }}"
expected_rc: 0
when:
- run_validation|default(false)|bool
- validation_component | length > 0
with_dict: "{{ validations_list[validation_component] }}"
- name: Fail if something went wrong
fail:
msg: "One or more Validations has failed, check the log results for more information."
when: result_failed | default(False) | bool
- name: Run negative validation tests
include_tasks: run.yaml
vars:
name: "{{ item }}"
expected_rc: 1
negative: true
when:
- run_validation|default(false)|bool
- validation_component | length > 0
with_dict: "{{ validations_list[validation_component] }}"
- name: Fail if something went wrong
fail:
msg: "One or more Validations has failed, check the log results for more information."
when: result_failed | default(False) | bool
- name: List validations
include_tasks: list.yaml
vars:
val_format: "{{ tested_format }}"
loop: "{{ validation_list_formats }}"
loop_control:
loop_var: tested_format
- name: Show validation run results
include_tasks: show_results.yaml
vars:
name: "{{ item }}"
when:
- run_validation|default(false)|bool
- validation_component | length > 0
with_dict: "{{ validations_list[validation_component] }}"
- name: Show validation
include_tasks: show_validation_info.yaml
vars:
name: "{{ item }}"
when:
- run_validation|default(false)|bool
- validation_component | length > 0
with_dict: "{{ validations_list[validation_component] }}"
- name: List history
include_tasks: list_validation_history.yaml
vars:
history_command: "{{'show history' if validation_command == 'openstack tripleo validator' else 'history list'}}"
- name: Run validations with extra vars file
include_tasks: run_extra_vars_file.yaml
vars:
name: "{{ item }}"
extra_vars_uuid: "{{ 'extra vars for tests' | to_uuid }}"
when:
- run_validation|default(false)|bool
- validation_component | length > 0
with_dict: "{{ validations_list[validation_component] }}"
- name: Import variable overrides
become: true
copy:
src: files/catalog_vars_override.yaml
dest: "{{ vf_catalogue_overrides }}"
mode: "0644"
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
- name: Execute entire validations catalogue
include_tasks: execute_full_catalogue.yaml
when: execute_full_vf_catalogue
- debug:
msg: "{{ item }}"
loop: "{{ test_arguments_run_from_file }}"
- name: Check if the File command is present
register: subcommand_list
shell:
cmd: >-
{{ validation_command }} --help
executable: /bin/bash
- name: Execute the file command tests
block:
- name: Copy files to run
template:
src: './templates/file-template.j2'
dest: rendered_file_{{ ansible_loop.index }}.yaml
mode: "0644"
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
loop: "{{ test_arguments_run_from_file }}"
loop_control:
extended: true
register: rendered_files
- name: Run validations from the File
include_tasks: file.yaml
vars:
files: "{{ rendered_files.results | map(attribute='dest') | list }}"
when: "'Include and exclude validations by' in subcommand_list.stdout"
- name: Fail if something went wrong
fail:
msg: "One or more file runs have failed, check the log results for more information."
when: files_test_failure | default(False) | bool and "'file' in subcommand_list.stdout"

View File

@ -1,29 +0,0 @@
---
- name: Run validations
block:
- name: Set fact for extra args.
set_fact:
execution_extra_args: "{{ name.value.negative_results.extra_args | default('') }}"
when:
- "'negative_results' in name.value "
- negative | default(False) | bool
- name: Execute Validations {{ name.key }}
ignore_errors: true
register: run_results
shell:
cmd: >-
{{ validation_command }} run --validation {{ name.key }}
{{ validation_dir_arg }} {{ ansible_dir_arg }}
--inventory {{ inventory }}
--output-log validation_{{ name.key }}_positive.log
{{ execution_extra_args | default(name.value.extra_args) }}
{{ name.value.extra_env_args }}
executable: /bin/bash
when: (negative | default(False) | bool and 'negative_results' in name.value) or
(not negative | default(False) | bool)
- name: set fact for run_results
ignore_errors: true
set_fact: result_failed=true
when: run_results is defined and run_results.rc is defined and run_results.rc != expected_rc

View File

@ -1,41 +0,0 @@
---
- name: Create extra vars file
shell:
cmd: "echo -e 'minimal_cpu_count: 2\nminimal_ram_gb: 2\n' > {{ extra_vars_uuid }}extra_vars.yaml"
executable: /bin/bash
- name: Run validations with extra vars file
shell:
cmd: >-
{{ validation_command }} run --validation {{ name.key }}
{{ validation_dir_arg }} {{ ansible_dir_arg }}
--inventory {{ inventory }}
--output-log validation_{{ name.key }}_extra_vars_file.log
--extra-vars-file {{ extra_vars_uuid }}extra_vars.yaml
{{ name.value.extra_env_args }}
executable: /bin/bash
- name: Get Run results
block:
- name: Get run results
register: result
shell:
cmd: "cat validation_{{ name.key }}_extra_vars_file.log"
executable: /bin/bash
- name: Get json data
set_fact:
jsondata: "{{ result.stdout | from_json }}"
- name: Get Validations Status
set_fact:
status: "{{ jsondata | json_query(jsonres) }}"
vars:
jsonres: 'results[*].Status'
- fail:
msg: "Validation failed with {{ validation_status }}: some of the validations has failed. {{ status }}"
when: validation_status != "PASSED"
loop: "{{ status }}"
loop_control:
loop_var: validation_status

View File

@ -1,21 +0,0 @@
---
- name: Get run UUID
block:
- name: Get run results file
register: result
shell:
cmd: "cat validation_{{ name.key }}_positive.log"
executable: /bin/bash
- name: Get uuid from log
set_fact:
validation_run_uuids: "{{ result.stdout | from_json | json_query(uuid_selector) }}"
vars:
uuid_selector: 'results[*].UUID'
- name: Show Validation run results
shell:
cmd: "{{ validation_command }} history get {{ run_uuid }} --full > {{ val_working_dir }}/show_run.log 2>&1"
executable: /bin/bash
loop: "{{ validation_run_uuids }}"
loop_control:
loop_var: run_uuid

View File

@ -1,33 +0,0 @@
---
- name: Show Validation - correct id
register: show_output
shell:
cmd: "{{ validation_command }} show {{ validation_dir_arg }} {{ name.key }} -f json > {{ val_working_dir }}/show.log 2>&1"
executable: /bin/bash
# Simulating a typo in the validation name
- name: Show Validation - incorrect id
block:
- name: Show validations - incorrect id
register: show_output_incorrect
shell:
cmd: "{{ validation_command }} show {{ validation_dir_arg }} chuck-cpu -f json 2>&1 | tee {{val_working_dir}}/show_typo.log"
executable: /bin/bash
ignore_errors: true
- fail:
msg: "The expected error message was not displayed."
when: "'Validation chuck-cpu not found' not in show_output_incorrect.stdout"
- name: Show Validation parameters
shell:
cmd: >-
{{ validation_command }} show parameter
--validation {{ name.key }} {{ validation_dir_arg }}
--download {{ name.key }}_params.{{ format_type }}
--format-output {{ format_type }}
executable: /bin/bash
loop:
- json
- yaml
loop_control:
loop_var: format_type

View File

@ -1,64 +0,0 @@
---
- name: Execute validation commands
shell:
cmd: "{{ validation_command }}"
executable: /bin/bash
- name: set fact for Validation action
set_fact: v_action="{{ action }}"
- name: Get Run results - Positive - these are supposed to pass
block:
- name: Get run results
register: result
shell:
cmd: "cat {{ val_output }}"
executable: /bin/bash
- name: Get json data
set_fact:
jsondata: "{{ result.stdout | from_json }}"
- name: Get Validations Status
set_fact:
status: "{{ jsondata | json_query(jsonres) }}"
vars:
jsonres: 'results[*].Status'
- fail:
msg: "Validation failed with {{ validation_status }}: some of the validations have failed. {{ status }}"
when: validation_status != "PASSED"
loop: "{{ status }}"
loop_control:
loop_var: validation_status
when: v_action == 'run'
- name: Get Run results - Negative - these are supposed to fail
# This task should fail with return code != 0
# The validation is supplied with parameters that make it impossible to pass.
block:
- name: Get run results
register: result
shell:
cmd: "cat {{ val_output }}"
executable: /bin/bash
- name: Get json data
set_fact:
jsondata: "{{ result.stdout | from_json }}"
- name: Get Validations Status
set_fact:
status: "{{ jsondata | json_query(jsonres) }}"
vars:
jsonres: 'results[*].Status'
- fail:
msg: "Validation passed with {{ validation_status }} when it shouldn't have: some of the validations have passed. {{ status }}"
when: validation_status == "PASSED"
loop: "{{ status }}"
loop_control:
loop_var: validation_status
when:
- v_action == 'run'
- "'negative_results' in name.value"

View File

@ -1,34 +0,0 @@
---
include_validation: {{ item.validation }}
include_group: {{ item.validation_group }}
include_category: {{ item.validation_category }}
include_product: {{ item.validation_product }}
exclude_validation: {{ item.exclude_validation }}
exclude_group: {{ item.exclude_validation_group }}
exclude_category: {{ item.exclude_validation_category }}
exclude_product: {{ item.exclude_validation_product }}
config: {{ validation_config | default('') }}
inventory: {{ inventory }}
validation-log-dir: {{ vf_log_dir }}
output-log: {{ vf_log_dir }}/run-from-file.log
# Checks if the following variables are defined, if the variables
# don't have the value, based on the vars/main.yaml file, they are skipped.
{% if item.extra_env_vars | default('') %}
extra-env-vars: {{ item.extra_env_vars }}
{% endif %}
{% if item.extra_vars | default('') %}
extra-vars: {{ item.extra_vars }}
{% endif %}
{% if item.limit_hosts | default('') %}
limit: {{ item.limit_hosts.limit | default('') }}
{% endif %}
# Checks if the zuul virtualenv exists, if not default path is used instead.
{% if is_virtualenv.stat.exists %}
validation-dir: {{ zuul_work_virtualenv }}/share/ansible/validation-playbooks
ansible-base-dir: {{ zuul_work_virtualenv }}/share/ansible
python-interpreter: {{ zuul_work_virtualenv }}/bin/python3
{% else %}
validation-dir: /usr/share/ansible/validation-playbooks
ansible-base-dir: /usr/share/ansible
python-interpreter: /usr/bin/python3
{% endif %}

View File

@ -1,267 +0,0 @@
---
# extra_env_vars variable should be formed as:
# extra_env_vars: "--extra-env-vars key=value --extra-env-vars key2=value2"
validations_list:
compute:
- nova-status:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
clients:
- openstack-endpoints:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
network:
- undercloud-neutron-sanity-check:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
security:
- tls-everywhere-prep:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- tls-everywhere-pre-deployment:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- tls-everywhere-post-deployment:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
validation:
- check-cpu:
negative_results:
extra_args: "--extra-vars minimal_cpu_count=20000"
extra_args: "--extra-vars minimal_cpu_count=2"
extra_env_args: "--extra-env-vars ANSIBLE_STDOUT_CALLBACK=default {{ extra_env_vars | default('') }}"
- 512e:
extra_args: ""
extra_env_args: ""
- check-ram:
negative_results:
extra_args: "--extra-vars minimal_ram_gb=2000000"
extra_args: "--extra-vars minimal_ram_gb=2"
extra_env_args: "--extra-env-vars ANSIBLE_STDOUT_CALLBACK=default {{ extra_env_vars | default('') }}"
tripleo:
- undercloud-process-count:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- tripleo-latest-packages-version:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- check-for-dangling-images:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
baremetal:
- collect-flavors-and-verify-profiles:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- default-node-count:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- dhcp-introspection:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- dhcp-provisioning:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- ironic-boot-configuration:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- node-disks:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
- node-health:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
validations_group:
compute:
- compute
network:
- network
validation_metadata:
group:
- no-op
- prep
category:
- storage
- container
product:
- tripleo
validation_list_formats:
- csv
- json
- table
- value
- yaml
# Entire catalogue of validations to be executed in OVB environment
# extra_env_args and extra_args are shared for the execution
# Included validations will be sorted alphabetically.
validation_catalogue:
extra_args: ""
extra_env_args: "{{ extra_env_vars | default('') }}"
full_list:
- 512e
- ceph-dependencies-installed
- check-cpu
- check-disk-space
- check-disk-space-pre-upgrade
- check-for-dangling-images
- check-ftype
- check-network-gateway
- check-ntp-reachability
- check-ram
- check-rsyslog
- check-selinux-mode
- compute-tsx
- container-status
- controller-token
- controller-ulimits
- ctlplane-ip-range
- dhcp-introspection
- dhcp-provisioning
- dns
- fips-enabled
- healthcheck-service-status
- image-serve
- mysql-open-files-limit
- neutron-sanity-check
- no-op
- nova-event-callback
- nova-svirt
- ntp
- openshift-nw-requirements
- openstack-endpoints
- overcloud-service-status
- rabbitmq-limits
- repos
- service-status
- tripleo-latest-packages-version
- undercloud-disabled-services
- undercloud-disk-space
- undercloud-disk-space-pre-upgrade
- undercloud-ipa-server-check
- undercloud-neutron-sanity-check
- undercloud-process-count
- validate-selinux
# Following validations are disabled for purposes of catalogue testing
# - ceilometerdb-size # Not present on the OVB deployment
# - system-encoding # Not present on the OVB deployment
# - ceph-ansible-installed # Not present on the OVB deployment
# - haproxy # Not present on the OVB deployment
# - no-op-firewall-nova-driver # Not present on the OVB deployment
# - containerized-undercloud-docker # Not present on the OVB deployment
# Hostname prerequisites are not met
# - ceph-health
# - ovs-dpdk-pmd-cpus-check
# General validation superceded by 'tripleo-latest-packages-version'
# - check-latest-packages-version
# Failing due to bugs
# - ceph-pg
# - check-uc-hostname
# - collect-flavors-and-verify-profiles
# - default-node-count
# - ironic-boot-configuration
# - network-environment
# - nova-status
# - node-health
# - node-disks
# - stack-health
# - switch-vlans
# - undercloud-heat-purge-deleted
# Real issues detedte by validations but not relevant
# - check-rhsm-version
# - check-undercloud-conf
# - openshift-hw-requirements
# - pacemaker-status
# - stonith-exists
# - tls-everywhere-post-deployment
# - tls-everywhere-pre-deployment
# - tls-everywhere-prep
# - undercloud-debug
# - undercloud-service-status
#
# List of dictionaries for testing 4 different versions of file-to-run the CLI file command
# Each dictionary consists of different options for inclusion and exclusion
# validations/groups/categories/products
test_arguments_run_from_file:
# 1st valid file
# expected rc is 0
- validation:
- check-cpu
validation_group: []
validation_category: []
validation_product: []
exclude_validation:
exclude_validation_group: []
exclude_validation_category: []
exclude_validation_product:
- tripleo
validation-dir:
extra_vars:
minimal_cpu_count: 2
# 2nd valid file with 1 non-existent validation
# networking group should be run, expected rc is 1 due to the failing
# validations
- validation:
- check-cpu
- i-dont-exist
validation_group: []
validation_category:
- compute
- networking
validation_product: []
exclude_validation:
- fips-enabled
exclude_validation_group:
- prep
exclude_validation_category:
- compute
exclude_validation_product:
- rabbitmq
- tripleo
limit_hosts:
limit: [undercloud-0]
# 3rd valid file testing the Invalid operation: no validation to run
# expected rc is 1
- validation:
- i-dont-exist
validation_group: []
validation_category: []
validation_product: []
exclude_validation:
- fips-enabled
exclude_validation_group:
- prep
exclude_validation_category:
- compute
exclude_validation_product:
- rabbitmq
- tripleo
# 4th valid file, testing the proper inclusion and exclusion
# only networking group should run (except the dns and the ntp validations)
# expected rc is 1 due to the failed check-cpu validation
- validation:
- check-cpu
validation_group:
validation_category:
- networking
validation_product:
- tripleo
exclude_validation:
- fips-enabled
- dns
- ntp
exclude_validation_group:
exclude_validation_category:
exclude_validation_product:
- rabbitmq
- tripleo
extra_vars:
minimal_cpu_count: 8000

View File

@ -1,45 +0,0 @@
#!/usr/bin/env bash
# Copyright 2019 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
## Shell Opts ----------------------------------------------------------------
set -o pipefail
set -xeuo
## Vars ----------------------------------------------------------------------
export BINDEP_FILE="${BINDEP_FILE:-$(dirname $(readlink -f ${BASH_SOURCE[0]}))/../bindep.txt}"
## Main ----------------------------------------------------------------------
# Source distribution information
source /etc/os-release || source /usr/lib/os-release
RHT_PKG_MGR=$(command -v dnf || command -v yum)
# NOTE(cloudnull): Get a list of packages to install with bindep. If packages
# need to be installed, bindep exits with an exit code of 1.
BINDEP_PKGS=$(bindep -b -f "${BINDEP_FILE}" test || true)
if [[ ${#BINDEP_PKGS} > 0 ]]; then
case "${ID,,}" in
amzn|rhel|centos|fedora)
sudo "${RHT_PKG_MGR}" install -y ${BINDEP_PKGS}
;;
esac
fi

View File

@ -1,89 +0,0 @@
#!/usr/bin/env bash
# Copyright 2019 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
## Functions -----------------------------------------------------------------
function usage {
echo "Usage: ROLE_NAME=ROLE_NAME ${0##*/} or ${0##*/} ROLE_NAME"
}
## Vars ----------------------------------------------------------------------
export PROJECT_DIR="$(dirname $(readlink -f ${BASH_SOURCE[0]}))/../"
if [ "${ROLE_NAME}x" = "x" -a "${1}x" = "x" ]; then
usage;
exit 2
fi
export ROLE_NAME="${ROLE_NAME:-$1}"
export TRIPLEO_JOB_ANSIBLE_ARGS=${TRIPLEO_JOB_ANSIBLE_ARGS:-""}
## Shell Opts ----------------------------------------------------------------
set -o pipefail
set -xeuo
## Main ----------------------------------------------------------------------
# Source distribution information
source /etc/os-release || source /usr/lib/os-release
RHT_PKG_MGR=$(command -v dnf || command -v yum)
# Install the one requirement we need to run any local test
case "${ID,,}" in
amzn|rhel|centos|fedora)
sudo "${RHT_PKG_MGR}" install -y python3 python*-virtualenv
;;
esac
# Ensure the required ci file is present
sudo mkdir -p /etc/ci
sudo touch /etc/ci/mirror_info.sh
# Get Python Executable
PYTHON_EXEC=$(command -v python3 || command -v python)
# Create a virtual env
"${PYTHON_EXEC}" -m virtualenv --system-site-packages "${HOME}/test-python"
# Activate a virtual env
PS1="[\u@\h \W]\$" source "${HOME}/test-python/bin/activate"
# Run bindep
"${HOME}/test-python/bin/pip" install "pip>=19.1.1" setuptools bindep --upgrade
"${PROJECT_DIR}/scripts/bindep-install"
# Install local requirements
if [[ -d "${HOME}/.cache/pip/wheels" ]]; then
rm -rf "${HOME}/.cache/pip/wheels"
fi
"${HOME}/test-python/bin/pip" install \
-r "${PROJECT_DIR}/requirements.txt" \
-r "${PROJECT_DIR}/test-requirements.txt" \
-r "${PROJECT_DIR}/molecule-requirements.txt"
# Run local test
source "${PROJECT_DIR}/ansible-test-env.rc"
export ANSIBLE_ROLES_PATH="${ANSIBLE_ROLES_PATH}:${HOME}/zuul-jobs/roles"
ansible-galaxy install -fr "${PROJECT_DIR}/ansible-collections-requirements.yml"
ansible-playbook -i "${PROJECT_DIR}/tests/hosts.ini" \
-e "tripleo_src=$(realpath --relative-to="${HOME}" "${PROJECT_DIR}")" \
-e "validations_common_role_name=${ROLE_NAME}" \
-e "tripleo_job_ansible_args='${TRIPLEO_JOB_ANSIBLE_ARGS}'" \
-e "ansible_user=${USER}" \
-e "ansible_user_dir=${HOME}" \
"${PROJECT_DIR}/tests/prepare-test-host.yml" \
"${PROJECT_DIR}/playbooks/molecule/run-local.yml" \
-v

View File

@ -1,35 +0,0 @@
[metadata]
name = validations-common
summary = A common Ansible libraries and plugins for the validations framework
long_description = file: README.rst
long_description_content_type = text/x-rst
author = OpenStack
author_email = openstack-discuss@lists.openstack.org
home_page = https://opendev.org/openstack/validations-common
classifier =
Development Status :: 5 - Production/Stable
Environment :: OpenStack
Framework :: Ansible
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
[options]
python_requires = >=3.8
[files]
packages =
validations_common
data_files =
share/ansible/roles = roles/*
share/ansible/roles = validations_common/roles/*
share/ansible/library = validations_common/library/*
share/ansible/validation-playbooks = validations_common/playbooks/*

View File

@ -1,21 +0,0 @@
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)

View File

@ -1,13 +0,0 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
ansible-core<2.12 # GPLv3+
coverage!=4.4,>=4.0 # Apache-2.0
oslotest>=3.2.0 # Apache-2.0
stestr>=3.0.1 # Apache-2.0
testscenarios>=0.4 # Apache-2.0/BSD
testtools>=2.2.0 # MIT
reno>=2.5.0 # Apache-2.0
netaddr>=0.7.18 # BSD
pre-commit>=2.3.0 # MIT

View File

@ -1,18 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def pytest_addoption(parser):
parser.addoption('--scenario', help='scenario setting')
parser.addoption(
'--ansible-args', help='ansible args passed into test runner.')

View File

@ -1 +0,0 @@
test ansible_connection=local ansible_host=localhost

View File

@ -1,66 +0,0 @@
---
# Copyright 2019 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
- name: pre prepare
hosts: all
gather_facts: false
tasks:
- name: set basic user fact
fail:
msg: >-
The variable `ansible_user` set this option and try again. On the
CLI this can be defined with "-e ansible_user=${USER}"
when:
- ansible_user is undefined
- name: set basic home fact
fail:
msg: >-
The variable `ansible_user_dir` set this option and try again. On
the CLI this can be defined with "-e ansible_user_dir=${HOME}"
when:
- ansible_user_dir is undefined
- name: Ensure the user has a .ssh directory
file:
path: "{{ ansible_user_dir }}/.ssh"
state: directory
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
mode: "0700"
- name: Create ssh key pair
openssh_keypair:
path: "{{ ansible_user_dir }}/.ssh/id_rsa"
size: 2048
- name: Slurp pub key
slurp:
src: "{{ ansible_user_dir ~ '/.ssh/id_rsa.pub' }}"
register: pub_key
- name: Ensure can ssh to can connect to localhost
authorized_key:
user: "{{ ansible_user }}"
key: "{{ pub_key['content'] | b64decode }}"
- name: Get the zuul/zuul-jobs repo
git:
repo: https://opendev.org/zuul/zuul-jobs
dest: "{{ ansible_user_dir }}/zuul-jobs"
version: master
force: true

View File

@ -1,41 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
def test_molecule(pytestconfig):
cmd = ['python', '-m', 'molecule']
scenario = pytestconfig.getoption("scenario")
ansible_args = pytestconfig.getoption("ansible_args")
if ansible_args:
cmd.append('converge')
if scenario:
cmd.extend(['--scenario-name', scenario])
cmd.append('--')
cmd.extend(ansible_args.split())
else:
cmd.append('test')
if scenario:
cmd.extend(['--scenario-name', scenario])
else:
cmd.append('--all')
try:
assert subprocess.call(cmd) == 0
finally:
if ansible_args:
cmd = ['python', '-m', 'molecule', 'destroy']
if scenario:
cmd.extend(['--scenario-name', scenario])
subprocess.call(cmd)

View File

@ -1,27 +0,0 @@
#!/usr/bin/env bash
rm -rf releasenotes/build
sphinx-build -a -E -W \
-d releasenotes/build/doctrees \
-b html \
releasenotes/source releasenotes/build/html
BUILD_RESULT=$?
UNCOMMITTED_NOTES=$(git status --porcelain | \
awk '$1 == "M" && $2 ~ /releasenotes\/notes/ {print $2}')
if [ "${UNCOMMITTED_NOTES}" ]; then
cat <<EOF
REMINDER: The following changes to release notes have not been committed:
${UNCOMMITTED_NOTES}
While that may be intentional, keep in mind that release notes are built from
committed changes, not the working directory.
EOF
fi
exit ${BUILD_RESULT}

View File

@ -1,119 +0,0 @@
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import os
import sys
def exit_usage():
print('Usage {} <directory>'.format(sys.argv[0]))
sys.exit(1)
def validate_library_file(file_path, quiet):
with open(file_path) as f:
file_content = f.read()
if 'DOCUMENTATION = ' not in file_content \
or 'EXAMPLES = ' not in file_content:
if quiet < 3:
print('Missing ansible documentation in {}'.format(file_path))
return 1
return 0
def validate_callback_file(file_path, quiet):
required_attributes = [
'CALLBACK_VERSION',
'CALLBACK_NAME']
with open(file_path) as file:
file_content = file.read()
if any([attr not in file_content for attr in required_attributes]):
if quiet < 3:
print(
'Missing required callback plugin attributes in {}'.format(file_path))
return 1
return 0
def validate_file(file_path, quiet):
if os.path.split(file_path)[0].endswith('library'):
return validate_library_file(file_path, quiet)
elif os.path.split(file_path)[0].endswith('callback_plugins'):
return validate_callback_file(file_path, quiet)
else:
raise ValueError()
def parse_args():
p = argparse.ArgumentParser()
p.add_argument('--quiet', '-q',
action='count',
default=0,
help='output warnings and errors (-q) or only errors (-qq)')
p.add_argument('path_args',
nargs='*',
default=['.'])
return p.parse_args()
def main():
args = parse_args()
path_args = args.path_args
quiet = args.quiet
exit_val = 0
scanned_subdirs = ['callback_plugins', 'library']
failed_files = []
for base_path in path_args:
scanned_paths = [
os.path.join(
base_path,
'validations_common',
path) for path in scanned_subdirs]
if os.path.isdir(base_path):
for subdir, dirs, files in os.walk(base_path):
if '.tox' in dirs:
dirs.remove('.tox')
if '.git' in dirs:
dirs.remove('.git')
if subdir in scanned_paths:
for f in files:
if f.endswith('.py') and f != '__init__.py':
file_path = os.path.join(subdir, f)
if quiet < 1:
print('Validating {}'.format(file_path))
failed = validate_file(file_path, quiet)
if failed:
failed_files.append(file_path)
exit_val |= failed
else:
print('Unexpected argument {}'.format(base_path))
exit_usage()
if failed_files:
print('Validation failed on:')
for f in failed_files:
print(f)
else:
print('Validation successful!')
sys.exit(exit_val)
if __name__ == '__main__':
main()

181
tox.ini
View File

@ -1,181 +0,0 @@
[tox]
minversion = 4.0.0
envlist = linters,docs,py3
skipsdist = True
# Automatic envs (pyXX) will only use the python version appropriate to that
# env and ignore basepython inherited from [testenv] if we set
# ignore_basepython_conflict.
ignore_basepython_conflict = True
[testenv]
basepython = python3.10
usedevelop = True
passenv =
TERM
setenv =
ANSIBLE_CALLBACK_PLUGINS={toxinidir}/validations_common/callback_plugins
ANSIBLE_LOOKUP_PLUGINS={toxinidir}/validations_common/lookup_plugins
ANSIBLE_LIBRARY={toxinidir}/validations_common/library
ANSIBLE_ROLES_PATH={toxinidir}/validations_common/roles
ANSIBLE_NOCOWS=1
ANSIBLE_RETRY_FILES_ENABLED=0
ANSIBLE_STDOUT_CALLBACK=debug
ANSIBLE_LOG_PATH={envlogdir}/ansible-execution.log
# pip: Avoid 2020-01-01 warnings: https://github.com/pypa/pip/issues/6207
# paramiko CryptographyDeprecationWarning: https://github.com/ansible/ansible/issues/52598
PYTHONWARNINGS=ignore:DEPRECATION::pip._internal.cli.base_command,ignore::UserWarning
VIRTUAL_ENV={envdir}
LANG=en_US.UTF-8
LANGUAGE=en_US:en
LC_ALL=en_US.UTF-8
HOME={envdir}
DISABLE_PIP_VERSION_CHECK=1
commands =
ansible-galaxy install -fr {toxinidir}/ansible-collections-requirements.yml
stestr run --slowest --color {posargs}
deps =
-r {toxinidir}/requirements.txt
-r {toxinidir}/test-requirements.txt
-r {toxinidir}/doc/requirements.txt
allowlist_externals =
bash
ansible-galaxy
[testenv:py27]
envtmpdir={envdir}/tmp
skip_install = true
commands_pre =
cp -r {toxinidir}/validations_common {envtmpdir}/validations_common
touch {envtmpdir}/validations_common/library/__init__.py
find {envtmpdir}/validations_common/tests/ -type f -exec sed -i -e 's/from unittest import mock/import mock/g' \{\} ;
find {envtmpdir}/validations_common/tests/ -type f -exec sed -i -e 's/from unittest.mock/from mock/g' \{\} ;
find {envtmpdir} -type f -name 'setup.cfg' -exec sed -i -e 's/python-requires = >=3.6/python-requires = >=2.7/g' \{\} ;
commands =
python2.7 -m unittest discover -s {envtmpdir}/validations_common/tests
deps =
-c {env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/train}
-r {toxinidir}/requirements.txt
-r {toxinidir}/test-requirements.txt
allowlist_externals =
bash
cp
find
sed
touch
[testenv:bindep]
# Do not install any requirements. We want this to be fast and work even if
# system dependencies are missing, since it's used to tell you what system
# dependencies are missing! This also means that bindep must be installed
# separately, outside of the requirements files.
deps = bindep
commands = bindep test
[testenv:debug]
commands = oslo_debug_helper {posargs}
[testenv:pep8]
envdir = {toxworkdir}/linters
commands =
python -m pre_commit run flake8 -a
[testenv:ansible-lint]
envdir = {toxworkdir}/linters
deps =
{[testenv:linters]deps}
commands =
bash -c "ANSIBLE_ROLES_PATH='{toxinidir}/validations_common/roles'"
bash -c "ANSIBLE_LIBRARY='{toxinidir}/validations_common/library'"
python -m pre_commit run ansible-lint -a
[testenv:yamllint]
envdir = {toxworkdir}/linters
deps = {[testenv:linters]deps}
commands =
python -m pre_commit run yamllint -a
[testenv:bashate]
envdir = {toxworkdir}/linters
deps = {[testenv:linters]deps}
commands =
python -m pre_commit run bashate -a
[testenv:whitespace]
envdir = {toxworkdir}/linters
deps = {[testenv:linters]deps}
commands =
python -m pre_commit run trailing-whitespace -a
[testenv:shebangs]
envdir = {toxworkdir}/linters
deps = {[testenv:linters]deps}
commands =
python -m pre_commit run check-executables-have-shebangs -a
[testenv:end-of-file]
envdir = {toxworkdir}/linters
deps = {[testenv:linters]deps}
commands =
python -m pre_commit run end-of-file-fixer -a
[testenv:linters]
commands_pre =
ansible-galaxy install -fr {toxinidir}/ansible-collections-requirements.yml
deps =
-c {env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
-r {toxinidir}/requirements.txt
-r {toxinidir}/test-requirements.txt
commands =
python '{toxinidir}/tools/validate-files.py' .
{[testenv:ansible-lint]commands}
{[testenv:yamllint]commands}
{[testenv:bashate]commands}
{[testenv:whitespace]commands}
{[testenv:shebangs]commands}
{[testenv:end-of-file]commands}
{[testenv:pep8]commands}
[testenv:releasenotes]
deps = -r {toxinidir}/doc/requirements.txt
commands =
sphinx-build -a -E -W -d releasenotes/build/doctrees --keep-going -b html releasenotes/source releasenotes/build/html
[testenv:venv]
commands = {posargs}
passenv = *
[testenv:cover]
setenv =
PYTHON=coverage run --parallel-mode --branch
HOME={envdir}
commands =
coverage erase
stestr run --color {posargs}
coverage combine
coverage html -d cover
coverage xml -o cover/coverage.xml
coverage report --show-missing
[testenv:docs]
deps =
-c {env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
-r {toxinidir}/doc/requirements.txt
commands=
sphinx-build -a -E -W -d doc/build/doctrees --keep-going -b html doc/source doc/build/html -T
doc8 doc
[pdf-docs]
allowlist_externals = make
description =
Build PDF documentation.
envdir = {toxworkdir}/docs
deps = {[docs]deps}
commands =
sphinx-build -b latex doc/source doc/build/pdf
make -C doc/build/pdf
[doc8]
# Settings for doc8:
extensions = .rst
ignore = D001

View File

@ -1,22 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Uses `oslotest`_.
.. _oslotest: https://opendev.org/openstack/oslotest
"""
import pbr.version
__version__ = pbr.version.VersionInfo('validations-common')

View File

@ -1,2 +0,0 @@
"""Provides several ansible modules for I/O needs of validations.
"""

View File

@ -1,96 +0,0 @@
# Copyright 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from os import path
from yaml import safe_load as yaml_safe_load
from ansible.module_utils.basic import AnsibleModule
DOCUMENTATION = '''
---
module: advanced_format
short_description: Check for advanced disk format
description:
- Check whether a drive uses advanced format
options:
drive:
required: true
description:
- drive name
type: str
author: "Martin Andre (@mandre)"
'''
EXAMPLES = '''
- hosts: webservers
tasks:
- name: Detect whether the drive uses Advanced Format
advanced_format: drive=vda
'''
def read_int(module, file_path):
'''Read a file and convert its value to int.
Raise ansible failure otherwise.
'''
try:
with open(file_path) as f:
file_contents = f.read()
return int(file_contents)
except IOError:
module.fail_json(msg="Cannot open '%s'" % file_path)
except ValueError:
module.fail_json(msg="The '%s' file doesn't contain an integer value" %
file_path)
def main():
module = AnsibleModule(
argument_spec=yaml_safe_load(DOCUMENTATION)['options']
)
drive = module.params.get('drive')
queue_path = path.join('/sys/class/block', drive, 'queue')
physical_block_size_path = path.join(queue_path, 'physical_block_size')
logical_block_size_path = path.join(queue_path, 'logical_block_size')
physical_block_size = read_int(module, physical_block_size_path)
logical_block_size = read_int(module, logical_block_size_path)
if physical_block_size == logical_block_size:
module.exit_json(
changed=False,
msg="The disk %s probably doesn't use Advance Format." % drive,
)
else:
module.exit_json(
# NOTE(shadower): we're marking this as `changed`, to make it
# visually stand out when running via Ansible directly instead of
# using the API.
#
# The API & UI is planned to look for the `warnings` field and
# display it differently.
changed=True,
warnings=["Physical and logical block sizes of drive %s differ "
"(%s vs. %s). This can mean the disk uses Advance "
"Format." %
(drive, physical_block_size, logical_block_size)],
)
if __name__ == '__main__':
main()

View File

@ -1,331 +0,0 @@
# Copyright 2017 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Check for available updates for a given package.
Module queries and parses output of at least two separate
external binaries, in order to obtain information about
supported package manager, installed and available packages.
As such it has many points of failure.
Information about supported package managers,
such as the commands to use while working with them
and the expected stderr output we can encounter while querying repos,
are stored as a nested dictionery SUPPORTED_PKG_MGRS.
With names of the supported package managers as keys
of the first level elements. And the aformentioned information
on the second level, as lists of strings, with self-explanatory keys.
Formally speaking it is a tree of a sort.
But so is entire python namespace.
"""
import collections
import subprocess
from ansible.module_utils.basic import AnsibleModule
from yaml import safe_load as yaml_safe_load
DOCUMENTATION = '''
---
module: check_package_update
short_description: Check for available updates for given packages
description:
- Check for available updates for given packages
options:
packages_list:
required: true
description:
- The names of the packages you want to check
type: list
pkg_mgr:
required: false
description:
- Supported Package Manager, DNF or YUM
type: str
author:
- Florian Fuchs
- Jiri Podivin (@jpodivin)
'''
EXAMPLES = '''
- hosts: webservers
tasks:
- name: Get available updates for packages
check_package_update:
packages_list:
- coreutils
- wget
pkg_mgr: "{{ ansible_pkg_mgr }}"
'''
SUPPORTED_PKG_MGRS = {
'dnf': {
'query_installed': [
'rpm', '-qa', '--qf',
'%{NAME}|%{VERSION}|%{RELEASE}|%{ARCH}\n'
],
'query_available': [
'dnf', '-q', 'list', '--available'
],
'allowed_errors': [
'',
'Error: No matching Packages to list\n'
]
},
'yum': {
'query_installed': [
'rpm', '-qa', '--qf',
'%{NAME}|%{VERSION}|%{RELEASE}|%{ARCH}\n'
],
'query_available': [
'yum', '-q', 'list', 'available'
],
'allowed_errors': [
'',
'Error: No matching Packages to list\n'
]
},
}
PackageDetails = collections.namedtuple(
'PackageDetails',
['name', 'version', 'release', 'arch'])
def get_package_details(pkg_details_string):
"""Returns PackageDetails namedtuple from given string.
Raises ValueError if the number of '|' separated
fields is < 4.
:return: package details
:rtype: collections.namedtuple
"""
split_output = pkg_details_string.split('|')
try:
pkg_details = PackageDetails(
split_output[0],
split_output[1],
split_output[2],
split_output[3],
)
except IndexError:
raise ValueError(
(
"Package description '{}' doesn't contain fields"
" required for processing."
).format(pkg_details_string)
)
return pkg_details
def _allowed_pkg_manager_stderr(stderr, allowed_errors):
"""Returns False if the error message isn't in the
allowed_errors list.
This function factors out large, and possibly expanding,
condition so it doesn't cause too much confusion.
"""
if stderr in allowed_errors:
return True
return False
def _command(command):
"""
Return result of a subprocess call.
Doesn't set timeout for the call, so the process can hang.
Potentially for a very long time.
:return: stdout and stderr from Popen.communicate()
:rtype: tuple
"""
process = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
return process.communicate()
def _get_pkg_manager(module):
"""Return name of available package manager.
Queries binaries using `command -v`, in order defined by
the `SUPPORTED_PKG_MGRS`.
:returns: string
"""
for possible_pkg_mgr in SUPPORTED_PKG_MGRS:
stdout, stderr = _command(['command', '-v', possible_pkg_mgr])
if stdout != '' and stderr == '':
return possible_pkg_mgr
module.fail_json(
msg=(
"None of the supported package managers '{}' seems to be "
"available on this system."
).format(' '.join(SUPPORTED_PKG_MGRS))
)
def _get_new_pkg_info(available_stdout):
"""Return package information as dictionary. With package names
as keys and detailed information as list of strings.
"""
available_stdout = available_stdout.split('\n')[1:]
available_stdout = [line.rstrip().split() for line in available_stdout]
new_pkgs_info = {}
for line in available_stdout:
if len(line) != 0:
new_pkgs_info[line[0]] = PackageDetails(
line[0],
line[1].split('-')[0],
line[1].split('-')[1],
line[0].split('.')[1])
return new_pkgs_info
def _get_installed_pkgs(installed_stdout, packages, module):
"""Return dictionary of installed packages.
Package names form keys and the output of the get_package_details
function values of the dictionary.
"""
installed = {}
installed_stdout = installed_stdout.split('\n')[:-1]
for package in installed_stdout:
if package != '':
package = get_package_details(package)
if package.name in packages:
installed[package.name + '.' + package.arch] = package
packages.remove(package.name)
#Once find all the requested packages we don't need to continue search
if len(packages) == 0:
break
#Even a single missing package is a reason for failure.
if len(packages) > 0:
msg = "Following packages are not installed {}".format(packages)
module.fail_json(
msg=msg
)
return
return installed
def check_update(module, packages_list, pkg_mgr):
"""Check if the packages in the 'packages_list are up to date.
Queries binaries, defined the in relevant SUPPORTED_PKG_MGRS entry,
to obtain information about present and available packages.
:param module: ansible module providing fail_json and exit_json
methods
:type module: AnsibleModule
:param packages_list: list of packages to be checked
:type package: list
:param pkg_mgr: Package manager to check for update availability
:type pkg_mgr: string
:return: None
:rtype: None
"""
if len(packages_list) == 0:
module.fail_json(
msg="No packages given to check.")
return
if pkg_mgr is None:
pkg_mgr = _get_pkg_manager(module=module)
if pkg_mgr not in SUPPORTED_PKG_MGRS:
module.fail_json(
msg='Package manager "{}" is not supported.'.format(pkg_mgr))
return
pkg_mgr = SUPPORTED_PKG_MGRS[pkg_mgr]
installed_stdout, installed_stderr = _command(pkg_mgr['query_installed'])
# Fail the module if for some reason we can't lookup the current package.
if installed_stderr != '':
module.fail_json(msg=installed_stderr)
return
if not installed_stdout:
module.fail_json(
msg='no output returned for the query.{}'.format(
' '.join(pkg_mgr['query_installed'])
))
return
installed = _get_installed_pkgs(installed_stdout, packages_list, module)
installed_pkg_names = ' '.join(installed)
pkg_mgr['query_available'].append(installed_pkg_names)
available_stdout, available_stderr = _command(pkg_mgr['query_available'])
#We need to check that the stderr consists only of the expected strings
#This can get complicated if the CLI on the pkg manager side changes.
if not _allowed_pkg_manager_stderr(available_stderr, pkg_mgr['allowed_errors']):
module.fail_json(msg=available_stderr)
return
if available_stdout:
new_pkgs_info = _get_new_pkg_info(available_stdout)
else:
new_pkgs_info = {}
results = []
for installed_pkg in installed:
results.append(
{
'name': installed_pkg,
'current_version': installed[installed_pkg].version,
'current_release': installed[installed_pkg].release,
'new_version': None,
'new_release': None
}
)
if installed_pkg in new_pkgs_info:
results[-1]['new_version'] = new_pkgs_info[installed_pkg][1]
results[-1]['new_release'] = new_pkgs_info[installed_pkg][2]
module.exit_json(
changed=False,
outdated_pkgs=results
)
def main():
module = AnsibleModule(
argument_spec=yaml_safe_load(DOCUMENTATION)['options']
)
check_update(
module,
packages_list=module.params.get('packages_list'),
pkg_mgr=module.params.get('pkg_mgr', None))
if __name__ == '__main__':
main()

Some files were not shown because too many files have changed in this diff Show More