From e59f07864ef303f12a1d74075ee809bbd5ee76dd Mon Sep 17 00:00:00 2001 From: Gauvain Pocentek Date: Fri, 19 Jan 2018 11:10:40 +0100 Subject: [PATCH] Retire the project Change-Id: Ic92e304924561a1aafdadab08803d282938aa17a --- .coveragerc | 7 - .gitignore | 51 - .gitreview | 4 - .mailmap | 3 - .testr.conf | 4 - CONTRIBUTING.rst | 17 - HACKING.rst | 5 - LICENSE | 175 ---- LICENSE-2.0.txt | 202 ---- MANIFEST.in | 6 - README.md | 0 README.rst | 21 +- babel.cfg | 1 - cerberus/__init__.py | 23 - cerberus/api/__init__.py | 27 - cerberus/api/app.py | 112 --- cerberus/api/auth.py | 62 -- cerberus/api/config.py | 23 - cerberus/api/hooks.py | 153 --- cerberus/api/middleware/__init__.py | 20 - cerberus/api/middleware/auth_token.py | 62 -- cerberus/api/root.py | 140 --- cerberus/api/v1/__init__.py | 0 cerberus/api/v1/controllers/__init__.py | 32 - cerberus/api/v1/controllers/base.py | 34 - cerberus/api/v1/controllers/plugins.py | 166 ---- .../api/v1/controllers/security_alarms.py | 124 --- .../api/v1/controllers/security_reports.py | 144 --- cerberus/api/v1/controllers/tasks.py | 236 ----- cerberus/api/v1/datamodels/__init__.py | 0 cerberus/api/v1/datamodels/base.py | 26 - cerberus/api/v1/datamodels/plugin.py | 87 -- cerberus/api/v1/datamodels/security_alarm.py | 94 -- cerberus/api/v1/datamodels/security_report.py | 113 --- cerberus/api/v1/datamodels/task.py | 76 -- cerberus/client/__init__.py | 15 - cerberus/client/keystone_client.py | 65 -- cerberus/client/neutron_client.py | 109 -- cerberus/client/nova_client.py | 109 -- cerberus/cmd/__init__.py | 15 - cerberus/cmd/agent.py | 42 - cerberus/cmd/api.py | 45 - cerberus/cmd/db_create.py | 40 - cerberus/cmd/dbsync.py | 110 --- cerberus/common/__init__.py | 15 - cerberus/common/cerberus_impl_rabbit.py | 147 --- cerberus/common/config.py | 26 - cerberus/common/context.py | 64 -- cerberus/common/errors.py | 124 --- cerberus/common/exception.py | 161 --- cerberus/common/json_encoders.py | 26 - cerberus/common/loopingcall.py | 66 -- cerberus/common/policy.py | 67 -- cerberus/common/safe_utils.py | 70 -- cerberus/common/serialize.py | 110 --- cerberus/common/service.py | 32 - cerberus/common/threadgroup.py | 60 -- cerberus/common/utils.py | 176 ---- cerberus/db/__init__.py | 17 - cerberus/db/api.py | 137 --- cerberus/db/migration.py | 55 -- cerberus/db/sqlalchemy/__init__.py | 15 - cerberus/db/sqlalchemy/alembic.ini | 54 - cerberus/db/sqlalchemy/alembic/README | 16 - cerberus/db/sqlalchemy/alembic/env.py | 54 - cerberus/db/sqlalchemy/alembic/script.py.mako | 22 - .../2dd6320a2745_initial_migration.py | 116 --- .../versions/4426f811d4d9_text_to_varchar.py | 332 ------- ...56a9ae3b_alter_security_report_add_uuid.py | 50 - cerberus/db/sqlalchemy/api.py | 400 -------- cerberus/db/sqlalchemy/migration.py | 90 -- cerberus/db/sqlalchemy/models.py | 188 ---- cerberus/manager.py | 592 ----------- cerberus/notifications.py | 93 -- cerberus/openstack/__init__.py | 0 cerberus/openstack/common/__init__.py | 17 - cerberus/openstack/common/_i18n.py | 40 - .../openstack/common/apiclient/__init__.py | 0 cerberus/openstack/common/apiclient/auth.py | 221 ----- cerberus/openstack/common/apiclient/base.py | 500 ---------- cerberus/openstack/common/apiclient/client.py | 358 ------- .../openstack/common/apiclient/exceptions.py | 459 --------- .../openstack/common/apiclient/fake_client.py | 173 ---- cerberus/openstack/common/cliutils.py | 309 ------ cerberus/openstack/common/config/__init__.py | 0 cerberus/openstack/common/config/generator.py | 307 ------ cerberus/openstack/common/context.py | 111 --- cerberus/openstack/common/db/__init__.py | 0 cerberus/openstack/common/db/api.py | 162 --- cerberus/openstack/common/db/exception.py | 56 -- cerberus/openstack/common/db/options.py | 171 ---- .../common/db/sqlalchemy/__init__.py | 0 .../common/db/sqlalchemy/migration.py | 278 ------ .../db/sqlalchemy/migration_cli/__init__.py | 0 .../sqlalchemy/migration_cli/ext_alembic.py | 78 -- .../db/sqlalchemy/migration_cli/ext_base.py | 79 -- .../sqlalchemy/migration_cli/ext_migrate.py | 69 -- .../db/sqlalchemy/migration_cli/manager.py | 71 -- .../openstack/common/db/sqlalchemy/models.py | 119 --- .../common/db/sqlalchemy/provision.py | 157 --- .../openstack/common/db/sqlalchemy/session.py | 933 ------------------ .../common/db/sqlalchemy/test_base.py | 153 --- .../common/db/sqlalchemy/test_migrations.py | 269 ----- .../openstack/common/db/sqlalchemy/utils.py | 647 ------------ .../openstack/common/eventlet_backdoor.py | 146 --- cerberus/openstack/common/excutils.py | 113 --- cerberus/openstack/common/fileutils.py | 135 --- cerberus/openstack/common/fixture/__init__.py | 0 cerberus/openstack/common/fixture/config.py | 85 -- .../openstack/common/fixture/lockutils.py | 51 - cerberus/openstack/common/fixture/logging.py | 34 - .../openstack/common/fixture/mockpatch.py | 62 -- .../openstack/common/fixture/moxstubout.py | 43 - cerberus/openstack/common/gettextutils.py | 448 --------- cerberus/openstack/common/importutils.py | 73 -- cerberus/openstack/common/jsonutils.py | 186 ---- cerberus/openstack/common/local.py | 45 - cerberus/openstack/common/lockutils.py | 377 ------- cerberus/openstack/common/log.py | 713 ------------- cerberus/openstack/common/log_handler.py | 30 - cerberus/openstack/common/loopingcall.py | 145 --- cerberus/openstack/common/network_utils.py | 108 -- cerberus/openstack/common/periodic_task.py | 183 ---- cerberus/openstack/common/policy.py | 897 ----------------- cerberus/openstack/common/processutils.py | 272 ----- cerberus/openstack/common/service.py | 504 ---------- cerberus/openstack/common/sslutils.py | 98 -- cerberus/openstack/common/strutils.py | 322 ------ cerberus/openstack/common/systemd.py | 104 -- cerberus/openstack/common/test.py | 99 -- cerberus/openstack/common/threadgroup.py | 147 --- cerberus/openstack/common/timeutils.py | 210 ---- cerberus/openstack/common/uuidutils.py | 37 - cerberus/openstack/common/versionutils.py | 148 --- cerberus/plugins/__init__.py | 15 - cerberus/plugins/base.py | 156 --- cerberus/plugins/extension.py | 55 -- cerberus/plugins/task_plugin.py | 61 -- cerberus/plugins/test_plugin.py | 169 ---- cerberus/service.py | 141 --- cerberus/tests/__init__.py | 0 cerberus/tests/functional/__init__.py | 0 cerberus/tests/functional/api/__init__.py | 0 cerberus/tests/functional/api/v1/__init__.py | 0 cerberus/tests/functional/api/v1/test_api.py | 285 ------ cerberus/tests/functional/base.py | 122 --- .../tests/functional/test_notifications.py | 131 --- cerberus/tests/unit/__init__.py | 0 cerberus/tests/unit/api/__init__.py | 0 cerberus/tests/unit/api/base.py | 208 ---- cerberus/tests/unit/api/utils.py | 66 -- cerberus/tests/unit/api/v1/__init__.py | 15 - cerberus/tests/unit/api/v1/test_plugins.py | 115 --- .../tests/unit/api/v1/test_security_alarms.py | 86 -- .../unit/api/v1/test_security_reports.py | 91 -- cerberus/tests/unit/api/v1/test_tasks.py | 198 ---- cerberus/tests/unit/base.py | 67 -- cerberus/tests/unit/client/__init__.py | 1 - .../tests/unit/client/test_keystone_client.py | 58 -- .../tests/unit/client/test_neutron_client.py | 211 ---- .../tests/unit/client/test_nova_client.py | 129 --- cerberus/tests/unit/config_fixture.py | 35 - cerberus/tests/unit/db/__init__.py | 0 cerberus/tests/unit/db/test_db_api.py | 58 -- cerberus/tests/unit/db/utils.py | 184 ---- cerberus/tests/unit/fake_policy.py | 22 - cerberus/tests/unit/policy_fixture.py | 46 - cerberus/tests/unit/test_cerberus_manager.py | 617 ------------ cerberus/tests/unit/test_notifications.py | 70 -- cerberus/tests/unit/test_utils.py | 98 -- cerberus/utils.py | 163 --- cerberus/version.py | 19 - contrib/devstack/README.rst | 0 contrib/devstack/extras.d/50-cerberus.sh | 39 - contrib/devstack/lib/cerberus | 231 ----- devstack/README.rst | 15 - devstack/plugin.sh | 223 ----- devstack/settings | 40 - doc/.gitignore | 1 - doc/Makefile | 177 ---- doc/source/arch.rst | 123 --- doc/source/cerberus-arch.png | Bin 69241 -> 0 bytes doc/source/conf.py | 267 ----- doc/source/contributing.rst | 4 - doc/source/development_plugin.rst | 57 -- doc/source/index.rst | 62 -- doc/source/installation.rst | 124 --- doc/source/readme.rst | 1 - doc/source/usage.rst | 7 - doc/source/webapi/root.rst | 16 - doc/source/webapi/v1.rst | 52 - etc/cerberus/cerberus.conf.sample | 746 -------------- etc/cerberus/policy.json | 4 - functionaltests/README.rst | 15 - openstack-common.conf | 36 - requirements.txt | 23 - setup.cfg | 60 -- setup.py | 22 - test-requirements.txt | 24 - tools/config/check_uptodate.sh | 25 - tools/config/generate_sample.sh | 119 --- tools/config/oslo.config.generator.rc | 2 - tools/install_venv_common.py | 172 ---- tools/pretty_tox.sh | 6 - tools/subunit-trace.py | 307 ------ tox.ini | 50 - 206 files changed, 8 insertions(+), 24461 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .gitignore delete mode 100644 .gitreview delete mode 100644 .mailmap delete mode 100644 .testr.conf delete mode 100644 CONTRIBUTING.rst delete mode 100644 HACKING.rst delete mode 100644 LICENSE delete mode 100644 LICENSE-2.0.txt delete mode 100644 MANIFEST.in delete mode 100644 README.md delete mode 100644 babel.cfg delete mode 100644 cerberus/__init__.py delete mode 100644 cerberus/api/__init__.py delete mode 100644 cerberus/api/app.py delete mode 100644 cerberus/api/auth.py delete mode 100644 cerberus/api/config.py delete mode 100644 cerberus/api/hooks.py delete mode 100644 cerberus/api/middleware/__init__.py delete mode 100644 cerberus/api/middleware/auth_token.py delete mode 100644 cerberus/api/root.py delete mode 100644 cerberus/api/v1/__init__.py delete mode 100644 cerberus/api/v1/controllers/__init__.py delete mode 100644 cerberus/api/v1/controllers/base.py delete mode 100644 cerberus/api/v1/controllers/plugins.py delete mode 100644 cerberus/api/v1/controllers/security_alarms.py delete mode 100644 cerberus/api/v1/controllers/security_reports.py delete mode 100644 cerberus/api/v1/controllers/tasks.py delete mode 100644 cerberus/api/v1/datamodels/__init__.py delete mode 100644 cerberus/api/v1/datamodels/base.py delete mode 100644 cerberus/api/v1/datamodels/plugin.py delete mode 100644 cerberus/api/v1/datamodels/security_alarm.py delete mode 100644 cerberus/api/v1/datamodels/security_report.py delete mode 100644 cerberus/api/v1/datamodels/task.py delete mode 100644 cerberus/client/__init__.py delete mode 100644 cerberus/client/keystone_client.py delete mode 100644 cerberus/client/neutron_client.py delete mode 100644 cerberus/client/nova_client.py delete mode 100644 cerberus/cmd/__init__.py delete mode 100644 cerberus/cmd/agent.py delete mode 100644 cerberus/cmd/api.py delete mode 100644 cerberus/cmd/db_create.py delete mode 100644 cerberus/cmd/dbsync.py delete mode 100644 cerberus/common/__init__.py delete mode 100644 cerberus/common/cerberus_impl_rabbit.py delete mode 100644 cerberus/common/config.py delete mode 100644 cerberus/common/context.py delete mode 100644 cerberus/common/errors.py delete mode 100644 cerberus/common/exception.py delete mode 100644 cerberus/common/json_encoders.py delete mode 100644 cerberus/common/loopingcall.py delete mode 100644 cerberus/common/policy.py delete mode 100644 cerberus/common/safe_utils.py delete mode 100644 cerberus/common/serialize.py delete mode 100644 cerberus/common/service.py delete mode 100644 cerberus/common/threadgroup.py delete mode 100644 cerberus/common/utils.py delete mode 100644 cerberus/db/__init__.py delete mode 100644 cerberus/db/api.py delete mode 100644 cerberus/db/migration.py delete mode 100644 cerberus/db/sqlalchemy/__init__.py delete mode 100644 cerberus/db/sqlalchemy/alembic.ini delete mode 100644 cerberus/db/sqlalchemy/alembic/README delete mode 100644 cerberus/db/sqlalchemy/alembic/env.py delete mode 100644 cerberus/db/sqlalchemy/alembic/script.py.mako delete mode 100644 cerberus/db/sqlalchemy/alembic/versions/2dd6320a2745_initial_migration.py delete mode 100644 cerberus/db/sqlalchemy/alembic/versions/4426f811d4d9_text_to_varchar.py delete mode 100644 cerberus/db/sqlalchemy/alembic/versions/479e56a9ae3b_alter_security_report_add_uuid.py delete mode 100644 cerberus/db/sqlalchemy/api.py delete mode 100644 cerberus/db/sqlalchemy/migration.py delete mode 100644 cerberus/db/sqlalchemy/models.py delete mode 100644 cerberus/manager.py delete mode 100644 cerberus/notifications.py delete mode 100644 cerberus/openstack/__init__.py delete mode 100644 cerberus/openstack/common/__init__.py delete mode 100644 cerberus/openstack/common/_i18n.py delete mode 100644 cerberus/openstack/common/apiclient/__init__.py delete mode 100644 cerberus/openstack/common/apiclient/auth.py delete mode 100644 cerberus/openstack/common/apiclient/base.py delete mode 100644 cerberus/openstack/common/apiclient/client.py delete mode 100644 cerberus/openstack/common/apiclient/exceptions.py delete mode 100644 cerberus/openstack/common/apiclient/fake_client.py delete mode 100644 cerberus/openstack/common/cliutils.py delete mode 100644 cerberus/openstack/common/config/__init__.py delete mode 100644 cerberus/openstack/common/config/generator.py delete mode 100644 cerberus/openstack/common/context.py delete mode 100644 cerberus/openstack/common/db/__init__.py delete mode 100644 cerberus/openstack/common/db/api.py delete mode 100644 cerberus/openstack/common/db/exception.py delete mode 100644 cerberus/openstack/common/db/options.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/__init__.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/migration.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/migration_cli/__init__.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_alembic.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_base.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_migrate.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/migration_cli/manager.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/models.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/provision.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/session.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/test_base.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/test_migrations.py delete mode 100644 cerberus/openstack/common/db/sqlalchemy/utils.py delete mode 100644 cerberus/openstack/common/eventlet_backdoor.py delete mode 100644 cerberus/openstack/common/excutils.py delete mode 100644 cerberus/openstack/common/fileutils.py delete mode 100644 cerberus/openstack/common/fixture/__init__.py delete mode 100644 cerberus/openstack/common/fixture/config.py delete mode 100644 cerberus/openstack/common/fixture/lockutils.py delete mode 100644 cerberus/openstack/common/fixture/logging.py delete mode 100644 cerberus/openstack/common/fixture/mockpatch.py delete mode 100644 cerberus/openstack/common/fixture/moxstubout.py delete mode 100644 cerberus/openstack/common/gettextutils.py delete mode 100644 cerberus/openstack/common/importutils.py delete mode 100644 cerberus/openstack/common/jsonutils.py delete mode 100644 cerberus/openstack/common/local.py delete mode 100644 cerberus/openstack/common/lockutils.py delete mode 100644 cerberus/openstack/common/log.py delete mode 100644 cerberus/openstack/common/log_handler.py delete mode 100644 cerberus/openstack/common/loopingcall.py delete mode 100644 cerberus/openstack/common/network_utils.py delete mode 100644 cerberus/openstack/common/periodic_task.py delete mode 100644 cerberus/openstack/common/policy.py delete mode 100644 cerberus/openstack/common/processutils.py delete mode 100644 cerberus/openstack/common/service.py delete mode 100644 cerberus/openstack/common/sslutils.py delete mode 100644 cerberus/openstack/common/strutils.py delete mode 100644 cerberus/openstack/common/systemd.py delete mode 100644 cerberus/openstack/common/test.py delete mode 100644 cerberus/openstack/common/threadgroup.py delete mode 100644 cerberus/openstack/common/timeutils.py delete mode 100644 cerberus/openstack/common/uuidutils.py delete mode 100644 cerberus/openstack/common/versionutils.py delete mode 100644 cerberus/plugins/__init__.py delete mode 100644 cerberus/plugins/base.py delete mode 100644 cerberus/plugins/extension.py delete mode 100644 cerberus/plugins/task_plugin.py delete mode 100644 cerberus/plugins/test_plugin.py delete mode 100644 cerberus/service.py delete mode 100644 cerberus/tests/__init__.py delete mode 100644 cerberus/tests/functional/__init__.py delete mode 100644 cerberus/tests/functional/api/__init__.py delete mode 100644 cerberus/tests/functional/api/v1/__init__.py delete mode 100644 cerberus/tests/functional/api/v1/test_api.py delete mode 100644 cerberus/tests/functional/base.py delete mode 100644 cerberus/tests/functional/test_notifications.py delete mode 100644 cerberus/tests/unit/__init__.py delete mode 100644 cerberus/tests/unit/api/__init__.py delete mode 100644 cerberus/tests/unit/api/base.py delete mode 100644 cerberus/tests/unit/api/utils.py delete mode 100644 cerberus/tests/unit/api/v1/__init__.py delete mode 100644 cerberus/tests/unit/api/v1/test_plugins.py delete mode 100644 cerberus/tests/unit/api/v1/test_security_alarms.py delete mode 100644 cerberus/tests/unit/api/v1/test_security_reports.py delete mode 100644 cerberus/tests/unit/api/v1/test_tasks.py delete mode 100644 cerberus/tests/unit/base.py delete mode 100644 cerberus/tests/unit/client/__init__.py delete mode 100644 cerberus/tests/unit/client/test_keystone_client.py delete mode 100644 cerberus/tests/unit/client/test_neutron_client.py delete mode 100644 cerberus/tests/unit/client/test_nova_client.py delete mode 100644 cerberus/tests/unit/config_fixture.py delete mode 100644 cerberus/tests/unit/db/__init__.py delete mode 100644 cerberus/tests/unit/db/test_db_api.py delete mode 100644 cerberus/tests/unit/db/utils.py delete mode 100644 cerberus/tests/unit/fake_policy.py delete mode 100644 cerberus/tests/unit/policy_fixture.py delete mode 100644 cerberus/tests/unit/test_cerberus_manager.py delete mode 100644 cerberus/tests/unit/test_notifications.py delete mode 100644 cerberus/tests/unit/test_utils.py delete mode 100644 cerberus/utils.py delete mode 100644 cerberus/version.py delete mode 100644 contrib/devstack/README.rst delete mode 100644 contrib/devstack/extras.d/50-cerberus.sh delete mode 100644 contrib/devstack/lib/cerberus delete mode 100644 devstack/README.rst delete mode 100755 devstack/plugin.sh delete mode 100644 devstack/settings delete mode 100644 doc/.gitignore delete mode 100644 doc/Makefile delete mode 100644 doc/source/arch.rst delete mode 100755 doc/source/cerberus-arch.png delete mode 100755 doc/source/conf.py delete mode 100644 doc/source/contributing.rst delete mode 100644 doc/source/development_plugin.rst delete mode 100644 doc/source/index.rst delete mode 100644 doc/source/installation.rst delete mode 100644 doc/source/readme.rst delete mode 100644 doc/source/usage.rst delete mode 100644 doc/source/webapi/root.rst delete mode 100644 doc/source/webapi/v1.rst delete mode 100644 etc/cerberus/cerberus.conf.sample delete mode 100644 etc/cerberus/policy.json delete mode 100644 functionaltests/README.rst delete mode 100644 openstack-common.conf delete mode 100644 requirements.txt delete mode 100644 setup.cfg delete mode 100755 setup.py delete mode 100644 test-requirements.txt delete mode 100755 tools/config/check_uptodate.sh delete mode 100755 tools/config/generate_sample.sh delete mode 100644 tools/config/oslo.config.generator.rc delete mode 100644 tools/install_venv_common.py delete mode 100644 tools/pretty_tox.sh delete mode 100755 tools/subunit-trace.py delete mode 100644 tox.ini diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index f77157f..0000000 --- a/.coveragerc +++ /dev/null @@ -1,7 +0,0 @@ -[run] -branch = True -source = cerberus -omit = cerberus/tests/*,cerberus/openstack/* - -[report] -ignore_errors = True diff --git a/.gitignore b/.gitignore deleted file mode 100644 index c1435a7..0000000 --- a/.gitignore +++ /dev/null @@ -1,51 +0,0 @@ -*.py[cod] - -# C extensions -*.so - -# Packages -*.egg -*.egg-info -dist -build -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.tox -nosetests.xml -.testrepository - -# Translations -*.mo - -# Mr Developer -.mr.developer.cfg -.project -.pydevproject - -# Complexity -output/*.html -output/*/index.html - -# Sphinx -doc/build - -# pbr generates these -AUTHORS -ChangeLog - -# Editors -*~ -.*.swp -.*sw? -.idea diff --git a/.gitreview b/.gitreview deleted file mode 100644 index 2192f10..0000000 --- a/.gitreview +++ /dev/null @@ -1,4 +0,0 @@ -[gerrit] -host=review.openstack.org -port=29418 -project=openstack/cerberus.git diff --git a/.mailmap b/.mailmap deleted file mode 100644 index cc92f17..0000000 --- a/.mailmap +++ /dev/null @@ -1,3 +0,0 @@ -# Format is: -# -# \ No newline at end of file diff --git a/.testr.conf b/.testr.conf deleted file mode 100644 index 8abf838..0000000 --- a/.testr.conf +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} OS_TEST_TIMEOUT=60 ${PYTHON:-python} -m subunit.run discover -t ./ ${TESTS_DIR:-./cerberus/tests/unit/} $LISTOPT $IDOPTION -test_id_option=--load-list $IDFILE -test_list_option=--list \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index 7053760..0000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,17 +0,0 @@ -If you would like to contribute to the development of OpenStack, -you must follow the steps in the "If you're a developer, start here" -section of this page: - - http://wiki.openstack.org/HowToContribute - -Once those steps have been completed, changes to OpenStack -should be submitted for review via the Gerrit tool, following -the workflow documented at: - - http://wiki.openstack.org/GerritWorkflow - -Pull requests submitted through GitHub will be ignored. - -Bugs should be filed on Launchpad, not GitHub: - - https://bugs.launchpad.net/cerberus \ No newline at end of file diff --git a/HACKING.rst b/HACKING.rst deleted file mode 100644 index e9c9a92..0000000 --- a/HACKING.rst +++ /dev/null @@ -1,5 +0,0 @@ -=========================== -cerberus Style Commandments -=========================== - -Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/ diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 67db858..0000000 --- a/LICENSE +++ /dev/null @@ -1,175 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/LICENSE-2.0.txt b/LICENSE-2.0.txt deleted file mode 100644 index d645695..0000000 --- a/LICENSE-2.0.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 90f8a7a..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,6 +0,0 @@ -include AUTHORS -include ChangeLog -exclude .gitignore -exclude .gitreview - -global-exclude *.pyc \ No newline at end of file diff --git a/README.md b/README.md deleted file mode 100644 index e69de29..0000000 diff --git a/README.rst b/README.rst index 955a66d..d98af48 100644 --- a/README.rst +++ b/README.rst @@ -1,15 +1,10 @@ -=============================== -cerberus -=============================== +This project is no longer maintained. -Cerberus security component +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". -* Free software: Apache license -* Documentation: http://docs.openstack.org/developer/cerberus -* Source: http://git.openstack.org/cgit/openstack/cerberus -* Bugs: http://bugs.launchpad.net/replace with the name of the project on launchpad - -Features --------- - -* TODO \ No newline at end of file +For any further questions, please email +openstack-dev@lists.openstack.org or join #openstack-dev on +Freenode. diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index efceab8..0000000 --- a/babel.cfg +++ /dev/null @@ -1 +0,0 @@ -[python: **.py] diff --git a/cerberus/__init__.py b/cerberus/__init__.py deleted file mode 100644 index 38059ca..0000000 --- a/cerberus/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import eventlet - -eventlet.monkey_patch() - -import pbr.version - -__version__ = pbr.version.VersionInfo( - 'cerberus').version_string() diff --git a/cerberus/api/__init__.py b/cerberus/api/__init__.py deleted file mode 100644 index 675d11a..0000000 --- a/cerberus/api/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from oslo.config import cfg - -from cerberus.openstack.common.gettextutils import _ # noqa - -keystone_opts = [ - cfg.StrOpt('auth_strategy', default='keystone', - help=_('The strategy to use for authentication.')) -] - -CONF = cfg.CONF -CONF.register_opts(keystone_opts) diff --git a/cerberus/api/app.py b/cerberus/api/app.py deleted file mode 100644 index b6be241..0000000 --- a/cerberus/api/app.py +++ /dev/null @@ -1,112 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pecan -from wsgiref import simple_server - -from oslo.config import cfg - -from cerberus.api import auth -from cerberus.api import config as api_config -from cerberus.api import hooks -from cerberus.openstack.common import log as logging - -LOG = logging.getLogger(__name__) - -auth_opts = [ - cfg.StrOpt('api_paste_config', - default="api_paste.ini", - help="Configuration file for WSGI definition of API." - ), -] - -api_opts = [ - cfg.StrOpt('host_ip', - default="0.0.0.0", - help="Host serving the API." - ), - cfg.IntOpt('port', - default=8300, - help="Host port serving the API." - ), -] - -CONF = cfg.CONF -CONF.register_opts(auth_opts) -CONF.register_opts(api_opts, group='api') - - -def get_pecan_config(): - # Set up the pecan configuration - filename = api_config.__file__.replace('.pyc', '.py') - return pecan.configuration.conf_from_file(filename) - - -def setup_app(pecan_config=None, extra_hooks=None): - - if not pecan_config: - pecan_config = get_pecan_config() - - app_hooks = [hooks.ConfigHook(), - hooks.DBHook(), - hooks.ContextHook(pecan_config.app.acl_public_routes), - hooks.NoExceptionTracebackHook()] - - if pecan_config.app.enable_acl: - app_hooks.append(hooks.AuthorizationHook( - pecan_config.app.member_routes)) - - pecan.configuration.set_config(dict(pecan_config), overwrite=True) - - app = pecan.make_app( - pecan_config.app.root, - static_root=pecan_config.app.static_root, - template_path=pecan_config.app.template_path, - debug=CONF.debug, - force_canonical=getattr(pecan_config.app, 'force_canonical', True), - hooks=app_hooks, - guess_content_type_from_ext=False - ) - - if pecan_config.app.enable_acl: - strategy = auth.strategy(CONF.auth_strategy) - return strategy.install(app, - cfg.CONF, - pecan_config.app.acl_public_routes) - - return app - - -def build_server(): - # Create the WSGI server and start it - host = CONF.api.host_ip - port = CONF.api.port - - server_cls = simple_server.WSGIServer - handler_cls = simple_server.WSGIRequestHandler - - pecan_config = get_pecan_config() - pecan_config.app.enable_acl = (CONF.auth_strategy == 'keystone') - - app = setup_app(pecan_config=pecan_config) - - srv = simple_server.make_server( - host, - port, - app, - server_cls, - handler_cls) - - return srv diff --git a/cerberus/api/auth.py b/cerberus/api/auth.py deleted file mode 100644 index 47d5a74..0000000 --- a/cerberus/api/auth.py +++ /dev/null @@ -1,62 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from cerberus.api.middleware import auth_token - -from cerberus.openstack.common import log - -STRATEGIES = {} - -LOG = log.getLogger(__name__) - - -OPT_GROUP_NAME = 'keystone_authtoken' - - -class KeystoneAuth(object): - - @classmethod - def _register_opts(cls, conf): - """Register keystoneclient middleware options.""" - - if OPT_GROUP_NAME not in conf: - conf.register_opts(auth_token.opts, group=OPT_GROUP_NAME) - auth_token.CONF = conf - - @classmethod - def install(cls, app, conf, public_routes): - """Install Auth check on application.""" - LOG.debug(u'Installing Keystone\'s auth protocol') - cls._register_opts(conf) - conf = dict(conf.get(OPT_GROUP_NAME)) - return auth_token.AuthTokenMiddleware(app, - conf=conf, - public_api_routes=public_routes) - - -STRATEGIES['keystone'] = KeystoneAuth - - -def strategy(strategy): - """Returns the Auth Strategy. - - :param strategy: String representing - the strategy to use - """ - try: - return STRATEGIES[strategy] - except KeyError: - raise RuntimeError diff --git a/cerberus/api/config.py b/cerberus/api/config.py deleted file mode 100644 index 0e2a213..0000000 --- a/cerberus/api/config.py +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Pecan Application Configurations -app = { - 'root': 'cerberus.api.root.RootController', - 'modules': ['cerberus.api'], - 'static_root': '%(confdir)s/public', - 'template_path': '%(confdir)s/templates', - 'debug': True, - 'enable_acl': False, - 'acl_public_routes': ['/', '/v1'], - 'member_routes': ['/v1/security_reports', ] -} diff --git a/cerberus/api/hooks.py b/cerberus/api/hooks.py deleted file mode 100644 index be6e02a..0000000 --- a/cerberus/api/hooks.py +++ /dev/null @@ -1,153 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from oslo.config import cfg -from pecan import hooks -from webob import exc - -from cerberus.common import context -from cerberus.common import policy -from cerberus.db import api as dbapi - - -class ConfigHook(hooks.PecanHook): - """Attach the config object to the request so controllers can get to it.""" - - def before(self, state): - state.request.cfg = cfg.CONF - - -class DBHook(hooks.PecanHook): - """Attach the dbapi object to the request so controllers can get to it.""" - - def before(self, state): - state.request.dbapi = dbapi.get_instance() - - -class ContextHook(hooks.PecanHook): - """Configures a request context and attaches it to the request. - - The following HTTP request headers are used: - - X-User-Id or X-User: - Used for context.user_id. - - X-Tenant-Id or X-Tenant: - Used for context.tenant. - - X-Auth-Token: - Used for context.auth_token. - - X-Roles: - Used for setting context.is_admin flag to either True or False. - The flag is set to True, if X-Roles contains either an administrator - or admin substring. Otherwise it is set to False. - - """ - def __init__(self, public_api_routes): - self.public_api_routes = public_api_routes - super(ContextHook, self).__init__() - - def before(self, state): - user_id = state.request.headers.get('X-User-Id') - user_id = state.request.headers.get('X-User', user_id) - tenant_id = state.request.headers.get('X-Tenant-Id') - tenant = state.request.headers.get('X-Tenant', tenant_id) - domain_id = state.request.headers.get('X-User-Domain-Id') - domain_name = state.request.headers.get('X-User-Domain-Name') - auth_token = state.request.headers.get('X-Auth-Token') - roles = state.request.headers.get('X-Roles', '').split(',') - creds = {'roles': roles} - - is_public_api = state.request.environ.get('is_public_api', False) - is_admin = policy.enforce('context_is_admin', - state.request.headers, - creds) - - state.request.context = context.RequestContext( - auth_token=auth_token, - user=user_id, - tenant_id=tenant_id, - tenant=tenant, - domain_id=domain_id, - domain_name=domain_name, - is_admin=is_admin, - is_public_api=is_public_api, - roles=roles) - - -class AuthorizationHook(hooks.PecanHook): - """Verify that the user has admin rights. - - Checks whether the request context is an admin context and - rejects the request if the api is not public. - - """ - def __init__(self, member_routes): - self.member_routes = member_routes - super(AuthorizationHook, self).__init__() - - def is_path_in_routes(self, path): - for p in self.member_routes: - if path.startswith(p): - return True - return False - - def before(self, state): - ctx = state.request.context - - if not ctx.is_admin and not ctx.is_public_api and \ - not self.is_path_in_routes(state.request.path): - raise exc.HTTPForbidden() - - -class NoExceptionTracebackHook(hooks.PecanHook): - """Workaround rpc.common: deserialize_remote_exception. - - deserialize_remote_exception builds rpc exception traceback into error - message which is then sent to the client. Such behavior is a security - concern so this hook is aimed to cut-off traceback from the error message. - - """ - # NOTE(max_lobur): 'after' hook used instead of 'on_error' because - # 'on_error' never fired for wsme+pecan pair. wsme @wsexpose decorator - # catches and handles all the errors, so 'on_error' dedicated for unhandled - # exceptions never fired. - def after(self, state): - # Omit empty body. Some errors may not have body at this level yet. - if not state.response.body: - return - - # Do nothing if there is no error. - if 200 <= state.response.status_int < 400: - return - - json_body = state.response.json - # Do not remove traceback when server in debug mode (except 'Server' - # errors when 'debuginfo' will be used for traces). - if cfg.CONF.debug and json_body.get('faultcode') != 'Server': - return - - faultsting = json_body.get('faultstring') - traceback_marker = 'Traceback (most recent call last):' - if faultsting and (traceback_marker in faultsting): - # Cut-off traceback. - faultsting = faultsting.split(traceback_marker, 1)[0] - # Remove trailing newlines and spaces if any. - json_body['faultstring'] = faultsting.rstrip() - # Replace the whole json. Cannot change original one beacause it's - # generated on the fly. - state.response.json = json_body diff --git a/cerberus/api/middleware/__init__.py b/cerberus/api/middleware/__init__.py deleted file mode 100644 index d15c2ea..0000000 --- a/cerberus/api/middleware/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -from cerberus.api.middleware import auth_token - -AuthTokenMiddleware = auth_token.AuthTokenMiddleware diff --git a/cerberus/api/middleware/auth_token.py b/cerberus/api/middleware/auth_token.py deleted file mode 100644 index 690b1f3..0000000 --- a/cerberus/api/middleware/auth_token.py +++ /dev/null @@ -1,62 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import re - -from keystoneclient.middleware import auth_token - -from cerberus.common import exception -from cerberus.common import safe_utils -from cerberus.openstack.common.gettextutils import _ # noqa -from cerberus.openstack.common import log - -LOG = log.getLogger(__name__) - - -class AuthTokenMiddleware(auth_token.AuthProtocol): - """A wrapper on Keystone auth_token middleware. - - Does not perform verification of authentication tokens - for public routes in the API. - - """ - def __init__(self, app, conf, public_api_routes=[]): - route_pattern_tpl = '%s(\.json|\.xml)?$' - - try: - self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl) - for route_tpl in public_api_routes] - except re.error as e: - msg = _('Cannot compile public API routes: %s') % e - - LOG.error(msg) - raise exception.ConfigInvalid(error_msg=msg) - - super(AuthTokenMiddleware, self).__init__(app, conf) - - def __call__(self, env, start_response): - path = safe_utils.safe_rstrip(env.get('PATH_INFO'), '/') - - # The information whether the API call is being performed against the - # public API is required for some other components. Saving it to the - # WSGI environment is reasonable thereby. - env['is_public_api'] = any(map(lambda pattern: re.match(pattern, path), - self.public_api_routes)) - - if env['is_public_api']: - return self.app(env, start_response) - - return super(AuthTokenMiddleware, self).__call__(env, start_response) diff --git a/cerberus/api/root.py b/cerberus/api/root.py deleted file mode 100644 index ffb362c..0000000 --- a/cerberus/api/root.py +++ /dev/null @@ -1,140 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pecan -from pecan import rest -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from cerberus.api.v1 import controllers as v1_api -from cerberus.openstack.common import log as logging - -LOG = logging.getLogger(__name__) - -VERSION_STATUS = wtypes.Enum(wtypes.text, 'EXPERIMENTAL', 'STABLE') - - -class APILink(wtypes.Base): - """API link description. - - """ - - type = wtypes.text - """Type of link.""" - - rel = wtypes.text - """Relationship with this link.""" - - href = wtypes.text - """URL of the link.""" - - @classmethod - def sample(cls): - version = 'v1' - sample = cls( - rel='self', - type='text/html', - href='http://127.0.0.1:8888/{id}'.format( - id=version)) - return sample - - -class APIMediaType(wtypes.Base): - """Media type description. - - """ - - base = wtypes.text - """Base type of this media type.""" - - type = wtypes.text - """Type of this media type.""" - - @classmethod - def sample(cls): - sample = cls( - base='application/json', - type='application/vnd.openstack.sticks-v1+json') - return sample - - -class APIVersion(wtypes.Base): - """API Version description. - - """ - - id = wtypes.text - """ID of the version.""" - - status = VERSION_STATUS - """Status of the version.""" - - updated = wtypes.text - "Last update in iso8601 format." - - links = [APILink] - """List of links to API resources.""" - - media_types = [APIMediaType] - """Types accepted by this API.""" - - @classmethod - def sample(cls): - version = 'v1' - updated = '2014-08-11T16:00:00Z' - links = [APILink.sample()] - media_types = [APIMediaType.sample()] - sample = cls(id=version, - status='STABLE', - updated=updated, - links=links, - media_types=media_types) - return sample - - -class RootController(rest.RestController): - """Root REST Controller exposing versions of the API. - - """ - - v1 = v1_api.V1Controller() - - @wsme_pecan.wsexpose([APIVersion]) - def get(self): - """Return the version list - - """ - # TODO(sheeprine): Maybe we should store all the API version - # informations in every API modules - ver1 = APIVersion( - id='v1', - status='EXPERIMENTAL', - updated='2015-03-09T16:00:00Z', - links=[ - APILink( - rel='self', - href='{scheme}://{host}/v1'.format( - scheme=pecan.request.scheme, - host=pecan.request.host, - ) - ) - ], - media_types=[] - ) - - versions = [] - versions.append(ver1) - - return versions diff --git a/cerberus/api/v1/__init__.py b/cerberus/api/v1/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/api/v1/controllers/__init__.py b/cerberus/api/v1/controllers/__init__.py deleted file mode 100644 index fab4430..0000000 --- a/cerberus/api/v1/controllers/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from pecan import rest - -from cerberus.api.v1.controllers import plugins as plugins_api -from cerberus.api.v1.controllers import security_alarms as \ - security_alarms_api -from cerberus.api.v1.controllers import security_reports as \ - security_reports_api -from cerberus.api.v1.controllers import tasks as tasks_api - - -class V1Controller(rest.RestController): - """API version 1 controller. """ - plugins = plugins_api.PluginsController() - security_alarms = security_alarms_api.SecurityAlarmsController() - security_reports = security_reports_api.SecurityReportsController() - tasks = tasks_api.TasksController() diff --git a/cerberus/api/v1/controllers/base.py b/cerberus/api/v1/controllers/base.py deleted file mode 100644 index fd34e24..0000000 --- a/cerberus/api/v1/controllers/base.py +++ /dev/null @@ -1,34 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from pecan import rest - -from oslo.config import cfg -from oslo import messaging - -from cerberus.openstack.common import log - - -LOG = log.getLogger(__name__) - - -class BaseController(rest.RestController): - - def __init__(self): - super(BaseController, self).__init__() - transport = messaging.get_transport(cfg.CONF) - target = messaging.Target(topic='test_rpc', server='server1') - self.client = messaging.RPCClient(transport, target) diff --git a/cerberus/api/v1/controllers/plugins.py b/cerberus/api/v1/controllers/plugins.py deleted file mode 100644 index 576946f..0000000 --- a/cerberus/api/v1/controllers/plugins.py +++ /dev/null @@ -1,166 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# - - -import json -import pecan -from webob import exc -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from oslo import messaging - -from cerberus.api.v1.controllers import base -from cerberus.api.v1.datamodels import plugin as plugin_models -from cerberus.common import errors -from cerberus import db -from cerberus.db.sqlalchemy import models -from cerberus.openstack.common import log - - -LOG = log.getLogger(__name__) - -_ENFORCER = None - - -class PluginsController(base.BaseController): - - def list_plugins(self): - """ List all the plugins installed on system """ - - # Get information about plugins loaded by Cerberus - try: - plugins = self._plugins() - except messaging.RemoteError as e: - LOG.exception(e) - raise - try: - # Get information about plugins stored in db - db_plugins_info = db.plugins_info_get() - except Exception as e: - LOG.exception(e) - raise - plugins_info = {} - for plugin_info in db_plugins_info: - plugins_info[plugin_info.name] = models.\ - PluginInfoJsonSerializer().serialize(plugin_info) - - for key in plugins: - if key in plugins_info: - if isinstance(plugins_info[key], dict) and isinstance( - plugins[key], dict): - plugins_info[key].update(plugins[key]) - - pluginResources = [] - - for k, v in plugins_info.items(): - pluginResources.append( - plugin_models.PluginResource(v)) - - return plugin_models.PluginResourceCollection(plugins=pluginResources) - - def _plugins(self): - """ Get a dict of plugins loaded by Cerberus Manager """ - ctx = pecan.request.context.to_dict() - try: - plugins = self.client.call(ctx, 'get_plugins') - except messaging.RemoteError as e: - LOG.exception(e) - raise - plugins_ = {} - for plugin in plugins: - plugin_ = json.loads(plugin) - plugins_[plugin_['name']] = plugin_ - return plugins_ - - @wsme_pecan.wsexpose(plugin_models.PluginResourceCollection) - def get_all(self): - """ Get a list of plugins loaded by Cerberus manager - - :return: a list of plugins loaded by Cerberus manager - :raises: - HTTPServiceUnavailable: an error occurred in Cerberus Manager or - the service is unavailable - HTTPNotFound: any other error - """ - - # Get information about plugins loaded by Cerberus - try: - plugins = self.list_plugins() - except messaging.RemoteError: - raise exc.HTTPServiceUnavailable() - except Exception as e: - LOG.exception(e) - raise exc.HTTPNotFound() - return plugins - - def get_plugin(self, uuid): - """ Get information about plugin loaded by Cerberus""" - try: - plugin = self._plugin(uuid) - except messaging.RemoteError: - raise - except errors.PluginNotFound: - raise - try: - # Get information about plugin stored in db - db_plugin_info = db.plugin_info_get_from_uuid(uuid) - plugin_info = models.PluginInfoJsonSerializer().\ - serialize(db_plugin_info) - - plugin_info.update(plugin) - except Exception as e: - LOG.exception(e) - raise - return plugin_models.PluginResource(plugin_info) - - def _plugin(self, uuid): - """ Get a specific plugin thanks to its identifier """ - ctx = pecan.request.context.to_dict() - try: - plugin = self.client.call(ctx, 'get_plugin_from_uuid', uuid=uuid) - except messaging.RemoteError as e: - LOG.exception(e) - raise - - if plugin is None: - LOG.exception('Plugin %s not found.' % uuid) - raise errors.PluginNotFound(uuid) - return json.loads(plugin) - - @wsme_pecan.wsexpose(plugin_models.PluginResource, - wtypes.text) - def get_one(self, uuid): - """ Get details of a specific plugin whose identifier is uuid - - :param uuid: the identifier of the plugin - :return: details of a specific plugin - :raises: - HTTPServiceUnavailable: an error occurred in Cerberus Manager or - the service is unavailable - HTTPNotFound: Plugin is not found. Also any other error - """ - try: - plugin = self.get_plugin(uuid) - except messaging.RemoteError: - raise exc.HTTPServiceUnavailable() - except errors.PluginNotFound: - raise exc.HTTPNotFound() - except Exception as e: - LOG.exception(e) - raise exc.HTTPNotFound() - return plugin diff --git a/cerberus/api/v1/controllers/security_alarms.py b/cerberus/api/v1/controllers/security_alarms.py deleted file mode 100644 index 4ee79ce..0000000 --- a/cerberus/api/v1/controllers/security_alarms.py +++ /dev/null @@ -1,124 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import pecan -from webob import exc -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from cerberus.api.v1.controllers import base -from cerberus.api.v1.datamodels import security_alarm as alarm_models -from cerberus.common import errors -from cerberus import db -from cerberus.db.sqlalchemy import models -from cerberus.openstack.common import log - -LOG = log.getLogger(__name__) - - -class SecurityAlarmsController(base.BaseController): - - @pecan.expose() - def _lookup(self, alarm_id, *remainder): - return SecurityAlarmController(alarm_id), remainder - - def list_security_alarms(self): - """ List all the security alarms of all projects or just one. """ - try: - security_alarms = db.security_alarm_get_all() - except Exception as e: - LOG.exception(e) - raise errors.DbError( - "Security alarms could not be retrieved" - ) - return security_alarms - - @wsme_pecan.wsexpose(alarm_models.SecurityAlarmResourceCollection) - def get_all(self): - """ Get stored security alarms. - - :return: list of security alarms - :raises: - HTTPNotFound: Any database error - """ - try: - security_alarms = self.list_security_alarms() - except errors.DbError: - raise exc.HTTPNotFound() - - alarms_resource = [] - # todo(eglamn3) : no need to serialize here - for security_alarm in security_alarms: - alarms_resource.append( - alarm_models.SecurityAlarmResource( - models.SecurityAlarmJsonSerializer(). - serialize(security_alarm))) - - return alarm_models.SecurityAlarmResourceCollection( - security_alarms=alarms_resource) - - -class SecurityAlarmController(base.BaseController): - - _custom_actions = { - 'tickets': ['PUT'] - } - - def __init__(self, alarm_id): - super(SecurityAlarmController, self).__init__() - pecan.request.context['alarm_id'] = alarm_id - self._uuid = alarm_id - - def get_security_alarm(self, alarm_id): - try: - security_alarm = db.security_alarm_get(alarm_id) - except Exception as e: - LOG.exception(e) - raise errors.DbError( - "Security alarm %s could not be retrieved" % alarm_id - ) - return security_alarm - - @wsme_pecan.wsexpose(alarm_models.SecurityAlarmResource, - wtypes.text) - def get(self): - """Get security alarm in db - - :return: a security alarm - :raises: - HTTPNotFound: Alarm not found or any database error - """ - try: - security_alarm = self.get_security_alarm(self._uuid) - except errors.DbError: - raise exc.HTTPNotFound() - s_alarm = models.SecurityAlarmJsonSerializer().\ - serialize(security_alarm) - - return alarm_models.SecurityAlarmResource(initial_data=s_alarm) - - @pecan.expose("json") - def tickets(self, ticket_id): - """Modify the ticket id associated to a security alarm in db. - - :param ticket_id: the ticket_id to store in db. - :raises: - HTTPNotFound: Alarm not found or any database error - """ - try: - db.security_alarm_update_ticket_id(self._uuid, ticket_id) - except Exception: - raise exc.HTTPNotFound() diff --git a/cerberus/api/v1/controllers/security_reports.py b/cerberus/api/v1/controllers/security_reports.py deleted file mode 100644 index e12cede..0000000 --- a/cerberus/api/v1/controllers/security_reports.py +++ /dev/null @@ -1,144 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import pecan -from webob import exc -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from cerberus.api.v1.controllers import base -from cerberus.api.v1.datamodels import security_report as report_models -from cerberus.common import errors -from cerberus import db -from cerberus.db.sqlalchemy import models -from cerberus.openstack.common import log - -LOG = log.getLogger(__name__) - - -class SecurityReportsController(base.BaseController): - - @pecan.expose() - def _lookup(self, report_id, *remainder): - return SecurityReportController(report_id), remainder - - def list_security_reports(self, project_id=None): - """ List all the security reports of all projects or just one. """ - try: - security_reports = db.security_report_get_all( - project_id=project_id) - except Exception as e: - LOG.exception(e) - raise errors.DbError( - "Security reports could not be retrieved" - ) - return security_reports - - @wsme_pecan.wsexpose(report_models.SecurityReportResourceCollection) - def get_all(self): - """ Get stored security reports. - - :return: list of security reports - :raises: - HTTPNotFound: Any database error - """ - ctx = pecan.request.context - try: - if ctx.is_admin: - security_reports = self.list_security_reports() - else: - security_reports = self.list_security_reports(ctx.tenant_id) - except errors.DbError: - raise exc.HTTPNotFound() - - reports_resource = [] - # todo(eglamn3) : no need to serialize here - for security_report in security_reports: - reports_resource.append( - report_models.SecurityReportResource( - models.SecurityReportJsonSerializer(). - serialize(security_report))) - - return report_models.SecurityReportResourceCollection( - security_reports=reports_resource) - - -class SecurityReportController(base.BaseController): - - _custom_actions = { - 'tickets': ['PUT'] - } - - def __init__(self, uuid): - super(SecurityReportController, self).__init__() - pecan.request.context['uuid'] = uuid - self._id = uuid - - def get_security_report(self, uuid): - try: - security_report = db.security_report_get(uuid) - except Exception as e: - LOG.exception(e) - raise errors.DbError( - "Security report %s could not be retrieved" % uuid - ) - return security_report - - @wsme_pecan.wsexpose(report_models.SecurityReportResource, - wtypes.text) - def get(self): - """Get security report in db. - - :return: a security report - :raises: - HTTPNotFound: Report not found or any database error - """ - try: - security_report = self.get_security_report(self._id) - except errors.DbError: - raise exc.HTTPNotFound() - if security_report is None: - raise exc.HTTPNotFound() - s_report = models.SecurityReportJsonSerializer().\ - serialize(security_report) - - return report_models.SecurityReportResource(initial_data=s_report) - - @pecan.expose("json") - def tickets(self, ticket_id): - """Modify the ticket id associated to a security report in db. - - :param ticket_id: the ticket_id to store in db. - :raises: - HTTPNotFound: Report not found or any database error - """ - try: - db.security_report_update_ticket_id(self._id, ticket_id) - except Exception: - raise exc.HTTPNotFound() - - @wsme_pecan.wsexpose() - def delete(self): - """Delete the security report stored in db. - - :raises: - HTTPNotFound: Report not found or any database error - """ - try: - db.security_report_delete(self._id) - except Exception as e: - LOG.exception(e) - raise exc.HTTPNotFound() diff --git a/cerberus/api/v1/controllers/tasks.py b/cerberus/api/v1/controllers/tasks.py deleted file mode 100644 index 94ecbc4..0000000 --- a/cerberus/api/v1/controllers/tasks.py +++ /dev/null @@ -1,236 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import json -import pecan -from webob import exc -import wsme -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from oslo.messaging import rpc - -from cerberus.api.v1.controllers import base -from cerberus.api.v1.datamodels import task as task_models -from cerberus.openstack.common import log - - -LOG = log.getLogger(__name__) - - -action_kind = ["stop", "start", "force_delete"] -action_kind_enum = wtypes.Enum(str, *action_kind) - - -class ActionController(base.BaseController): - _custom_actions = { - 'stop': ['POST'], - 'force_delete': ['POST'], - 'start': ['POST'], - } - - @wsme_pecan.wsexpose(None, wtypes.text) - def stop(self, task_id): - """Stop task - - :raises: - HTTPBadRequest: task not found or impossible to stop it - """ - try: - self.stop_task(task_id) - except rpc.RemoteError: - raise exc.HTTPBadRequest( - explanation="Task can not be stopped") - - @wsme_pecan.wsexpose(None, wtypes.text) - def force_delete(self, task_id): - """Force delete task - - :raises: - HTTPNotFound: task is not found - """ - try: - self.force_delete_task(task_id) - except rpc.RemoteError as e: - raise exc.HTTPNotFound(explanation=e.value) - - @wsme_pecan.wsexpose(None, wtypes.text) - def start(self, task_id): - """Start task - - :raises: - HTTPBadRequest: task not found or impossible to start it - """ - try: - self.start_task(task_id) - except rpc.RemoteError as e: - raise exc.HTTPBadRequest(explanation=e.value) - - def stop_task(self, task_id): - ctx = pecan.request.context.to_dict() - try: - self.client.call(ctx, 'stop_task', task_id=task_id) - except rpc.RemoteError as e: - LOG.exception(e) - raise - - def force_delete_task(self, task_id): - ctx = pecan.request.context.to_dict() - try: - self.client.call(ctx, - 'force_delete_recurrent_task', - task_id=task_id) - except rpc.RemoteError as e: - LOG.exception(e) - raise - - def start_task(self, task_id): - ctx = pecan.request.context.to_dict() - try: - self.client.call(ctx, - 'start_recurrent_task', - task_id=task_id) - except rpc.RemoteError as e: - LOG.exception(e) - raise - - -class TasksController(base.BaseController): - - action = ActionController() - - def __init__(self): - super(TasksController, self).__init__() - - def list_tasks(self): - ctx = pecan.request.context.to_dict() - try: - tasks = self.client.call(ctx, 'get_tasks') - except rpc.RemoteError as e: - LOG.exception(e) - raise - tasks_resource = [] - for task in tasks: - tasks_resource.append( - task_models.TaskResource(json.loads(task))) - - return task_models.TaskResourceCollection(tasks=tasks_resource) - - @wsme_pecan.wsexpose(task_models.TaskResourceCollection) - def get_all(self): - """ List tasks handled by Cerberus Manager. - - :return: list of tasks - :raises: - HTTPServiceUnavailable: an error occurred in Cerberus Manager or - the service is unavailable - """ - try: - tasks = self.list_tasks() - except rpc.RemoteError: - raise exc.HTTPServiceUnavailable() - return tasks - - def get_task(self, task_id): - ctx = pecan.request.context.to_dict() - try: - task = self.client.call(ctx, 'get_task', task_id=task_id) - except rpc.RemoteError as e: - LOG.exception(e) - raise - return json.loads(task) - - @wsme_pecan.wsexpose(task_models.TaskResource, - wtypes.text) - def get(self, task_id): - """ Get details of a task - - :return: task details - :raises: - HTTPNotFound: task is not found - """ - try: - task = self.get_task(task_id) - except rpc.RemoteError: - raise exc.HTTPNotFound() - except Exception as e: - LOG.exception(e) - raise exc.HTTPNotFound() - return task_models.TaskResource(initial_data=task) - - def create_task(self, task): - ctx = pecan.request.context.to_dict() - try: - if task.period is wsme.Unset: - task.period = None - task.id = self.client.call( - ctx, - 'create_task', - plugin_id=task.plugin_id, - method_=task.method, - task_period=task.period, - task_name=task.name, - task_type=task.type, - persistent=task.persistent - ) - except rpc.RemoteError as e: - LOG.exception(e) - raise - - return task - - @wsme_pecan.wsexpose(task_models.TaskResource, - body=task_models.TaskResource) - def post(self, task): - """Create a task - - :return: task details - :raises: - HTTPBadRequest - """ - - try: - task = self.create_task(task) - except rpc.RemoteError as e: - LOG.exception(e) - raise exc.HTTPBadRequest(explanation=e.value) - except Exception as e: - LOG.exception(e) - raise exc.HTTPBadRequest() - return task - - @wsme_pecan.wsexpose(None, wtypes.text) - def delete(self, task_id): - """Delete a task - - :raises: - HTTPNotFound: task does not exist - """ - try: - self.delete_task(task_id) - except rpc.RemoteError as e: - raise exc.HTTPNotFound(explanation=e.value) - except Exception as e: - LOG.exception(e) - raise - - def delete_task(self, task_id): - ctx = pecan.request.context.to_dict() - try: - self.client.call(ctx, 'delete_recurrent_task', task_id=task_id) - except rpc.RemoteError as e: - LOG.exception(e) - raise diff --git a/cerberus/api/v1/datamodels/__init__.py b/cerberus/api/v1/datamodels/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/api/v1/datamodels/base.py b/cerberus/api/v1/datamodels/base.py deleted file mode 100644 index ee384af..0000000 --- a/cerberus/api/v1/datamodels/base.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import wsme -from wsme import types as wtypes - - -class Base(wtypes.Base): - - def as_dict_from_keys(self, keys): - return dict((k, getattr(self, k)) - for k in keys - if hasattr(self, k) and - getattr(self, k) != wsme.Unset) diff --git a/cerberus/api/v1/datamodels/plugin.py b/cerberus/api/v1/datamodels/plugin.py deleted file mode 100644 index 762c286..0000000 --- a/cerberus/api/v1/datamodels/plugin.py +++ /dev/null @@ -1,87 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -import decimal - -from cerberus.api.v1.datamodels import base -from wsme import types as wtypes - - -class PluginResource(base.Base): - """Type describing a plugin. - - """ - - name = wtypes.text - """Name of the plugin.""" - - id = wtypes.IntegerType() - """Id of the plugin.""" - - uuid = wtypes.text - """Uuid of the plugin.""" - - methods = [wtypes.text] - """Hook methods.""" - - version = wtypes.text - """Version of the plugin.""" - - provider = wtypes.text - """Provider of the plugin.""" - - subscribed_events = [wtypes.text] - """Subscribed events of the plugin.""" - - type = wtypes.text - """Type of the plugin.""" - - tool_name = wtypes.text - """Tool name of the plugin.""" - - description = wtypes.text - """Description of the plugin.""" - - def as_dict(self): - return self.as_dict_from_keys(['name', 'id', 'uuid', 'methods', - 'version', 'provider', - 'subscribed_events', 'type', - 'tool_name', 'description']) - - def __init__(self, initial_data): - super(PluginResource, self).__init__() - for key in initial_data: - setattr(self, key, initial_data[key]) - - @classmethod - def sample(cls): - sample = cls(initial_data={ - 'name': 'some_plugin', - 'version': '2015.1', - 'tool_name': 'some_tool', - 'provider': 'some_provider', - 'type': 'scanner', - 'id': decimal.Decimal(1), - 'uuid': '063d4206-5afc-409c-a4d1-c2a469299d37', - 'methods': ['method_1', 'method_2'], - 'subscribed_events': ['image.update']}) - return sample - - -class PluginResourceCollection(base.Base): - """A list of Plugins.""" - - plugins = [PluginResource] diff --git a/cerberus/api/v1/datamodels/security_alarm.py b/cerberus/api/v1/datamodels/security_alarm.py deleted file mode 100644 index b3832a4..0000000 --- a/cerberus/api/v1/datamodels/security_alarm.py +++ /dev/null @@ -1,94 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime -import decimal - -from cerberus.api.v1.datamodels import base -from wsme import types as wtypes - - -class SecurityAlarmResource(base.Base): - """ Representation of a security alarm. - """ - - id = wtypes.IntegerType() - """Security alarm id.""" - - plugin_id = wtypes.wsattr(wtypes.text) - """Associated plugin id.""" - - alarm_id = wtypes.wsattr(wtypes.text) - """Associated alarm id.""" - - timestamp = datetime.datetime - """creation date.""" - - status = wtypes.wsattr(wtypes.text) - """Status.""" - - severity = wtypes.wsattr(wtypes.text) - """Severity.""" - - project_id = wtypes.wsattr(wtypes.text) - """Associated project id.""" - - component_id = wtypes.wsattr(wtypes.text) - """Component id.""" - - summary = wtypes.wsattr(wtypes.text) - """Summary.""" - - description = wtypes.wsattr(wtypes.text) - """Description.""" - - ticket_id = wtypes.wsattr(wtypes.text) - """Associated ticket id.""" - - def as_dict(self): - return self.as_dict_from_keys( - ['id', 'plugin_id', 'alarm_id', 'timestamp', - 'status', 'severity', 'component_id', 'project_id', - 'summary', 'description', 'ticket_id'] - ) - - def __init__(self, initial_data=None): - super(SecurityAlarmResource, self).__init__() - if initial_data is not None: - for key in initial_data: - setattr(self, key, initial_data[key]) - - @classmethod - def sample(cls): - sample = cls(initial_data={ - 'id': decimal.Decimal(1), - 'plugin_id': '927c8435-f81f-468a-92cb-ebb08ed0fad2', - 'alarm_id': 'fea4b170-ed46-4a50-8b91-ed1c6876be7d', - 'timestamp': datetime.datetime(2015, 3, 24, 9, 50, 50, 577840), - 'status': 'new', - 'severity': 'critical', - 'project_id': 'e845a1f2004847e4ac14cb1732a2e75f', - 'component_id': '4b75699f7a9649438932bebdbf9711e0', - 'summary': 'Several attempts to log failed', - 'description': 'Apache suffered an attack by brute force.' - ' Thousands of attempts to log failed'}) - return sample - - -class SecurityAlarmResourceCollection(base.Base): - """A list of Security alarms.""" - - security_alarms = [SecurityAlarmResource] diff --git a/cerberus/api/v1/datamodels/security_report.py b/cerberus/api/v1/datamodels/security_report.py deleted file mode 100644 index f07e666..0000000 --- a/cerberus/api/v1/datamodels/security_report.py +++ /dev/null @@ -1,113 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime -import uuid - -from cerberus.api.v1.datamodels import base -from wsme import types as wtypes - - -class SecurityReportResource(base.Base): - """ Representation of a security report. - """ - - uuid = wtypes.wsattr(wtypes.text) - """Security report id.""" - - plugin_id = wtypes.wsattr(wtypes.text) - """Associated plugin id.""" - - report_id = wtypes.wsattr(wtypes.text) - """Associated report id provided by plugin.""" - - component_id = wtypes.wsattr(wtypes.text) - """Associated component id.""" - - component_type = wtypes.wsattr(wtypes.text) - """Component type.""" - - component_name = wtypes.wsattr(wtypes.text) - """Component name.""" - - project_id = wtypes.wsattr(wtypes.text) - """Associated project id.""" - - title = wtypes.wsattr(wtypes.text) - """Title of report.""" - - description = wtypes.wsattr(wtypes.text) - """Description.""" - - security_rating = float - """Security rating.""" - - vulnerabilities = wtypes.wsattr(wtypes.text) - """Vulnerabilities.""" - - vulnerabilities_number = wtypes.IntegerType() - """Total of Vulnerabilities.""" - - last_report_date = datetime.datetime - """Last report date.""" - - ticket_id = wtypes.wsattr(wtypes.text, mandatory=True) - """Associated ticket id.""" - - def as_dict(self): - return self.as_dict_from_keys( - ['uuid', 'plugin_id', 'report_id', 'component_id', - 'component_type', 'component_name', 'project_id', - 'title', 'description', 'security_rating', - 'vulnerabilities', 'vulnerabilities_number', - 'last_report_date', 'ticket_id'] - ) - - def __init__(self, initial_data=None): - super(SecurityReportResource, self).__init__() - if initial_data is not None: - for key in initial_data: - setattr(self, key, initial_data[key]) - - @classmethod - def sample(cls): - sample = cls(initial_data={ - 'uuid': str(uuid.uuid4()), - 'security_rating': float(7.4), - 'component_name': 'openstack-server', - 'component_id': 'a1d869a1-6ab0-4f02-9e56-f83034bacfcb', - 'component_type': 'instance', - 'vulnerabilities_number': '2', - 'description': 'security report', - 'title': 'Security report', - 'last_report_date': datetime.datetime(2015, 5, 6, 16, 19, 29), - 'project_id': '510c7f4ed14243f09df371bba2561177', - 'plugin_id': '063d4206-5afc-409c-a4d1-c2a469299d37', - 'report_id': 'fea4b170-ed46-4a50-8b91-ed1c6876be7d', - 'vulnerabilities': '{"443": {"archived": "false", ' - '"protocol": "tcp", "family": "Web Servers", ' - '"iface_id": 329, ' - '"plugin": "1.3.6.1.4.1.25623.1.0.10386",' - '"ip": "192.168.100.3", "id": 443,' - '"output": "Summary": "Remote web server does' - ' not reply with 404 error code"}}'}) - return sample - - -class SecurityReportResourceCollection(base.Base): - """A list of Security reports.""" - - security_reports = [SecurityReportResource] diff --git a/cerberus/api/v1/datamodels/task.py b/cerberus/api/v1/datamodels/task.py deleted file mode 100644 index 24b1772..0000000 --- a/cerberus/api/v1/datamodels/task.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import decimal - -from cerberus.api.v1.datamodels import base -from wsme import types as wtypes - - -class TaskResource(base.Base): - """ Representation of a task. - """ - name = wtypes.wsattr(wtypes.text, default="unknown") - """Name of the task.""" - - period = wtypes.IntegerType() - """Period if periodic.""" - - method = wtypes.wsattr(wtypes.text, mandatory=True) - """Hook methods.""" - - state = wtypes.wsattr(wtypes.text) - """Running or not.""" - - id = wtypes.IntegerType() - """Associated task id.""" - - plugin_id = wtypes.wsattr(wtypes.text, mandatory=True) - """Associated plugin id.""" - - type = wtypes.wsattr(wtypes.text, default="unique") - """Type of the task.""" - - persistent = wtypes.wsattr(bool, default=False) - """If task must persist.""" - - def as_dict(self): - return self.as_dict_from_keys(['name', 'period', 'method', 'state', - 'id', 'plugin_id', 'type', - 'persistent']) - - def __init__(self, initial_data=None): - super(TaskResource, self).__init__() - if initial_data is not None: - for key in initial_data: - setattr(self, key, initial_data[key]) - - @classmethod - def sample(cls): - sample = cls(initial_data={ - 'name': 'some_task', - 'period': decimal.Decimal(3), - 'persistent': True, - 'state': 'running', - 'plugin_id': '063d4206-5afc-409c-a4d1-c2a469299d37', - 'type': 'recurrent', - 'id': '4820cea8-e88e-463b-ae1f-6bbde009cc93'}) - return sample - - -class TaskResourceCollection(base.Base): - """A list of Tasks.""" - - tasks = [TaskResource] diff --git a/cerberus/client/__init__.py b/cerberus/client/__init__.py deleted file mode 100644 index 73ca62b..0000000 --- a/cerberus/client/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/cerberus/client/keystone_client.py b/cerberus/client/keystone_client.py deleted file mode 100644 index 013e84c..0000000 --- a/cerberus/client/keystone_client.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import functools - -from keystoneclient.v2_0 import client as keystone_client_v2_0 -from oslo.config import cfg - -from cerberus.openstack.common import log - - -cfg.CONF.import_group('service_credentials', 'cerberus.service') - -LOG = log.getLogger(__name__) - - -def logged(func): - - @functools.wraps(func) - def with_logging(*args, **kwargs): - try: - return func(*args, **kwargs) - except Exception as e: - LOG.exception(e) - raise - - return with_logging - - -class Client(object): - """A client which gets information via python-keystoneclient.""" - - def __init__(self): - """Initialize a keystone client object.""" - conf = cfg.CONF.service_credentials - self.keystone_client_v2_0 = keystone_client_v2_0.Client( - username=conf.os_username, - password=conf.os_password, - tenant_name=conf.os_tenant_name, - auth_url=conf.os_auth_url, - region_name=conf.os_region_name, - ) - - @logged - def user_detail_get(self, user): - """Returns details for a user.""" - return self.keystone_client_v2_0.users.get(user) - - @logged - def roles_for_user(self, user, tenant=None): - """Returns role for a given id.""" - return self.keystone_client_v2_0.roles.roles_for_user(user, tenant) diff --git a/cerberus/client/neutron_client.py b/cerberus/client/neutron_client.py deleted file mode 100644 index cf3cf22..0000000 --- a/cerberus/client/neutron_client.py +++ /dev/null @@ -1,109 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import functools - -from neutronclient.v2_0 import client as neutron_client -from oslo.config import cfg - -from cerberus.openstack.common import log - - -cfg.CONF.import_group('service_credentials', 'cerberus.service') - -LOG = log.getLogger(__name__) - - -def logged(func): - - @functools.wraps(func) - def with_logging(*args, **kwargs): - try: - return func(*args, **kwargs) - except Exception as e: - LOG.exception(e) - raise - - return with_logging - - -class Client(object): - """A client which gets information via python-neutronclient.""" - - def __init__(self): - """Initialize a neutron client object.""" - conf = cfg.CONF.service_credentials - self.neutronClient = neutron_client.Client( - username=conf.os_username, - password=conf.os_password, - tenant_name=conf.os_tenant_name, - auth_url=conf.os_auth_url, - ) - - @logged - def list_networks(self, tenant_id): - """Returns the list of networks of a given tenant""" - return self.neutronClient.list_networks( - tenant_id=tenant_id).get("networks", None) - - @logged - def list_floatingips(self, tenant_id): - """Returns the list of networks of a given tenant""" - return self.neutronClient.list_floatingips( - tenant_id=tenant_id).get("floatingips", None) - - @logged - def list_associated_floatingips(self, **params): - """Returns the list of associated floating ips of a given tenant""" - floating_ips = self.neutronClient.list_floatingips( - **params).get("floatingips", None) - # A floating IP is an IP address on an external network, which is - # associated with a specific port, and optionally a specific IP - # address, on a private OpenStack Networking network. Therefore a - # floating IP allows access to an instance on a private network from an - # external network. Floating IPs can only be defined on networks for - # which the attribute router:external (by the external network - # extension) has been set to True. - associated_floating_ips = [] - for floating_ip in floating_ips: - if floating_ip.get("port_id") is not None: - associated_floating_ips.append(floating_ip) - return associated_floating_ips - - @logged - def net_ips_get(self, network_id): - """ - Return ip pools used in all subnets of a network - :param network_id: - :return: list of pools - """ - subnets = self.neutronClient.show_network( - network_id)["network"]["subnets"] - ips = [] - for subnet in subnets: - ips.append(self.subnet_ips_get(subnet)) - return ips - - @logged - def get_net_of_subnet(self, subnet_id): - return self.neutronClient.show_subnet( - subnet_id)["subnet"]["network_id"] - - @logged - def subnet_ips_get(self, network_id): - """Returns ip pool of a subnet.""" - return self.neutronClient.show_subnet( - network_id)["subnet"]["allocation_pools"] diff --git a/cerberus/client/nova_client.py b/cerberus/client/nova_client.py deleted file mode 100644 index d7a5750..0000000 --- a/cerberus/client/nova_client.py +++ /dev/null @@ -1,109 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import functools - -from novaclient.v3 import client as nova_client -from oslo.config import cfg - -from cerberus.openstack.common import log - - -OPTS = [ - cfg.BoolOpt('nova_http_log_debug', - default=False, - help='Allow novaclient\'s debug log output.'), -] - -SERVICE_OPTS = [ - cfg.StrOpt('nova', - default='compute', - help='Nova service type.'), -] - -cfg.CONF.register_opts(OPTS) -cfg.CONF.register_opts(SERVICE_OPTS, group='service_types') -# cfg.CONF.import_opt('http_timeout', 'cerberus.service') -cfg.CONF.import_group('service_credentials', 'cerberus.service') -LOG = log.getLogger(__name__) - - -def logged(func): - - @functools.wraps(func) - def with_logging(*args, **kwargs): - try: - return func(*args, **kwargs) - except Exception as e: - LOG.exception(e) - raise - - return with_logging - - -class Client(object): - """A client which gets information via python-novaclient.""" - def __init__(self, bypass_url=None, auth_token=None): - """Initialize a nova client object.""" - conf = cfg.CONF.service_credentials - tenant = conf.os_tenant_id or conf.os_tenant_name - self.nova_client = nova_client.Client( - username=conf.os_username, - project_id=tenant, - auth_url=conf.os_auth_url, - password=conf.os_password, - region_name=conf.os_region_name, - endpoint_type=conf.os_endpoint_type, - service_type=cfg.CONF.service_types.nova, - bypass_url=bypass_url, - cacert=conf.os_cacert, - insecure=conf.insecure, - http_log_debug=cfg.CONF.nova_http_log_debug, - no_cache=True) - - @logged - def instance_get_all(self): - """Returns list of all instances.""" - search_opts = {'all_tenants': True} - return self.nova_client.servers.list( - detailed=True, - search_opts=search_opts) - - @logged - def get_instance_details_from_floating_ip(self, ip): - """ - Get instance_id which is associated to the floating ip "ip" - :param ip: the floating ip that should belong to an instance - :return instance_id if ip is found, else None - """ - instances = self.instance_get_all() - - try: - for instance in instances: - # An instance can belong to many networks. An instance can - # have two ips in a network: - # at least a private ip and potentially a floating ip - addresses_in_networks = instance.addresses.values() - for addresses_in_network in addresses_in_networks: - for address_in_network in addresses_in_network: - if ((address_in_network.get('OS-EXT-IPS:type', None) - == 'floating') - and (address_in_network['addr'] == ip)): - return instance - except Exception as e: - LOG.exception(e) - raise - return None diff --git a/cerberus/cmd/__init__.py b/cerberus/cmd/__init__.py deleted file mode 100644 index 73ca62b..0000000 --- a/cerberus/cmd/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/cerberus/cmd/agent.py b/cerberus/cmd/agent.py deleted file mode 100644 index f5bcbdc..0000000 --- a/cerberus/cmd/agent.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import sys - -from oslo.config import cfg - -from cerberus.common import config -from cerberus import manager -from cerberus.openstack.common import log -from cerberus.openstack.common import service - -LOG = log.getLogger(__name__) - - -def main(): - - log.set_defaults(cfg.CONF.default_log_levels) - argv = sys.argv - config.parse_args(argv) - log.setup(cfg.CONF, 'cerberus') - launcher = service.ProcessLauncher() - c_manager = manager.CerberusManager() - launcher.launch_service(c_manager) - launcher.wait() - - -if __name__ == '__main__': - main() diff --git a/cerberus/cmd/api.py b/cerberus/cmd/api.py deleted file mode 100644 index 7b0dc45..0000000 --- a/cerberus/cmd/api.py +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import sys - -from oslo.config import cfg - -from cerberus.api import app -from cerberus.common import config -from cerberus.openstack.common import log - - -CONF = cfg.CONF -CONF.import_opt('auth_strategy', 'cerberus.api') -LOG = log.getLogger(__name__) - - -def main(): - argv = sys.argv - config.parse_args(argv) - log.setup(cfg.CONF, 'cerberus') - server = app.build_server() - log.set_defaults(cfg.CONF.default_log_levels) - - try: - server.serve_forever() - except KeyboardInterrupt: - pass - LOG.info("cerberus-api starting...") - -if __name__ == '__main__': - main() diff --git a/cerberus/cmd/db_create.py b/cerberus/cmd/db_create.py deleted file mode 100644 index 5be9fc8..0000000 --- a/cerberus/cmd/db_create.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import sys - -from oslo.config import cfg -from sqlalchemy import create_engine - -from cerberus.common import config - - -def main(): - argv = sys.argv - config.parse_args(argv) - - engine = create_engine(cfg.CONF.database.connection) - - conn = engine.connect() - try: - conn.execute("CREATE DATABASE cerberus") - except Exception: - pass - - conn.close() - -if __name__ == '__main__': - main() diff --git a/cerberus/cmd/dbsync.py b/cerberus/cmd/dbsync.py deleted file mode 100644 index e8dc0d1..0000000 --- a/cerberus/cmd/dbsync.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Run storage database migration. -""" - -import sys - -from oslo.config import cfg - -from cerberus.db import migration -from cerberus import service - - -CONF = cfg.CONF - - -class DBCommand(object): - - def upgrade(self): - migration.upgrade(CONF.command.revision) - - def downgrade(self): - migration.downgrade(CONF.command.revision) - - def revision(self): - migration.revision(CONF.command.message, CONF.command.autogenerate) - - def stamp(self): - migration.stamp(CONF.command.revision) - - def version(self): - print(migration.version()) - - def create_schema(self): - migration.create_schema() - - -def add_command_parsers(subparsers): - command_object = DBCommand() - - parser = subparsers.add_parser('upgrade', - help="Upgrade the database schema to the latest version. " - "Optionally, use --revision to specify an alembic revision " - "string to upgrade to.") - parser.set_defaults(func=command_object.upgrade) - parser.add_argument('--revision', nargs='?') - - parser = subparsers.add_parser('downgrade', - help="Downgrade the database schema to the oldest revision. " - "While optional, one should generally use --revision to " - "specify the alembic revision string to downgrade to.") - parser.set_defaults(func=command_object.downgrade) - parser.add_argument('--revision', nargs='?') - - parser = subparsers.add_parser('stamp') - parser.add_argument('--revision', nargs='?') - parser.set_defaults(func=command_object.stamp) - - parser = subparsers.add_parser('revision', - help="Create a new alembic revision. " - "Use --message to set the message string.") - parser.add_argument('-m', '--message') - parser.add_argument('--autogenerate', action='store_true') - parser.set_defaults(func=command_object.revision) - - parser = subparsers.add_parser('version', - help="Print the current version information and exit.") - parser.set_defaults(func=command_object.version) - - parser = subparsers.add_parser('create_schema', - help="Create the database schema.") - parser.set_defaults(func=command_object.create_schema) - - -command_opt = cfg.SubCommandOpt('command', - title='Command', - help='Available commands', - handler=add_command_parsers) - -CONF.register_cli_opt(command_opt) - - -def main(): - # this is hack to work with previous usage of ironic-dbsync - # pls change it to ironic-dbsync upgrade - valid_commands = set([ - 'upgrade', 'downgrade', 'revision', - 'version', 'stamp', 'create_schema', - ]) - if not set(sys.argv) & valid_commands: - sys.argv.append('upgrade') - - service.prepare_service(sys.argv) - CONF.command.func() diff --git a/cerberus/common/__init__.py b/cerberus/common/__init__.py deleted file mode 100644 index 73ca62b..0000000 --- a/cerberus/common/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/cerberus/common/cerberus_impl_rabbit.py b/cerberus/common/cerberus_impl_rabbit.py deleted file mode 100644 index ad9494b..0000000 --- a/cerberus/common/cerberus_impl_rabbit.py +++ /dev/null @@ -1,147 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import functools -import json -import kombu -import logging - -from oslo.messaging._drivers import amqp as rpc_amqp -from oslo.messaging._drivers import amqpdriver -from oslo.messaging._drivers import common as rpc_common -from oslo.messaging._drivers import impl_rabbit -from oslo.messaging.openstack.common.gettextutils import _ # noqa - - -LOG = logging.getLogger(__name__) - - -def _get_queue_arguments(conf): - """Construct the arguments for declaring a queue. - - If the rabbit_ha_queues option is set, we declare a mirrored queue - as described here: - - http://www.rabbitmq.com/ha.html - - Setting x-ha-policy to all means that the queue will be mirrored - to all nodes in the cluster. - """ - return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {} - - -class CerberusRabbitMessage(dict): - - def __init__(self, raw_message): - if isinstance(raw_message.payload, unicode): - message = rpc_common.deserialize_msg( - json.loads(raw_message.payload)) - else: - message = rpc_common.deserialize_msg(raw_message.payload) - super(CerberusRabbitMessage, self).__init__(message) - self._raw_message = raw_message - - def acknowledge(self): - self._raw_message.ack() - - def requeue(self): - self._raw_message.requeue() - - -class CerberusConsumerBase(impl_rabbit.ConsumerBase): - - def _callback_handler(self, message, callback): - """Call callback with deserialized message. - - Messages that are processed and ack'ed. - """ - - try: - callback(CerberusRabbitMessage(message)) - except Exception: - LOG.exception(_("Failed to process message" - " ... skipping it.")) - message.ack() - - -class CerberusTopicConsumer(CerberusConsumerBase): - """Consumer class for 'topic'.""" - - def __init__(self, conf, channel, topic, callback, tag, exchange_name, - name=None, **kwargs): - """Init a 'topic' queue. - - :param channel: the amqp channel to use - :param topic: the topic to listen on - :paramtype topic: str - :param callback: the callback to call when messages are received - :param tag: a unique ID for the consumer on the channel - :param exchange_name: the exchange name to use - :param name: optional queue name, defaults to topic - :paramtype name: str - - Other kombu options may be passed as keyword arguments - """ - # Default options - options = {'durable': conf.amqp_durable_queues, - 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': conf.amqp_auto_delete, - 'exclusive': False} - options.update(kwargs) - exchange = kombu.entity.Exchange(name=exchange_name, - type='topic', - durable=options['durable'], - auto_delete=options['auto_delete']) - super(CerberusTopicConsumer, self).__init__(channel, - callback, - tag, - name=name or topic, - exchange=exchange, - routing_key=topic, - **options) - - -class CerberusConnection(impl_rabbit.Connection): - - def __init__(self, conf, url): - super(CerberusConnection, self).__init__(conf, url) - - def declare_topic_consumer(self, exchange_name, topic, callback=None, - queue_name=None): - """Create a 'topic' consumer.""" - self.declare_consumer(functools.partial(CerberusTopicConsumer, - name=queue_name, - exchange_name=exchange_name, - ), - topic, callback) - - -class CerberusRabbitDriver(amqpdriver.AMQPDriverBase): - - def __init__(self, conf, url, - default_exchange=None, - allowed_remote_exmods=None): - conf.register_opts(impl_rabbit.rabbit_opts) - conf.register_opts(rpc_amqp.amqp_opts) - - connection_pool = rpc_amqp.get_connection_pool(conf, - url, - CerberusConnection) - - super(CerberusRabbitDriver, self).__init__(conf, url, - connection_pool, - default_exchange, - allowed_remote_exmods) diff --git a/cerberus/common/config.py b/cerberus/common/config.py deleted file mode 100644 index d0e790d..0000000 --- a/cerberus/common/config.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from oslo.config import cfg - -from cerberus import version - - -def parse_args(argv, default_config_files=None): - cfg.CONF(argv[1:], - project='cerberus', - version=version.version_info.release_string(), - default_config_files=default_config_files) diff --git a/cerberus/common/context.py b/cerberus/common/context.py deleted file mode 100644 index c14c422..0000000 --- a/cerberus/common/context.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from cerberus.openstack.common import context - - -class RequestContext(context.RequestContext): - """Extends security contexts from the OpenStack common library.""" - - def __init__(self, auth_token=None, domain_id=None, domain_name=None, - user=None, tenant_id=None, tenant=None, is_admin=False, - is_public_api=False, read_only=False, show_deleted=False, - request_id=None, roles=None): - """Stores several additional request parameters: - - :param domain_id: The ID of the domain. - :param domain_name: The name of the domain. - :param is_public_api: Specifies whether the request should be processed - without authentication. - - """ - self.tenant_id = tenant_id - self.is_public_api = is_public_api - self.domain_id = domain_id - self.domain_name = domain_name - self.roles = roles or [] - - super(RequestContext, self).__init__(auth_token=auth_token, - user=user, tenant=tenant, - is_admin=is_admin, - read_only=read_only, - show_deleted=show_deleted, - request_id=request_id) - - def to_dict(self): - return {'auth_token': self.auth_token, - 'user': self.user, - 'tenant_id': self.tenant_id, - 'tenant': self.tenant, - 'is_admin': self.is_admin, - 'read_only': self.read_only, - 'show_deleted': self.show_deleted, - 'request_id': self.request_id, - 'domain_id': self.domain_id, - 'roles': self.roles, - 'domain_name': self.domain_name, - 'is_public_api': self.is_public_api} - - @classmethod - def from_dict(cls, values): - values.pop('user', None) - values.pop('tenant', None) - return cls(**values) diff --git a/cerberus/common/errors.py b/cerberus/common/errors.py deleted file mode 100644 index 10df02b..0000000 --- a/cerberus/common/errors.py +++ /dev/null @@ -1,124 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from cerberus.openstack.common.gettextutils import _ # noqa - - -class InvalidOperation(Exception): - - def __init__(self, description): - super(InvalidOperation, self).__init__(description) - - -class PluginNotFound(InvalidOperation): - - def __init__(self, uuid): - super(PluginNotFound, self).__init__("Plugin %s does not exist" - % str(uuid)) - - -class TaskPeriodNotInteger(InvalidOperation): - - def __init__(self): - super(TaskPeriodNotInteger, self).__init__( - "The period of the task must be provided as an integer" - ) - - -class TaskNotFound(InvalidOperation): - - def __init__(self, _id): - super(TaskNotFound, self).__init__( - _('Task %s does not exist') % _id - ) - - -class TaskDeletionNotAllowed(InvalidOperation): - def __init__(self, _id): - super(TaskDeletionNotAllowed, self).__init__( - _("Deletion of task %s is not allowed because either it " - "does not exist or it is not recurrent") % _id - ) - - -class TaskStartNotAllowed(InvalidOperation): - def __init__(self, _id): - super(TaskStartNotAllowed, self).__init__( - _("Starting task %s is not allowed because either it " - "does not exist or it is not recurrent") % _id - ) - - -class TaskStartNotPossible(InvalidOperation): - def __init__(self, _id): - super(TaskStartNotPossible, self).__init__( - _("Starting task %s is not possible because it is running") % _id - ) - - -class MethodNotString(InvalidOperation): - - def __init__(self): - super(MethodNotString, self).__init__( - "Method must be provided as a string" - ) - - -class MethodNotCallable(InvalidOperation): - - def __init__(self, method, name): - super(MethodNotCallable, self).__init__( - "Method named %s is not callable by plugin %s" - % (str(method), str(name)) - ) - - -class TaskObjectNotProvided(InvalidOperation): - - def __init__(self): - super(TaskObjectNotProvided, self).__init__( - "Task object not provided in request" - ) - - -class PluginIdNotProvided(InvalidOperation): - - def __init__(self): - super(PluginIdNotProvided, self).__init__( - "Plugin id not provided in request" - ) - - -class MethodNotProvided(InvalidOperation): - - def __init__(self): - super(MethodNotProvided, self).__init__( - "Method not provided in request" - ) - - -class PolicyEnforcementError(Exception): - - def __init__(self): - super(PolicyEnforcementError, self).__init__( - "Policy enforcement error" - ) - - -class DbError(Exception): - - def __init__(self, description): - super(DbError, self).__init__(description) diff --git a/cerberus/common/exception.py b/cerberus/common/exception.py deleted file mode 100644 index d4a8005..0000000 --- a/cerberus/common/exception.py +++ /dev/null @@ -1,161 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""Cerberus base exception handling. - -Includes decorator for re-raising Nova-type exceptions. - -SHOULD include dedicated exception logging. - -""" - -import functools -import logging -import sys -import webob.exc - -from oslo.config import cfg - -from cerberus.common import safe_utils -from cerberus.openstack.common import excutils -from cerberus.openstack.common.gettextutils import _ # noqa - - -LOG = logging.getLogger(__name__) - -exc_log_opts = [ - cfg.BoolOpt('fatal_exception_format_errors', - default=False, - help='Make exception message format errors fatal'), -] - -CONF = cfg.CONF -CONF.register_opts(exc_log_opts) - - -class ConvertedException(webob.exc.WSGIHTTPException): - def __init__(self, code=0, title="", explanation=""): - self.code = code - self.title = title - self.explanation = explanation - super(ConvertedException, self).__init__() - - -def _cleanse_dict(original): - """Strip all admin_password, new_pass, rescue_pass keys from a dict.""" - return dict((k, v) for k, v in original.iteritems() if "_pass" not in k) - - -def wrap_exception(notifier=None, get_notifier=None): - """This decorator wraps a method to catch any exceptions that may - get thrown. It logs the exception as well as optionally sending - it to the notification system. - """ - def inner(f): - def wrapped(self, context, *args, **kw): - # Don't store self or context in the payload, it now seems to - # contain confidential information. - try: - return f(self, context, *args, **kw) - except Exception as e: - with excutils.save_and_reraise_exception(): - if notifier or get_notifier: - payload = dict(exception=e) - call_dict = safe_utils.getcallargs(f, context, - *args, **kw) - cleansed = _cleanse_dict(call_dict) - payload.update({'args': cleansed}) - - # If f has multiple decorators, they must use - # functools.wraps to ensure the name is - # propagated. - event_type = f.__name__ - - (notifier or get_notifier()).error(context, - event_type, - payload) - - return functools.wraps(f)(wrapped) - return inner - - -class CerberusException(Exception): - """Base Cerberus Exception - - To correctly use this class, inherit from it and define - a 'msg_fmt' property. That msg_fmt will get printf'd - with the keyword arguments provided to the constructor. - - """ - msg_fmt = _("An unknown exception occurred.") - code = 500 - headers = {} - safe = False - - def __init__(self, message=None, **kwargs): - self.kwargs = kwargs - - if 'code' not in self.kwargs: - try: - self.kwargs['code'] = self.code - except AttributeError: - pass - - if not message: - try: - message = self.msg_fmt % kwargs - - except Exception: - exc_info = sys.exc_info() - # kwargs doesn't match a variable in the message - # log the issue and the kwargs - LOG.exception(_('Exception in string format operation')) - for name, value in kwargs.iteritems(): - LOG.error("%s: %s" % (name, value)) # noqa - - if CONF.fatal_exception_format_errors: - raise exc_info[0], exc_info[1], exc_info[2] - else: - # at least get the core message out if something happened - message = self.msg_fmt - - super(CerberusException, self).__init__(message) - - def format_message(self): - # NOTE(mrodden): use the first argument to the python Exception object - # which should be our full NovaException message, (see __init__) - return self.args[0] - - -class DBException(CerberusException): - msg_fmt = _("Database error.") - - -class ReportExists(DBException): - msg_fmt = _("Report %(report_id)s already exists for plugin " - "%(plugin_id)s.") - - -class PluginInfoExists(DBException): - msg_fmt = _("Plugin info %(plugin_id)s already exists.") - - -class AlarmExists(DBException): - msg_fmt = _("Alarm %(alarm_id)s already exists.") - - -class TaskExists(DBException): - msg_fmt = _("Task %(task_id)s already exists.") diff --git a/cerberus/common/json_encoders.py b/cerberus/common/json_encoders.py deleted file mode 100644 index 51826bf..0000000 --- a/cerberus/common/json_encoders.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime -import json - - -class DateTimeEncoder(json.JSONEncoder): - def default(self, obj): - """JSON serializer for objects not serializable by default json code""" - if isinstance(obj, datetime.datetime): - serial = obj.isoformat() - return serial diff --git a/cerberus/common/loopingcall.py b/cerberus/common/loopingcall.py deleted file mode 100644 index 386efb4..0000000 --- a/cerberus/common/loopingcall.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import sys - -from eventlet import event -from eventlet import greenthread - -from cerberus.openstack.common.gettextutils import _LE, _LW # noqa -from cerberus.openstack.common import log as logging -from cerberus.openstack.common import loopingcall -from cerberus.openstack.common import timeutils - - -LOG = logging.getLogger(__name__) - - -class CerberusFixedIntervalLoopingCall(loopingcall.FixedIntervalLoopingCall): - """A fixed interval looping call.""" - - def start(self, interval, initial_delay=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - start = timeutils.utcnow() - self.f(*self.args, **self.kw) - end = timeutils.utcnow() - if not self._running: - break - delay = interval - timeutils.delta_seconds(start, end) - if delay <= 0: - LOG.warn(_LW('task run outlasted interval by %s sec') % - -delay) - greenthread.sleep(delay if delay > 0 else 0) - except loopingcall.LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_LE('in fixed duration looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - self.gt = greenthread.spawn(_inner) - return self.done diff --git a/cerberus/common/policy.py b/cerberus/common/policy.py deleted file mode 100644 index 61a66d7..0000000 --- a/cerberus/common/policy.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) 2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Policy Engine For Cerberus.""" - -from oslo.config import cfg - -from cerberus.openstack.common import policy - -_ENFORCER = None -CONF = cfg.CONF - - -def init_enforcer(policy_file=None, rules=None, - default_rule=None, use_conf=True): - """Synchronously initializes the policy enforcer - - :param policy_file: Custom policy file to use, if none is specified, - `CONF.policy_file` will be used. - :param rules: Default dictionary / Rules to use. It will be - considered just in the first instantiation. - :param default_rule: Default rule to use, CONF.default_rule will - be used if none is specified. - :param use_conf: Whether to load rules from config file. - - """ - global _ENFORCER - - if _ENFORCER: - return - - _ENFORCER = policy.Enforcer(policy_file=policy_file, - rules=rules, - default_rule=default_rule, - use_conf=use_conf) - - -def get_enforcer(): - """Provides access to the single instance of Policy enforcer.""" - - if not _ENFORCER: - init_enforcer() - - return _ENFORCER - - -def enforce(rule, target, creds, do_raise=False, exc=None, *args, **kwargs): - """A shortcut for policy.Enforcer.enforce() - - Checks authorization of a rule against the target and credentials. - - """ - enforcer = get_enforcer() - return enforcer.enforce(rule, target, creds, do_raise=do_raise, - exc=exc, *args, **kwargs) diff --git a/cerberus/common/safe_utils.py b/cerberus/common/safe_utils.py deleted file mode 100644 index 1efb582..0000000 --- a/cerberus/common/safe_utils.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""Utilities and helper functions that won't produce circular imports.""" - -import inspect -import six - -from cerberus.openstack.common import log - -LOG = log.getLogger(__name__) - - -def getcallargs(function, *args, **kwargs): - """This is a simplified inspect.getcallargs (2.7+). - - It should be replaced when python >= 2.7 is standard. - """ - keyed_args = {} - argnames, varargs, keywords, defaults = inspect.getargspec(function) - - keyed_args.update(kwargs) - - # NOTE(alaski) the implicit 'self' or 'cls' argument shows up in - # argnames but not in args or kwargs. Uses 'in' rather than '==' because - # some tests use 'self2'. - if 'self' in argnames[0] or 'cls' == argnames[0]: - # The function may not actually be a method or have im_self. - # Typically seen when it's stubbed with mox. - if inspect.ismethod(function) and hasattr(function, 'im_self'): - keyed_args[argnames[0]] = function.im_self - else: - keyed_args[argnames[0]] = None - - remaining_argnames = filter(lambda x: x not in keyed_args, argnames) - keyed_args.update(dict(zip(remaining_argnames, args))) - - if defaults: - num_defaults = len(defaults) - for argname, value in zip(argnames[-num_defaults:], defaults): - if argname not in keyed_args: - keyed_args[argname] = value - - return keyed_args - - -def safe_rstrip(value, chars=None): - """Removes trailing characters from a string if that does not make it empty - :param value: A string value that will be stripped. - :param chars: Characters to remove. - :return: Stripped value. - """ - if not isinstance(value, six.string_types): - LOG.warn(("Failed to remove trailing character. Returning original " - "object. Supplied object is not a string: %s,") % value) - return value - return value.rstrip(chars) or value diff --git a/cerberus/common/serialize.py b/cerberus/common/serialize.py deleted file mode 100644 index ef8075a..0000000 --- a/cerberus/common/serialize.py +++ /dev/null @@ -1,110 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import parser - - -class JsonSerializer(object): - """A serializer that provides methods to serialize and deserialize JSON - dictionaries. - - Note, one of the assumptions this serializer makes is that all objects that - it is used to deserialize have a constructor that can take all of the - attribute arguments. I.e. If you have an object with 3 attributes, the - constructor needs to take those three attributes as keyword arguments. - """ - - __attributes__ = None - """The attributes to be serialized by the seralizer. - The implementor needs to provide these.""" - - __required__ = None - """The attributes that are required when deserializing. - The implementor needs to provide these.""" - - __attribute_serializer__ = None - """The serializer to use for a specified attribute. If an attribute is not - included here, no special serializer will be user. - The implementor needs to provide these.""" - - __object_class__ = None - """The class that the deserializer should generate. - The implementor needs to provide these.""" - - serializers = dict( - date=dict( - serialize=lambda x: x.isoformat(), - deserialize=lambda x: parser.parse(x) - ) - ) - - def deserialize(self, json, **kwargs): - """Deserialize a JSON dictionary and return a populated object. - - This takes the JSON data, and deserializes it appropriately and then - calls the constructor of the object to be created with all of the - attributes. - - Args: - json: The JSON dict with all of the data - **kwargs: Optional values that can be used as defaults if they are - not present in the JSON data - Returns: - The deserialized object. - Raises: - ValueError: If any of the required attributes are not present - """ - d = dict() - for attr in self.__attributes__: - if attr in json: - val = json[attr] - elif attr in self.__required__: - try: - val = kwargs[attr] - except KeyError: - raise ValueError("{} must be set".format(attr)) - - serializer = self.__attribute_serializer__.get(attr) - if serializer: - d[attr] = self.serializers[serializer]['deserialize'](val) - else: - d[attr] = val - - return self.__object_class__(**d) - - def serialize(self, obj): - """Serialize an object to a dictionary. - - Take all of the attributes defined in self.__attributes__ and create - a dictionary containing those values. - - Args: - obj: The object to serialize - Returns: - A dictionary containing all of the serialized data from the object. - """ - d = dict() - for attr in self.__attributes__: - val = getattr(obj, attr) - if val is None: - continue - serializer = self.__attribute_serializer__.get(attr) - if serializer: - d[attr] = self.serializers[serializer]['serialize'](val) - else: - d[attr] = val - - return d diff --git a/cerberus/common/service.py b/cerberus/common/service.py deleted file mode 100644 index 8093dbd..0000000 --- a/cerberus/common/service.py +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from cerberus.common import threadgroup -from cerberus.openstack.common import service - - -class CerberusService(service.Service): - - def __init__(self, threads=1000): - super(CerberusService, self).__init__(threads) - self.tg = threadgroup.CerberusThreadGroup(threads) - - -class CerberusServices(service.Services): - - def __init__(self): - super(CerberusServices, self).__init__() - self.tg = threadgroup.CerberusThreadGroup() diff --git a/cerberus/common/threadgroup.py b/cerberus/common/threadgroup.py deleted file mode 100644 index 64a64a0..0000000 --- a/cerberus/common/threadgroup.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from cerberus.common import loopingcall -from cerberus.db.sqlalchemy import api as db_api -from cerberus.openstack.common import threadgroup - - -class CerberusThread(threadgroup.Thread): - def __init__(self, f, thread, group, *args, **kwargs): - super(CerberusThread, self).__init__(thread, group) - self.f = f - self.args = args - self.kw = kwargs - - -class CerberusThreadGroup(threadgroup.ThreadGroup): - - def add_stopped_timer(self, callback, *args, **kwargs): - pulse = loopingcall.CerberusFixedIntervalLoopingCall(callback, - *args, - **kwargs) - self.timers.append(pulse) - return pulse - - def add_timer(self, interval, callback, initial_delay=None, - *args, **kwargs): - pulse = loopingcall.CerberusFixedIntervalLoopingCall(callback, - *args, - **kwargs) - pulse.start(interval=interval, - initial_delay=initial_delay) - self.timers.append(pulse) - return pulse - - def add_thread(self, callback, *args, **kwargs): - gt = self.pool.spawn(callback, *args, **kwargs) - th = CerberusThread(callback, gt, self, *args, **kwargs) - self.threads.append(th) - return th - - def thread_done(self, thread): - self.threads.remove(thread) - try: - db_api.delete_task(thread.kw.get('task_id')) - except Exception: - raise diff --git a/cerberus/common/utils.py b/cerberus/common/utils.py deleted file mode 100644 index f7c8646..0000000 --- a/cerberus/common/utils.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# Copyright (c) 2012 NTT DOCOMO, INC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utilities and helper functions.""" - - -import netaddr -import re -import six -import uuid - -from oslo.config import cfg - -from cerberus.common import exception -from cerberus.openstack.common.gettextutils import _ # noqa -from cerberus.openstack.common import log as logging - -CONF = cfg.CONF - -LOG = logging.getLogger(__name__) - - -class LazyPluggable(object): - """A pluggable backend loaded lazily based on some value.""" - - def __init__(self, pivot, config_group=None, **backends): - self.__backends = backends - self.__pivot = pivot - self.__backend = None - self.__config_group = config_group - - def __get_backend(self): - if not self.__backend: - if self.__config_group is None: - backend_name = CONF[self.__pivot] - else: - backend_name = CONF[self.__config_group][self.__pivot] - if backend_name not in self.__backends: - msg = _('Invalid backend: %s') % backend_name - raise exception.CerberusException(msg) - - backend = self.__backends[backend_name] - if isinstance(backend, tuple): - name = backend[0] - fromlist = backend[1] - else: - name = backend - fromlist = backend - - self.__backend = __import__(name, None, None, fromlist) - return self.__backend - - def __getattr__(self, key): - backend = self.__get_backend() - return getattr(backend, key) - - -def is_valid_ipv4(address): - """Verify that address represents a valid IPv4 address.""" - try: - return netaddr.valid_ipv4(address) - except Exception: - return False - - -def is_valid_ipv6(address): - try: - return netaddr.valid_ipv6(address) - except Exception: - return False - - -def is_valid_ipv6_cidr(address): - try: - str(netaddr.IPNetwork(address, version=6).cidr) - return True - except Exception: - return False - - -def get_shortened_ipv6(address): - addr = netaddr.IPAddress(address, version=6) - return str(addr.ipv6()) - - -def get_shortened_ipv6_cidr(address): - net = netaddr.IPNetwork(address, version=6) - return str(net.cidr) - - -def is_valid_cidr(address): - """Check if the provided ipv4 or ipv6 address is a valid CIDR address.""" - try: - # Validate the correct CIDR Address - netaddr.IPNetwork(address) - except netaddr.core.AddrFormatError: - return False - except UnboundLocalError: - # NOTE(MotoKen): work around bug in netaddr 0.7.5 (see detail in - # https://github.com/drkjam/netaddr/issues/2) - return False - - # Prior validation partially verify /xx part - # Verify it here - ip_segment = address.split('/') - - if (len(ip_segment) <= 1 or - ip_segment[1] == ''): - return False - - return True - - -def get_ip_version(network): - """Returns the IP version of a network (IPv4 or IPv6). - - :raises: AddrFormatError if invalid network. - """ - if netaddr.IPNetwork(network).version == 6: - return "IPv6" - elif netaddr.IPNetwork(network).version == 4: - return "IPv4" - - -def convert_to_list_dict(lst, label): - """Convert a value or list into a list of dicts.""" - if not lst: - return None - if not isinstance(lst, list): - lst = [lst] - return [{label: x} for x in lst] - - -def sanitize_hostname(hostname): - """Return a hostname which conforms to RFC-952 and RFC-1123 specs.""" - if isinstance(hostname, six.text_type): - hostname = hostname.encode('latin-1', 'ignore') - - hostname = re.sub('[ _]', '-', hostname) - hostname = re.sub('[^\w.-]+', '', hostname) - hostname = hostname.lower() - hostname = hostname.strip('.-') - - return hostname - - -def generate_uuid(): - return str(uuid.uuid4()) - - -def is_uuid_like(val): - """Returns validation of a value as a UUID. - - For our purposes, a UUID is a canonical form string: - aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa - - """ - try: - return str(uuid.UUID(val)) == val - except (TypeError, ValueError, AttributeError): - return False diff --git a/cerberus/db/__init__.py b/cerberus/db/__init__.py deleted file mode 100644 index 2261e4c..0000000 --- a/cerberus/db/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from cerberus.db.api import * # noqa diff --git a/cerberus/db/api.py b/cerberus/db/api.py deleted file mode 100644 index b68cb9f..0000000 --- a/cerberus/db/api.py +++ /dev/null @@ -1,137 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from oslo.config import cfg - -from cerberus.db.sqlalchemy import models -from cerberus.openstack.common.db import api as db_api - - -CONF = cfg.CONF -CONF.import_opt('backend', 'cerberus.openstack.common.db.options', - group='database') - -_BACKEND_MAPPING = {'sqlalchemy': 'cerberus.db.sqlalchemy.api'} -IMPL = db_api.DBAPI(CONF.database.backend, backend_mapping=_BACKEND_MAPPING, - lazy=True) - - -def setup_db(): - engine = get_engine() - models.register_models(engine) - - -def drop_db(): - engine = get_engine() - models.unregister_models(engine) - - -def get_instance(): - """Return a DB API instance.""" - return IMPL - - -def get_engine(): - return IMPL.get_engine() - - -def get_session(): - return IMPL.get_session() - - -def security_report_create(values): - """Create an instance from the values dictionary.""" - return IMPL.security_report_create(values) - - -def security_report_update_last_report_date(uuid, date): - """Create an instance from the values dictionary.""" - return IMPL.security_report_update_last_report_date(uuid, date) - - -def security_report_update_ticket_id(uuid, ticket_id): - """Create an instance from the values dictionary.""" - return IMPL.security_report_update_ticket_id(uuid, ticket_id) - - -def security_report_get_all(project_id=None): - """Get all security reports""" - return IMPL.security_report_get_all(project_id=project_id) - - -def security_report_get(uuid): - """Get security report from its id in database""" - return IMPL.security_report_get(uuid) - - -def security_report_get_from_report_id(report_id): - """Get security report from its report identifier""" - return IMPL.security_report_get_from_report_id(report_id) - - -def security_report_delete(report_id): - """Delete security report from its report identifier""" - return IMPL.security_report_delete(report_id) - - -def plugins_info_get(): - """Get information about plugins stored in db""" - return IMPL.plugins_info_get() - - -def plugin_info_get_from_uuid(id): - """ - Get information about plugin stored in db - :param id: the uuid of the plugin - """ - return IMPL.plugin_info_get_from_uuid(id) - - -def plugin_version_update(id, version): - return IMPL.plugin_version_update(id, version) - - -def security_alarm_create(values): - return IMPL.security_alarm_create(values) - - -def security_alarm_get_all(): - return IMPL.security_alarm_get_all() - - -def security_alarm_get(id): - return IMPL.security_alarm_get(id) - - -def security_alarm_update_ticket_id(alarm_id, ticket_id): - """Create an instance from the values dictionary.""" - return IMPL.security_alarm_update_ticket_id(alarm_id, ticket_id) - - -def create_task(values): - return IMPL.create_task(values) - - -def delete_task(id): - IMPL.delete_task(id) - - -def update_state_task(id, running): - IMPL.update_state_task(id, running) - - -def get_all_tasks(): - return IMPL.get_all_tasks() diff --git a/cerberus/db/migration.py b/cerberus/db/migration.py deleted file mode 100644 index fae3117..0000000 --- a/cerberus/db/migration.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""Database setup and migration commands.""" - -from oslo.config import cfg - -from cerberus.common import utils - -CONF = cfg.CONF -CONF.import_opt('backend', - 'cerberus.openstack.common.db.options', - group='database') - -IMPL = utils.LazyPluggable( - pivot='backend', - config_group='database', - sqlalchemy='cerberus.db.sqlalchemy.migration') - - -INIT_VERSION = 0 - - -def upgrade(version=None): - """Migrate the database to `version` or the most recent version.""" - return IMPL.upgrade(version) - - -def downgrade(version=None): - return IMPL.downgrade(version) - - -def version(): - return IMPL.version() - - -def stamp(version): - return IMPL.stamp(version) - - -def revision(message, autogenerate): - return IMPL.revision(message, autogenerate) diff --git a/cerberus/db/sqlalchemy/__init__.py b/cerberus/db/sqlalchemy/__init__.py deleted file mode 100644 index 73ca62b..0000000 --- a/cerberus/db/sqlalchemy/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/cerberus/db/sqlalchemy/alembic.ini b/cerberus/db/sqlalchemy/alembic.ini deleted file mode 100644 index a768980..0000000 --- a/cerberus/db/sqlalchemy/alembic.ini +++ /dev/null @@ -1,54 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = %(here)s/alembic - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -#sqlalchemy.url = driver://user:pass@localhost/dbname - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/cerberus/db/sqlalchemy/alembic/README b/cerberus/db/sqlalchemy/alembic/README deleted file mode 100644 index 75d886f..0000000 --- a/cerberus/db/sqlalchemy/alembic/README +++ /dev/null @@ -1,16 +0,0 @@ -Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation - -To create alembic migrations use: -$ cerberus-dbsync revision --message --autogenerate - -Stamp db with most recent migration version, without actually running migrations -$ cerberus-dbsync stamp --revision head - -Upgrade can be performed by: -$ cerberus-dbsync - for backward compatibility -$ cerberus-dbsync upgrade -# cerberus-dbsync upgrade --revision head - -Downgrading db: -$ cerberus-dbsync downgrade -$ cerberus-dbsync downgrade --revision base diff --git a/cerberus/db/sqlalchemy/alembic/env.py b/cerberus/db/sqlalchemy/alembic/env.py deleted file mode 100644 index 887a61b..0000000 --- a/cerberus/db/sqlalchemy/alembic/env.py +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from logging import config as log_config - -from alembic import context - -from cerberus.db.sqlalchemy import api as sqla_api -from cerberus.db.sqlalchemy import models - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -log_config.fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -target_metadata = models.Base.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = sqla_api.get_engine() - with engine.connect() as connection: - context.configure(connection=connection, - target_metadata=target_metadata) - with context.begin_transaction(): - context.run_migrations() - - -run_migrations_online() diff --git a/cerberus/db/sqlalchemy/alembic/script.py.mako b/cerberus/db/sqlalchemy/alembic/script.py.mako deleted file mode 100644 index 9570201..0000000 --- a/cerberus/db/sqlalchemy/alembic/script.py.mako +++ /dev/null @@ -1,22 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/cerberus/db/sqlalchemy/alembic/versions/2dd6320a2745_initial_migration.py b/cerberus/db/sqlalchemy/alembic/versions/2dd6320a2745_initial_migration.py deleted file mode 100644 index 651194f..0000000 --- a/cerberus/db/sqlalchemy/alembic/versions/2dd6320a2745_initial_migration.py +++ /dev/null @@ -1,116 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""initial_migration - -Revision ID: 2dd6320a2745 -Revises: None -Create Date: 2015-06-25 10:45:10.853595 - -""" - -# revision identifiers, used by Alembic. -revision = '2dd6320a2745' -down_revision = None - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.create_table( - 'plugin_info', - sa.Column('id', sa.Integer, primary_key=True, nullable=False), - sa.Column('uuid', sa.Text), - sa.Column('name', sa.Text), - sa.Column('version', sa.Text), - sa.Column('provider', sa.Text), - sa.Column('type', sa.Text), - sa.Column('description', sa.Text), - sa.Column('tool_name', sa.Text), - sa.Column('created_at', sa.DateTime), - sa.Column('updated_at', sa.DateTime), - sa.Column('deleted_at', sa.DateTime), - sa.Column('deleted', sa.Integer), - mysql_ENGINE='InnoDB', - mysql_DEFAULT_CHARSET='utf8' - ) - op.create_table( - 'security_report', - sa.Column('id', sa.Integer, primary_key=True, nullable=False), - sa.Column('plugin_id', sa.Text), - sa.Column('report_id', sa.VARCHAR(255), unique=True), - sa.Column('component_id', sa.Text), - sa.Column('component_type', sa.Text), - sa.Column('component_name', sa.Text), - sa.Column('project_id', sa.Text), - sa.Column('ticket_id', sa.Text), - sa.Column('title', sa.Text), - sa.Column('description', sa.Text), - sa.Column('security_rating', sa.Float), - sa.Column('vulnerabilities', sa.Text), - sa.Column('vulnerabilities_number', sa.Integer), - sa.Column('last_report_date', sa.DateTime), - sa.Column('created_at', sa.DateTime), - sa.Column('updated_at', sa.DateTime), - sa.Column('deleted_at', sa.DateTime), - sa.Column('deleted', sa.Integer), - mysql_ENGINE='InnoDB', - mysql_DEFAULT_CHARSET='UTF8' - ) - op.create_table( - 'security_alarm', - sa.Column('id', sa.Integer, primary_key=True, nullable=False), - sa.Column('plugin_id', sa.Text), - sa.Column('alarm_id', sa.VARCHAR(255), unique=True), - sa.Column('component_id', sa.Text), - sa.Column('project_id', sa.Text), - sa.Column('ticket_id', sa.Text), - sa.Column('timestamp', sa.DateTime), - sa.Column('summary', sa.Text), - sa.Column('severity', sa.Text), - sa.Column('status', sa.Text), - sa.Column('description', sa.Text), - sa.Column('created_at', sa.DateTime), - sa.Column('updated_at', sa.DateTime), - sa.Column('deleted_at', sa.DateTime), - sa.Column('deleted', sa.Integer), - mysql_engine='InnoDB', - mysql_charset='utf8' - ) - - op.create_table( - 'task', - sa.Column('id', sa.Integer, primary_key=True, nullable=False), - sa.Column('type', sa.Text), - sa.Column('plugin_id', sa.Text), - sa.Column('uuid', sa.Text), - sa.Column('name', sa.Text), - sa.Column('method', sa.Text), - sa.Column('running', sa.Boolean), - sa.Column('period', sa.Integer), - sa.Column('created_at', sa.DateTime), - sa.Column('updated_at', sa.DateTime), - sa.Column('deleted_at', sa.DateTime), - sa.Column('deleted', sa.Integer), - mysql_engine='InnoDB', - mysql_charset='utf8' - ) - - -def downgrade(): - raise NotImplementedError(('Downgrade from initial migration is' - ' unsupported.')) diff --git a/cerberus/db/sqlalchemy/alembic/versions/4426f811d4d9_text_to_varchar.py b/cerberus/db/sqlalchemy/alembic/versions/4426f811d4d9_text_to_varchar.py deleted file mode 100644 index 25c895e..0000000 --- a/cerberus/db/sqlalchemy/alembic/versions/4426f811d4d9_text_to_varchar.py +++ /dev/null @@ -1,332 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""text_to_varchar - -Revision ID: 4426f811d4d9 -Revises: 2dd6320a2745 -Create Date: 2015-06-25 10:47:00.485303 - -""" - -# revision identifiers, used by Alembic. -revision = '4426f811d4d9' -down_revision = '2dd6320a2745' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - - # In table plugin_info - op.alter_column( - table_name='plugin_info', - column_name='uuid', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='plugin_info', - column_name='name', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='plugin_info', - column_name='version', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='plugin_info', - column_name='provider', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='plugin_info', - column_name='type', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='plugin_info', - column_name='description', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='plugin_info', - column_name='tool_name', - type_=sa.VARCHAR(255) - ) - - # In table security_report, except column vulnerabilities - op.alter_column( - table_name='security_report', - column_name='plugin_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_report', - column_name='component_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_report', - column_name='component_type', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_report', - column_name='component_name', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_report', - column_name='project_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_report', - column_name='ticket_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_report', - column_name='title', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_report', - column_name='description', - type_=sa.VARCHAR(255) - ) - - # In table security_alarm - op.alter_column( - table_name='security_alarm', - column_name='plugin_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_alarm', - column_name='component_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_alarm', - column_name='project_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_alarm', - column_name='ticket_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_alarm', - column_name='summary', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_alarm', - column_name='severity', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_alarm', - column_name='status', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='security_alarm', - column_name='description', - type_=sa.VARCHAR(255) - ) - - # In table task - op.alter_column( - table_name='task', - column_name='type', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='task', - column_name='plugin_id', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='task', - column_name='uuid', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='task', - column_name='name', - type_=sa.VARCHAR(255) - ) - op.alter_column( - table_name='task', - column_name='method', - type_=sa.VARCHAR(255) - ) - - -def downgrade(): - # In table plugin_info - op.alter_column( - table_name='plugin_info', - column_name='uuid', - type_=sa.TEXT - ) - op.alter_column( - table_name='plugin_info', - column_name='name', - type_=sa.TEXT - ) - op.alter_column( - table_name='plugin_info', - column_name='version', - type_=sa.TEXT - ) - op.alter_column( - table_name='plugin_info', - column_name='provider', - type_=sa.TEXT - ) - op.alter_column( - table_name='plugin_info', - column_name='type', - type_=sa.TEXT - ) - op.alter_column( - table_name='plugin_info', - column_name='description', - type_=sa.TEXT - ) - op.alter_column( - table_name='plugin_info', - column_name='tool_name', - type_=sa.TEXT - ) - - # In table security_report, except column vulnerabilities (still Text) - # and report_id (already varchar) - op.alter_column( - table_name='security_report', - column_name='plugin_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_report', - column_name='component_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_report', - column_name='component_type', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_report', - column_name='component_name', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_report', - column_name='project_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_report', - column_name='ticket_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_report', - column_name='title', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_report', - column_name='description', - type_=sa.TEXT - ) - - # In table security_alarm, except alarm_id (already varchar) - op.alter_column( - table_name='security_alarm', - column_name='plugin_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_alarm', - column_name='component_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_alarm', - column_name='project_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_alarm', - column_name='ticket_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_alarm', - column_name='summary', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_alarm', - column_name='severity', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_alarm', - column_name='status', - type_=sa.TEXT - ) - op.alter_column( - table_name='security_alarm', - column_name='description', - type_=sa.TEXT - ) - - # In table task - op.alter_column( - table_name='task', - column_name='type', - type_=sa.TEXT - ) - op.alter_column( - table_name='task', - column_name='plugin_id', - type_=sa.TEXT - ) - op.alter_column( - table_name='task', - column_name='uuid', - type_=sa.TEXT - ) - op.alter_column( - table_name='task', - column_name='name', - type_=sa.TEXT - ) - op.alter_column( - table_name='task', - column_name='method', - type_=sa.TEXT - ) diff --git a/cerberus/db/sqlalchemy/alembic/versions/479e56a9ae3b_alter_security_report_add_uuid.py b/cerberus/db/sqlalchemy/alembic/versions/479e56a9ae3b_alter_security_report_add_uuid.py deleted file mode 100644 index e8be9dc..0000000 --- a/cerberus/db/sqlalchemy/alembic/versions/479e56a9ae3b_alter_security_report_add_uuid.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""alter_security_report_add_uuid - -Revision ID: 479e56a9ae3b -Revises: 4426f811d4d9 -Create Date: 2015-06-25 10:48:06.260041 - -""" - -# revision identifiers, used by Alembic. -revision = '479e56a9ae3b' -down_revision = '4426f811d4d9' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.add_column('security_report', - sa.Column('uuid', sa.VARCHAR(255), unique=True)) - op.drop_constraint('report_id', 'security_report', type_='unique') - op.create_unique_constraint('unique_uuid', - 'security_report', - ['uuid']) - op.create_unique_constraint('unique_report_id_plugin_id', - 'security_report', - ['report_id', 'plugin_id']) - - -def downgrade(): - op.drop_column('security_report', 'uuid') - op.drop_constraint('unique_report_id_plugin_id', - 'security_report', - type_='unique') - op.create_unique_constraint('report_id', 'security_report', ['report_id']) diff --git a/cerberus/db/sqlalchemy/api.py b/cerberus/db/sqlalchemy/api.py deleted file mode 100644 index c02f7ba..0000000 --- a/cerberus/db/sqlalchemy/api.py +++ /dev/null @@ -1,400 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import sys -import threading - -from oslo.config import cfg - -from cerberus.common import exception -from cerberus.db.sqlalchemy import models -from cerberus.openstack.common.db import exception as db_exc -from cerberus.openstack.common.db.sqlalchemy import session as db_session -from cerberus.openstack.common import log - - -CONF = cfg.CONF - -LOG = log.getLogger(__name__) - - -_ENGINE_FACADE = None -_LOCK = threading.Lock() - - -_FACADE = None - - -def _create_facade_lazily(): - global _FACADE - if _FACADE is None: - _FACADE = db_session.EngineFacade( - CONF.database.connection, - **dict(CONF.database.iteritems()) - ) - return _FACADE - - -def get_engine(): - facade = _create_facade_lazily() - return facade.get_engine() - - -def get_session(**kwargs): - facade = _create_facade_lazily() - return facade.get_session(**kwargs) - - -def get_backend(): - """The backend is this module itself.""" - return sys.modules[__name__] - - -def model_query(model, *args, **kwargs): - """Query helper for simpler session usage. - :param session: if present, the session to use - """ - session = kwargs.get('session') or get_session() - query = session.query(model, *args) - return query - - -def _security_report_create(values): - try: - security_report_ref = models.SecurityReport() - security_report_ref.update(values) - security_report_ref.save() - except db_exc.DBDuplicateEntry as e: - LOG.exception(e) - raise exception.ReportExists(report_id=values['report_id'], - plugin_id=values['plugin_id']) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - return security_report_ref - - -def security_report_create(values): - return _security_report_create(values) - - -def _security_report_update_last_report_date(uuid, date): - try: - session = get_session() - report = model_query(models.SecurityReport, read_deleted="no", - session=session).filter(models.SecurityReport.uuid - == uuid).first() - report.last_report_date = date - report.save(session) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_report_update_last_report_date(uuid, date): - _security_report_update_last_report_date(uuid, date) - - -def _security_report_update_ticket_id(uuid, ticket_id): - try: - session = get_session() - report = model_query(models.SecurityReport, read_deleted="no", - session=session).filter(models.SecurityReport.uuid - == uuid).first() - report.ticket_id = ticket_id - report.save(session) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_report_update_ticket_id(uuid, ticket_id): - _security_report_update_ticket_id(uuid, ticket_id) - - -def _security_report_get_all(project_id=None): - try: - session = get_session() - if project_id is None: - return model_query(models.SecurityReport, read_deleted="no", - session=session).all() - else: - return model_query(models.SecurityReport, read_deleted="no", - session=session).\ - filter(models.SecurityReport.project_id == project_id).all() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_report_get_all(project_id=None): - return _security_report_get_all(project_id=project_id) - - -def _security_report_get(uuid): - try: - session = get_session() - return model_query( - models.SecurityReport, read_deleted="no", session=session).filter( - models.SecurityReport.uuid == uuid).first() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_report_get(uuid): - return _security_report_get(uuid) - - -def _security_report_get_from_report_id(report_id): - try: - session = get_session() - return model_query( - models.SecurityReport, read_deleted="no", session=session).filter( - models.SecurityReport.report_id == report_id).first() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_report_get_from_report_id(report_id): - return _security_report_get_from_report_id(report_id) - - -def _security_report_delete(uuid): - try: - session = get_session() - report = model_query( - models.SecurityReport, read_deleted="no", - session=session).filter_by(uuid=uuid) - report.delete() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_report_delete(uuid): - return _security_report_delete(uuid) - - -def _plugin_info_create(values): - try: - plugin_info_ref = models.PluginInfo() - plugin_info_ref.update(values) - plugin_info_ref.save() - except db_exc.DBDuplicateEntry: - raise exception.PluginInfoExists(plugin_id=values['id']) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - return plugin_info_ref - - -def plugin_info_create(values): - return _plugin_info_create(values) - - -def _plugins_info_get(): - try: - session = get_session() - return model_query(models.PluginInfo, - read_deleted="no", - session=session).all() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def plugins_info_get(): - return _plugins_info_get() - - -def _plugin_info_get(name): - try: - session = get_session() - - return model_query(models.PluginInfo, - read_deleted="no", - session=session).filter(models.PluginInfo.name == - name).first() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def plugin_info_get(name): - return _plugin_info_get(name) - - -def _plugin_info_get_from_uuid(plugin_id): - try: - session = get_session() - return model_query(models.PluginInfo, - read_deleted="no", - session=session).filter(models.PluginInfo.uuid == - plugin_id).first() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def plugin_info_get_from_uuid(plugin_id): - return _plugin_info_get_from_uuid(plugin_id) - - -def _plugin_version_update(plugin_id, version): - try: - session = get_session() - plugin = model_query(models.PluginInfo, read_deleted="no", - session=session).filter(models.PluginInfo.id == - plugin_id).first() - plugin.version = version - plugin.save(session) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def plugin_version_update(plugin_id, version): - _plugin_version_update(plugin_id, version) - - -def _security_alarm_create(values): - try: - security_alarm_ref = models.SecurityAlarm() - security_alarm_ref.update(values) - security_alarm_ref.save() - except db_exc.DBDuplicateEntry as e: - LOG.exception(e) - raise exception.AlarmExists(alarm_id=values['id']) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - return security_alarm_ref - - -def security_alarm_create(values): - return _security_alarm_create(values) - - -def _security_alarm_get_all(): - try: - session = get_session() - return model_query(models.SecurityAlarm, read_deleted="no", - session=session).all() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_alarm_get_all(): - return _security_alarm_get_all() - - -def _security_alarm_get(alarm_id): - try: - session = get_session() - return model_query( - models.SecurityAlarm, read_deleted="no", session=session).filter( - models.SecurityAlarm.alarm_id == alarm_id).first() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_alarm_get(alarm_id): - return _security_alarm_get(alarm_id) - - -def _security_alarm_update_ticket_id(alarm_id, ticket_id): - try: - session = get_session() - alarm = model_query( - models.SecurityAlarm, read_deleted="no", session=session).filter( - models.SecurityAlarm.alarm_id == alarm_id).first() - alarm.ticket_id = ticket_id - - alarm.save(session) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def security_alarm_update_ticket_id(alarm_id, ticket_id): - _security_alarm_update_ticket_id(alarm_id, ticket_id) - - -def _create_task(values): - try: - task_ref = models.Task() - task_ref.update(values) - task_ref.save() - except db_exc.DBDuplicateEntry as e: - LOG.exception(e) - raise exception.TaskExists(task_id=values['uuid']) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - return task_ref - - -def create_task(values): - return _create_task(values) - - -def _delete_task(task_id): - try: - session = get_session() - task = model_query(models.Task, read_deleted="no", - session=session).filter_by(uuid=task_id) - task.delete() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def delete_task(task_id): - _delete_task(task_id) - - -def _update_state_task(task_id, running): - try: - session = get_session() - task = model_query(models.Task, read_deleted="no", - session=session).filter_by(uuid=task_id).first() - task.running = running - task.save(session) - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def update_state_task(task_id, running): - _update_state_task(task_id, running) - - -def _get_all_tasks(): - try: - session = get_session() - return model_query(models.Task, read_deleted="no", - session=session).all() - except Exception as e: - LOG.exception(e) - raise exception.DBException() - - -def get_all_tasks(): - return _get_all_tasks() diff --git a/cerberus/db/sqlalchemy/migration.py b/cerberus/db/sqlalchemy/migration.py deleted file mode 100644 index 0caf317..0000000 --- a/cerberus/db/sqlalchemy/migration.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -import alembic -from alembic import config as alembic_config -import alembic.migration as alembic_migration - -from cerberus.db.sqlalchemy import api as sqla_api - - -INIT_VERSION = 0 - - -def _alembic_config(): - path = os.path.join(os.path.dirname(__file__), 'alembic.ini') - config = alembic_config.Config(path) - return config - - -def version(config=None): - """Current database version. - - :returns: Database version - :rtype: string - """ - engine = sqla_api.get_engine() - with engine.connect() as conn: - context = alembic_migration.MigrationContext.configure(conn) - return context.get_current_revision() - - -def upgrade(revision, config=None): - """Used for upgrading database. - - :param version: Desired database version - :type version: string - """ - revision = revision or 'head' - config = config or _alembic_config() - - alembic.command.upgrade(config, revision or 'head') - - -def downgrade(revision, config=None): - """Used for downgrading database. - - :param version: Desired database version - :type version: string - """ - revision = revision or 'base' - config = config or _alembic_config() - return alembic.command.downgrade(config, revision) - - -def stamp(revision, config=None): - """Stamps database with provided revision. - Dont run any migrations. - - :param revision: Should match one from repository or head - to stamp - database with most recent revision - :type revision: string - """ - config = config or _alembic_config() - return alembic.command.stamp(config, revision=revision) - - -def revision(message=None, autogenerate=False, config=None): - """Creates template for migration. - - :param message: Text that will be used for migration title - :type message: string - :param autogenerate: If True - generates diff based on current database - state - :type autogenerate: bool - """ - config = config or _alembic_config() - return alembic.command.revision(config, message=message, - autogenerate=autogenerate) diff --git a/cerberus/db/sqlalchemy/models.py b/cerberus/db/sqlalchemy/models.py deleted file mode 100644 index ab3cf27..0000000 --- a/cerberus/db/sqlalchemy/models.py +++ /dev/null @@ -1,188 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -""" -SQLAlchemy models for cerberus data. -""" - -from sqlalchemy import Boolean, Column, String, Integer, DateTime, Float, Text -from sqlalchemy.ext.declarative import declarative_base - -from oslo.config import cfg - -from cerberus.common import serialize -from cerberus.openstack.common.db.sqlalchemy import models - - -CONF = cfg.CONF - - -class CerberusBase(models.SoftDeleteMixin, - models.TimestampMixin, - models.ModelBase): - - metadata = None - - def save(self, session=None): - from cerberus.db.sqlalchemy import api - - if session is None: - session = api.get_session() - - super(CerberusBase, self).save(session=session) - -Base = declarative_base(cls=CerberusBase) - - -class PluginInfo(Base, CerberusBase): - """Plugin info""" - - __tablename__ = 'plugin_info' - __table_args__ = () - - id = Column(Integer, primary_key=True) - uuid = Column(String(255)) - name = Column(String(255)) - version = Column(String(255)) - provider = Column(String(255)) - type = Column(String(255)) - description = Column(String(255)) - tool_name = Column(String(255)) - - -class PluginInfoJsonSerializer(serialize.JsonSerializer): - """Plugin info serializer""" - - __attributes__ = ['id', 'uuid', 'name', 'version', 'provider', - 'type', 'description', 'tool_name'] - __required__ = ['id'] - __attribute_serializer__ = dict(created_at='date', deleted_at='date', - acknowledged_at='date') - __object_class__ = PluginInfo - - -class SecurityReport(Base, CerberusBase): - """Security Report""" - - __tablename__ = 'security_report' - __table_args__ = () - - id = Column(Integer, primary_key=True) - uuid = Column(String(255), unique=True) - plugin_id = Column(String(255)) - report_id = Column(String(255)) - component_id = Column(String(255)) - component_type = Column(String(255)) - component_name = Column(String(255)) - project_id = Column(String(255)) - title = Column(String(255)) - description = Column(String(255)) - security_rating = Column(Float) - vulnerabilities = Column(Text) - vulnerabilities_number = Column(Integer) - last_report_date = Column(DateTime) - ticket_id = Column(String(255)) - - -class SecurityReportJsonSerializer(serialize.JsonSerializer): - """Security report serializer""" - - __attributes__ = ['id', 'uuid', 'title', 'description', 'plugin_id', - 'report_id', 'component_id', 'component_type', - 'component_name', 'project_id', 'security_rating', - 'vulnerabilities', 'vulnerabilities_number', - 'last_report_date', 'ticket_id', 'deleted', 'created_at', - 'deleted_at', 'updated_at'] - __required__ = ['uuid', 'title', 'component_id'] - __attribute_serializer__ = dict(created_at='date', deleted_at='date', - acknowledged_at='date') - __object_class__ = SecurityReport - - -class SecurityAlarm(Base, CerberusBase): - """Security alarm coming from Security Information and Event Manager - for example - """ - - __tablename__ = 'security_alarm' - __table_args__ = () - - id = Column(Integer, primary_key=True) - plugin_id = Column(String(255)) - alarm_id = Column(String(255), unique=True) - timestamp = Column(DateTime) - status = Column(String(255)) - severity = Column(String(255)) - project_id = Column(String(255)) - component_id = Column(String(255)) - summary = Column(String(255)) - description = Column(String(255)) - ticket_id = Column(String(255)) - - -class SecurityAlarmJsonSerializer(serialize.JsonSerializer): - """Security report serializer""" - - __attributes__ = ['id', 'plugin_id', 'alarm_id', 'timestamp', 'status', - 'severity', 'project_id', 'component_id', 'summary', - 'description', 'ticket_id', 'deleted', 'created_at', - 'deleted_at', 'updated_at'] - __required__ = ['id', 'title'] - __attribute_serializer__ = dict(created_at='date', deleted_at='date', - acknowledged_at='date') - __object_class__ = SecurityAlarm - - -class Task(Base, CerberusBase): - """Tasks for security purposes (e.g: daily scans...) - """ - __tablename__ = 'task' - __table_args__ = () - - id = Column(Integer, primary_key=True) - name = Column(String(255)) - method = Column(String(255)) - type = Column(String(255)) - period = Column(Integer) - plugin_id = Column(String(255)) - running = Column(Boolean) - uuid = Column(String(255)) - - -class TaskJsonSerializer(serialize.JsonSerializer): - """Security report serializer""" - - __attributes__ = ['id', 'name', 'method', 'type', 'period', - 'plugin_id', 'running', 'uuid', 'deleted', 'created_at', - 'deleted_at', 'updated_at'] - __required__ = ['id', ] - __attribute_serializer__ = dict(created_at='date', deleted_at='date', - acknowledged_at='date') - __object_class__ = Task - - -def register_models(engine): - """Creates database tables for all models with the given engine.""" - models = (PluginInfo, SecurityReport, SecurityAlarm, Task) - for model in models: - model.metadata.create_all(engine) - - -def unregister_models(engine): - """Drops database tables for all models with the given engine.""" - models = (PluginInfo, SecurityReport, SecurityAlarm, Task) - for model in models: - model.metadata.drop_all(engine) diff --git a/cerberus/manager.py b/cerberus/manager.py deleted file mode 100644 index beb5777..0000000 --- a/cerberus/manager.py +++ /dev/null @@ -1,592 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import json -import uuid - -from oslo.config import cfg -from oslo import messaging -from stevedore import extension - -from cerberus.common import errors -from cerberus.common import exception as cerberus_exception -from cerberus.common import service -from cerberus.db.sqlalchemy import api as db_api -from cerberus import notifications -from cerberus.openstack.common import log -from cerberus.openstack.common import loopingcall -from cerberus.openstack.common import threadgroup -from plugins import base - - -OPTS = [ - cfg.StrOpt('notifier_topic', - default='notifications', - help='The topic that Cerberus uses for generating ' - 'notifications') -] - -cfg.CONF.register_opts(OPTS) - -LOG = log.getLogger(__name__) - - -_SECURITY_REPORT = 'security_report' - - -def store_report_and_notify(title, plugin_id, report_id, component_id, - component_name, component_type, project_id, - description, security_rating, vulnerabilities, - vulnerabilities_number, last_report_date): - report_uuid = uuid.uuid4() - report = {'title': title, - 'plugin_id': plugin_id, - 'uuid': str(report_uuid), - 'report_id': report_id, - 'component_id': component_id, - 'component_type': component_type, - 'component_name': component_name, - 'project_id': project_id, - 'description': description, - 'security_rating': security_rating, - 'vulnerabilities': vulnerabilities, - 'vulnerabilities_number': vulnerabilities_number} - try: - db_api.security_report_create(report) - db_api.security_report_update_last_report_date( - report_uuid, last_report_date) - notifications.send_notification('store', 'security_report', report) - except cerberus_exception.DBException: - raise - - -def store_alarm_and_notify(plugin_id, alarm_id, timestamp, status, severity, - component_id, description, summary): - alarm = {'plugin_id': plugin_id, - 'alarm_id': alarm_id, - 'timestamp': timestamp, - 'status': status, - 'severity': severity, - 'component_id': component_id, - 'description': description, - 'summary': summary} - try: - db_api.security_alarm_create(alarm) - notifications.send_notification('store', 'security_alarm', alarm) - except cerberus_exception.DBException: - raise - - -class CerberusManager(service.CerberusService): - - TASK_NAMESPACE = 'cerberus.plugins' - - @classmethod - def _get_cerberus_manager(cls): - return extension.ExtensionManager( - namespace=cls.TASK_NAMESPACE, - invoke_on_load=True, - ) - - def __init__(self): - super(CerberusManager, self).__init__() - self.notifier = None - - def _register_plugin(self, extension): - """Register plugin in database - - :param extension: stevedore extension containing the plugin to register - :return: - """ - - version = extension.entry_point.dist.version - plugin = extension.obj - db_plugin_info = db_api.plugin_info_get(plugin._name) - if db_plugin_info is None: - db_plugin_info = db_api.plugin_info_create({'name': plugin._name, - 'uuid': uuid.uuid4(), - 'version': version, - 'provider': - plugin.PROVIDER, - 'type': plugin.TYPE, - 'description': - plugin.DESCRIPTION, - 'tool_name': - plugin.TOOL_NAME}) - else: - db_api.plugin_version_update(db_plugin_info.id, version) - - plugin._uuid = db_plugin_info.uuid - - def add_stored_tasks(self): - """Add stored tasks when Cerberus starts""" - tasks = db_api.get_all_tasks() - for task in tasks: - kwargs = {} - kwargs['task_name'] = task.name - kwargs['task_type'] = task.type - kwargs['task_period'] = task.period - kwargs['task_id'] = task.uuid - kwargs['running'] = task.running - kwargs['persistent'] = True - self._add_task(task.plugin_id, task.method, **kwargs) - - def start(self): - """Start Cerberus Manager""" - - self.rpc_server = None - self.notification_server = None - super(CerberusManager, self).start() - - transport = messaging.get_transport(cfg.CONF) - self.notifier = notifications._get_notifier() - targets = [] - plugins = [] - self.cerberus_manager = self._get_cerberus_manager() - if not list(self.cerberus_manager): - LOG.warning('Failed to load any task handlers for %s', - self.TASK_NAMESPACE) - - for extension in self.cerberus_manager: - handler = extension.obj - LOG.debug('Plugin loaded: ' + extension.name) - LOG.debug(('Event types from %(name)s: %(type)s') - % {'name': extension.name, - 'type': ', '.join(handler._subscribedEvents)}) - - self._register_plugin(extension) - handler.register_manager(self) - targets.extend(handler.get_targets(cfg.CONF)) - plugins.append(handler) - - self.add_stored_tasks() - - if transport: - rpc_target = messaging.Target(topic='test_rpc', server='server1') - self.rpc_server = messaging.get_rpc_server(transport, rpc_target, - [self], - executor='eventlet') - - self.notification_server = messaging.get_notification_listener( - transport, targets, plugins, executor='eventlet') - - LOG.info("RPC Server starting...") - self.rpc_server.start() - self.notification_server.start() - - def _get_unique_task(self, task_id): - """Get unique task (executed once) thanks to its identifier - - :param task_id: the uique identifier of the task - :return: the task or None if there is not any task with this id - """ - - try: - unique_task = next( - thread for thread in self.tg.threads - if (thread.kw.get('task_id', None) == task_id)) - except StopIteration: - return None - return unique_task - - def _get_recurrent_task(self, task_id): - """Get recurrent task thanks to its identifier - - :param task_id: the uique identifier of the task - :return: the task or None if there is not any task with this id - """ - try: - recurrent_task = next(timer for timer in self.tg.timers if - (timer.kw.get('task_id', None) == task_id)) - except StopIteration: - return None - return recurrent_task - - def _add_unique_task(self, callback, *args, **kwargs): - """Add an unique task (executed once) without delay - - :param callback: Callable function to call when it's necessary - :param args: list of positional arguments to call the callback with - :param kwargs: dict of keyword arguments to call the callback with - :return the thread object that is created - """ - return self.tg.add_thread(callback, *args, **kwargs) - - def _add_stopped_reccurent_task(self, callback, period, initial_delay=None, - *args, **kwargs): - """Add a recurrent task (executed periodically) without starting it - - :param callback: Callable function to call when it's necessary - :param period: the time in seconds during two executions of the task - :param initial_delay: the time after the first execution of the task - occurs - :param args: list of positional arguments to call the callback with - :param kwargs: dict of keyword arguments to call the callback with - """ - return self.tg.add_stopped_timer(callback, initial_delay, - *args, **kwargs) - - def _add_recurrent_task(self, callback, period, initial_delay=None, *args, - **kwargs): - """Add a recurrent task (executed periodically) - - :param callback: Callable function to call when it's necessary - :param period: the time in seconds during two executions of the task - :param initial_delay: the time after the first execution of the task - occurs - :param args: list of positional arguments to call the callback with - :param kwargs: dict of keyword arguments to call the callback with - """ - return self.tg.add_timer(period, callback, initial_delay, *args, - **kwargs) - - def get_plugins(self, ctx): - '''List plugins loaded by Cerberus manager - - This method is called by the Cerberus-api rpc client - ''' - json_plugins = [] - for extension in self.cerberus_manager: - plugin = extension.obj - res = json.dumps(plugin, cls=base.PluginEncoder) - json_plugins.append(res) - return json_plugins - - def _get_plugin_from_uuid(self, plugin_id): - for extension in self.cerberus_manager: - plugin = extension.obj - if plugin._uuid == plugin_id: - return plugin - return None - - def get_plugin_from_uuid(self, ctx, uuid): - plugin = self._get_plugin_from_uuid(uuid) - if plugin is not None: - return json.dumps(plugin, cls=base.PluginEncoder) - else: - return None - - def _add_task(self, plugin_id, method_, *args, **kwargs): - '''Add a task in the Cerberus manager - - :param plugin_id: the uuid of the plugin to call method onto - :param method_: the method to call back - :param task_type: the type of task to create - :param args: some extra arguments - :param kwargs: some extra keyworded arguments - ''' - kwargs['plugin_id'] = plugin_id - task_type = kwargs.get('task_type', "unique") - plugin = self._get_plugin_from_uuid(plugin_id) - - if plugin is None: - raise errors.PluginNotFound(plugin_id) - - if (task_type.lower() == 'recurrent'): - try: - task_period = int(kwargs.get('task_period', None)) - except (TypeError, ValueError) as e: - LOG.exception(e) - raise errors.TaskPeriodNotInteger() - try: - if kwargs.get('running', True) is True: - task = self._add_recurrent_task(getattr(plugin, method_), - task_period, - *args, - **kwargs) - else: - task = self._add_stopped_reccurent_task( - getattr(plugin, method_), - task_period, - *args, - **kwargs) - except TypeError as e: - LOG.exception(e) - raise errors.MethodNotString() - - except AttributeError as e: - LOG.exception(e) - raise errors.MethodNotCallable(method_, - plugin.__class__.__name__) - else: - try: - task = self._add_unique_task( - getattr(plugin, method_), - *args, - **kwargs) - except TypeError as e: - LOG.exception(e) - raise errors.MethodNotString() - except AttributeError as e: - LOG.exception(e) - raise errors.MethodNotCallable(method_, - plugin.__class__.__name__) - - return task - - def _store_task(self, task, method_): - try: - task_period_ = task.kw.get('task_period', None) - if task_period_ is not None: - task_period = int(task_period_) - else: - task_period = task_period_ - - db_api.create_task({'name': task.kw.get('task_name', - 'Unknown'), - 'method': str(method_), - 'type': task.kw['task_type'], - 'period': task_period, - 'plugin_id': task.kw['plugin_id'], - 'running': True, - 'uuid': task.kw['task_id']}) - - except Exception as e: - LOG.exception(e) - pass - - def create_task(self, ctx, plugin_id, method_, *args, **kwargs): - """Create a task - - This method is called by a rpc client. It adds a task in the manager - and stores it if the task is persistent - - :param ctx: a request context dict supplied by client - :param plugin_id: the uuid of the plugin to call method onto - :param method_: the method to call back - :param args: some extra arguments - :param kwargs: some extra keyworded arguments - """ - task_id = uuid.uuid4() - try: - task = self._add_task(plugin_id, method_, *args, - task_id=str(task_id), **kwargs) - except Exception: - raise - if kwargs.get('persistent', False) is True: - try: - self._store_task(task, method_) - except Exception as e: - LOG.exception(e) - pass - return str(task_id) - - def _stop_recurrent_task(self, task_id): - """Stop the recurrent task but does not remove it from the ThreadGroup. - - The task still exists and could be started. Plus, if the task is - running, wait for the end of its execution - :param task_id: the id of the recurrent task to stop - :return: - :raises: - StopIteration: the task is not found - """ - recurrent_task = self._get_recurrent_task(task_id) - if recurrent_task is None: - raise errors.TaskNotFound(task_id) - recurrent_task.stop() - if recurrent_task.kw.get('persistent', False) is True: - try: - db_api.update_state_task(task_id, False) - except Exception as e: - LOG.exception(e) - raise e - - def _stop_unique_task(self, task_id): - """Stop the task. This task is automatically deleted as it's not - recurrent - """ - unique_task = self._get_unique_task(task_id) - if unique_task is None: - raise errors.TaskNotFound(task_id) - unique_task.stop() - if unique_task.kw.get('persistent', False) is True: - try: - db_api.delete_task(task_id) - except Exception as e: - LOG.exception(e) - raise e - - def _stop_task(self, task_id): - task = self._get_task(task_id) - if isinstance(task, loopingcall.FixedIntervalLoopingCall): - try: - self._stop_recurrent_task(task_id) - except errors.InvalidOperation: - raise - elif isinstance(task, threadgroup.Thread): - try: - self._stop_unique_task(task_id) - except errors.InvalidOperation: - raise - - def stop_task(self, ctx, task_id): - try: - self._stop_task(task_id) - except errors.InvalidOperation: - raise - return task_id - - def _delete_recurrent_task(self, task_id): - """ - Stop the task and delete the recurrent task from the ThreadGroup. - If the task is running, wait for the end of its execution - :param task_id: the identifier of the task to delete - :return: - """ - recurrent_task = self._get_recurrent_task(task_id) - if (recurrent_task is None): - raise errors.TaskDeletionNotAllowed(task_id) - recurrent_task.stop() - try: - self.tg.timers.remove(recurrent_task) - except ValueError: - raise - if recurrent_task.kw.get('persistent', False) is True: - try: - db_api.delete_task(task_id) - except Exception as e: - LOG.exception(e) - raise e - - def delete_recurrent_task(self, ctx, task_id): - ''' - This method is designed to be called by an rpc client. - E.g: Cerberus-api - Stop the task and delete the recurrent task from the ThreadGroup. - If the task is running, wait for the end of its execution - :param ctx: a request context dict supplied by client - :param task_id: the identifier of the task to delete - ''' - try: - self._delete_recurrent_task(task_id) - except errors.InvalidOperation: - raise - return task_id - - def _force_delete_recurrent_task(self, task_id): - """ - Stop the task even if it is running and delete the recurrent task from - the ThreadGroup. - :param task_id: the identifier of the task to force delete - :return: - """ - recurrent_task = self._get_recurrent_task(task_id) - if (recurrent_task is None): - raise errors.TaskDeletionNotAllowed(task_id) - recurrent_task.stop() - recurrent_task.gt.kill() - try: - self.tg.timers.remove(recurrent_task) - except ValueError: - raise - if recurrent_task.kw.get('persistent', False) is True: - try: - db_api.delete_task(task_id) - except Exception as e: - LOG.exception(e) - raise e - - def force_delete_recurrent_task(self, ctx, task_id): - ''' - This method is designed to be called by an rpc client. - E.g: Cerberus-api - Stop the task even if it is running and delete the recurrent task - from the ThreadGroup. - :param ctx: a request context dict supplied by client - :param task_id: the identifier of the task to force delete - ''' - try: - self._force_delete_recurrent_task(task_id) - except errors.InvalidOperation: - raise - return task_id - - def _get_tasks(self): - tasks = [] - for timer in self.tg.timers: - tasks.append(timer) - for thread in self.tg.threads: - tasks.append(thread) - return tasks - - def _get_task(self, task_id): - task = self._get_unique_task(task_id) - task_ = self._get_recurrent_task(task_id) - if (task is None and task_ is None): - raise errors.TaskNotFound(task_id) - return task if task is not None else task_ - - def get_tasks(self, ctx): - tasks_ = [] - tasks = self._get_tasks() - for task in tasks: - if (isinstance(task, loopingcall.FixedIntervalLoopingCall)): - tasks_.append( - json.dumps(task, - cls=base.FixedIntervalLoopingCallEncoder)) - elif (isinstance(task, threadgroup.Thread)): - tasks_.append( - json.dumps(task, - cls=base.ThreadEncoder)) - return tasks_ - - def get_task(self, ctx, task_id): - try: - task = self._get_task(task_id) - except errors.InvalidOperation: - raise - if isinstance(task, loopingcall.FixedIntervalLoopingCall): - return json.dumps(task, - cls=base.FixedIntervalLoopingCallEncoder) - elif isinstance(task, threadgroup.Thread): - return json.dumps(task, - cls=base.ThreadEncoder) - - def _start_recurrent_task(self, task_id): - """ - Start the task - :param task_id: the identifier of the task to start - :return: - """ - recurrent_task = self._get_recurrent_task(task_id) - if (recurrent_task is None): - raise errors.TaskStartNotAllowed(str(task_id)) - period = recurrent_task.kw.get("task_period", None) - if recurrent_task._running is True: - raise errors.TaskStartNotPossible(str(task_id)) - else: - try: - recurrent_task.start(int(period)) - if recurrent_task.kw.get('persistent', False) is True: - db_api.update_state_task(task_id, True) - except Exception as e: - LOG.exception(e) - raise e - - def start_recurrent_task(self, ctx, task_id): - ''' - This method is designed to be called by an rpc client. - E.g: Cerberus-api - Start a recurrent task after it's being stopped - :param ctx: a request context dict supplied by client - :param task_id: the identifier of the task to start - ''' - try: - self._start_recurrent_task(task_id) - except errors.InvalidOperation: - raise - return task_id diff --git a/cerberus/notifications.py b/cerberus/notifications.py deleted file mode 100644 index 09d6d3a..0000000 --- a/cerberus/notifications.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import socket - -from oslo.config import cfg -from oslo import messaging - -from cerberus.openstack.common.gettextutils import _ # noqa -from cerberus.openstack.common import log - - -notifier_opts = [ - cfg.StrOpt('default_publisher_id', - default=None, - help='Default publisher_id for outgoing notifications'), - cfg.StrOpt('notifier_topic', - default='notifications', - help='The topic that Cerberus uses for generating ' - 'notifications') -] - -cfg.CONF.register_opts(notifier_opts) -LOG = log.getLogger(__name__) -_notifier = None - - -def _get_notifier(): - """Return a notifier object. - - If _notifier is None it means that a notifier object has not been set. - If _notifier is False it means that a notifier has previously failed to - construct. - Otherwise it is a constructed Notifier object. - """ - global _notifier - - if _notifier is None: - host = cfg.CONF.default_publisher_id or socket.gethostname() - try: - transport = messaging.get_transport(cfg.CONF) - _notifier = messaging.Notifier(transport, "security.%s" % host, - topic=cfg.CONF.notifier_topic) - except Exception: - LOG.exception("Failed to construct notifier") - _notifier = False - - return _notifier - - -def _reset_notifier(): - global _notifier - _notifier = None - - -def send_notification(operation, resource_type, payload): - """Send notification to inform observers about the affected resource. - - This method doesn't raise an exception when sending the notification fails. - - :param operation: operation being performed (created, updated, or deleted) - :param resource_type: type of resource being operated on - :param resource_id: ID of resource being operated on - """ - context = {} - service = 'security' - event_type = '%(service)s.%(resource_type)s.%(operation)s' % { - 'service': service, - 'resource_type': resource_type, - 'operation': operation} - - notifier = _get_notifier() - if notifier: - try: - LOG.info('Sending %(event_type)s notification...', - {'event_type': event_type}) - notifier.info(context, event_type, payload) - except Exception: - LOG.exception(_( - 'Failed to send %(event_type)s notification'), - {'event_type': event_type}) diff --git a/cerberus/openstack/__init__.py b/cerberus/openstack/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/openstack/common/__init__.py b/cerberus/openstack/common/__init__.py deleted file mode 100644 index d1223ea..0000000 --- a/cerberus/openstack/common/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import six - - -six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox')) diff --git a/cerberus/openstack/common/_i18n.py b/cerberus/openstack/common/_i18n.py deleted file mode 100644 index 4a6691f..0000000 --- a/cerberus/openstack/common/_i18n.py +++ /dev/null @@ -1,40 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""oslo.i18n integration module. - -See http://docs.openstack.org/developer/oslo.i18n/usage.html - -""" - -import oslo.i18n - - -# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the -# application name when this module is synced into the separate -# repository. It is OK to have more than one translation function -# using the same domain, since there will still only be one message -# catalog. -_translators = oslo.i18n.TranslatorFactory(domain='oslo') - -# The primary translation function using the well-known name "_" -_ = _translators.primary - -# Translators for log levels. -# -# The abbreviated names are meant to reflect the usual use of a short -# name like '_'. The "L" is for "log" and the other letter comes from -# the level. -_LI = _translators.log_info -_LW = _translators.log_warning -_LE = _translators.log_error -_LC = _translators.log_critical diff --git a/cerberus/openstack/common/apiclient/__init__.py b/cerberus/openstack/common/apiclient/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/openstack/common/apiclient/auth.py b/cerberus/openstack/common/apiclient/auth.py deleted file mode 100644 index 1763818..0000000 --- a/cerberus/openstack/common/apiclient/auth.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# Copyright 2013 Spanish National Research Council. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# E0202: An attribute inherited from %s hide this method -# pylint: disable=E0202 - -import abc -import argparse -import os - -import six -from stevedore import extension - -from cerberus.openstack.common.apiclient import exceptions - - -_discovered_plugins = {} - - -def discover_auth_systems(): - """Discover the available auth-systems. - - This won't take into account the old style auth-systems. - """ - global _discovered_plugins - _discovered_plugins = {} - - def add_plugin(ext): - _discovered_plugins[ext.name] = ext.plugin - - ep_namespace = "cerberus.openstack.common.apiclient.auth" - mgr = extension.ExtensionManager(ep_namespace) - mgr.map(add_plugin) - - -def load_auth_system_opts(parser): - """Load options needed by the available auth-systems into a parser. - - This function will try to populate the parser with options from the - available plugins. - """ - group = parser.add_argument_group("Common auth options") - BaseAuthPlugin.add_common_opts(group) - for name, auth_plugin in six.iteritems(_discovered_plugins): - group = parser.add_argument_group( - "Auth-system '%s' options" % name, - conflict_handler="resolve") - auth_plugin.add_opts(group) - - -def load_plugin(auth_system): - try: - plugin_class = _discovered_plugins[auth_system] - except KeyError: - raise exceptions.AuthSystemNotFound(auth_system) - return plugin_class(auth_system=auth_system) - - -def load_plugin_from_args(args): - """Load required plugin and populate it with options. - - Try to guess auth system if it is not specified. Systems are tried in - alphabetical order. - - :type args: argparse.Namespace - :raises: AuthPluginOptionsMissing - """ - auth_system = args.os_auth_system - if auth_system: - plugin = load_plugin(auth_system) - plugin.parse_opts(args) - plugin.sufficient_options() - return plugin - - for plugin_auth_system in sorted(six.iterkeys(_discovered_plugins)): - plugin_class = _discovered_plugins[plugin_auth_system] - plugin = plugin_class() - plugin.parse_opts(args) - try: - plugin.sufficient_options() - except exceptions.AuthPluginOptionsMissing: - continue - return plugin - raise exceptions.AuthPluginOptionsMissing(["auth_system"]) - - -@six.add_metaclass(abc.ABCMeta) -class BaseAuthPlugin(object): - """Base class for authentication plugins. - - An authentication plugin needs to override at least the authenticate - method to be a valid plugin. - """ - - auth_system = None - opt_names = [] - common_opt_names = [ - "auth_system", - "username", - "password", - "tenant_name", - "token", - "auth_url", - ] - - def __init__(self, auth_system=None, **kwargs): - self.auth_system = auth_system or self.auth_system - self.opts = dict((name, kwargs.get(name)) - for name in self.opt_names) - - @staticmethod - def _parser_add_opt(parser, opt): - """Add an option to parser in two variants. - - :param opt: option name (with underscores) - """ - dashed_opt = opt.replace("_", "-") - env_var = "OS_%s" % opt.upper() - arg_default = os.environ.get(env_var, "") - arg_help = "Defaults to env[%s]." % env_var - parser.add_argument( - "--os-%s" % dashed_opt, - metavar="<%s>" % dashed_opt, - default=arg_default, - help=arg_help) - parser.add_argument( - "--os_%s" % opt, - metavar="<%s>" % dashed_opt, - help=argparse.SUPPRESS) - - @classmethod - def add_opts(cls, parser): - """Populate the parser with the options for this plugin. - """ - for opt in cls.opt_names: - # use `BaseAuthPlugin.common_opt_names` since it is never - # changed in child classes - if opt not in BaseAuthPlugin.common_opt_names: - cls._parser_add_opt(parser, opt) - - @classmethod - def add_common_opts(cls, parser): - """Add options that are common for several plugins. - """ - for opt in cls.common_opt_names: - cls._parser_add_opt(parser, opt) - - @staticmethod - def get_opt(opt_name, args): - """Return option name and value. - - :param opt_name: name of the option, e.g., "username" - :param args: parsed arguments - """ - return (opt_name, getattr(args, "os_%s" % opt_name, None)) - - def parse_opts(self, args): - """Parse the actual auth-system options if any. - - This method is expected to populate the attribute `self.opts` with a - dict containing the options and values needed to make authentication. - """ - self.opts.update(dict(self.get_opt(opt_name, args) - for opt_name in self.opt_names)) - - def authenticate(self, http_client): - """Authenticate using plugin defined method. - - The method usually analyses `self.opts` and performs - a request to authentication server. - - :param http_client: client object that needs authentication - :type http_client: HTTPClient - :raises: AuthorizationFailure - """ - self.sufficient_options() - self._do_authenticate(http_client) - - @abc.abstractmethod - def _do_authenticate(self, http_client): - """Protected method for authentication. - """ - - def sufficient_options(self): - """Check if all required options are present. - - :raises: AuthPluginOptionsMissing - """ - missing = [opt - for opt in self.opt_names - if not self.opts.get(opt)] - if missing: - raise exceptions.AuthPluginOptionsMissing(missing) - - @abc.abstractmethod - def token_and_endpoint(self, endpoint_type, service_type): - """Return token and endpoint. - - :param service_type: Service type of the endpoint - :type service_type: string - :param endpoint_type: Type of endpoint. - Possible values: public or publicURL, - internal or internalURL, - admin or adminURL - :type endpoint_type: string - :returns: tuple of token and endpoint strings - :raises: EndpointException - """ diff --git a/cerberus/openstack/common/apiclient/base.py b/cerberus/openstack/common/apiclient/base.py deleted file mode 100644 index bd2a48e..0000000 --- a/cerberus/openstack/common/apiclient/base.py +++ /dev/null @@ -1,500 +0,0 @@ -# Copyright 2010 Jacob Kaplan-Moss -# Copyright 2011 OpenStack Foundation -# Copyright 2012 Grid Dynamics -# Copyright 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Base utilities to build API operation managers and objects on top of. -""" - -# E1102: %s is not callable -# pylint: disable=E1102 - -import abc -import copy - -import six -from six.moves.urllib import parse - -from cerberus.openstack.common.apiclient import exceptions -from cerberus.openstack.common import strutils - - -def getid(obj): - """Return id if argument is a Resource. - - Abstracts the common pattern of allowing both an object or an object's ID - (UUID) as a parameter when dealing with relationships. - """ - try: - if obj.uuid: - return obj.uuid - except AttributeError: - pass - try: - return obj.id - except AttributeError: - return obj - - -# TODO(aababilov): call run_hooks() in HookableMixin's child classes -class HookableMixin(object): - """Mixin so classes can register and run hooks.""" - _hooks_map = {} - - @classmethod - def add_hook(cls, hook_type, hook_func): - """Add a new hook of specified type. - - :param cls: class that registers hooks - :param hook_type: hook type, e.g., '__pre_parse_args__' - :param hook_func: hook function - """ - if hook_type not in cls._hooks_map: - cls._hooks_map[hook_type] = [] - - cls._hooks_map[hook_type].append(hook_func) - - @classmethod - def run_hooks(cls, hook_type, *args, **kwargs): - """Run all hooks of specified type. - - :param cls: class that registers hooks - :param hook_type: hook type, e.g., '__pre_parse_args__' - :param **args: args to be passed to every hook function - :param **kwargs: kwargs to be passed to every hook function - """ - hook_funcs = cls._hooks_map.get(hook_type) or [] - for hook_func in hook_funcs: - hook_func(*args, **kwargs) - - -class BaseManager(HookableMixin): - """Basic manager type providing common operations. - - Managers interact with a particular type of API (servers, flavors, images, - etc.) and provide CRUD operations for them. - """ - resource_class = None - - def __init__(self, client): - """Initializes BaseManager with `client`. - - :param client: instance of BaseClient descendant for HTTP requests - """ - super(BaseManager, self).__init__() - self.client = client - - def _list(self, url, response_key, obj_class=None, json=None): - """List the collection. - - :param url: a partial URL, e.g., '/servers' - :param response_key: the key to be looked up in response dictionary, - e.g., 'servers' - :param obj_class: class for constructing the returned objects - (self.resource_class will be used by default) - :param json: data that will be encoded as JSON and passed in POST - request (GET will be sent by default) - """ - if json: - body = self.client.post(url, json=json).json() - else: - body = self.client.get(url).json() - - if obj_class is None: - obj_class = self.resource_class - - data = body[response_key] - # NOTE(ja): keystone returns values as list as {'values': [ ... ]} - # unlike other services which just return the list... - try: - data = data['values'] - except (KeyError, TypeError): - pass - - return [obj_class(self, res, loaded=True) for res in data if res] - - def _get(self, url, response_key): - """Get an object from collection. - - :param url: a partial URL, e.g., '/servers' - :param response_key: the key to be looked up in response dictionary, - e.g., 'server' - """ - body = self.client.get(url).json() - return self.resource_class(self, body[response_key], loaded=True) - - def _head(self, url): - """Retrieve request headers for an object. - - :param url: a partial URL, e.g., '/servers' - """ - resp = self.client.head(url) - return resp.status_code == 204 - - def _post(self, url, json, response_key, return_raw=False): - """Create an object. - - :param url: a partial URL, e.g., '/servers' - :param json: data that will be encoded as JSON and passed in POST - request (GET will be sent by default) - :param response_key: the key to be looked up in response dictionary, - e.g., 'servers' - :param return_raw: flag to force returning raw JSON instead of - Python object of self.resource_class - """ - body = self.client.post(url, json=json).json() - if return_raw: - return body[response_key] - return self.resource_class(self, body[response_key]) - - def _put(self, url, json=None, response_key=None): - """Update an object with PUT method. - - :param url: a partial URL, e.g., '/servers' - :param json: data that will be encoded as JSON and passed in POST - request (GET will be sent by default) - :param response_key: the key to be looked up in response dictionary, - e.g., 'servers' - """ - resp = self.client.put(url, json=json) - # PUT requests may not return a body - if resp.content: - body = resp.json() - if response_key is not None: - return self.resource_class(self, body[response_key]) - else: - return self.resource_class(self, body) - - def _patch(self, url, json=None, response_key=None): - """Update an object with PATCH method. - - :param url: a partial URL, e.g., '/servers' - :param json: data that will be encoded as JSON and passed in POST - request (GET will be sent by default) - :param response_key: the key to be looked up in response dictionary, - e.g., 'servers' - """ - body = self.client.patch(url, json=json).json() - if response_key is not None: - return self.resource_class(self, body[response_key]) - else: - return self.resource_class(self, body) - - def _delete(self, url): - """Delete an object. - - :param url: a partial URL, e.g., '/servers/my-server' - """ - return self.client.delete(url) - - -@six.add_metaclass(abc.ABCMeta) -class ManagerWithFind(BaseManager): - """Manager with additional `find()`/`findall()` methods.""" - - @abc.abstractmethod - def list(self): - pass - - def find(self, **kwargs): - """Find a single item with attributes matching ``**kwargs``. - - This isn't very efficient: it loads the entire list then filters on - the Python side. - """ - matches = self.findall(**kwargs) - num_matches = len(matches) - if num_matches == 0: - msg = "No %s matching %s." % (self.resource_class.__name__, kwargs) - raise exceptions.NotFound(msg) - elif num_matches > 1: - raise exceptions.NoUniqueMatch() - else: - return matches[0] - - def findall(self, **kwargs): - """Find all items with attributes matching ``**kwargs``. - - This isn't very efficient: it loads the entire list then filters on - the Python side. - """ - found = [] - searches = kwargs.items() - - for obj in self.list(): - try: - if all(getattr(obj, attr) == value - for (attr, value) in searches): - found.append(obj) - except AttributeError: - continue - - return found - - -class CrudManager(BaseManager): - """Base manager class for manipulating entities. - - Children of this class are expected to define a `collection_key` and `key`. - - - `collection_key`: Usually a plural noun by convention (e.g. `entities`); - used to refer collections in both URL's (e.g. `/v3/entities`) and JSON - objects containing a list of member resources (e.g. `{'entities': [{}, - {}, {}]}`). - - `key`: Usually a singular noun by convention (e.g. `entity`); used to - refer to an individual member of the collection. - - """ - collection_key = None - key = None - - def build_url(self, base_url=None, **kwargs): - """Builds a resource URL for the given kwargs. - - Given an example collection where `collection_key = 'entities'` and - `key = 'entity'`, the following URL's could be generated. - - By default, the URL will represent a collection of entities, e.g.:: - - /entities - - If kwargs contains an `entity_id`, then the URL will represent a - specific member, e.g.:: - - /entities/{entity_id} - - :param base_url: if provided, the generated URL will be appended to it - """ - url = base_url if base_url is not None else '' - - url += '/%s' % self.collection_key - - # do we have a specific entity? - entity_id = kwargs.get('%s_id' % self.key) - if entity_id is not None: - url += '/%s' % entity_id - - return url - - def _filter_kwargs(self, kwargs): - """Drop null values and handle ids.""" - for key, ref in six.iteritems(kwargs.copy()): - if ref is None: - kwargs.pop(key) - else: - if isinstance(ref, Resource): - kwargs.pop(key) - kwargs['%s_id' % key] = getid(ref) - return kwargs - - def create(self, **kwargs): - kwargs = self._filter_kwargs(kwargs) - return self._post( - self.build_url(**kwargs), - {self.key: kwargs}, - self.key) - - def get(self, **kwargs): - kwargs = self._filter_kwargs(kwargs) - return self._get( - self.build_url(**kwargs), - self.key) - - def head(self, **kwargs): - kwargs = self._filter_kwargs(kwargs) - return self._head(self.build_url(**kwargs)) - - def list(self, base_url=None, **kwargs): - """List the collection. - - :param base_url: if provided, the generated URL will be appended to it - """ - kwargs = self._filter_kwargs(kwargs) - - return self._list( - '%(base_url)s%(query)s' % { - 'base_url': self.build_url(base_url=base_url, **kwargs), - 'query': '?%s' % parse.urlencode(kwargs) if kwargs else '', - }, - self.collection_key) - - def put(self, base_url=None, **kwargs): - """Update an element. - - :param base_url: if provided, the generated URL will be appended to it - """ - kwargs = self._filter_kwargs(kwargs) - - return self._put(self.build_url(base_url=base_url, **kwargs)) - - def update(self, **kwargs): - kwargs = self._filter_kwargs(kwargs) - params = kwargs.copy() - params.pop('%s_id' % self.key) - - return self._patch( - self.build_url(**kwargs), - {self.key: params}, - self.key) - - def delete(self, **kwargs): - kwargs = self._filter_kwargs(kwargs) - - return self._delete( - self.build_url(**kwargs)) - - def find(self, base_url=None, **kwargs): - """Find a single item with attributes matching ``**kwargs``. - - :param base_url: if provided, the generated URL will be appended to it - """ - kwargs = self._filter_kwargs(kwargs) - - rl = self._list( - '%(base_url)s%(query)s' % { - 'base_url': self.build_url(base_url=base_url, **kwargs), - 'query': '?%s' % parse.urlencode(kwargs) if kwargs else '', - }, - self.collection_key) - num = len(rl) - - if num == 0: - msg = "No %s matching %s." % (self.resource_class.__name__, kwargs) - raise exceptions.NotFound(404, msg) - elif num > 1: - raise exceptions.NoUniqueMatch - else: - return rl[0] - - -class Extension(HookableMixin): - """Extension descriptor.""" - - SUPPORTED_HOOKS = ('__pre_parse_args__', '__post_parse_args__') - manager_class = None - - def __init__(self, name, module): - super(Extension, self).__init__() - self.name = name - self.module = module - self._parse_extension_module() - - def _parse_extension_module(self): - self.manager_class = None - for attr_name, attr_value in self.module.__dict__.items(): - if attr_name in self.SUPPORTED_HOOKS: - self.add_hook(attr_name, attr_value) - else: - try: - if issubclass(attr_value, BaseManager): - self.manager_class = attr_value - except TypeError: - pass - - def __repr__(self): - return "" % self.name - - -class Resource(object): - """Base class for OpenStack resources (tenant, user, etc.). - - This is pretty much just a bag for attributes. - """ - - HUMAN_ID = False - NAME_ATTR = 'name' - - def __init__(self, manager, info, loaded=False): - """Populate and bind to a manager. - - :param manager: BaseManager object - :param info: dictionary representing resource attributes - :param loaded: prevent lazy-loading if set to True - """ - self.manager = manager - self._info = info - self._add_details(info) - self._loaded = loaded - - def __repr__(self): - reprkeys = sorted(k - for k in self.__dict__.keys() - if k[0] != '_' and k != 'manager') - info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys) - return "<%s %s>" % (self.__class__.__name__, info) - - @property - def human_id(self): - """Human-readable ID which can be used for bash completion. - """ - if self.NAME_ATTR in self.__dict__ and self.HUMAN_ID: - return strutils.to_slug(getattr(self, self.NAME_ATTR)) - return None - - def _add_details(self, info): - for (k, v) in six.iteritems(info): - try: - setattr(self, k, v) - self._info[k] = v - except AttributeError: - # In this case we already defined the attribute on the class - pass - - def __getattr__(self, k): - if k not in self.__dict__: - #NOTE(bcwaldon): disallow lazy-loading if already loaded once - if not self.is_loaded(): - self.get() - return self.__getattr__(k) - - raise AttributeError(k) - else: - return self.__dict__[k] - - def get(self): - """Support for lazy loading details. - - Some clients, such as novaclient have the option to lazy load the - details, details which can be loaded with this function. - """ - # set_loaded() first ... so if we have to bail, we know we tried. - self.set_loaded(True) - if not hasattr(self.manager, 'get'): - return - - new = self.manager.get(self.id) - if new: - self._add_details(new._info) - - def __eq__(self, other): - if not isinstance(other, Resource): - return NotImplemented - # two resources of different types are not equal - if not isinstance(other, self.__class__): - return False - if hasattr(self, 'id') and hasattr(other, 'id'): - return self.id == other.id - return self._info == other._info - - def is_loaded(self): - return self._loaded - - def set_loaded(self, val): - self._loaded = val - - def to_dict(self): - return copy.deepcopy(self._info) diff --git a/cerberus/openstack/common/apiclient/client.py b/cerberus/openstack/common/apiclient/client.py deleted file mode 100644 index 5bc0c7d..0000000 --- a/cerberus/openstack/common/apiclient/client.py +++ /dev/null @@ -1,358 +0,0 @@ -# Copyright 2010 Jacob Kaplan-Moss -# Copyright 2011 OpenStack Foundation -# Copyright 2011 Piston Cloud Computing, Inc. -# Copyright 2013 Alessio Ababilov -# Copyright 2013 Grid Dynamics -# Copyright 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -OpenStack Client interface. Handles the REST calls and responses. -""" - -# E0202: An attribute inherited from %s hide this method -# pylint: disable=E0202 - -import logging -import time - -try: - import simplejson as json -except ImportError: - import json - -import requests - -from cerberus.openstack.common.apiclient import exceptions -from cerberus.openstack.common import importutils - - -_logger = logging.getLogger(__name__) - - -class HTTPClient(object): - """This client handles sending HTTP requests to OpenStack servers. - - Features: - - share authentication information between several clients to different - services (e.g., for compute and image clients); - - reissue authentication request for expired tokens; - - encode/decode JSON bodies; - - raise exceptions on HTTP errors; - - pluggable authentication; - - store authentication information in a keyring; - - store time spent for requests; - - register clients for particular services, so one can use - `http_client.identity` or `http_client.compute`; - - log requests and responses in a format that is easy to copy-and-paste - into terminal and send the same request with curl. - """ - - user_agent = "cerberus.openstack.common.apiclient" - - def __init__(self, - auth_plugin, - region_name=None, - endpoint_type="publicURL", - original_ip=None, - verify=True, - cert=None, - timeout=None, - timings=False, - keyring_saver=None, - debug=False, - user_agent=None, - http=None): - self.auth_plugin = auth_plugin - - self.endpoint_type = endpoint_type - self.region_name = region_name - - self.original_ip = original_ip - self.timeout = timeout - self.verify = verify - self.cert = cert - - self.keyring_saver = keyring_saver - self.debug = debug - self.user_agent = user_agent or self.user_agent - - self.times = [] # [("item", starttime, endtime), ...] - self.timings = timings - - # requests within the same session can reuse TCP connections from pool - self.http = http or requests.Session() - - self.cached_token = None - - def _http_log_req(self, method, url, kwargs): - if not self.debug: - return - - string_parts = [ - "curl -i", - "-X '%s'" % method, - "'%s'" % url, - ] - - for element in kwargs['headers']: - header = "-H '%s: %s'" % (element, kwargs['headers'][element]) - string_parts.append(header) - - _logger.debug("REQ: %s" % " ".join(string_parts)) - if 'data' in kwargs: - _logger.debug("REQ BODY: %s\n" % (kwargs['data'])) - - def _http_log_resp(self, resp): - if not self.debug: - return - _logger.debug( - "RESP: [%s] %s\n", - resp.status_code, - resp.headers) - if resp._content_consumed: - _logger.debug( - "RESP BODY: %s\n", - resp.text) - - def serialize(self, kwargs): - if kwargs.get('json') is not None: - kwargs['headers']['Content-Type'] = 'application/json' - kwargs['data'] = json.dumps(kwargs['json']) - try: - del kwargs['json'] - except KeyError: - pass - - def get_timings(self): - return self.times - - def reset_timings(self): - self.times = [] - - def request(self, method, url, **kwargs): - """Send an http request with the specified characteristics. - - Wrapper around `requests.Session.request` to handle tasks such as - setting headers, JSON encoding/decoding, and error handling. - - :param method: method of HTTP request - :param url: URL of HTTP request - :param kwargs: any other parameter that can be passed to -' requests.Session.request (such as `headers`) or `json` - that will be encoded as JSON and used as `data` argument - """ - kwargs.setdefault("headers", kwargs.get("headers", {})) - kwargs["headers"]["User-Agent"] = self.user_agent - if self.original_ip: - kwargs["headers"]["Forwarded"] = "for=%s;by=%s" % ( - self.original_ip, self.user_agent) - if self.timeout is not None: - kwargs.setdefault("timeout", self.timeout) - kwargs.setdefault("verify", self.verify) - if self.cert is not None: - kwargs.setdefault("cert", self.cert) - self.serialize(kwargs) - - self._http_log_req(method, url, kwargs) - if self.timings: - start_time = time.time() - resp = self.http.request(method, url, **kwargs) - if self.timings: - self.times.append(("%s %s" % (method, url), - start_time, time.time())) - self._http_log_resp(resp) - - if resp.status_code >= 400: - _logger.debug( - "Request returned failure status: %s", - resp.status_code) - raise exceptions.from_response(resp, method, url) - - return resp - - @staticmethod - def concat_url(endpoint, url): - """Concatenate endpoint and final URL. - - E.g., "http://keystone/v2.0/" and "/tokens" are concatenated to - "http://keystone/v2.0/tokens". - - :param endpoint: the base URL - :param url: the final URL - """ - return "%s/%s" % (endpoint.rstrip("/"), url.strip("/")) - - def client_request(self, client, method, url, **kwargs): - """Send an http request using `client`'s endpoint and specified `url`. - - If request was rejected as unauthorized (possibly because the token is - expired), issue one authorization attempt and send the request once - again. - - :param client: instance of BaseClient descendant - :param method: method of HTTP request - :param url: URL of HTTP request - :param kwargs: any other parameter that can be passed to -' `HTTPClient.request` - """ - - filter_args = { - "endpoint_type": client.endpoint_type or self.endpoint_type, - "service_type": client.service_type, - } - token, endpoint = (self.cached_token, client.cached_endpoint) - just_authenticated = False - if not (token and endpoint): - try: - token, endpoint = self.auth_plugin.token_and_endpoint( - **filter_args) - except exceptions.EndpointException: - pass - if not (token and endpoint): - self.authenticate() - just_authenticated = True - token, endpoint = self.auth_plugin.token_and_endpoint( - **filter_args) - if not (token and endpoint): - raise exceptions.AuthorizationFailure( - "Cannot find endpoint or token for request") - - old_token_endpoint = (token, endpoint) - kwargs.setdefault("headers", {})["X-Auth-Token"] = token - self.cached_token = token - client.cached_endpoint = endpoint - # Perform the request once. If we get Unauthorized, then it - # might be because the auth token expired, so try to - # re-authenticate and try again. If it still fails, bail. - try: - return self.request( - method, self.concat_url(endpoint, url), **kwargs) - except exceptions.Unauthorized as unauth_ex: - if just_authenticated: - raise - self.cached_token = None - client.cached_endpoint = None - self.authenticate() - try: - token, endpoint = self.auth_plugin.token_and_endpoint( - **filter_args) - except exceptions.EndpointException: - raise unauth_ex - if (not (token and endpoint) or - old_token_endpoint == (token, endpoint)): - raise unauth_ex - self.cached_token = token - client.cached_endpoint = endpoint - kwargs["headers"]["X-Auth-Token"] = token - return self.request( - method, self.concat_url(endpoint, url), **kwargs) - - def add_client(self, base_client_instance): - """Add a new instance of :class:`BaseClient` descendant. - - `self` will store a reference to `base_client_instance`. - - Example: - - >>> def test_clients(): - ... from keystoneclient.auth import keystone - ... from openstack.common.apiclient import client - ... auth = keystone.KeystoneAuthPlugin( - ... username="user", password="pass", tenant_name="tenant", - ... auth_url="http://auth:5000/v2.0") - ... openstack_client = client.HTTPClient(auth) - ... # create nova client - ... from novaclient.v1_1 import client - ... client.Client(openstack_client) - ... # create keystone client - ... from keystoneclient.v2_0 import client - ... client.Client(openstack_client) - ... # use them - ... openstack_client.identity.tenants.list() - ... openstack_client.compute.servers.list() - """ - service_type = base_client_instance.service_type - if service_type and not hasattr(self, service_type): - setattr(self, service_type, base_client_instance) - - def authenticate(self): - self.auth_plugin.authenticate(self) - # Store the authentication results in the keyring for later requests - if self.keyring_saver: - self.keyring_saver.save(self) - - -class BaseClient(object): - """Top-level object to access the OpenStack API. - - This client uses :class:`HTTPClient` to send requests. :class:`HTTPClient` - will handle a bunch of issues such as authentication. - """ - - service_type = None - endpoint_type = None # "publicURL" will be used - cached_endpoint = None - - def __init__(self, http_client, extensions=None): - self.http_client = http_client - http_client.add_client(self) - - # Add in any extensions... - if extensions: - for extension in extensions: - if extension.manager_class: - setattr(self, extension.name, - extension.manager_class(self)) - - def client_request(self, method, url, **kwargs): - return self.http_client.client_request( - self, method, url, **kwargs) - - def head(self, url, **kwargs): - return self.client_request("HEAD", url, **kwargs) - - def get(self, url, **kwargs): - return self.client_request("GET", url, **kwargs) - - def post(self, url, **kwargs): - return self.client_request("POST", url, **kwargs) - - def put(self, url, **kwargs): - return self.client_request("PUT", url, **kwargs) - - def delete(self, url, **kwargs): - return self.client_request("DELETE", url, **kwargs) - - def patch(self, url, **kwargs): - return self.client_request("PATCH", url, **kwargs) - - @staticmethod - def get_class(api_name, version, version_map): - """Returns the client class for the requested API version - - :param api_name: the name of the API, e.g. 'compute', 'image', etc - :param version: the requested API version - :param version_map: a dict of client classes keyed by version - :rtype: a client class for the requested API version - """ - try: - client_path = version_map[str(version)] - except (KeyError, ValueError): - msg = "Invalid %s client version '%s'. must be one of: %s" % ( - (api_name, version, ', '.join(version_map.keys()))) - raise exceptions.UnsupportedVersion(msg) - - return importutils.import_class(client_path) diff --git a/cerberus/openstack/common/apiclient/exceptions.py b/cerberus/openstack/common/apiclient/exceptions.py deleted file mode 100644 index ada1344..0000000 --- a/cerberus/openstack/common/apiclient/exceptions.py +++ /dev/null @@ -1,459 +0,0 @@ -# Copyright 2010 Jacob Kaplan-Moss -# Copyright 2011 Nebula, Inc. -# Copyright 2013 Alessio Ababilov -# Copyright 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exception definitions. -""" - -import inspect -import sys - -import six - - -class ClientException(Exception): - """The base exception class for all exceptions this library raises. - """ - pass - - -class MissingArgs(ClientException): - """Supplied arguments are not sufficient for calling a function.""" - def __init__(self, missing): - self.missing = missing - msg = "Missing argument(s): %s" % ", ".join(missing) - super(MissingArgs, self).__init__(msg) - - -class ValidationError(ClientException): - """Error in validation on API client side.""" - pass - - -class UnsupportedVersion(ClientException): - """User is trying to use an unsupported version of the API.""" - pass - - -class CommandError(ClientException): - """Error in CLI tool.""" - pass - - -class AuthorizationFailure(ClientException): - """Cannot authorize API client.""" - pass - - -class ConnectionRefused(ClientException): - """Cannot connect to API service.""" - pass - - -class AuthPluginOptionsMissing(AuthorizationFailure): - """Auth plugin misses some options.""" - def __init__(self, opt_names): - super(AuthPluginOptionsMissing, self).__init__( - "Authentication failed. Missing options: %s" % - ", ".join(opt_names)) - self.opt_names = opt_names - - -class AuthSystemNotFound(AuthorizationFailure): - """User has specified a AuthSystem that is not installed.""" - def __init__(self, auth_system): - super(AuthSystemNotFound, self).__init__( - "AuthSystemNotFound: %s" % repr(auth_system)) - self.auth_system = auth_system - - -class NoUniqueMatch(ClientException): - """Multiple entities found instead of one.""" - pass - - -class EndpointException(ClientException): - """Something is rotten in Service Catalog.""" - pass - - -class EndpointNotFound(EndpointException): - """Could not find requested endpoint in Service Catalog.""" - pass - - -class AmbiguousEndpoints(EndpointException): - """Found more than one matching endpoint in Service Catalog.""" - def __init__(self, endpoints=None): - super(AmbiguousEndpoints, self).__init__( - "AmbiguousEndpoints: %s" % repr(endpoints)) - self.endpoints = endpoints - - -class HttpError(ClientException): - """The base exception class for all HTTP exceptions. - """ - http_status = 0 - message = "HTTP Error" - - def __init__(self, message=None, details=None, - response=None, request_id=None, - url=None, method=None, http_status=None): - self.http_status = http_status or self.http_status - self.message = message or self.message - self.details = details - self.request_id = request_id - self.response = response - self.url = url - self.method = method - formatted_string = "%s (HTTP %s)" % (self.message, self.http_status) - if request_id: - formatted_string += " (Request-ID: %s)" % request_id - super(HttpError, self).__init__(formatted_string) - - -class HTTPRedirection(HttpError): - """HTTP Redirection.""" - message = "HTTP Redirection" - - -class HTTPClientError(HttpError): - """Client-side HTTP error. - - Exception for cases in which the client seems to have erred. - """ - message = "HTTP Client Error" - - -class HttpServerError(HttpError): - """Server-side HTTP error. - - Exception for cases in which the server is aware that it has - erred or is incapable of performing the request. - """ - message = "HTTP Server Error" - - -class MultipleChoices(HTTPRedirection): - """HTTP 300 - Multiple Choices. - - Indicates multiple options for the resource that the client may follow. - """ - - http_status = 300 - message = "Multiple Choices" - - -class BadRequest(HTTPClientError): - """HTTP 400 - Bad Request. - - The request cannot be fulfilled due to bad syntax. - """ - http_status = 400 - message = "Bad Request" - - -class Unauthorized(HTTPClientError): - """HTTP 401 - Unauthorized. - - Similar to 403 Forbidden, but specifically for use when authentication - is required and has failed or has not yet been provided. - """ - http_status = 401 - message = "Unauthorized" - - -class PaymentRequired(HTTPClientError): - """HTTP 402 - Payment Required. - - Reserved for future use. - """ - http_status = 402 - message = "Payment Required" - - -class Forbidden(HTTPClientError): - """HTTP 403 - Forbidden. - - The request was a valid request, but the server is refusing to respond - to it. - """ - http_status = 403 - message = "Forbidden" - - -class NotFound(HTTPClientError): - """HTTP 404 - Not Found. - - The requested resource could not be found but may be available again - in the future. - """ - http_status = 404 - message = "Not Found" - - -class MethodNotAllowed(HTTPClientError): - """HTTP 405 - Method Not Allowed. - - A request was made of a resource using a request method not supported - by that resource. - """ - http_status = 405 - message = "Method Not Allowed" - - -class NotAcceptable(HTTPClientError): - """HTTP 406 - Not Acceptable. - - The requested resource is only capable of generating content not - acceptable according to the Accept headers sent in the request. - """ - http_status = 406 - message = "Not Acceptable" - - -class ProxyAuthenticationRequired(HTTPClientError): - """HTTP 407 - Proxy Authentication Required. - - The client must first authenticate itself with the proxy. - """ - http_status = 407 - message = "Proxy Authentication Required" - - -class RequestTimeout(HTTPClientError): - """HTTP 408 - Request Timeout. - - The server timed out waiting for the request. - """ - http_status = 408 - message = "Request Timeout" - - -class Conflict(HTTPClientError): - """HTTP 409 - Conflict. - - Indicates that the request could not be processed because of conflict - in the request, such as an edit conflict. - """ - http_status = 409 - message = "Conflict" - - -class Gone(HTTPClientError): - """HTTP 410 - Gone. - - Indicates that the resource requested is no longer available and will - not be available again. - """ - http_status = 410 - message = "Gone" - - -class LengthRequired(HTTPClientError): - """HTTP 411 - Length Required. - - The request did not specify the length of its content, which is - required by the requested resource. - """ - http_status = 411 - message = "Length Required" - - -class PreconditionFailed(HTTPClientError): - """HTTP 412 - Precondition Failed. - - The server does not meet one of the preconditions that the requester - put on the request. - """ - http_status = 412 - message = "Precondition Failed" - - -class RequestEntityTooLarge(HTTPClientError): - """HTTP 413 - Request Entity Too Large. - - The request is larger than the server is willing or able to process. - """ - http_status = 413 - message = "Request Entity Too Large" - - def __init__(self, *args, **kwargs): - try: - self.retry_after = int(kwargs.pop('retry_after')) - except (KeyError, ValueError): - self.retry_after = 0 - - super(RequestEntityTooLarge, self).__init__(*args, **kwargs) - - -class RequestUriTooLong(HTTPClientError): - """HTTP 414 - Request-URI Too Long. - - The URI provided was too long for the server to process. - """ - http_status = 414 - message = "Request-URI Too Long" - - -class UnsupportedMediaType(HTTPClientError): - """HTTP 415 - Unsupported Media Type. - - The request entity has a media type which the server or resource does - not support. - """ - http_status = 415 - message = "Unsupported Media Type" - - -class RequestedRangeNotSatisfiable(HTTPClientError): - """HTTP 416 - Requested Range Not Satisfiable. - - The client has asked for a portion of the file, but the server cannot - supply that portion. - """ - http_status = 416 - message = "Requested Range Not Satisfiable" - - -class ExpectationFailed(HTTPClientError): - """HTTP 417 - Expectation Failed. - - The server cannot meet the requirements of the Expect request-header field. - """ - http_status = 417 - message = "Expectation Failed" - - -class UnprocessableEntity(HTTPClientError): - """HTTP 422 - Unprocessable Entity. - - The request was well-formed but was unable to be followed due to semantic - errors. - """ - http_status = 422 - message = "Unprocessable Entity" - - -class InternalServerError(HttpServerError): - """HTTP 500 - Internal Server Error. - - A generic error message, given when no more specific message is suitable. - """ - http_status = 500 - message = "Internal Server Error" - - -# NotImplemented is a python keyword. -class HttpNotImplemented(HttpServerError): - """HTTP 501 - Not Implemented. - - The server either does not recognize the request method, or it lacks - the ability to fulfill the request. - """ - http_status = 501 - message = "Not Implemented" - - -class BadGateway(HttpServerError): - """HTTP 502 - Bad Gateway. - - The server was acting as a gateway or proxy and received an invalid - response from the upstream server. - """ - http_status = 502 - message = "Bad Gateway" - - -class ServiceUnavailable(HttpServerError): - """HTTP 503 - Service Unavailable. - - The server is currently unavailable. - """ - http_status = 503 - message = "Service Unavailable" - - -class GatewayTimeout(HttpServerError): - """HTTP 504 - Gateway Timeout. - - The server was acting as a gateway or proxy and did not receive a timely - response from the upstream server. - """ - http_status = 504 - message = "Gateway Timeout" - - -class HttpVersionNotSupported(HttpServerError): - """HTTP 505 - HttpVersion Not Supported. - - The server does not support the HTTP protocol version used in the request. - """ - http_status = 505 - message = "HTTP Version Not Supported" - - -# _code_map contains all the classes that have http_status attribute. -_code_map = dict( - (getattr(obj, 'http_status', None), obj) - for name, obj in six.iteritems(vars(sys.modules[__name__])) - if inspect.isclass(obj) and getattr(obj, 'http_status', False) -) - - -def from_response(response, method, url): - """Returns an instance of :class:`HttpError` or subclass based on response. - - :param response: instance of `requests.Response` class - :param method: HTTP method used for request - :param url: URL used for request - """ - kwargs = { - "http_status": response.status_code, - "response": response, - "method": method, - "url": url, - "request_id": response.headers.get("x-compute-request-id"), - } - if "retry-after" in response.headers: - kwargs["retry_after"] = response.headers["retry-after"] - - content_type = response.headers.get("Content-Type", "") - if content_type.startswith("application/json"): - try: - body = response.json() - except ValueError: - pass - else: - if isinstance(body, dict): - error = list(body.values())[0] - kwargs["message"] = error.get("message") - kwargs["details"] = error.get("details") - elif content_type.startswith("text/"): - kwargs["details"] = response.text - - try: - cls = _code_map[response.status_code] - except KeyError: - if 500 <= response.status_code < 600: - cls = HttpServerError - elif 400 <= response.status_code < 500: - cls = HTTPClientError - else: - cls = HttpError - return cls(**kwargs) diff --git a/cerberus/openstack/common/apiclient/fake_client.py b/cerberus/openstack/common/apiclient/fake_client.py deleted file mode 100644 index c1dfdbe..0000000 --- a/cerberus/openstack/common/apiclient/fake_client.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -A fake server that "responds" to API methods with pre-canned responses. - -All of these responses come from the spec, so if for some reason the spec's -wrong the tests might raise AssertionError. I've indicated in comments the -places where actual behavior differs from the spec. -""" - -# W0102: Dangerous default value %s as argument -# pylint: disable=W0102 - -import json - -import requests -import six -from six.moves.urllib import parse - -from cerberus.openstack.common.apiclient import client - - -def assert_has_keys(dct, required=[], optional=[]): - for k in required: - try: - assert k in dct - except AssertionError: - extra_keys = set(dct.keys()).difference(set(required + optional)) - raise AssertionError("found unexpected keys: %s" % - list(extra_keys)) - - -class TestResponse(requests.Response): - """Wrap requests.Response and provide a convenient initialization. - """ - - def __init__(self, data): - super(TestResponse, self).__init__() - self._content_consumed = True - if isinstance(data, dict): - self.status_code = data.get('status_code', 200) - # Fake the text attribute to streamline Response creation - text = data.get('text', "") - if isinstance(text, (dict, list)): - self._content = json.dumps(text) - default_headers = { - "Content-Type": "application/json", - } - else: - self._content = text - default_headers = {} - if six.PY3 and isinstance(self._content, six.string_types): - self._content = self._content.encode('utf-8', 'strict') - self.headers = data.get('headers') or default_headers - else: - self.status_code = data - - def __eq__(self, other): - return (self.status_code == other.status_code and - self.headers == other.headers and - self._content == other._content) - - -class FakeHTTPClient(client.HTTPClient): - - def __init__(self, *args, **kwargs): - self.callstack = [] - self.fixtures = kwargs.pop("fixtures", None) or {} - if not args and not "auth_plugin" in kwargs: - args = (None, ) - super(FakeHTTPClient, self).__init__(*args, **kwargs) - - def assert_called(self, method, url, body=None, pos=-1): - """Assert than an API method was just called. - """ - expected = (method, url) - called = self.callstack[pos][0:2] - assert self.callstack, \ - "Expected %s %s but no calls were made." % expected - - assert expected == called, 'Expected %s %s; got %s %s' % \ - (expected + called) - - if body is not None: - if self.callstack[pos][3] != body: - raise AssertionError('%r != %r' % - (self.callstack[pos][3], body)) - - def assert_called_anytime(self, method, url, body=None): - """Assert than an API method was called anytime in the test. - """ - expected = (method, url) - - assert self.callstack, \ - "Expected %s %s but no calls were made." % expected - - found = False - entry = None - for entry in self.callstack: - if expected == entry[0:2]: - found = True - break - - assert found, 'Expected %s %s; got %s' % \ - (method, url, self.callstack) - if body is not None: - assert entry[3] == body, "%s != %s" % (entry[3], body) - - self.callstack = [] - - def clear_callstack(self): - self.callstack = [] - - def authenticate(self): - pass - - def client_request(self, client, method, url, **kwargs): - # Check that certain things are called correctly - if method in ["GET", "DELETE"]: - assert "json" not in kwargs - - # Note the call - self.callstack.append( - (method, - url, - kwargs.get("headers") or {}, - kwargs.get("json") or kwargs.get("data"))) - try: - fixture = self.fixtures[url][method] - except KeyError: - pass - else: - return TestResponse({"headers": fixture[0], - "text": fixture[1]}) - - # Call the method - args = parse.parse_qsl(parse.urlparse(url)[4]) - kwargs.update(args) - munged_url = url.rsplit('?', 1)[0] - munged_url = munged_url.strip('/').replace('/', '_').replace('.', '_') - munged_url = munged_url.replace('-', '_') - - callback = "%s_%s" % (method.lower(), munged_url) - - if not hasattr(self, callback): - raise AssertionError('Called unknown API method: %s %s, ' - 'expected fakes method name: %s' % - (method, url, callback)) - - resp = getattr(self, callback)(**kwargs) - if len(resp) == 3: - status, headers, body = resp - else: - status, body = resp - headers = {} - return TestResponse({ - "status_code": status, - "text": body, - "headers": headers, - }) diff --git a/cerberus/openstack/common/cliutils.py b/cerberus/openstack/common/cliutils.py deleted file mode 100644 index a99ea4d..0000000 --- a/cerberus/openstack/common/cliutils.py +++ /dev/null @@ -1,309 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# W0603: Using the global statement -# W0621: Redefining name %s from outer scope -# pylint: disable=W0603,W0621 - -from __future__ import print_function - -import getpass -import inspect -import os -import sys -import textwrap - -import prettytable -import six -from six import moves - -from cerberus.openstack.common.apiclient import exceptions -from cerberus.openstack.common.gettextutils import _ -from cerberus.openstack.common import strutils -from cerberus.openstack.common import uuidutils - - -def validate_args(fn, *args, **kwargs): - """Check that the supplied args are sufficient for calling a function. - - >>> validate_args(lambda a: None) - Traceback (most recent call last): - ... - MissingArgs: Missing argument(s): a - >>> validate_args(lambda a, b, c, d: None, 0, c=1) - Traceback (most recent call last): - ... - MissingArgs: Missing argument(s): b, d - - :param fn: the function to check - :param arg: the positional arguments supplied - :param kwargs: the keyword arguments supplied - """ - argspec = inspect.getargspec(fn) - - num_defaults = len(argspec.defaults or []) - required_args = argspec.args[:len(argspec.args) - num_defaults] - - def isbound(method): - return getattr(method, 'im_self', None) is not None - - if isbound(fn): - required_args.pop(0) - - missing = [arg for arg in required_args if arg not in kwargs] - missing = missing[len(args):] - if missing: - raise exceptions.MissingArgs(missing) - - -def arg(*args, **kwargs): - """Decorator for CLI args. - - Example: - - >>> @arg("name", help="Name of the new entity") - ... def entity_create(args): - ... pass - """ - def _decorator(func): - add_arg(func, *args, **kwargs) - return func - return _decorator - - -def env(*args, **kwargs): - """Returns the first environment variable set. - - If all are empty, defaults to '' or keyword arg `default`. - """ - for arg in args: - value = os.environ.get(arg) - if value: - return value - return kwargs.get('default', '') - - -def add_arg(func, *args, **kwargs): - """Bind CLI arguments to a shell.py `do_foo` function.""" - - if not hasattr(func, 'arguments'): - func.arguments = [] - - # NOTE(sirp): avoid dups that can occur when the module is shared across - # tests. - if (args, kwargs) not in func.arguments: - # Because of the semantics of decorator composition if we just append - # to the options list positional options will appear to be backwards. - func.arguments.insert(0, (args, kwargs)) - - -def unauthenticated(func): - """Adds 'unauthenticated' attribute to decorated function. - - Usage: - - >>> @unauthenticated - ... def mymethod(f): - ... pass - """ - func.unauthenticated = True - return func - - -def isunauthenticated(func): - """Checks if the function does not require authentication. - - Mark such functions with the `@unauthenticated` decorator. - - :returns: bool - """ - return getattr(func, 'unauthenticated', False) - - -def print_list(objs, fields, formatters=None, sortby_index=0, - mixed_case_fields=None): - """Print a list or objects as a table, one row per object. - - :param objs: iterable of :class:`Resource` - :param fields: attributes that correspond to columns, in order - :param formatters: `dict` of callables for field formatting - :param sortby_index: index of the field for sorting table rows - :param mixed_case_fields: fields corresponding to object attributes that - have mixed case names (e.g., 'serverId') - """ - formatters = formatters or {} - mixed_case_fields = mixed_case_fields or [] - if sortby_index is None: - kwargs = {} - else: - kwargs = {'sortby': fields[sortby_index]} - pt = prettytable.PrettyTable(fields, caching=False) - pt.align = 'l' - - for o in objs: - row = [] - for field in fields: - if field in formatters: - row.append(formatters[field](o)) - else: - if field in mixed_case_fields: - field_name = field.replace(' ', '_') - else: - field_name = field.lower().replace(' ', '_') - data = getattr(o, field_name, '') - row.append(data) - pt.add_row(row) - - print(strutils.safe_encode(pt.get_string(**kwargs))) - - -def print_dict(dct, dict_property="Property", wrap=0): - """Print a `dict` as a table of two columns. - - :param dct: `dict` to print - :param dict_property: name of the first column - :param wrap: wrapping for the second column - """ - pt = prettytable.PrettyTable([dict_property, 'Value'], caching=False) - pt.align = 'l' - for k, v in six.iteritems(dct): - # convert dict to str to check length - if isinstance(v, dict): - v = six.text_type(v) - if wrap > 0: - v = textwrap.fill(six.text_type(v), wrap) - # if value has a newline, add in multiple rows - # e.g. fault with stacktrace - if v and isinstance(v, six.string_types) and r'\n' in v: - lines = v.strip().split(r'\n') - col1 = k - for line in lines: - pt.add_row([col1, line]) - col1 = '' - else: - pt.add_row([k, v]) - print(strutils.safe_encode(pt.get_string())) - - -def get_password(max_password_prompts=3): - """Read password from TTY.""" - verify = strutils.bool_from_string(env("OS_VERIFY_PASSWORD")) - pw = None - if hasattr(sys.stdin, "isatty") and sys.stdin.isatty(): - # Check for Ctrl-D - try: - for __ in moves.range(max_password_prompts): - pw1 = getpass.getpass("OS Password: ") - if verify: - pw2 = getpass.getpass("Please verify: ") - else: - pw2 = pw1 - if pw1 == pw2 and pw1: - pw = pw1 - break - except EOFError: - pass - return pw - - -def find_resource(manager, name_or_id, **find_args): - """Look for resource in a given manager. - - Used as a helper for the _find_* methods. - Example: - - def _find_hypervisor(cs, hypervisor): - #Get a hypervisor by name or ID. - return cliutils.find_resource(cs.hypervisors, hypervisor) - """ - # first try to get entity as integer id - try: - return manager.get(int(name_or_id)) - except (TypeError, ValueError, exceptions.NotFound): - pass - - # now try to get entity as uuid - try: - tmp_id = strutils.safe_encode(name_or_id) - - if uuidutils.is_uuid_like(tmp_id): - return manager.get(tmp_id) - except (TypeError, ValueError, exceptions.NotFound): - pass - - # for str id which is not uuid - if getattr(manager, 'is_alphanum_id_allowed', False): - try: - return manager.get(name_or_id) - except exceptions.NotFound: - pass - - try: - try: - return manager.find(human_id=name_or_id, **find_args) - except exceptions.NotFound: - pass - - # finally try to find entity by name - try: - resource = getattr(manager, 'resource_class', None) - name_attr = resource.NAME_ATTR if resource else 'name' - kwargs = {name_attr: name_or_id} - kwargs.update(find_args) - return manager.find(**kwargs) - except exceptions.NotFound: - msg = _("No %(name)s with a name or " - "ID of '%(name_or_id)s' exists.") % \ - { - "name": manager.resource_class.__name__.lower(), - "name_or_id": name_or_id - } - raise exceptions.CommandError(msg) - except exceptions.NoUniqueMatch: - msg = _("Multiple %(name)s matches found for " - "'%(name_or_id)s', use an ID to be more specific.") % \ - { - "name": manager.resource_class.__name__.lower(), - "name_or_id": name_or_id - } - raise exceptions.CommandError(msg) - - -def service_type(stype): - """Adds 'service_type' attribute to decorated function. - - Usage: - @service_type('volume') - def mymethod(f): - ... - """ - def inner(f): - f.service_type = stype - return f - return inner - - -def get_service_type(f): - """Retrieves service type from function.""" - return getattr(f, 'service_type', None) - - -def pretty_choice_list(l): - return ', '.join("'%s'" % i for i in l) - - -def exit(msg=''): - if msg: - print (msg, file=sys.stderr) - sys.exit(1) diff --git a/cerberus/openstack/common/config/__init__.py b/cerberus/openstack/common/config/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/openstack/common/config/generator.py b/cerberus/openstack/common/config/generator.py deleted file mode 100644 index 8808dcf..0000000 --- a/cerberus/openstack/common/config/generator.py +++ /dev/null @@ -1,307 +0,0 @@ -# Copyright 2012 SINA Corporation -# Copyright 2014 Cisco Systems, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# - -"""Extracts OpenStack config option info from module(s).""" - -from __future__ import print_function - -import argparse -import imp -import os -import re -import socket -import sys -import textwrap - -from oslo.config import cfg -import six -import stevedore.named - -from cerberus.openstack.common import gettextutils -from cerberus.openstack.common import importutils - -gettextutils.install('cerberus') - -STROPT = "StrOpt" -BOOLOPT = "BoolOpt" -INTOPT = "IntOpt" -FLOATOPT = "FloatOpt" -LISTOPT = "ListOpt" -DICTOPT = "DictOpt" -MULTISTROPT = "MultiStrOpt" - -OPT_TYPES = { - STROPT: 'string value', - BOOLOPT: 'boolean value', - INTOPT: 'integer value', - FLOATOPT: 'floating point value', - LISTOPT: 'list value', - DICTOPT: 'dict value', - MULTISTROPT: 'multi valued', -} - -OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, - FLOATOPT, LISTOPT, DICTOPT, - MULTISTROPT])) - -PY_EXT = ".py" -BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), - "../../../../")) -WORDWRAP_WIDTH = 60 - - -def raise_extension_exception(extmanager, ep, err): - raise - - -def generate(argv): - parser = argparse.ArgumentParser( - description='generate sample configuration file', - ) - parser.add_argument('-m', dest='modules', action='append') - parser.add_argument('-l', dest='libraries', action='append') - parser.add_argument('srcfiles', nargs='*') - parsed_args = parser.parse_args(argv) - - mods_by_pkg = dict() - for filepath in parsed_args.srcfiles: - pkg_name = filepath.split(os.sep)[1] - mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), - os.path.basename(filepath).split('.')[0]]) - mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) - # NOTE(lzyeval): place top level modules before packages - pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) - ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) - pkg_names.extend(ext_names) - - # opts_by_group is a mapping of group name to an options list - # The options list is a list of (module, options) tuples - opts_by_group = {'DEFAULT': []} - - if parsed_args.modules: - for module_name in parsed_args.modules: - module = _import_module(module_name) - if module: - for group, opts in _list_opts(module): - opts_by_group.setdefault(group, []).append((module_name, - opts)) - - # Look for entry points defined in libraries (or applications) for - # option discovery, and include their return values in the output. - # - # Each entry point should be a function returning an iterable - # of pairs with the group name (or None for the default group) - # and the list of Opt instances for that group. - if parsed_args.libraries: - loader = stevedore.named.NamedExtensionManager( - 'oslo.config.opts', - names=list(set(parsed_args.libraries)), - invoke_on_load=False, - on_load_failure_callback=raise_extension_exception - ) - for ext in loader: - for group, opts in ext.plugin(): - opt_list = opts_by_group.setdefault(group or 'DEFAULT', []) - opt_list.append((ext.name, opts)) - - for pkg_name in pkg_names: - mods = mods_by_pkg.get(pkg_name) - mods.sort() - for mod_str in mods: - if mod_str.endswith('.__init__'): - mod_str = mod_str[:mod_str.rfind(".")] - - mod_obj = _import_module(mod_str) - if not mod_obj: - raise RuntimeError("Unable to import module %s" % mod_str) - - for group, opts in _list_opts(mod_obj): - opts_by_group.setdefault(group, []).append((mod_str, opts)) - - print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) - for group in sorted(opts_by_group.keys()): - print_group_opts(group, opts_by_group[group]) - - -def _import_module(mod_str): - try: - if mod_str.startswith('bin.'): - imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) - return sys.modules[mod_str[4:]] - else: - return importutils.import_module(mod_str) - except Exception as e: - sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) - return None - - -def _is_in_group(opt, group): - "Check if opt is in group." - for value in group._opts.values(): - # NOTE(llu): Temporary workaround for bug #1262148, wait until - # newly released oslo.config support '==' operator. - if not(value['opt'] != opt): - return True - return False - - -def _guess_groups(opt, mod_obj): - # is it in the DEFAULT group? - if _is_in_group(opt, cfg.CONF): - return 'DEFAULT' - - # what other groups is it in? - for value in cfg.CONF.values(): - if isinstance(value, cfg.CONF.GroupAttr): - if _is_in_group(opt, value._group): - return value._group.name - - raise RuntimeError( - "Unable to find group for option %s, " - "maybe it's defined twice in the same group?" - % opt.name - ) - - -def _list_opts(obj): - def is_opt(o): - return (isinstance(o, cfg.Opt) and - not isinstance(o, cfg.SubCommandOpt)) - - opts = list() - for attr_str in dir(obj): - attr_obj = getattr(obj, attr_str) - if is_opt(attr_obj): - opts.append(attr_obj) - elif (isinstance(attr_obj, list) and - all(map(lambda x: is_opt(x), attr_obj))): - opts.extend(attr_obj) - - ret = {} - for opt in opts: - ret.setdefault(_guess_groups(opt, obj), []).append(opt) - return ret.items() - - -def print_group_opts(group, opts_by_module): - print("[%s]" % group) - print('') - for mod, opts in opts_by_module: - print('#') - print('# Options defined in %s' % mod) - print('#') - print('') - for opt in opts: - _print_opt(opt) - print('') - - -def _get_my_ip(): - try: - csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - csock.connect(('8.8.8.8', 80)) - (addr, port) = csock.getsockname() - csock.close() - return addr - except socket.error: - return None - - -def _sanitize_default(name, value): - """Set up a reasonably sensible default for pybasedir, my_ip and host.""" - if value.startswith(sys.prefix): - # NOTE(jd) Don't use os.path.join, because it is likely to think the - # second part is an absolute pathname and therefore drop the first - # part. - value = os.path.normpath("/usr/" + value[len(sys.prefix):]) - elif value.startswith(BASEDIR): - return value.replace(BASEDIR, '/usr/lib/python/site-packages') - elif BASEDIR in value: - return value.replace(BASEDIR, '') - elif value == _get_my_ip(): - return '10.0.0.1' - elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: - return 'cerberus' - elif value.strip() != value: - return '"%s"' % value - return value - - -def _print_opt(opt): - opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help - if not opt_help: - sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) - opt_help = "" - opt_type = None - try: - opt_type = OPTION_REGEX.search(str(type(opt))).group(0) - except (ValueError, AttributeError) as err: - sys.stderr.write("%s\n" % str(err)) - sys.exit(1) - opt_help = u'%s (%s)' % (opt_help, - OPT_TYPES[opt_type]) - print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) - if opt.deprecated_opts: - for deprecated_opt in opt.deprecated_opts: - if deprecated_opt.name: - deprecated_group = (deprecated_opt.group if - deprecated_opt.group else "DEFAULT") - print('# Deprecated group/name - [%s]/%s' % - (deprecated_group, - deprecated_opt.name)) - try: - if opt_default is None: - print('#%s=' % opt_name) - elif opt_type == STROPT: - assert(isinstance(opt_default, six.string_types)) - print('#%s=%s' % (opt_name, _sanitize_default(opt_name, - opt_default))) - elif opt_type == BOOLOPT: - assert(isinstance(opt_default, bool)) - print('#%s=%s' % (opt_name, str(opt_default).lower())) - elif opt_type == INTOPT: - assert(isinstance(opt_default, int) and - not isinstance(opt_default, bool)) - print('#%s=%s' % (opt_name, opt_default)) - elif opt_type == FLOATOPT: - assert(isinstance(opt_default, float)) - print('#%s=%s' % (opt_name, opt_default)) - elif opt_type == LISTOPT: - assert(isinstance(opt_default, list)) - print('#%s=%s' % (opt_name, ','.join(opt_default))) - elif opt_type == DICTOPT: - assert(isinstance(opt_default, dict)) - opt_default_strlist = [str(key) + ':' + str(value) - for (key, value) in opt_default.items()] - print('#%s=%s' % (opt_name, ','.join(opt_default_strlist))) - elif opt_type == MULTISTROPT: - assert(isinstance(opt_default, list)) - if not opt_default: - opt_default = [''] - for default in opt_default: - print('#%s=%s' % (opt_name, default)) - print('') - except Exception: - sys.stderr.write('Error in option "%s"\n' % opt_name) - sys.exit(1) - - -def main(): - generate(sys.argv[1:]) - -if __name__ == '__main__': - main() diff --git a/cerberus/openstack/common/context.py b/cerberus/openstack/common/context.py deleted file mode 100644 index 09019ee..0000000 --- a/cerberus/openstack/common/context.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Simple class that stores security context information in the web request. - -Projects should subclass this class if they wish to enhance the request -context or provide additional information in their specific WSGI pipeline. -""" - -import itertools -import uuid - - -def generate_request_id(): - return 'req-%s' % str(uuid.uuid4()) - - -class RequestContext(object): - - """Helper class to represent useful information about a request context. - - Stores information about the security context under which the user - accesses the system, as well as additional request information. - """ - - user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}' - - def __init__(self, auth_token=None, user=None, tenant=None, domain=None, - user_domain=None, project_domain=None, is_admin=False, - read_only=False, show_deleted=False, request_id=None, - instance_uuid=None): - self.auth_token = auth_token - self.user = user - self.tenant = tenant - self.domain = domain - self.user_domain = user_domain - self.project_domain = project_domain - self.is_admin = is_admin - self.read_only = read_only - self.show_deleted = show_deleted - self.instance_uuid = instance_uuid - if not request_id: - request_id = generate_request_id() - self.request_id = request_id - - def to_dict(self): - user_idt = ( - self.user_idt_format.format(user=self.user or '-', - tenant=self.tenant or '-', - domain=self.domain or '-', - user_domain=self.user_domain or '-', - p_domain=self.project_domain or '-')) - - return {'user': self.user, - 'tenant': self.tenant, - 'domain': self.domain, - 'user_domain': self.user_domain, - 'project_domain': self.project_domain, - 'is_admin': self.is_admin, - 'read_only': self.read_only, - 'show_deleted': self.show_deleted, - 'auth_token': self.auth_token, - 'request_id': self.request_id, - 'instance_uuid': self.instance_uuid, - 'user_identity': user_idt} - - -def get_admin_context(show_deleted=False): - context = RequestContext(None, - tenant=None, - is_admin=True, - show_deleted=show_deleted) - return context - - -def get_context_from_function_and_args(function, args, kwargs): - """Find an arg of type RequestContext and return it. - - This is useful in a couple of decorators where we don't - know much about the function we're wrapping. - """ - - for arg in itertools.chain(kwargs.values(), args): - if isinstance(arg, RequestContext): - return arg - - return None - - -def is_user_context(context): - """Indicates if the request context is a normal user.""" - if not context: - return False - if context.is_admin: - return False - if not context.user_id or not context.project_id: - return False - return True diff --git a/cerberus/openstack/common/db/__init__.py b/cerberus/openstack/common/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/openstack/common/db/api.py b/cerberus/openstack/common/db/api.py deleted file mode 100644 index 0025c34..0000000 --- a/cerberus/openstack/common/db/api.py +++ /dev/null @@ -1,162 +0,0 @@ -# Copyright (c) 2013 Rackspace Hosting -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Multiple DB API backend support. - -A DB backend module should implement a method named 'get_backend' which -takes no arguments. The method can return any object that implements DB -API methods. -""" - -import functools -import logging -import threading -import time - -from cerberus.openstack.common.db import exception -from cerberus.openstack.common.gettextutils import _LE -from cerberus.openstack.common import importutils - - -LOG = logging.getLogger(__name__) - - -def safe_for_db_retry(f): - """Enable db-retry for decorated function, if config option enabled.""" - f.__dict__['enable_retry'] = True - return f - - -class wrap_db_retry(object): - """Retry db.api methods, if DBConnectionError() raised - - Retry decorated db.api methods. If we enabled `use_db_reconnect` - in config, this decorator will be applied to all db.api functions, - marked with @safe_for_db_retry decorator. - Decorator catchs DBConnectionError() and retries function in a - loop until it succeeds, or until maximum retries count will be reached. - """ - - def __init__(self, retry_interval, max_retries, inc_retry_interval, - max_retry_interval): - super(wrap_db_retry, self).__init__() - - self.retry_interval = retry_interval - self.max_retries = max_retries - self.inc_retry_interval = inc_retry_interval - self.max_retry_interval = max_retry_interval - - def __call__(self, f): - @functools.wraps(f) - def wrapper(*args, **kwargs): - next_interval = self.retry_interval - remaining = self.max_retries - - while True: - try: - return f(*args, **kwargs) - except exception.DBConnectionError as e: - if remaining == 0: - LOG.exception(_LE('DB exceeded retry limit.')) - raise exception.DBError(e) - if remaining != -1: - remaining -= 1 - LOG.exception(_LE('DB connection error.')) - # NOTE(vsergeyev): We are using patched time module, so - # this effectively yields the execution - # context to another green thread. - time.sleep(next_interval) - if self.inc_retry_interval: - next_interval = min( - next_interval * 2, - self.max_retry_interval - ) - return wrapper - - -class DBAPI(object): - def __init__(self, backend_name, backend_mapping=None, lazy=False, - **kwargs): - """Initialize the chosen DB API backend. - - :param backend_name: name of the backend to load - :type backend_name: str - - :param backend_mapping: backend name -> module/class to load mapping - :type backend_mapping: dict - - :param lazy: load the DB backend lazily on the first DB API method call - :type lazy: bool - - Keyword arguments: - - :keyword use_db_reconnect: retry DB transactions on disconnect or not - :type use_db_reconnect: bool - - :keyword retry_interval: seconds between transaction retries - :type retry_interval: int - - :keyword inc_retry_interval: increase retry interval or not - :type inc_retry_interval: bool - - :keyword max_retry_interval: max interval value between retries - :type max_retry_interval: int - - :keyword max_retries: max number of retries before an error is raised - :type max_retries: int - - """ - - self._backend = None - self._backend_name = backend_name - self._backend_mapping = backend_mapping or {} - self._lock = threading.Lock() - - if not lazy: - self._load_backend() - - self.use_db_reconnect = kwargs.get('use_db_reconnect', False) - self.retry_interval = kwargs.get('retry_interval', 1) - self.inc_retry_interval = kwargs.get('inc_retry_interval', True) - self.max_retry_interval = kwargs.get('max_retry_interval', 10) - self.max_retries = kwargs.get('max_retries', 20) - - def _load_backend(self): - with self._lock: - if not self._backend: - # Import the untranslated name if we don't have a mapping - backend_path = self._backend_mapping.get(self._backend_name, - self._backend_name) - backend_mod = importutils.import_module(backend_path) - self._backend = backend_mod.get_backend() - - def __getattr__(self, key): - if not self._backend: - self._load_backend() - - attr = getattr(self._backend, key) - if not hasattr(attr, '__call__'): - return attr - # NOTE(vsergeyev): If `use_db_reconnect` option is set to True, retry - # DB API methods, decorated with @safe_for_db_retry - # on disconnect. - if self.use_db_reconnect and hasattr(attr, 'enable_retry'): - attr = wrap_db_retry( - retry_interval=self.retry_interval, - max_retries=self.max_retries, - inc_retry_interval=self.inc_retry_interval, - max_retry_interval=self.max_retry_interval)(attr) - - return attr diff --git a/cerberus/openstack/common/db/exception.py b/cerberus/openstack/common/db/exception.py deleted file mode 100644 index 1be2db5..0000000 --- a/cerberus/openstack/common/db/exception.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""DB related custom exceptions.""" - -import six - -from cerberus.openstack.common.gettextutils import _ - - -class DBError(Exception): - """Wraps an implementation specific exception.""" - def __init__(self, inner_exception=None): - self.inner_exception = inner_exception - super(DBError, self).__init__(six.text_type(inner_exception)) - - -class DBDuplicateEntry(DBError): - """Wraps an implementation specific exception.""" - def __init__(self, columns=[], inner_exception=None): - self.columns = columns - super(DBDuplicateEntry, self).__init__(inner_exception) - - -class DBDeadlock(DBError): - def __init__(self, inner_exception=None): - super(DBDeadlock, self).__init__(inner_exception) - - -class DBInvalidUnicodeParameter(Exception): - message = _("Invalid Parameter: " - "Unicode is not supported by the current database.") - - -class DbMigrationError(DBError): - """Wraps migration specific exception.""" - def __init__(self, message=None): - super(DbMigrationError, self).__init__(message) - - -class DBConnectionError(DBError): - """Wraps connection specific exception.""" - pass diff --git a/cerberus/openstack/common/db/options.py b/cerberus/openstack/common/db/options.py deleted file mode 100644 index 61e4ce1..0000000 --- a/cerberus/openstack/common/db/options.py +++ /dev/null @@ -1,171 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy - -from oslo.config import cfg - - -database_opts = [ - cfg.StrOpt('sqlite_db', - deprecated_group='DEFAULT', - default='cerberus.sqlite', - help='The file name to use with SQLite'), - cfg.BoolOpt('sqlite_synchronous', - deprecated_group='DEFAULT', - default=True, - help='If True, SQLite uses synchronous mode'), - cfg.StrOpt('backend', - default='sqlalchemy', - deprecated_name='db_backend', - deprecated_group='DEFAULT', - help='The backend to use for db'), - cfg.StrOpt('connection', - help='The SQLAlchemy connection string used to connect to the ' - 'database', - secret=True, - deprecated_opts=[cfg.DeprecatedOpt('sql_connection', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_connection', - group='DATABASE'), - cfg.DeprecatedOpt('connection', - group='sql'), ]), - cfg.StrOpt('mysql_sql_mode', - default='TRADITIONAL', - help='The SQL mode to be used for MySQL sessions. ' - 'This option, including the default, overrides any ' - 'server-set SQL mode. To use whatever SQL mode ' - 'is set by the server configuration, ' - 'set this to no value. Example: mysql_sql_mode='), - cfg.IntOpt('idle_timeout', - default=3600, - deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_idle_timeout', - group='DATABASE'), - cfg.DeprecatedOpt('idle_timeout', - group='sql')], - help='Timeout before idle sql connections are reaped'), - cfg.IntOpt('min_pool_size', - default=1, - deprecated_opts=[cfg.DeprecatedOpt('sql_min_pool_size', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_min_pool_size', - group='DATABASE')], - help='Minimum number of SQL connections to keep open in a ' - 'pool'), - cfg.IntOpt('max_pool_size', - default=None, - deprecated_opts=[cfg.DeprecatedOpt('sql_max_pool_size', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_max_pool_size', - group='DATABASE')], - help='Maximum number of SQL connections to keep open in a ' - 'pool'), - cfg.IntOpt('max_retries', - default=10, - deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries', - group='DEFAULT'), - cfg.DeprecatedOpt('sql_max_retries', - group='DATABASE')], - help='Maximum db connection retries during startup. ' - '(setting -1 implies an infinite retry count)'), - cfg.IntOpt('retry_interval', - default=10, - deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval', - group='DEFAULT'), - cfg.DeprecatedOpt('reconnect_interval', - group='DATABASE')], - help='Interval between retries of opening a sql connection'), - cfg.IntOpt('max_overflow', - default=None, - deprecated_opts=[cfg.DeprecatedOpt('sql_max_overflow', - group='DEFAULT'), - cfg.DeprecatedOpt('sqlalchemy_max_overflow', - group='DATABASE')], - help='If set, use this value for max_overflow with sqlalchemy'), - cfg.IntOpt('connection_debug', - default=0, - deprecated_opts=[cfg.DeprecatedOpt('sql_connection_debug', - group='DEFAULT')], - help='Verbosity of SQL debugging information. 0=None, ' - '100=Everything'), - cfg.BoolOpt('connection_trace', - default=False, - deprecated_opts=[cfg.DeprecatedOpt('sql_connection_trace', - group='DEFAULT')], - help='Add python stack traces to SQL as comment strings'), - cfg.IntOpt('pool_timeout', - default=None, - deprecated_opts=[cfg.DeprecatedOpt('sqlalchemy_pool_timeout', - group='DATABASE')], - help='If set, use this value for pool_timeout with sqlalchemy'), - cfg.BoolOpt('use_db_reconnect', - default=False, - help='Enable the experimental use of database reconnect ' - 'on connection lost'), - cfg.IntOpt('db_retry_interval', - default=1, - help='seconds between db connection retries'), - cfg.BoolOpt('db_inc_retry_interval', - default=True, - help='Whether to increase interval between db connection ' - 'retries, up to db_max_retry_interval'), - cfg.IntOpt('db_max_retry_interval', - default=10, - help='max seconds between db connection retries, if ' - 'db_inc_retry_interval is enabled'), - cfg.IntOpt('db_max_retries', - default=20, - help='maximum db connection retries before error is raised. ' - '(setting -1 implies an infinite retry count)'), -] - -CONF = cfg.CONF -CONF.register_opts(database_opts, 'database') - - -def set_defaults(sql_connection, sqlite_db, max_pool_size=None, - max_overflow=None, pool_timeout=None): - """Set defaults for configuration variables.""" - cfg.set_defaults(database_opts, - connection=sql_connection, - sqlite_db=sqlite_db) - # Update the QueuePool defaults - if max_pool_size is not None: - cfg.set_defaults(database_opts, - max_pool_size=max_pool_size) - if max_overflow is not None: - cfg.set_defaults(database_opts, - max_overflow=max_overflow) - if pool_timeout is not None: - cfg.set_defaults(database_opts, - pool_timeout=pool_timeout) - - -def list_opts(): - """Returns a list of oslo.config options available in the library. - - The returned list includes all oslo.config options which may be registered - at runtime by the library. - - Each element of the list is a tuple. The first element is the name of the - group under which the list of elements in the second element will be - registered. A group name of None corresponds to the [DEFAULT] group in - config files. - - The purpose of this is to allow tools like the Oslo sample config file - generator to discover the options exposed to users by this library. - - :returns: a list of (group_name, opts) tuples - """ - return [('database', copy.deepcopy(database_opts))] diff --git a/cerberus/openstack/common/db/sqlalchemy/__init__.py b/cerberus/openstack/common/db/sqlalchemy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/openstack/common/db/sqlalchemy/migration.py b/cerberus/openstack/common/db/sqlalchemy/migration.py deleted file mode 100644 index f728ae2..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/migration.py +++ /dev/null @@ -1,278 +0,0 @@ -# coding: utf-8 -# -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Base on code in migrate/changeset/databases/sqlite.py which is under -# the following license: -# -# The MIT License -# -# Copyright (c) 2009 Evan Rosson, Jan Dittberner, Domen Kožar -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import os -import re - -from migrate.changeset import ansisql -from migrate.changeset.databases import sqlite -from migrate import exceptions as versioning_exceptions -from migrate.versioning import api as versioning_api -from migrate.versioning.repository import Repository -import sqlalchemy -from sqlalchemy.schema import UniqueConstraint - -from cerberus.openstack.common.db import exception -from cerberus.openstack.common.gettextutils import _ - - -def _get_unique_constraints(self, table): - """Retrieve information about existing unique constraints of the table - - This feature is needed for _recreate_table() to work properly. - Unfortunately, it's not available in sqlalchemy 0.7.x/0.8.x. - - """ - - data = table.metadata.bind.execute( - """SELECT sql - FROM sqlite_master - WHERE - type='table' AND - name=:table_name""", - table_name=table.name - ).fetchone()[0] - - UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)" - return [ - UniqueConstraint( - *[getattr(table.columns, c.strip(' "')) for c in cols.split(",")], - name=name - ) - for name, cols in re.findall(UNIQUE_PATTERN, data) - ] - - -def _recreate_table(self, table, column=None, delta=None, omit_uniques=None): - """Recreate the table properly - - Unlike the corresponding original method of sqlalchemy-migrate this one - doesn't drop existing unique constraints when creating a new one. - - """ - - table_name = self.preparer.format_table(table) - - # we remove all indexes so as not to have - # problems during copy and re-create - for index in table.indexes: - index.drop() - - # reflect existing unique constraints - for uc in self._get_unique_constraints(table): - table.append_constraint(uc) - # omit given unique constraints when creating a new table if required - table.constraints = set([ - cons for cons in table.constraints - if omit_uniques is None or cons.name not in omit_uniques - ]) - - self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name) - self.execute() - - insertion_string = self._modify_table(table, column, delta) - - table.create(bind=self.connection) - self.append(insertion_string % {'table_name': table_name}) - self.execute() - self.append('DROP TABLE migration_tmp') - self.execute() - - -def _visit_migrate_unique_constraint(self, *p, **k): - """Drop the given unique constraint - - The corresponding original method of sqlalchemy-migrate just - raises NotImplemented error - - """ - - self.recreate_table(p[0].table, omit_uniques=[p[0].name]) - - -def patch_migrate(): - """A workaround for SQLite's inability to alter things - - SQLite abilities to alter tables are very limited (please read - http://www.sqlite.org/lang_altertable.html for more details). - E. g. one can't drop a column or a constraint in SQLite. The - workaround for this is to recreate the original table omitting - the corresponding constraint (or column). - - sqlalchemy-migrate library has recreate_table() method that - implements this workaround, but it does it wrong: - - - information about unique constraints of a table - is not retrieved. So if you have a table with one - unique constraint and a migration adding another one - you will end up with a table that has only the - latter unique constraint, and the former will be lost - - - dropping of unique constraints is not supported at all - - The proper way to fix this is to provide a pull-request to - sqlalchemy-migrate, but the project seems to be dead. So we - can go on with monkey-patching of the lib at least for now. - - """ - - # this patch is needed to ensure that recreate_table() doesn't drop - # existing unique constraints of the table when creating a new one - helper_cls = sqlite.SQLiteHelper - helper_cls.recreate_table = _recreate_table - helper_cls._get_unique_constraints = _get_unique_constraints - - # this patch is needed to be able to drop existing unique constraints - constraint_cls = sqlite.SQLiteConstraintDropper - constraint_cls.visit_migrate_unique_constraint = \ - _visit_migrate_unique_constraint - constraint_cls.__bases__ = (ansisql.ANSIColumnDropper, - sqlite.SQLiteConstraintGenerator) - - -def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True): - """Upgrade or downgrade a database. - - Function runs the upgrade() or downgrade() functions in change scripts. - - :param engine: SQLAlchemy engine instance for a given database - :param abs_path: Absolute path to migrate repository. - :param version: Database will upgrade/downgrade until this version. - If None - database will update to the latest - available version. - :param init_version: Initial database version - :param sanity_check: Require schema sanity checking for all tables - """ - - if version is not None: - try: - version = int(version) - except ValueError: - raise exception.DbMigrationError( - message=_("version should be an integer")) - - current_version = db_version(engine, abs_path, init_version) - repository = _find_migrate_repo(abs_path) - if sanity_check: - _db_schema_sanity_check(engine) - if version is None or version > current_version: - return versioning_api.upgrade(engine, repository, version) - else: - return versioning_api.downgrade(engine, repository, - version) - - -def _db_schema_sanity_check(engine): - """Ensure all database tables were created with required parameters. - - :param engine: SQLAlchemy engine instance for a given database - - """ - - if engine.name == 'mysql': - onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION ' - 'from information_schema.TABLES ' - 'where TABLE_SCHEMA=%s and ' - 'TABLE_COLLATION NOT LIKE "%%utf8%%"') - - # NOTE(morganfainberg): exclude the sqlalchemy-migrate and alembic - # versioning tables from the tables we need to verify utf8 status on. - # Non-standard table names are not supported. - EXCLUDED_TABLES = ['migrate_version', 'alembic_version'] - - table_names = [res[0] for res in - engine.execute(onlyutf8_sql, engine.url.database) if - res[0].lower() not in EXCLUDED_TABLES] - - if len(table_names) > 0: - raise ValueError(_('Tables "%s" have non utf8 collation, ' - 'please make sure all tables are CHARSET=utf8' - ) % ','.join(table_names)) - - -def db_version(engine, abs_path, init_version): - """Show the current version of the repository. - - :param engine: SQLAlchemy engine instance for a given database - :param abs_path: Absolute path to migrate repository - :param version: Initial database version - """ - repository = _find_migrate_repo(abs_path) - try: - return versioning_api.db_version(engine, repository) - except versioning_exceptions.DatabaseNotControlledError: - meta = sqlalchemy.MetaData() - meta.reflect(bind=engine) - tables = meta.tables - if len(tables) == 0 or 'alembic_version' in tables: - db_version_control(engine, abs_path, version=init_version) - return versioning_api.db_version(engine, repository) - else: - raise exception.DbMigrationError( - message=_( - "The database is not under version control, but has " - "tables. Please stamp the current version of the schema " - "manually.")) - - -def db_version_control(engine, abs_path, version=None): - """Mark a database as under this repository's version control. - - Once a database is under version control, schema changes should - only be done via change scripts in this repository. - - :param engine: SQLAlchemy engine instance for a given database - :param abs_path: Absolute path to migrate repository - :param version: Initial database version - """ - repository = _find_migrate_repo(abs_path) - versioning_api.version_control(engine, repository, version) - return version - - -def _find_migrate_repo(abs_path): - """Get the project's change script repository - - :param abs_path: Absolute path to migrate repository - """ - if not os.path.exists(abs_path): - raise exception.DbMigrationError("Path %s not found" % abs_path) - return Repository(abs_path) diff --git a/cerberus/openstack/common/db/sqlalchemy/migration_cli/__init__.py b/cerberus/openstack/common/db/sqlalchemy/migration_cli/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_alembic.py b/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_alembic.py deleted file mode 100644 index 039ed47..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_alembic.py +++ /dev/null @@ -1,78 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -import alembic -from alembic import config as alembic_config -import alembic.migration as alembic_migration - -from cerberus.openstack.common.db.sqlalchemy.migration_cli import ext_base -from cerberus.openstack.common.db.sqlalchemy import session as db_session - - -class AlembicExtension(ext_base.MigrationExtensionBase): - - order = 2 - - @property - def enabled(self): - return os.path.exists(self.alembic_ini_path) - - def __init__(self, migration_config): - """Extension to provide alembic features. - - :param migration_config: Stores specific configuration for migrations - :type migration_config: dict - """ - self.alembic_ini_path = migration_config.get('alembic_ini_path', '') - self.config = alembic_config.Config(self.alembic_ini_path) - # option should be used if script is not in default directory - repo_path = migration_config.get('alembic_repo_path') - if repo_path: - self.config.set_main_option('script_location', repo_path) - self.db_url = migration_config['db_url'] - - def upgrade(self, version): - return alembic.command.upgrade(self.config, version or 'head') - - def downgrade(self, version): - if isinstance(version, int) or version is None or version.isdigit(): - version = 'base' - return alembic.command.downgrade(self.config, version) - - def version(self): - engine = db_session.create_engine(self.db_url) - with engine.connect() as conn: - context = alembic_migration.MigrationContext.configure(conn) - return context.get_current_revision() - - def revision(self, message='', autogenerate=False): - """Creates template for migration. - - :param message: Text that will be used for migration title - :type message: string - :param autogenerate: If True - generates diff based on current database - state - :type autogenerate: bool - """ - return alembic.command.revision(self.config, message=message, - autogenerate=autogenerate) - - def stamp(self, revision): - """Stamps database with provided revision. - - :param revision: Should match one from repository or head - to stamp - database with most recent revision - :type revision: string - """ - return alembic.command.stamp(self.config, revision=revision) diff --git a/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_base.py b/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_base.py deleted file mode 100644 index 271cd0a..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_base.py +++ /dev/null @@ -1,79 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class MigrationExtensionBase(object): - - #used to sort migration in logical order - order = 0 - - @property - def enabled(self): - """Used for availability verification of a plugin. - - :rtype: bool - """ - return False - - @abc.abstractmethod - def upgrade(self, version): - """Used for upgrading database. - - :param version: Desired database version - :type version: string - """ - - @abc.abstractmethod - def downgrade(self, version): - """Used for downgrading database. - - :param version: Desired database version - :type version: string - """ - - @abc.abstractmethod - def version(self): - """Current database version. - - :returns: Databse version - :rtype: string - """ - - def revision(self, *args, **kwargs): - """Used to generate migration script. - - In migration engines that support this feature, it should generate - new migration script. - - Accept arbitrary set of arguments. - """ - raise NotImplementedError() - - def stamp(self, *args, **kwargs): - """Stamps database based on plugin features. - - Accept arbitrary set of arguments. - """ - raise NotImplementedError() - - def __cmp__(self, other): - """Used for definition of plugin order. - - :param other: MigrationExtensionBase instance - :rtype: bool - """ - return self.order > other.order diff --git a/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_migrate.py b/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_migrate.py deleted file mode 100644 index 4758c4f..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/migration_cli/ext_migrate.py +++ /dev/null @@ -1,69 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import os - -from cerberus.openstack.common.db.sqlalchemy import migration -from cerberus.openstack.common.db.sqlalchemy.migration_cli import ext_base -from cerberus.openstack.common.db.sqlalchemy import session as db_session -from cerberus.openstack.common.gettextutils import _LE - - -LOG = logging.getLogger(__name__) - - -class MigrateExtension(ext_base.MigrationExtensionBase): - """Extension to provide sqlalchemy-migrate features. - - :param migration_config: Stores specific configuration for migrations - :type migration_config: dict - """ - - order = 1 - - def __init__(self, migration_config): - self.repository = migration_config.get('migration_repo_path', '') - self.init_version = migration_config.get('init_version', 0) - self.db_url = migration_config['db_url'] - self.engine = db_session.create_engine(self.db_url) - - @property - def enabled(self): - return os.path.exists(self.repository) - - def upgrade(self, version): - version = None if version == 'head' else version - return migration.db_sync( - self.engine, self.repository, version, - init_version=self.init_version) - - def downgrade(self, version): - try: - #version for migrate should be valid int - else skip - if version in ('base', None): - version = self.init_version - version = int(version) - return migration.db_sync( - self.engine, self.repository, version, - init_version=self.init_version) - except ValueError: - LOG.error( - _LE('Migration number for migrate plugin must be valid ' - 'integer or empty, if you want to downgrade ' - 'to initial state') - ) - raise - - def version(self): - return migration.db_version( - self.engine, self.repository, init_version=self.init_version) diff --git a/cerberus/openstack/common/db/sqlalchemy/migration_cli/manager.py b/cerberus/openstack/common/db/sqlalchemy/migration_cli/manager.py deleted file mode 100644 index 1184293..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/migration_cli/manager.py +++ /dev/null @@ -1,71 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from stevedore import enabled - - -MIGRATION_NAMESPACE = 'cerberus.openstack.common.migration' - - -def check_plugin_enabled(ext): - """Used for EnabledExtensionManager""" - return ext.obj.enabled - - -class MigrationManager(object): - - def __init__(self, migration_config): - self._manager = enabled.EnabledExtensionManager( - MIGRATION_NAMESPACE, - check_plugin_enabled, - invoke_kwds={'migration_config': migration_config}, - invoke_on_load=True - ) - if not self._plugins: - raise ValueError('There must be at least one plugin active.') - - @property - def _plugins(self): - return sorted(ext.obj for ext in self._manager.extensions) - - def upgrade(self, revision): - """Upgrade database with all available backends.""" - results = [] - for plugin in self._plugins: - results.append(plugin.upgrade(revision)) - return results - - def downgrade(self, revision): - """Downgrade database with available backends.""" - #downgrading should be performed in reversed order - results = [] - for plugin in reversed(self._plugins): - results.append(plugin.downgrade(revision)) - return results - - def version(self): - """Return last version of db.""" - last = None - for plugin in self._plugins: - version = plugin.version() - if version: - last = version - return last - - def revision(self, message, autogenerate): - """Generate template or autogenerated revision.""" - #revision should be done only by last plugin - return self._plugins[-1].revision(message, autogenerate) - - def stamp(self, revision): - """Create stamp for a given revision.""" - return self._plugins[-1].stamp(revision) diff --git a/cerberus/openstack/common/db/sqlalchemy/models.py b/cerberus/openstack/common/db/sqlalchemy/models.py deleted file mode 100644 index ccc77f6..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/models.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright (c) 2011 X.commerce, a business unit of eBay Inc. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Piston Cloud Computing, Inc. -# Copyright 2012 Cloudscaling Group, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -SQLAlchemy models. -""" - -import six - -from sqlalchemy import Column, Integer -from sqlalchemy import DateTime -from sqlalchemy.orm import object_mapper - -from cerberus.openstack.common import timeutils - - -class ModelBase(six.Iterator): - """Base class for models.""" - __table_initialized__ = False - - def save(self, session): - """Save this object.""" - - # NOTE(boris-42): This part of code should be look like: - # session.add(self) - # session.flush() - # But there is a bug in sqlalchemy and eventlet that - # raises NoneType exception if there is no running - # transaction and rollback is called. As long as - # sqlalchemy has this bug we have to create transaction - # explicitly. - with session.begin(subtransactions=True): - session.add(self) - session.flush() - - def __setitem__(self, key, value): - setattr(self, key, value) - - def __getitem__(self, key): - return getattr(self, key) - - def get(self, key, default=None): - return getattr(self, key, default) - - @property - def _extra_keys(self): - """Specifies custom fields - - Subclasses can override this property to return a list - of custom fields that should be included in their dict - representation. - - For reference check tests/db/sqlalchemy/test_models.py - """ - return [] - - def __iter__(self): - columns = dict(object_mapper(self).columns).keys() - # NOTE(russellb): Allow models to specify other keys that can be looked - # up, beyond the actual db columns. An example would be the 'name' - # property for an Instance. - columns.extend(self._extra_keys) - self._i = iter(columns) - return self - - # In Python 3, __next__() has replaced next(). - def __next__(self): - n = six.advance_iterator(self._i) - return n, getattr(self, n) - - def next(self): - return self.__next__() - - def update(self, values): - """Make the model object behave like a dict.""" - for k, v in six.iteritems(values): - setattr(self, k, v) - - def iteritems(self): - """Make the model object behave like a dict. - - Includes attributes from joins. - """ - local = dict(self) - joined = dict([(k, v) for k, v in six.iteritems(self.__dict__) - if not k[0] == '_']) - local.update(joined) - return six.iteritems(local) - - -class TimestampMixin(object): - created_at = Column(DateTime, default=lambda: timeutils.utcnow()) - updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow()) - - -class SoftDeleteMixin(object): - deleted_at = Column(DateTime) - deleted = Column(Integer, default=0) - - def soft_delete(self, session): - """Mark this object as deleted.""" - self.deleted = self.id - self.deleted_at = timeutils.utcnow() - self.save(session=session) diff --git a/cerberus/openstack/common/db/sqlalchemy/provision.py b/cerberus/openstack/common/db/sqlalchemy/provision.py deleted file mode 100644 index 4a29e70..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/provision.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright 2013 Mirantis.inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Provision test environment for specific DB backends""" - -import argparse -import logging -import os -import random -import string - -from six import moves -import sqlalchemy - -from cerberus.openstack.common.db import exception as exc - - -LOG = logging.getLogger(__name__) - - -def get_engine(uri): - """Engine creation - - Call the function without arguments to get admin connection. Admin - connection required to create temporary user and database for each - particular test. Otherwise use existing connection to recreate connection - to the temporary database. - """ - return sqlalchemy.create_engine(uri, poolclass=sqlalchemy.pool.NullPool) - - -def _execute_sql(engine, sql, driver): - """Initialize connection, execute sql query and close it.""" - try: - with engine.connect() as conn: - if driver == 'postgresql': - conn.connection.set_isolation_level(0) - for s in sql: - conn.execute(s) - except sqlalchemy.exc.OperationalError: - msg = ('%s does not match database admin ' - 'credentials or database does not exist.') - LOG.exception(msg % engine.url) - raise exc.DBConnectionError(msg % engine.url) - - -def create_database(engine): - """Provide temporary user and database for each particular test.""" - driver = engine.name - - auth = { - 'database': ''.join(random.choice(string.ascii_lowercase) - for i in moves.range(10)), - 'user': engine.url.username, - 'passwd': engine.url.password, - } - - sqls = [ - "drop database if exists %(database)s;", - "create database %(database)s;" - ] - - if driver == 'sqlite': - return 'sqlite:////tmp/%s' % auth['database'] - elif driver in ['mysql', 'postgresql']: - sql_query = map(lambda x: x % auth, sqls) - _execute_sql(engine, sql_query, driver) - else: - raise ValueError('Unsupported RDBMS %s' % driver) - - params = auth.copy() - params['backend'] = driver - return "%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" % params - - -def drop_database(admin_engine, current_uri): - """Drop temporary database and user after each particular test.""" - - engine = get_engine(current_uri) - driver = engine.name - auth = {'database': engine.url.database, 'user': engine.url.username} - - if driver == 'sqlite': - try: - os.remove(auth['database']) - except OSError: - pass - elif driver in ['mysql', 'postgresql']: - sql = "drop database if exists %(database)s;" - _execute_sql(admin_engine, [sql % auth], driver) - else: - raise ValueError('Unsupported RDBMS %s' % driver) - - -def main(): - """Controller to handle commands - - ::create: Create test user and database with random names. - ::drop: Drop user and database created by previous command. - """ - parser = argparse.ArgumentParser( - description='Controller to handle database creation and dropping' - ' commands.', - epilog='Under normal circumstances is not used directly.' - ' Used in .testr.conf to automate test database creation' - ' and dropping processes.') - subparsers = parser.add_subparsers( - help='Subcommands to manipulate temporary test databases.') - - create = subparsers.add_parser( - 'create', - help='Create temporary test ' - 'databases and users.') - create.set_defaults(which='create') - create.add_argument( - 'instances_count', - type=int, - help='Number of databases to create.') - - drop = subparsers.add_parser( - 'drop', - help='Drop temporary test databases and users.') - drop.set_defaults(which='drop') - drop.add_argument( - 'instances', - nargs='+', - help='List of databases uri to be dropped.') - - args = parser.parse_args() - - connection_string = os.getenv('OS_TEST_DBAPI_ADMIN_CONNECTION', - 'sqlite://') - engine = get_engine(connection_string) - which = args.which - - if which == "create": - for i in range(int(args.instances_count)): - print(create_database(engine)) - elif which == "drop": - for db in args.instances: - drop_database(engine, db) - - -if __name__ == "__main__": - main() diff --git a/cerberus/openstack/common/db/sqlalchemy/session.py b/cerberus/openstack/common/db/sqlalchemy/session.py deleted file mode 100644 index 7a0324a..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/session.py +++ /dev/null @@ -1,933 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Session Handling for SQLAlchemy backend. - -Recommended ways to use sessions within this framework: - -* Don't use them explicitly; this is like running with ``AUTOCOMMIT=1``. - `model_query()` will implicitly use a session when called without one - supplied. This is the ideal situation because it will allow queries - to be automatically retried if the database connection is interrupted. - - .. note:: Automatic retry will be enabled in a future patch. - - It is generally fine to issue several queries in a row like this. Even though - they may be run in separate transactions and/or separate sessions, each one - will see the data from the prior calls. If needed, undo- or rollback-like - functionality should be handled at a logical level. For an example, look at - the code around quotas and `reservation_rollback()`. - - Examples: - - .. code:: python - - def get_foo(context, foo): - return (model_query(context, models.Foo). - filter_by(foo=foo). - first()) - - def update_foo(context, id, newfoo): - (model_query(context, models.Foo). - filter_by(id=id). - update({'foo': newfoo})) - - def create_foo(context, values): - foo_ref = models.Foo() - foo_ref.update(values) - foo_ref.save() - return foo_ref - - -* Within the scope of a single method, keep all the reads and writes within - the context managed by a single session. In this way, the session's - `__exit__` handler will take care of calling `flush()` and `commit()` for - you. If using this approach, you should not explicitly call `flush()` or - `commit()`. Any error within the context of the session will cause the - session to emit a `ROLLBACK`. Database errors like `IntegrityError` will be - raised in `session`'s `__exit__` handler, and any try/except within the - context managed by `session` will not be triggered. And catching other - non-database errors in the session will not trigger the ROLLBACK, so - exception handlers should always be outside the session, unless the - developer wants to do a partial commit on purpose. If the connection is - dropped before this is possible, the database will implicitly roll back the - transaction. - - .. note:: Statements in the session scope will not be automatically retried. - - If you create models within the session, they need to be added, but you - do not need to call `model.save()`: - - .. code:: python - - def create_many_foo(context, foos): - session = sessionmaker() - with session.begin(): - for foo in foos: - foo_ref = models.Foo() - foo_ref.update(foo) - session.add(foo_ref) - - def update_bar(context, foo_id, newbar): - session = sessionmaker() - with session.begin(): - foo_ref = (model_query(context, models.Foo, session). - filter_by(id=foo_id). - first()) - (model_query(context, models.Bar, session). - filter_by(id=foo_ref['bar_id']). - update({'bar': newbar})) - - .. note:: `update_bar` is a trivially simple example of using - ``with session.begin``. Whereas `create_many_foo` is a good example of - when a transaction is needed, it is always best to use as few queries as - possible. - - The two queries in `update_bar` can be better expressed using a single query - which avoids the need for an explicit transaction. It can be expressed like - so: - - .. code:: python - - def update_bar(context, foo_id, newbar): - subq = (model_query(context, models.Foo.id). - filter_by(id=foo_id). - limit(1). - subquery()) - (model_query(context, models.Bar). - filter_by(id=subq.as_scalar()). - update({'bar': newbar})) - - For reference, this emits approximately the following SQL statement: - - .. code:: sql - - UPDATE bar SET bar = ${newbar} - WHERE id=(SELECT bar_id FROM foo WHERE id = ${foo_id} LIMIT 1); - - .. note:: `create_duplicate_foo` is a trivially simple example of catching an - exception while using ``with session.begin``. Here create two duplicate - instances with same primary key, must catch the exception out of context - managed by a single session: - - .. code:: python - - def create_duplicate_foo(context): - foo1 = models.Foo() - foo2 = models.Foo() - foo1.id = foo2.id = 1 - session = sessionmaker() - try: - with session.begin(): - session.add(foo1) - session.add(foo2) - except exception.DBDuplicateEntry as e: - handle_error(e) - -* Passing an active session between methods. Sessions should only be passed - to private methods. The private method must use a subtransaction; otherwise - SQLAlchemy will throw an error when you call `session.begin()` on an existing - transaction. Public methods should not accept a session parameter and should - not be involved in sessions within the caller's scope. - - Note that this incurs more overhead in SQLAlchemy than the above means - due to nesting transactions, and it is not possible to implicitly retry - failed database operations when using this approach. - - This also makes code somewhat more difficult to read and debug, because a - single database transaction spans more than one method. Error handling - becomes less clear in this situation. When this is needed for code clarity, - it should be clearly documented. - - .. code:: python - - def myfunc(foo): - session = sessionmaker() - with session.begin(): - # do some database things - bar = _private_func(foo, session) - return bar - - def _private_func(foo, session=None): - if not session: - session = sessionmaker() - with session.begin(subtransaction=True): - # do some other database things - return bar - - -There are some things which it is best to avoid: - -* Don't keep a transaction open any longer than necessary. - - This means that your ``with session.begin()`` block should be as short - as possible, while still containing all the related calls for that - transaction. - -* Avoid ``with_lockmode('UPDATE')`` when possible. - - In MySQL/InnoDB, when a ``SELECT ... FOR UPDATE`` query does not match - any rows, it will take a gap-lock. This is a form of write-lock on the - "gap" where no rows exist, and prevents any other writes to that space. - This can effectively prevent any INSERT into a table by locking the gap - at the end of the index. Similar problems will occur if the SELECT FOR UPDATE - has an overly broad WHERE clause, or doesn't properly use an index. - - One idea proposed at ODS Fall '12 was to use a normal SELECT to test the - number of rows matching a query, and if only one row is returned, - then issue the SELECT FOR UPDATE. - - The better long-term solution is to use - ``INSERT .. ON DUPLICATE KEY UPDATE``. - However, this can not be done until the "deleted" columns are removed and - proper UNIQUE constraints are added to the tables. - - -Enabling soft deletes: - -* To use/enable soft-deletes, the `SoftDeleteMixin` must be added - to your model class. For example: - - .. code:: python - - class NovaBase(models.SoftDeleteMixin, models.ModelBase): - pass - - -Efficient use of soft deletes: - -* There are two possible ways to mark a record as deleted: - `model.soft_delete()` and `query.soft_delete()`. - - The `model.soft_delete()` method works with a single already-fetched entry. - `query.soft_delete()` makes only one db request for all entries that - correspond to the query. - -* In almost all cases you should use `query.soft_delete()`. Some examples: - - .. code:: python - - def soft_delete_bar(): - count = model_query(BarModel).find(some_condition).soft_delete() - if count == 0: - raise Exception("0 entries were soft deleted") - - def complex_soft_delete_with_synchronization_bar(session=None): - if session is None: - session = sessionmaker() - with session.begin(subtransactions=True): - count = (model_query(BarModel). - find(some_condition). - soft_delete(synchronize_session=True)) - # Here synchronize_session is required, because we - # don't know what is going on in outer session. - if count == 0: - raise Exception("0 entries were soft deleted") - -* There is only one situation where `model.soft_delete()` is appropriate: when - you fetch a single record, work with it, and mark it as deleted in the same - transaction. - - .. code:: python - - def soft_delete_bar_model(): - session = sessionmaker() - with session.begin(): - bar_ref = model_query(BarModel).find(some_condition).first() - # Work with bar_ref - bar_ref.soft_delete(session=session) - - However, if you need to work with all entries that correspond to query and - then soft delete them you should use the `query.soft_delete()` method: - - .. code:: python - - def soft_delete_multi_models(): - session = sessionmaker() - with session.begin(): - query = (model_query(BarModel, session=session). - find(some_condition)) - model_refs = query.all() - # Work with model_refs - query.soft_delete(synchronize_session=False) - # synchronize_session=False should be set if there is no outer - # session and these entries are not used after this. - - When working with many rows, it is very important to use query.soft_delete, - which issues a single query. Using `model.soft_delete()`, as in the following - example, is very inefficient. - - .. code:: python - - for bar_ref in bar_refs: - bar_ref.soft_delete(session=session) - # This will produce count(bar_refs) db requests. - -""" - -import functools -import logging -import re -import time - -import six -from sqlalchemy import exc as sqla_exc -from sqlalchemy.interfaces import PoolListener -import sqlalchemy.orm -from sqlalchemy.pool import NullPool, StaticPool -from sqlalchemy.sql.expression import literal_column - -from cerberus.openstack.common.db import exception -from cerberus.openstack.common.gettextutils import _LE, _LW -from cerberus.openstack.common import timeutils - - -LOG = logging.getLogger(__name__) - - -class SqliteForeignKeysListener(PoolListener): - """Ensures that the foreign key constraints are enforced in SQLite. - - The foreign key constraints are disabled by default in SQLite, - so the foreign key constraints will be enabled here for every - database connection - """ - def connect(self, dbapi_con, con_record): - dbapi_con.execute('pragma foreign_keys=ON') - - -# note(boris-42): In current versions of DB backends unique constraint -# violation messages follow the structure: -# -# sqlite: -# 1 column - (IntegrityError) column c1 is not unique -# N columns - (IntegrityError) column c1, c2, ..., N are not unique -# -# sqlite since 3.7.16: -# 1 column - (IntegrityError) UNIQUE constraint failed: tbl.k1 -# -# N columns - (IntegrityError) UNIQUE constraint failed: tbl.k1, tbl.k2 -# -# postgres: -# 1 column - (IntegrityError) duplicate key value violates unique -# constraint "users_c1_key" -# N columns - (IntegrityError) duplicate key value violates unique -# constraint "name_of_our_constraint" -# -# mysql: -# 1 column - (IntegrityError) (1062, "Duplicate entry 'value_of_c1' for key -# 'c1'") -# N columns - (IntegrityError) (1062, "Duplicate entry 'values joined -# with -' for key 'name_of_our_constraint'") -# -# ibm_db_sa: -# N columns - (IntegrityError) SQL0803N One or more values in the INSERT -# statement, UPDATE statement, or foreign key update caused by a -# DELETE statement are not valid because the primary key, unique -# constraint or unique index identified by "2" constrains table -# "NOVA.KEY_PAIRS" from having duplicate values for the index -# key. -_DUP_KEY_RE_DB = { - "sqlite": (re.compile(r"^.*columns?([^)]+)(is|are)\s+not\s+unique$"), - re.compile(r"^.*UNIQUE\s+constraint\s+failed:\s+(.+)$")), - "postgresql": (re.compile(r"^.*duplicate\s+key.*\"([^\"]+)\"\s*\n.*$"),), - "mysql": (re.compile(r"^.*\(1062,.*'([^\']+)'\"\)$"),), - "ibm_db_sa": (re.compile(r"^.*SQL0803N.*$"),), -} - - -def _raise_if_duplicate_entry_error(integrity_error, engine_name): - """Raise exception if two entries are duplicated. - - In this function will be raised DBDuplicateEntry exception if integrity - error wrap unique constraint violation. - """ - - def get_columns_from_uniq_cons_or_name(columns): - # note(vsergeyev): UniqueConstraint name convention: "uniq_t0c10c2" - # where `t` it is table name and columns `c1`, `c2` - # are in UniqueConstraint. - uniqbase = "uniq_" - if not columns.startswith(uniqbase): - if engine_name == "postgresql": - return [columns[columns.index("_") + 1:columns.rindex("_")]] - return [columns] - return columns[len(uniqbase):].split("0")[1:] - - if engine_name not in ("ibm_db_sa", "mysql", "sqlite", "postgresql"): - return - - # FIXME(johannes): The usage of the .message attribute has been - # deprecated since Python 2.6. However, the exceptions raised by - # SQLAlchemy can differ when using unicode() and accessing .message. - # An audit across all three supported engines will be necessary to - # ensure there are no regressions. - for pattern in _DUP_KEY_RE_DB[engine_name]: - match = pattern.match(integrity_error.message) - if match: - break - else: - return - - # NOTE(mriedem): The ibm_db_sa integrity error message doesn't provide the - # columns so we have to omit that from the DBDuplicateEntry error. - columns = '' - - if engine_name != 'ibm_db_sa': - columns = match.group(1) - - if engine_name == "sqlite": - columns = [c.split('.')[-1] for c in columns.strip().split(", ")] - else: - columns = get_columns_from_uniq_cons_or_name(columns) - raise exception.DBDuplicateEntry(columns, integrity_error) - - -# NOTE(comstud): In current versions of DB backends, Deadlock violation -# messages follow the structure: -# -# mysql: -# (OperationalError) (1213, 'Deadlock found when trying to get lock; try ' -# 'restarting transaction') -_DEADLOCK_RE_DB = { - "mysql": re.compile(r"^.*\(1213, 'Deadlock.*") -} - - -def _raise_if_deadlock_error(operational_error, engine_name): - """Raise exception on deadlock condition. - - Raise DBDeadlock exception if OperationalError contains a Deadlock - condition. - """ - re = _DEADLOCK_RE_DB.get(engine_name) - if re is None: - return - # FIXME(johannes): The usage of the .message attribute has been - # deprecated since Python 2.6. However, the exceptions raised by - # SQLAlchemy can differ when using unicode() and accessing .message. - # An audit across all three supported engines will be necessary to - # ensure there are no regressions. - m = re.match(operational_error.message) - if not m: - return - raise exception.DBDeadlock(operational_error) - - -def _wrap_db_error(f): - @functools.wraps(f) - def _wrap(self, *args, **kwargs): - try: - assert issubclass( - self.__class__, ( - sqlalchemy.orm.session.Session, SessionTransactionWrapper) - ), ('_wrap_db_error() can only be applied to methods of ' - 'subclasses of sqlalchemy.orm.session.Session or ' - ' SessionTransactionWrapper') - - return f(self, *args, **kwargs) - except UnicodeEncodeError: - raise exception.DBInvalidUnicodeParameter() - except sqla_exc.OperationalError as e: - _raise_if_db_connection_lost(e, self.bind) - _raise_if_deadlock_error(e, self.bind.dialect.name) - # NOTE(comstud): A lot of code is checking for OperationalError - # so let's not wrap it for now. - raise - # note(boris-42): We should catch unique constraint violation and - # wrap it by our own DBDuplicateEntry exception. Unique constraint - # violation is wrapped by IntegrityError. - except sqla_exc.IntegrityError as e: - # note(boris-42): SqlAlchemy doesn't unify errors from different - # DBs so we must do this. Also in some tables (for example - # instance_types) there are more than one unique constraint. This - # means we should get names of columns, which values violate - # unique constraint, from error message. - _raise_if_duplicate_entry_error(e, self.bind.dialect.name) - raise exception.DBError(e) - except exception.DBError: - # note(zzzeek) - if _wrap_db_error is applied to nested functions, - # ensure an existing DBError is propagated outwards - raise - except Exception as e: - LOG.exception(_LE('DB exception wrapped.')) - raise exception.DBError(e) - return _wrap - - -def _synchronous_switch_listener(dbapi_conn, connection_rec): - """Switch sqlite connections to non-synchronous mode.""" - dbapi_conn.execute("PRAGMA synchronous = OFF") - - -def _add_regexp_listener(dbapi_con, con_record): - """Add REGEXP function to sqlite connections.""" - - def regexp(expr, item): - reg = re.compile(expr) - return reg.search(six.text_type(item)) is not None - dbapi_con.create_function('regexp', 2, regexp) - - -def _thread_yield(dbapi_con, con_record): - """Ensure other greenthreads get a chance to be executed. - - If we use eventlet.monkey_patch(), eventlet.greenthread.sleep(0) will - execute instead of time.sleep(0). - Force a context switch. With common database backends (eg MySQLdb and - sqlite), there is no implicit yield caused by network I/O since they are - implemented by C libraries that eventlet cannot monkey patch. - """ - time.sleep(0) - - -def _ping_listener(engine, dbapi_conn, connection_rec, connection_proxy): - """Ensures that MySQL, PostgreSQL or DB2 connections are alive. - - Borrowed from: - http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f - """ - cursor = dbapi_conn.cursor() - try: - ping_sql = 'select 1' - if engine.name == 'ibm_db_sa': - # DB2 requires a table expression - ping_sql = 'select 1 from (values (1)) AS t1' - cursor.execute(ping_sql) - except Exception as ex: - if engine.dialect.is_disconnect(ex, dbapi_conn, cursor): - msg = _LW('Database server has gone away: %s') % ex - LOG.warning(msg) - - # if the database server has gone away, all connections in the pool - # have become invalid and we can safely close all of them here, - # rather than waste time on checking of every single connection - engine.dispose() - - # this will be handled by SQLAlchemy and will force it to create - # a new connection and retry the original action - raise sqla_exc.DisconnectionError(msg) - else: - raise - - -def _set_session_sql_mode(dbapi_con, connection_rec, sql_mode=None): - """Set the sql_mode session variable. - - MySQL supports several server modes. The default is None, but sessions - may choose to enable server modes like TRADITIONAL, ANSI, - several STRICT_* modes and others. - - Note: passing in '' (empty string) for sql_mode clears - the SQL mode for the session, overriding a potentially set - server default. - """ - - cursor = dbapi_con.cursor() - cursor.execute("SET SESSION sql_mode = %s", [sql_mode]) - - -def _mysql_get_effective_sql_mode(engine): - """Returns the effective SQL mode for connections from the engine pool. - - Returns ``None`` if the mode isn't available, otherwise returns the mode. - - """ - # Get the real effective SQL mode. Even when unset by - # our own config, the server may still be operating in a specific - # SQL mode as set by the server configuration. - # Also note that the checkout listener will be called on execute to - # set the mode if it's registered. - row = engine.execute("SHOW VARIABLES LIKE 'sql_mode'").fetchone() - if row is None: - return - return row[1] - - -def _mysql_check_effective_sql_mode(engine): - """Logs a message based on the effective SQL mode for MySQL connections.""" - realmode = _mysql_get_effective_sql_mode(engine) - - if realmode is None: - LOG.warning(_LW('Unable to detect effective SQL mode')) - return - - LOG.debug('MySQL server mode set to %s', realmode) - # 'TRADITIONAL' mode enables several other modes, so - # we need a substring match here - if not ('TRADITIONAL' in realmode.upper() or - 'STRICT_ALL_TABLES' in realmode.upper()): - LOG.warning(_LW("MySQL SQL mode is '%s', " - "consider enabling TRADITIONAL or STRICT_ALL_TABLES"), - realmode) - - -def _mysql_set_mode_callback(engine, sql_mode): - if sql_mode is not None: - mode_callback = functools.partial(_set_session_sql_mode, - sql_mode=sql_mode) - sqlalchemy.event.listen(engine, 'connect', mode_callback) - _mysql_check_effective_sql_mode(engine) - - -def _is_db_connection_error(args): - """Return True if error in connecting to db.""" - # NOTE(adam_g): This is currently MySQL specific and needs to be extended - # to support Postgres and others. - # For the db2, the error code is -30081 since the db2 is still not ready - conn_err_codes = ('2002', '2003', '2006', '2013', '-30081') - for err_code in conn_err_codes: - if args.find(err_code) != -1: - return True - return False - - -def _raise_if_db_connection_lost(error, engine): - # NOTE(vsergeyev): Function is_disconnect(e, connection, cursor) - # requires connection and cursor in incoming parameters, - # but we have no possibility to create connection if DB - # is not available, so in such case reconnect fails. - # But is_disconnect() ignores these parameters, so it - # makes sense to pass to function None as placeholder - # instead of connection and cursor. - if engine.dialect.is_disconnect(error, None, None): - raise exception.DBConnectionError(error) - - -def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None, - idle_timeout=3600, - connection_debug=0, max_pool_size=None, max_overflow=None, - pool_timeout=None, sqlite_synchronous=True, - connection_trace=False, max_retries=10, retry_interval=10): - """Return a new SQLAlchemy engine.""" - - connection_dict = sqlalchemy.engine.url.make_url(sql_connection) - - engine_args = { - "pool_recycle": idle_timeout, - 'convert_unicode': True, - } - - logger = logging.getLogger('sqlalchemy.engine') - - # Map SQL debug level to Python log level - if connection_debug >= 100: - logger.setLevel(logging.DEBUG) - elif connection_debug >= 50: - logger.setLevel(logging.INFO) - else: - logger.setLevel(logging.WARNING) - - if "sqlite" in connection_dict.drivername: - if sqlite_fk: - engine_args["listeners"] = [SqliteForeignKeysListener()] - engine_args["poolclass"] = NullPool - - if sql_connection == "sqlite://": - engine_args["poolclass"] = StaticPool - engine_args["connect_args"] = {'check_same_thread': False} - else: - if max_pool_size is not None: - engine_args['pool_size'] = max_pool_size - if max_overflow is not None: - engine_args['max_overflow'] = max_overflow - if pool_timeout is not None: - engine_args['pool_timeout'] = pool_timeout - - engine = sqlalchemy.create_engine(sql_connection, **engine_args) - - sqlalchemy.event.listen(engine, 'checkin', _thread_yield) - - if engine.name in ('ibm_db_sa', 'mysql', 'postgresql'): - ping_callback = functools.partial(_ping_listener, engine) - sqlalchemy.event.listen(engine, 'checkout', ping_callback) - if engine.name == 'mysql': - if mysql_sql_mode: - _mysql_set_mode_callback(engine, mysql_sql_mode) - elif 'sqlite' in connection_dict.drivername: - if not sqlite_synchronous: - sqlalchemy.event.listen(engine, 'connect', - _synchronous_switch_listener) - sqlalchemy.event.listen(engine, 'connect', _add_regexp_listener) - - if connection_trace and engine.dialect.dbapi.__name__ == 'MySQLdb': - _patch_mysqldb_with_stacktrace_comments() - - try: - engine.connect() - except sqla_exc.OperationalError as e: - if not _is_db_connection_error(e.args[0]): - raise - - remaining = max_retries - if remaining == -1: - remaining = 'infinite' - while True: - msg = _LW('SQL connection failed. %s attempts left.') - LOG.warning(msg % remaining) - if remaining != 'infinite': - remaining -= 1 - time.sleep(retry_interval) - try: - engine.connect() - break - except sqla_exc.OperationalError as e: - if (remaining != 'infinite' and remaining == 0) or \ - not _is_db_connection_error(e.args[0]): - raise - return engine - - -class Query(sqlalchemy.orm.query.Query): - """Subclass of sqlalchemy.query with soft_delete() method.""" - def soft_delete(self, synchronize_session='evaluate'): - return self.update({'deleted': literal_column('id'), - 'updated_at': literal_column('updated_at'), - 'deleted_at': timeutils.utcnow()}, - synchronize_session=synchronize_session) - - -class Session(sqlalchemy.orm.session.Session): - """Custom Session class to avoid SqlAlchemy Session monkey patching.""" - @_wrap_db_error - def query(self, *args, **kwargs): - return super(Session, self).query(*args, **kwargs) - - @_wrap_db_error - def flush(self, *args, **kwargs): - return super(Session, self).flush(*args, **kwargs) - - @_wrap_db_error - def execute(self, *args, **kwargs): - return super(Session, self).execute(*args, **kwargs) - - @_wrap_db_error - def commit(self, *args, **kwargs): - return super(Session, self).commit(*args, **kwargs) - - def begin(self, **kw): - trans = super(Session, self).begin(**kw) - trans.__class__ = SessionTransactionWrapper - return trans - - -class SessionTransactionWrapper(sqlalchemy.orm.session.SessionTransaction): - @property - def bind(self): - return self.session.bind - - @_wrap_db_error - def commit(self, *args, **kwargs): - return super(SessionTransactionWrapper, self).commit(*args, **kwargs) - - @_wrap_db_error - def rollback(self, *args, **kwargs): - return super(SessionTransactionWrapper, self).rollback(*args, **kwargs) - - -def get_maker(engine, autocommit=True, expire_on_commit=False): - """Return a SQLAlchemy sessionmaker using the given engine.""" - return sqlalchemy.orm.sessionmaker(bind=engine, - class_=Session, - autocommit=autocommit, - expire_on_commit=expire_on_commit, - query_cls=Query) - - -def _patch_mysqldb_with_stacktrace_comments(): - """Adds current stack trace as a comment in queries. - - Patches MySQLdb.cursors.BaseCursor._do_query. - """ - import MySQLdb.cursors - import traceback - - old_mysql_do_query = MySQLdb.cursors.BaseCursor._do_query - - def _do_query(self, q): - stack = '' - for filename, line, method, function in traceback.extract_stack(): - # exclude various common things from trace - if filename.endswith('session.py') and method == '_do_query': - continue - if filename.endswith('api.py') and method == 'wrapper': - continue - if filename.endswith('utils.py') and method == '_inner': - continue - if filename.endswith('exception.py') and method == '_wrap': - continue - # db/api is just a wrapper around db/sqlalchemy/api - if filename.endswith('db/api.py'): - continue - # only trace inside cerberus - index = filename.rfind('cerberus') - if index == -1: - continue - stack += "File:%s:%s Method:%s() Line:%s | " \ - % (filename[index:], line, method, function) - - # strip trailing " | " from stack - if stack: - stack = stack[:-3] - qq = "%s /* %s */" % (q, stack) - else: - qq = q - old_mysql_do_query(self, qq) - - setattr(MySQLdb.cursors.BaseCursor, '_do_query', _do_query) - - -class EngineFacade(object): - """A helper class for removing of global engine instances from cerberus.db. - - As a library, cerberus.db can't decide where to store/when to create engine - and sessionmaker instances, so this must be left for a target application. - - On the other hand, in order to simplify the adoption of cerberus.db changes, - we'll provide a helper class, which creates engine and sessionmaker - on its instantiation and provides get_engine()/get_session() methods - that are compatible with corresponding utility functions that currently - exist in target projects, e.g. in Nova. - - engine/sessionmaker instances will still be global (and they are meant to - be global), but they will be stored in the app context, rather that in the - cerberus.db context. - - Note: using of this helper is completely optional and you are encouraged to - integrate engine/sessionmaker instances into your apps any way you like - (e.g. one might want to bind a session to a request context). Two important - things to remember: - - 1. An Engine instance is effectively a pool of DB connections, so it's - meant to be shared (and it's thread-safe). - 2. A Session instance is not meant to be shared and represents a DB - transactional context (i.e. it's not thread-safe). sessionmaker is - a factory of sessions. - - """ - - def __init__(self, sql_connection, - sqlite_fk=False, autocommit=True, - expire_on_commit=False, **kwargs): - """Initialize engine and sessionmaker instances. - - :param sqlite_fk: enable foreign keys in SQLite - :type sqlite_fk: bool - - :param autocommit: use autocommit mode for created Session instances - :type autocommit: bool - - :param expire_on_commit: expire session objects on commit - :type expire_on_commit: bool - - Keyword arguments: - - :keyword mysql_sql_mode: the SQL mode to be used for MySQL sessions. - (defaults to TRADITIONAL) - :keyword idle_timeout: timeout before idle sql connections are reaped - (defaults to 3600) - :keyword connection_debug: verbosity of SQL debugging information. - 0=None, 100=Everything (defaults to 0) - :keyword max_pool_size: maximum number of SQL connections to keep open - in a pool (defaults to SQLAlchemy settings) - :keyword max_overflow: if set, use this value for max_overflow with - sqlalchemy (defaults to SQLAlchemy settings) - :keyword pool_timeout: if set, use this value for pool_timeout with - sqlalchemy (defaults to SQLAlchemy settings) - :keyword sqlite_synchronous: if True, SQLite uses synchronous mode - (defaults to True) - :keyword connection_trace: add python stack traces to SQL as comment - strings (defaults to False) - :keyword max_retries: maximum db connection retries during startup. - (setting -1 implies an infinite retry count) - (defaults to 10) - :keyword retry_interval: interval between retries of opening a sql - connection (defaults to 10) - - """ - - super(EngineFacade, self).__init__() - - self._engine = create_engine( - sql_connection=sql_connection, - sqlite_fk=sqlite_fk, - mysql_sql_mode=kwargs.get('mysql_sql_mode', 'TRADITIONAL'), - idle_timeout=kwargs.get('idle_timeout', 3600), - connection_debug=kwargs.get('connection_debug', 0), - max_pool_size=kwargs.get('max_pool_size'), - max_overflow=kwargs.get('max_overflow'), - pool_timeout=kwargs.get('pool_timeout'), - sqlite_synchronous=kwargs.get('sqlite_synchronous', True), - connection_trace=kwargs.get('connection_trace', False), - max_retries=kwargs.get('max_retries', 10), - retry_interval=kwargs.get('retry_interval', 10)) - self._session_maker = get_maker( - engine=self._engine, - autocommit=autocommit, - expire_on_commit=expire_on_commit) - - def get_engine(self): - """Get the engine instance (note, that it's shared).""" - - return self._engine - - def get_session(self, **kwargs): - """Get a Session instance. - - If passed, keyword arguments values override the ones used when the - sessionmaker instance was created. - - :keyword autocommit: use autocommit mode for created Session instances - :type autocommit: bool - - :keyword expire_on_commit: expire session objects on commit - :type expire_on_commit: bool - - """ - - for arg in kwargs: - if arg not in ('autocommit', 'expire_on_commit'): - del kwargs[arg] - - return self._session_maker(**kwargs) - - @classmethod - def from_config(cls, connection_string, conf, - sqlite_fk=False, autocommit=True, expire_on_commit=False): - """Initialize EngineFacade using oslo.config config instance options. - - :param connection_string: SQLAlchemy connection string - :type connection_string: string - - :param conf: oslo.config config instance - :type conf: oslo.config.cfg.ConfigOpts - - :param sqlite_fk: enable foreign keys in SQLite - :type sqlite_fk: bool - - :param autocommit: use autocommit mode for created Session instances - :type autocommit: bool - - :param expire_on_commit: expire session objects on commit - :type expire_on_commit: bool - - """ - - return cls(sql_connection=connection_string, - sqlite_fk=sqlite_fk, - autocommit=autocommit, - expire_on_commit=expire_on_commit, - **dict(conf.database.items())) diff --git a/cerberus/openstack/common/db/sqlalchemy/test_base.py b/cerberus/openstack/common/db/sqlalchemy/test_base.py deleted file mode 100644 index 199326a..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/test_base.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc -import functools -import os - -import fixtures -import six - -from cerberus.openstack.common.db.sqlalchemy import session -from cerberus.openstack.common.db.sqlalchemy import utils -from cerberus.openstack.common.fixture import lockutils -from cerberus.openstack.common import test - - -class DbFixture(fixtures.Fixture): - """Basic database fixture. - - Allows to run tests on various db backends, such as SQLite, MySQL and - PostgreSQL. By default use sqlite backend. To override default backend - uri set env variable OS_TEST_DBAPI_CONNECTION with database admin - credentials for specific backend. - """ - - def _get_uri(self): - return os.getenv('OS_TEST_DBAPI_CONNECTION', 'sqlite://') - - def __init__(self, test): - super(DbFixture, self).__init__() - - self.test = test - - def setUp(self): - super(DbFixture, self).setUp() - - self.test.engine = session.create_engine(self._get_uri()) - self.test.sessionmaker = session.get_maker(self.test.engine) - self.addCleanup(self.test.engine.dispose) - - -class DbTestCase(test.BaseTestCase): - """Base class for testing of DB code. - - Using `DbFixture`. Intended to be the main database test case to use all - the tests on a given backend with user defined uri. Backend specific - tests should be decorated with `backend_specific` decorator. - """ - - FIXTURE = DbFixture - - def setUp(self): - super(DbTestCase, self).setUp() - self.useFixture(self.FIXTURE(self)) - - -ALLOWED_DIALECTS = ['sqlite', 'mysql', 'postgresql'] - - -def backend_specific(*dialects): - """Decorator to skip backend specific tests on inappropriate engines. - - ::dialects: list of dialects names under which the test will be launched. - """ - def wrap(f): - @functools.wraps(f) - def ins_wrap(self): - if not set(dialects).issubset(ALLOWED_DIALECTS): - raise ValueError( - "Please use allowed dialects: %s" % ALLOWED_DIALECTS) - if self.engine.name not in dialects: - msg = ('The test "%s" can be run ' - 'only on %s. Current engine is %s.') - args = (f.__name__, ' '.join(dialects), self.engine.name) - self.skip(msg % args) - else: - return f(self) - return ins_wrap - return wrap - - -@six.add_metaclass(abc.ABCMeta) -class OpportunisticFixture(DbFixture): - """Base fixture to use default CI databases. - - The databases exist in OpenStack CI infrastructure. But for the - correct functioning in local environment the databases must be - created manually. - """ - - DRIVER = abc.abstractproperty(lambda: None) - DBNAME = PASSWORD = USERNAME = 'openstack_citest' - - def _get_uri(self): - return utils.get_connect_string(backend=self.DRIVER, - user=self.USERNAME, - passwd=self.PASSWORD, - database=self.DBNAME) - - -@six.add_metaclass(abc.ABCMeta) -class OpportunisticTestCase(DbTestCase): - """Base test case to use default CI databases. - - The subclasses of the test case are running only when openstack_citest - database is available otherwise a tests will be skipped. - """ - - FIXTURE = abc.abstractproperty(lambda: None) - - def setUp(self): - # TODO(bnemec): Remove this once infra is ready for - # https://review.openstack.org/#/c/74963/ to merge. - self.useFixture(lockutils.LockFixture('opportunistic-db')) - credentials = { - 'backend': self.FIXTURE.DRIVER, - 'user': self.FIXTURE.USERNAME, - 'passwd': self.FIXTURE.PASSWORD, - 'database': self.FIXTURE.DBNAME} - - if self.FIXTURE.DRIVER and not utils.is_backend_avail(**credentials): - msg = '%s backend is not available.' % self.FIXTURE.DRIVER - return self.skip(msg) - - super(OpportunisticTestCase, self).setUp() - - -class MySQLOpportunisticFixture(OpportunisticFixture): - DRIVER = 'mysql' - - -class PostgreSQLOpportunisticFixture(OpportunisticFixture): - DRIVER = 'postgresql' - - -class MySQLOpportunisticTestCase(OpportunisticTestCase): - FIXTURE = MySQLOpportunisticFixture - - -class PostgreSQLOpportunisticTestCase(OpportunisticTestCase): - FIXTURE = PostgreSQLOpportunisticFixture diff --git a/cerberus/openstack/common/db/sqlalchemy/test_migrations.py b/cerberus/openstack/common/db/sqlalchemy/test_migrations.py deleted file mode 100644 index 0fe0f12..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/test_migrations.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright 2010-2011 OpenStack Foundation -# Copyright 2012-2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import logging -import os -import subprocess - -import lockfile -from six import moves -from six.moves.urllib import parse -import sqlalchemy -import sqlalchemy.exc - -from cerberus.openstack.common.db.sqlalchemy import utils -from cerberus.openstack.common.gettextutils import _LE -from cerberus.openstack.common import test - -LOG = logging.getLogger(__name__) - - -def _have_mysql(user, passwd, database): - present = os.environ.get('TEST_MYSQL_PRESENT') - if present is None: - return utils.is_backend_avail(backend='mysql', - user=user, - passwd=passwd, - database=database) - return present.lower() in ('', 'true') - - -def _have_postgresql(user, passwd, database): - present = os.environ.get('TEST_POSTGRESQL_PRESENT') - if present is None: - return utils.is_backend_avail(backend='postgres', - user=user, - passwd=passwd, - database=database) - return present.lower() in ('', 'true') - - -def _set_db_lock(lock_path=None, lock_prefix=None): - def decorator(f): - @functools.wraps(f) - def wrapper(*args, **kwargs): - try: - path = lock_path or os.environ.get("CERBERUS_LOCK_PATH") - lock = lockfile.FileLock(os.path.join(path, lock_prefix)) - with lock: - LOG.debug('Got lock "%s"' % f.__name__) - return f(*args, **kwargs) - finally: - LOG.debug('Lock released "%s"' % f.__name__) - return wrapper - return decorator - - -class BaseMigrationTestCase(test.BaseTestCase): - """Base class fort testing of migration utils.""" - - def __init__(self, *args, **kwargs): - super(BaseMigrationTestCase, self).__init__(*args, **kwargs) - - self.DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__), - 'test_migrations.conf') - # Test machines can set the TEST_MIGRATIONS_CONF variable - # to override the location of the config file for migration testing - self.CONFIG_FILE_PATH = os.environ.get('TEST_MIGRATIONS_CONF', - self.DEFAULT_CONFIG_FILE) - self.test_databases = {} - self.migration_api = None - - def setUp(self): - super(BaseMigrationTestCase, self).setUp() - - # Load test databases from the config file. Only do this - # once. No need to re-run this on each test... - LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH) - if os.path.exists(self.CONFIG_FILE_PATH): - cp = moves.configparser.RawConfigParser() - try: - cp.read(self.CONFIG_FILE_PATH) - defaults = cp.defaults() - for key, value in defaults.items(): - self.test_databases[key] = value - except moves.configparser.ParsingError as e: - self.fail("Failed to read test_migrations.conf config " - "file. Got error: %s" % e) - else: - self.fail("Failed to find test_migrations.conf config " - "file.") - - self.engines = {} - for key, value in self.test_databases.items(): - self.engines[key] = sqlalchemy.create_engine(value) - - # We start each test case with a completely blank slate. - self._reset_databases() - - def tearDown(self): - # We destroy the test data store between each test case, - # and recreate it, which ensures that we have no side-effects - # from the tests - self._reset_databases() - super(BaseMigrationTestCase, self).tearDown() - - def execute_cmd(self, cmd=None): - process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - output = process.communicate()[0] - LOG.debug(output) - self.assertEqual(0, process.returncode, - "Failed to run: %s\n%s" % (cmd, output)) - - def _reset_pg(self, conn_pieces): - (user, - password, - database, - host) = utils.get_db_connection_info(conn_pieces) - os.environ['PGPASSWORD'] = password - os.environ['PGUSER'] = user - # note(boris-42): We must create and drop database, we can't - # drop database which we have connected to, so for such - # operations there is a special database template1. - sqlcmd = ("psql -w -U %(user)s -h %(host)s -c" - " '%(sql)s' -d template1") - - sql = ("drop database if exists %s;") % database - droptable = sqlcmd % {'user': user, 'host': host, 'sql': sql} - self.execute_cmd(droptable) - - sql = ("create database %s;") % database - createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql} - self.execute_cmd(createtable) - - os.unsetenv('PGPASSWORD') - os.unsetenv('PGUSER') - - @_set_db_lock(lock_prefix='migration_tests-') - def _reset_databases(self): - for key, engine in self.engines.items(): - conn_string = self.test_databases[key] - conn_pieces = parse.urlparse(conn_string) - engine.dispose() - if conn_string.startswith('sqlite'): - # We can just delete the SQLite database, which is - # the easiest and cleanest solution - db_path = conn_pieces.path.strip('/') - if os.path.exists(db_path): - os.unlink(db_path) - # No need to recreate the SQLite DB. SQLite will - # create it for us if it's not there... - elif conn_string.startswith('mysql'): - # We can execute the MySQL client to destroy and re-create - # the MYSQL database, which is easier and less error-prone - # than using SQLAlchemy to do this via MetaData...trust me. - (user, password, database, host) = \ - utils.get_db_connection_info(conn_pieces) - sql = ("drop database if exists %(db)s; " - "create database %(db)s;") % {'db': database} - cmd = ("mysql -u \"%(user)s\" -p\"%(password)s\" -h %(host)s " - "-e \"%(sql)s\"") % {'user': user, 'password': password, - 'host': host, 'sql': sql} - self.execute_cmd(cmd) - elif conn_string.startswith('postgresql'): - self._reset_pg(conn_pieces) - - -class WalkVersionsMixin(object): - def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): - # Determine latest version script from the repo, then - # upgrade from 1 through to the latest, with no data - # in the databases. This just checks that the schema itself - # upgrades successfully. - - # Place the database under version control - self.migration_api.version_control(engine, self.REPOSITORY, - self.INIT_VERSION) - self.assertEqual(self.INIT_VERSION, - self.migration_api.db_version(engine, - self.REPOSITORY)) - - LOG.debug('latest version is %s' % self.REPOSITORY.latest) - versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) - - for version in versions: - # upgrade -> downgrade -> upgrade - self._migrate_up(engine, version, with_data=True) - if snake_walk: - downgraded = self._migrate_down( - engine, version - 1, with_data=True) - if downgraded: - self._migrate_up(engine, version) - - if downgrade: - # Now walk it back down to 0 from the latest, testing - # the downgrade paths. - for version in reversed(versions): - # downgrade -> upgrade -> downgrade - downgraded = self._migrate_down(engine, version - 1) - - if snake_walk and downgraded: - self._migrate_up(engine, version) - self._migrate_down(engine, version - 1) - - def _migrate_down(self, engine, version, with_data=False): - try: - self.migration_api.downgrade(engine, self.REPOSITORY, version) - except NotImplementedError: - # NOTE(sirp): some migrations, namely release-level - # migrations, don't support a downgrade. - return False - - self.assertEqual( - version, self.migration_api.db_version(engine, self.REPOSITORY)) - - # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target' - # version). So if we have any downgrade checks, they need to be run for - # the previous (higher numbered) migration. - if with_data: - post_downgrade = getattr( - self, "_post_downgrade_%03d" % (version + 1), None) - if post_downgrade: - post_downgrade(engine) - - return True - - def _migrate_up(self, engine, version, with_data=False): - """migrate up to a new version of the db. - - We allow for data insertion and post checks at every - migration version with special _pre_upgrade_### and - _check_### functions in the main test. - """ - # NOTE(sdague): try block is here because it's impossible to debug - # where a failed data migration happens otherwise - try: - if with_data: - data = None - pre_upgrade = getattr( - self, "_pre_upgrade_%03d" % version, None) - if pre_upgrade: - data = pre_upgrade(engine) - - self.migration_api.upgrade(engine, self.REPOSITORY, version) - self.assertEqual(version, - self.migration_api.db_version(engine, - self.REPOSITORY)) - if with_data: - check = getattr(self, "_check_%03d" % version, None) - if check: - check(engine, data) - except Exception: - LOG.error(_LE("Failed to migrate to version %s on engine %s") % - (version, engine)) - raise diff --git a/cerberus/openstack/common/db/sqlalchemy/utils.py b/cerberus/openstack/common/db/sqlalchemy/utils.py deleted file mode 100644 index 6da194e..0000000 --- a/cerberus/openstack/common/db/sqlalchemy/utils.py +++ /dev/null @@ -1,647 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2010-2011 OpenStack Foundation. -# Copyright 2012 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import re - -import sqlalchemy -from sqlalchemy import Boolean -from sqlalchemy import CheckConstraint -from sqlalchemy import Column -from sqlalchemy.engine import reflection -from sqlalchemy.ext.compiler import compiles -from sqlalchemy import func -from sqlalchemy import Index -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy import or_ -from sqlalchemy.sql.expression import literal_column -from sqlalchemy.sql.expression import UpdateBase -from sqlalchemy import String -from sqlalchemy import Table -from sqlalchemy.types import NullType - -from cerberus.openstack.common import context as request_context -from cerberus.openstack.common.db.sqlalchemy import models -from cerberus.openstack.common.gettextutils import _, _LI, _LW -from cerberus.openstack.common import timeutils - - -LOG = logging.getLogger(__name__) - -_DBURL_REGEX = re.compile(r"[^:]+://([^:]+):([^@]+)@.+") - - -def sanitize_db_url(url): - match = _DBURL_REGEX.match(url) - if match: - return '%s****:****%s' % (url[:match.start(1)], url[match.end(2):]) - return url - - -class InvalidSortKey(Exception): - message = _("Sort key supplied was not valid.") - - -# copy from glance/db/sqlalchemy/api.py -def paginate_query(query, model, limit, sort_keys, marker=None, - sort_dir=None, sort_dirs=None): - """Returns a query with sorting / pagination criteria added. - - Pagination works by requiring a unique sort_key, specified by sort_keys. - (If sort_keys is not unique, then we risk looping through values.) - We use the last row in the previous page as the 'marker' for pagination. - So we must return values that follow the passed marker in the order. - With a single-valued sort_key, this would be easy: sort_key > X. - With a compound-values sort_key, (k1, k2, k3) we must do this to repeat - the lexicographical ordering: - (k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3) - - We also have to cope with different sort_directions. - - Typically, the id of the last row is used as the client-facing pagination - marker, then the actual marker object must be fetched from the db and - passed in to us as marker. - - :param query: the query object to which we should add paging/sorting - :param model: the ORM model class - :param limit: maximum number of items to return - :param sort_keys: array of attributes by which results should be sorted - :param marker: the last item of the previous page; we returns the next - results after this value. - :param sort_dir: direction in which results should be sorted (asc, desc) - :param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys - - :rtype: sqlalchemy.orm.query.Query - :return: The query with sorting/pagination added. - """ - - if 'id' not in sort_keys: - # TODO(justinsb): If this ever gives a false-positive, check - # the actual primary key, rather than assuming its id - LOG.warning(_LW('Id not in sort_keys; is sort_keys unique?')) - - assert(not (sort_dir and sort_dirs)) - - # Default the sort direction to ascending - if sort_dirs is None and sort_dir is None: - sort_dir = 'asc' - - # Ensure a per-column sort direction - if sort_dirs is None: - sort_dirs = [sort_dir for _sort_key in sort_keys] - - assert(len(sort_dirs) == len(sort_keys)) - - # Add sorting - for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs): - try: - sort_dir_func = { - 'asc': sqlalchemy.asc, - 'desc': sqlalchemy.desc, - }[current_sort_dir] - except KeyError: - raise ValueError(_("Unknown sort direction, " - "must be 'desc' or 'asc'")) - try: - sort_key_attr = getattr(model, current_sort_key) - except AttributeError: - raise InvalidSortKey() - query = query.order_by(sort_dir_func(sort_key_attr)) - - # Add pagination - if marker is not None: - marker_values = [] - for sort_key in sort_keys: - v = getattr(marker, sort_key) - marker_values.append(v) - - # Build up an array of sort criteria as in the docstring - criteria_list = [] - for i in range(len(sort_keys)): - crit_attrs = [] - for j in range(i): - model_attr = getattr(model, sort_keys[j]) - crit_attrs.append((model_attr == marker_values[j])) - - model_attr = getattr(model, sort_keys[i]) - if sort_dirs[i] == 'desc': - crit_attrs.append((model_attr < marker_values[i])) - else: - crit_attrs.append((model_attr > marker_values[i])) - - criteria = sqlalchemy.sql.and_(*crit_attrs) - criteria_list.append(criteria) - - f = sqlalchemy.sql.or_(*criteria_list) - query = query.filter(f) - - if limit is not None: - query = query.limit(limit) - - return query - - -def _read_deleted_filter(query, db_model, read_deleted): - if 'deleted' not in db_model.__table__.columns: - raise ValueError(_("There is no `deleted` column in `%s` table. " - "Project doesn't use soft-deleted feature.") - % db_model.__name__) - - default_deleted_value = db_model.__table__.c.deleted.default.arg - if read_deleted == 'no': - query = query.filter(db_model.deleted == default_deleted_value) - elif read_deleted == 'yes': - pass # omit the filter to include deleted and active - elif read_deleted == 'only': - query = query.filter(db_model.deleted != default_deleted_value) - else: - raise ValueError(_("Unrecognized read_deleted value '%s'") - % read_deleted) - return query - - -def _project_filter(query, db_model, context, project_only): - if project_only and 'project_id' not in db_model.__table__.columns: - raise ValueError(_("There is no `project_id` column in `%s` table.") - % db_model.__name__) - - if request_context.is_user_context(context) and project_only: - if project_only == 'allow_none': - is_none = None - query = query.filter(or_(db_model.project_id == context.project_id, - db_model.project_id == is_none)) - else: - query = query.filter(db_model.project_id == context.project_id) - - return query - - -def model_query(context, model, session, args=None, project_only=False, - read_deleted=None): - """Query helper that accounts for context's `read_deleted` field. - - :param context: context to query under - - :param model: Model to query. Must be a subclass of ModelBase. - :type model: models.ModelBase - - :param session: The session to use. - :type session: sqlalchemy.orm.session.Session - - :param args: Arguments to query. If None - model is used. - :type args: tuple - - :param project_only: If present and context is user-type, then restrict - query to match the context's project_id. If set to - 'allow_none', restriction includes project_id = None. - :type project_only: bool - - :param read_deleted: If present, overrides context's read_deleted field. - :type read_deleted: bool - - Usage: - - ..code:: python - - result = (utils.model_query(context, models.Instance, session=session) - .filter_by(uuid=instance_uuid) - .all()) - - query = utils.model_query( - context, Node, - session=session, - args=(func.count(Node.id), func.sum(Node.ram)) - ).filter_by(project_id=project_id) - - """ - - if not read_deleted: - if hasattr(context, 'read_deleted'): - # NOTE(viktors): some projects use `read_deleted` attribute in - # their contexts instead of `show_deleted`. - read_deleted = context.read_deleted - else: - read_deleted = context.show_deleted - - if not issubclass(model, models.ModelBase): - raise TypeError(_("model should be a subclass of ModelBase")) - - query = session.query(model) if not args else session.query(*args) - query = _read_deleted_filter(query, model, read_deleted) - query = _project_filter(query, model, context, project_only) - - return query - - -def get_table(engine, name): - """Returns an sqlalchemy table dynamically from db. - - Needed because the models don't work for us in migrations - as models will be far out of sync with the current data. - """ - metadata = MetaData() - metadata.bind = engine - return Table(name, metadata, autoload=True) - - -class InsertFromSelect(UpdateBase): - """Form the base for `INSERT INTO table (SELECT ... )` statement.""" - def __init__(self, table, select): - self.table = table - self.select = select - - -@compiles(InsertFromSelect) -def visit_insert_from_select(element, compiler, **kw): - """Form the `INSERT INTO table (SELECT ... )` statement.""" - return "INSERT INTO %s %s" % ( - compiler.process(element.table, asfrom=True), - compiler.process(element.select)) - - -class ColumnError(Exception): - """Error raised when no column or an invalid column is found.""" - - -def _get_not_supported_column(col_name_col_instance, column_name): - try: - column = col_name_col_instance[column_name] - except KeyError: - msg = _("Please specify column %s in col_name_col_instance " - "param. It is required because column has unsupported " - "type by sqlite).") - raise ColumnError(msg % column_name) - - if not isinstance(column, Column): - msg = _("col_name_col_instance param has wrong type of " - "column instance for column %s It should be instance " - "of sqlalchemy.Column.") - raise ColumnError(msg % column_name) - return column - - -def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns, - **col_name_col_instance): - """Drop unique constraint from table. - - DEPRECATED: this function is deprecated and will be removed from cerberus.db - in a few releases. Please use UniqueConstraint.drop() method directly for - sqlalchemy-migrate migration scripts. - - This method drops UC from table and works for mysql, postgresql and sqlite. - In mysql and postgresql we are able to use "alter table" construction. - Sqlalchemy doesn't support some sqlite column types and replaces their - type with NullType in metadata. We process these columns and replace - NullType with the correct column type. - - :param migrate_engine: sqlalchemy engine - :param table_name: name of table that contains uniq constraint. - :param uc_name: name of uniq constraint that will be dropped. - :param columns: columns that are in uniq constraint. - :param col_name_col_instance: contains pair column_name=column_instance. - column_instance is instance of Column. These params - are required only for columns that have unsupported - types by sqlite. For example BigInteger. - """ - - from migrate.changeset import UniqueConstraint - - meta = MetaData() - meta.bind = migrate_engine - t = Table(table_name, meta, autoload=True) - - if migrate_engine.name == "sqlite": - override_cols = [ - _get_not_supported_column(col_name_col_instance, col.name) - for col in t.columns - if isinstance(col.type, NullType) - ] - for col in override_cols: - t.columns.replace(col) - - uc = UniqueConstraint(*columns, table=t, name=uc_name) - uc.drop() - - -def drop_old_duplicate_entries_from_table(migrate_engine, table_name, - use_soft_delete, *uc_column_names): - """Drop all old rows having the same values for columns in uc_columns. - - This method drop (or mark ad `deleted` if use_soft_delete is True) old - duplicate rows form table with name `table_name`. - - :param migrate_engine: Sqlalchemy engine - :param table_name: Table with duplicates - :param use_soft_delete: If True - values will be marked as `deleted`, - if False - values will be removed from table - :param uc_column_names: Unique constraint columns - """ - meta = MetaData() - meta.bind = migrate_engine - - table = Table(table_name, meta, autoload=True) - columns_for_group_by = [table.c[name] for name in uc_column_names] - - columns_for_select = [func.max(table.c.id)] - columns_for_select.extend(columns_for_group_by) - - duplicated_rows_select = sqlalchemy.sql.select( - columns_for_select, group_by=columns_for_group_by, - having=func.count(table.c.id) > 1) - - for row in migrate_engine.execute(duplicated_rows_select): - # NOTE(boris-42): Do not remove row that has the biggest ID. - delete_condition = table.c.id != row[0] - is_none = None # workaround for pyflakes - delete_condition &= table.c.deleted_at == is_none - for name in uc_column_names: - delete_condition &= table.c[name] == row[name] - - rows_to_delete_select = sqlalchemy.sql.select( - [table.c.id]).where(delete_condition) - for row in migrate_engine.execute(rows_to_delete_select).fetchall(): - LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: " - "%(table)s") % dict(id=row[0], table=table_name)) - - if use_soft_delete: - delete_statement = table.update().\ - where(delete_condition).\ - values({ - 'deleted': literal_column('id'), - 'updated_at': literal_column('updated_at'), - 'deleted_at': timeutils.utcnow() - }) - else: - delete_statement = table.delete().where(delete_condition) - migrate_engine.execute(delete_statement) - - -def _get_default_deleted_value(table): - if isinstance(table.c.id.type, Integer): - return 0 - if isinstance(table.c.id.type, String): - return "" - raise ColumnError(_("Unsupported id columns type")) - - -def _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes): - table = get_table(migrate_engine, table_name) - - insp = reflection.Inspector.from_engine(migrate_engine) - real_indexes = insp.get_indexes(table_name) - existing_index_names = dict( - [(index['name'], index['column_names']) for index in real_indexes]) - - # NOTE(boris-42): Restore indexes on `deleted` column - for index in indexes: - if 'deleted' not in index['column_names']: - continue - name = index['name'] - if name in existing_index_names: - column_names = [table.c[c] for c in existing_index_names[name]] - old_index = Index(name, *column_names, unique=index["unique"]) - old_index.drop(migrate_engine) - - column_names = [table.c[c] for c in index['column_names']] - new_index = Index(index["name"], *column_names, unique=index["unique"]) - new_index.create(migrate_engine) - - -def change_deleted_column_type_to_boolean(migrate_engine, table_name, - **col_name_col_instance): - if migrate_engine.name == "sqlite": - return _change_deleted_column_type_to_boolean_sqlite( - migrate_engine, table_name, **col_name_col_instance) - insp = reflection.Inspector.from_engine(migrate_engine) - indexes = insp.get_indexes(table_name) - - table = get_table(migrate_engine, table_name) - - old_deleted = Column('old_deleted', Boolean, default=False) - old_deleted.create(table, populate_default=False) - - table.update().\ - where(table.c.deleted == table.c.id).\ - values(old_deleted=True).\ - execute() - - table.c.deleted.drop() - table.c.old_deleted.alter(name="deleted") - - _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes) - - -def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name, - **col_name_col_instance): - insp = reflection.Inspector.from_engine(migrate_engine) - table = get_table(migrate_engine, table_name) - - columns = [] - for column in table.columns: - column_copy = None - if column.name != "deleted": - if isinstance(column.type, NullType): - column_copy = _get_not_supported_column(col_name_col_instance, - column.name) - else: - column_copy = column.copy() - else: - column_copy = Column('deleted', Boolean, default=0) - columns.append(column_copy) - - constraints = [constraint.copy() for constraint in table.constraints] - - meta = table.metadata - new_table = Table(table_name + "__tmp__", meta, - *(columns + constraints)) - new_table.create() - - indexes = [] - for index in insp.get_indexes(table_name): - column_names = [new_table.c[c] for c in index['column_names']] - indexes.append(Index(index["name"], *column_names, - unique=index["unique"])) - - c_select = [] - for c in table.c: - if c.name != "deleted": - c_select.append(c) - else: - c_select.append(table.c.deleted == table.c.id) - - ins = InsertFromSelect(new_table, sqlalchemy.sql.select(c_select)) - migrate_engine.execute(ins) - - table.drop() - [index.create(migrate_engine) for index in indexes] - - new_table.rename(table_name) - new_table.update().\ - where(new_table.c.deleted == new_table.c.id).\ - values(deleted=True).\ - execute() - - -def change_deleted_column_type_to_id_type(migrate_engine, table_name, - **col_name_col_instance): - if migrate_engine.name == "sqlite": - return _change_deleted_column_type_to_id_type_sqlite( - migrate_engine, table_name, **col_name_col_instance) - insp = reflection.Inspector.from_engine(migrate_engine) - indexes = insp.get_indexes(table_name) - - table = get_table(migrate_engine, table_name) - - new_deleted = Column('new_deleted', table.c.id.type, - default=_get_default_deleted_value(table)) - new_deleted.create(table, populate_default=True) - - deleted = True # workaround for pyflakes - table.update().\ - where(table.c.deleted == deleted).\ - values(new_deleted=table.c.id).\ - execute() - table.c.deleted.drop() - table.c.new_deleted.alter(name="deleted") - - _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes) - - -def _change_deleted_column_type_to_id_type_sqlite(migrate_engine, table_name, - **col_name_col_instance): - # NOTE(boris-42): sqlaclhemy-migrate can't drop column with check - # constraints in sqlite DB and our `deleted` column has - # 2 check constraints. So there is only one way to remove - # these constraints: - # 1) Create new table with the same columns, constraints - # and indexes. (except deleted column). - # 2) Copy all data from old to new table. - # 3) Drop old table. - # 4) Rename new table to old table name. - insp = reflection.Inspector.from_engine(migrate_engine) - meta = MetaData(bind=migrate_engine) - table = Table(table_name, meta, autoload=True) - default_deleted_value = _get_default_deleted_value(table) - - columns = [] - for column in table.columns: - column_copy = None - if column.name != "deleted": - if isinstance(column.type, NullType): - column_copy = _get_not_supported_column(col_name_col_instance, - column.name) - else: - column_copy = column.copy() - else: - column_copy = Column('deleted', table.c.id.type, - default=default_deleted_value) - columns.append(column_copy) - - def is_deleted_column_constraint(constraint): - # NOTE(boris-42): There is no other way to check is CheckConstraint - # associated with deleted column. - if not isinstance(constraint, CheckConstraint): - return False - sqltext = str(constraint.sqltext) - return (sqltext.endswith("deleted in (0, 1)") or - sqltext.endswith("deleted IN (:deleted_1, :deleted_2)")) - - constraints = [] - for constraint in table.constraints: - if not is_deleted_column_constraint(constraint): - constraints.append(constraint.copy()) - - new_table = Table(table_name + "__tmp__", meta, - *(columns + constraints)) - new_table.create() - - indexes = [] - for index in insp.get_indexes(table_name): - column_names = [new_table.c[c] for c in index['column_names']] - indexes.append(Index(index["name"], *column_names, - unique=index["unique"])) - - ins = InsertFromSelect(new_table, table.select()) - migrate_engine.execute(ins) - - table.drop() - [index.create(migrate_engine) for index in indexes] - - new_table.rename(table_name) - deleted = True # workaround for pyflakes - new_table.update().\ - where(new_table.c.deleted == deleted).\ - values(deleted=new_table.c.id).\ - execute() - - # NOTE(boris-42): Fix value of deleted column: False -> "" or 0. - deleted = False # workaround for pyflakes - new_table.update().\ - where(new_table.c.deleted == deleted).\ - values(deleted=default_deleted_value).\ - execute() - - -def get_connect_string(backend, database, user=None, passwd=None): - """Get database connection - - Try to get a connection with a very specific set of values, if we get - these then we'll run the tests, otherwise they are skipped - """ - args = {'backend': backend, - 'user': user, - 'passwd': passwd, - 'database': database} - if backend == 'sqlite': - template = '%(backend)s:///%(database)s' - else: - template = "%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" - return template % args - - -def is_backend_avail(backend, database, user=None, passwd=None): - try: - connect_uri = get_connect_string(backend=backend, - database=database, - user=user, - passwd=passwd) - engine = sqlalchemy.create_engine(connect_uri) - connection = engine.connect() - except Exception: - # intentionally catch all to handle exceptions even if we don't - # have any backend code loaded. - return False - else: - connection.close() - engine.dispose() - return True - - -def get_db_connection_info(conn_pieces): - database = conn_pieces.path.strip('/') - loc_pieces = conn_pieces.netloc.split('@') - host = loc_pieces[1] - - auth_pieces = loc_pieces[0].split(':') - user = auth_pieces[0] - password = "" - if len(auth_pieces) > 1: - password = auth_pieces[1].strip() - - return (user, password, database, host) diff --git a/cerberus/openstack/common/eventlet_backdoor.py b/cerberus/openstack/common/eventlet_backdoor.py deleted file mode 100644 index 95b443b..0000000 --- a/cerberus/openstack/common/eventlet_backdoor.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright (c) 2012 OpenStack Foundation. -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import print_function - -import errno -import gc -import os -import pprint -import socket -import sys -import traceback - -import eventlet -import eventlet.backdoor -import greenlet -from oslo.config import cfg - -from cerberus.openstack.common.gettextutils import _LI -from cerberus.openstack.common import log as logging - -help_for_backdoor_port = ( - "Acceptable values are 0, , and :, where 0 results " - "in listening on a random tcp port number; results in listening " - "on the specified port number (and not enabling backdoor if that port " - "is in use); and : results in listening on the smallest " - "unused port number within the specified range of port numbers. The " - "chosen port is displayed in the service's log file.") -eventlet_backdoor_opts = [ - cfg.StrOpt('backdoor_port', - default=None, - help="Enable eventlet backdoor. %s" % help_for_backdoor_port) -] - -CONF = cfg.CONF -CONF.register_opts(eventlet_backdoor_opts) -LOG = logging.getLogger(__name__) - - -class EventletBackdoorConfigValueError(Exception): - def __init__(self, port_range, help_msg, ex): - msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. ' - '%(help)s' % - {'range': port_range, 'ex': ex, 'help': help_msg}) - super(EventletBackdoorConfigValueError, self).__init__(msg) - self.port_range = port_range - - -def _dont_use_this(): - print("Don't use this, just disconnect instead") - - -def _find_objects(t): - return [o for o in gc.get_objects() if isinstance(o, t)] - - -def _print_greenthreads(): - for i, gt in enumerate(_find_objects(greenlet.greenlet)): - print(i, gt) - traceback.print_stack(gt.gr_frame) - print() - - -def _print_nativethreads(): - for threadId, stack in sys._current_frames().items(): - print(threadId) - traceback.print_stack(stack) - print() - - -def _parse_port_range(port_range): - if ':' not in port_range: - start, end = port_range, port_range - else: - start, end = port_range.split(':', 1) - try: - start, end = int(start), int(end) - if end < start: - raise ValueError - return start, end - except ValueError as ex: - raise EventletBackdoorConfigValueError(port_range, ex, - help_for_backdoor_port) - - -def _listen(host, start_port, end_port, listen_func): - try_port = start_port - while True: - try: - return listen_func((host, try_port)) - except socket.error as exc: - if (exc.errno != errno.EADDRINUSE or - try_port >= end_port): - raise - try_port += 1 - - -def initialize_if_enabled(): - backdoor_locals = { - 'exit': _dont_use_this, # So we don't exit the entire process - 'quit': _dont_use_this, # So we don't exit the entire process - 'fo': _find_objects, - 'pgt': _print_greenthreads, - 'pnt': _print_nativethreads, - } - - if CONF.backdoor_port is None: - return None - - start_port, end_port = _parse_port_range(str(CONF.backdoor_port)) - - # NOTE(johannes): The standard sys.displayhook will print the value of - # the last expression and set it to __builtin__._, which overwrites - # the __builtin__._ that gettext sets. Let's switch to using pprint - # since it won't interact poorly with gettext, and it's easier to - # read the output too. - def displayhook(val): - if val is not None: - pprint.pprint(val) - sys.displayhook = displayhook - - sock = _listen('localhost', start_port, end_port, eventlet.listen) - - # In the case of backdoor port being zero, a port number is assigned by - # listen(). In any case, pull the port number out here. - port = sock.getsockname()[1] - LOG.info( - _LI('Eventlet backdoor listening on %(port)s for process %(pid)d') % - {'port': port, 'pid': os.getpid()} - ) - eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock, - locals=backdoor_locals) - return port diff --git a/cerberus/openstack/common/excutils.py b/cerberus/openstack/common/excutils.py deleted file mode 100644 index 01f8b8e..0000000 --- a/cerberus/openstack/common/excutils.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# Copyright 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exception related utilities. -""" - -import logging -import sys -import time -import traceback - -import six - -from cerberus.openstack.common.gettextutils import _LE - - -class save_and_reraise_exception(object): - """Save current exception, run some code and then re-raise. - - In some cases the exception context can be cleared, resulting in None - being attempted to be re-raised after an exception handler is run. This - can happen when eventlet switches greenthreads or when running an - exception handler, code raises and catches an exception. In both - cases the exception context will be cleared. - - To work around this, we save the exception state, run handler code, and - then re-raise the original exception. If another exception occurs, the - saved exception is logged and the new exception is re-raised. - - In some cases the caller may not want to re-raise the exception, and - for those circumstances this context provides a reraise flag that - can be used to suppress the exception. For example:: - - except Exception: - with save_and_reraise_exception() as ctxt: - decide_if_need_reraise() - if not should_be_reraised: - ctxt.reraise = False - - If another exception occurs and reraise flag is False, - the saved exception will not be logged. - - If the caller wants to raise new exception during exception handling - he/she sets reraise to False initially with an ability to set it back to - True if needed:: - - except Exception: - with save_and_reraise_exception(reraise=False) as ctxt: - [if statements to determine whether to raise a new exception] - # Not raising a new exception, so reraise - ctxt.reraise = True - """ - def __init__(self, reraise=True): - self.reraise = reraise - - def __enter__(self): - self.type_, self.value, self.tb, = sys.exc_info() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None: - if self.reraise: - logging.error(_LE('Original exception being dropped: %s'), - traceback.format_exception(self.type_, - self.value, - self.tb)) - return False - if self.reraise: - six.reraise(self.type_, self.value, self.tb) - - -def forever_retry_uncaught_exceptions(infunc): - def inner_func(*args, **kwargs): - last_log_time = 0 - last_exc_message = None - exc_count = 0 - while True: - try: - return infunc(*args, **kwargs) - except Exception as exc: - this_exc_message = six.u(str(exc)) - if this_exc_message == last_exc_message: - exc_count += 1 - else: - exc_count = 1 - # Do not log any more frequently than once a minute unless - # the exception message changes - cur_time = int(time.time()) - if (cur_time - last_log_time > 60 or - this_exc_message != last_exc_message): - logging.exception( - _LE('Unexpected exception occurred %d time(s)... ' - 'retrying.') % exc_count) - last_log_time = cur_time - last_exc_message = this_exc_message - exc_count = 0 - # This should be a very rare event. In case it isn't, do - # a sleep. - time.sleep(1) - return inner_func diff --git a/cerberus/openstack/common/fileutils.py b/cerberus/openstack/common/fileutils.py deleted file mode 100644 index 2b804a2..0000000 --- a/cerberus/openstack/common/fileutils.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib -import errno -import os -import tempfile - -from cerberus.openstack.common import excutils -from cerberus.openstack.common import log as logging - -LOG = logging.getLogger(__name__) - -_FILE_CACHE = {} - - -def ensure_tree(path): - """Create a directory (and any ancestor directories required) - - :param path: Directory to create - """ - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST: - if not os.path.isdir(path): - raise - else: - raise - - -def read_cached_file(filename, force_reload=False): - """Read from a file if it has been modified. - - :param force_reload: Whether to reload the file. - :returns: A tuple with a boolean specifying if the data is fresh - or not. - """ - global _FILE_CACHE - - if force_reload and filename in _FILE_CACHE: - del _FILE_CACHE[filename] - - reloaded = False - mtime = os.path.getmtime(filename) - cache_info = _FILE_CACHE.setdefault(filename, {}) - - if not cache_info or mtime > cache_info.get('mtime', 0): - LOG.debug("Reloading cached file %s" % filename) - with open(filename) as fap: - cache_info['data'] = fap.read() - cache_info['mtime'] = mtime - reloaded = True - return (reloaded, cache_info['data']) - - -def delete_if_exists(path, remove=os.unlink): - """Delete a file, but ignore file not found error. - - :param path: File to delete - :param remove: Optional function to remove passed path - """ - - try: - remove(path) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - -@contextlib.contextmanager -def remove_path_on_error(path, remove=delete_if_exists): - """Protect code that wants to operate on PATH atomically. - Any exception will cause PATH to be removed. - - :param path: File to work with - :param remove: Optional function to remove passed path - """ - - try: - yield - except Exception: - with excutils.save_and_reraise_exception(): - remove(path) - - -def file_open(*args, **kwargs): - """Open file - - see built-in file() documentation for more details - - Note: The reason this is kept in a separate module is to easily - be able to provide a stub module that doesn't alter system - state at all (for unit tests) - """ - return file(*args, **kwargs) - - -def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): - """Create temporary file or use existing file. - - This util is needed for creating temporary file with - specified content, suffix and prefix. If path is not None, - it will be used for writing content. If the path doesn't - exist it'll be created. - - :param content: content for temporary file. - :param path: same as parameter 'dir' for mkstemp - :param suffix: same as parameter 'suffix' for mkstemp - :param prefix: same as parameter 'prefix' for mkstemp - - For example: it can be used in database tests for creating - configuration files. - """ - if path: - ensure_tree(path) - - (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) - try: - os.write(fd, content) - finally: - os.close(fd) - return path diff --git a/cerberus/openstack/common/fixture/__init__.py b/cerberus/openstack/common/fixture/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/openstack/common/fixture/config.py b/cerberus/openstack/common/fixture/config.py deleted file mode 100644 index 9489b85..0000000 --- a/cerberus/openstack/common/fixture/config.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# Copyright 2013 Mirantis, Inc. -# Copyright 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import fixtures -from oslo.config import cfg -import six - - -class Config(fixtures.Fixture): - """Allows overriding configuration settings for the test. - - `conf` will be reset on cleanup. - - """ - - def __init__(self, conf=cfg.CONF): - self.conf = conf - - def setUp(self): - super(Config, self).setUp() - # NOTE(morganfainberg): unregister must be added to cleanup before - # reset is because cleanup works in reverse order of registered items, - # and a reset must occur before unregistering options can occur. - self.addCleanup(self._unregister_config_opts) - self.addCleanup(self.conf.reset) - self._registered_config_opts = {} - - def config(self, **kw): - """Override configuration values. - - The keyword arguments are the names of configuration options to - override and their values. - - If a `group` argument is supplied, the overrides are applied to - the specified configuration option group, otherwise the overrides - are applied to the ``default`` group. - - """ - - group = kw.pop('group', None) - for k, v in six.iteritems(kw): - self.conf.set_override(k, v, group) - - def _unregister_config_opts(self): - for group in self._registered_config_opts: - self.conf.unregister_opts(self._registered_config_opts[group], - group=group) - - def register_opt(self, opt, group=None): - """Register a single option for the test run. - - Options registered in this manner will automatically be unregistered - during cleanup. - - If a `group` argument is supplied, it will register the new option - to that group, otherwise the option is registered to the ``default`` - group. - """ - self.conf.register_opt(opt, group=group) - self._registered_config_opts.setdefault(group, set()).add(opt) - - def register_opts(self, opts, group=None): - """Register multiple options for the test run. - - This works in the same manner as register_opt() but takes a list of - options as the first argument. All arguments will be registered to the - same group if the ``group`` argument is supplied, otherwise all options - will be registered to the ``default`` group. - """ - for opt in opts: - self.register_opt(opt, group=group) diff --git a/cerberus/openstack/common/fixture/lockutils.py b/cerberus/openstack/common/fixture/lockutils.py deleted file mode 100644 index 2ecd0dc..0000000 --- a/cerberus/openstack/common/fixture/lockutils.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import fixtures - -from cerberus.openstack.common import lockutils - - -class LockFixture(fixtures.Fixture): - """External locking fixture. - - This fixture is basically an alternative to the synchronized decorator with - the external flag so that tearDowns and addCleanups will be included in - the lock context for locking between tests. The fixture is recommended to - be the first line in a test method, like so:: - - def test_method(self): - self.useFixture(LockFixture) - ... - - or the first line in setUp if all the test methods in the class are - required to be serialized. Something like:: - - class TestCase(testtools.testcase): - def setUp(self): - self.useFixture(LockFixture) - super(TestCase, self).setUp() - ... - - This is because addCleanups are put on a LIFO queue that gets run after the - test method exits. (either by completing or raising an exception) - """ - def __init__(self, name, lock_file_prefix=None): - self.mgr = lockutils.lock(name, lock_file_prefix, True) - - def setUp(self): - super(LockFixture, self).setUp() - self.addCleanup(self.mgr.__exit__, None, None, None) - self.lock = self.mgr.__enter__() diff --git a/cerberus/openstack/common/fixture/logging.py b/cerberus/openstack/common/fixture/logging.py deleted file mode 100644 index 3823a03..0000000 --- a/cerberus/openstack/common/fixture/logging.py +++ /dev/null @@ -1,34 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import fixtures - - -def get_logging_handle_error_fixture(): - """returns a fixture to make logging raise formatting exceptions. - - Usage: - self.useFixture(logging.get_logging_handle_error_fixture()) - """ - return fixtures.MonkeyPatch('logging.Handler.handleError', - _handleError) - - -def _handleError(self, record): - """Monkey patch for logging.Handler.handleError. - - The default handleError just logs the error to stderr but we want - the option of actually raising an exception. - """ - raise diff --git a/cerberus/openstack/common/fixture/mockpatch.py b/cerberus/openstack/common/fixture/mockpatch.py deleted file mode 100644 index f6316ef..0000000 --- a/cerberus/openstack/common/fixture/mockpatch.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -############################################################################## -############################################################################## -## -## DO NOT MODIFY THIS FILE -## -## This file is being graduated to the cerberustest library. Please make all -## changes there, and only backport critical fixes here. - dhellmann -## -############################################################################## -############################################################################## - -import fixtures -import mock - - -class PatchObject(fixtures.Fixture): - """Deal with code around mock.""" - - def __init__(self, obj, attr, new=mock.DEFAULT, **kwargs): - self.obj = obj - self.attr = attr - self.kwargs = kwargs - self.new = new - - def setUp(self): - super(PatchObject, self).setUp() - _p = mock.patch.object(self.obj, self.attr, self.new, **self.kwargs) - self.mock = _p.start() - self.addCleanup(_p.stop) - - -class Patch(fixtures.Fixture): - - """Deal with code around mock.patch.""" - - def __init__(self, obj, new=mock.DEFAULT, **kwargs): - self.obj = obj - self.kwargs = kwargs - self.new = new - - def setUp(self): - super(Patch, self).setUp() - _p = mock.patch(self.obj, self.new, **self.kwargs) - self.mock = _p.start() - self.addCleanup(_p.stop) diff --git a/cerberus/openstack/common/fixture/moxstubout.py b/cerberus/openstack/common/fixture/moxstubout.py deleted file mode 100644 index 15b35bd..0000000 --- a/cerberus/openstack/common/fixture/moxstubout.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -############################################################################## -############################################################################## -## -## DO NOT MODIFY THIS FILE -## -## This file is being graduated to the cerberustest library. Please make all -## changes there, and only backport critical fixes here. - dhellmann -## -############################################################################## -############################################################################## - -import fixtures -from six.moves import mox - - -class MoxStubout(fixtures.Fixture): - """Deal with code around mox and stubout as a fixture.""" - - def setUp(self): - super(MoxStubout, self).setUp() - # emulate some of the mox stuff, we can't use the metaclass - # because it screws with our generators - self.mox = mox.Mox() - self.stubs = self.mox.stubs - self.addCleanup(self.mox.UnsetStubs) - self.addCleanup(self.mox.VerifyAll) diff --git a/cerberus/openstack/common/gettextutils.py b/cerberus/openstack/common/gettextutils.py deleted file mode 100644 index a69ed04..0000000 --- a/cerberus/openstack/common/gettextutils.py +++ /dev/null @@ -1,448 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# Copyright 2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -gettext for openstack-common modules. - -Usual usage in an openstack.common module: - - from cerberus.openstack.common.gettextutils import _ -""" - -import copy -import functools -import gettext -import locale -from logging import handlers -import os - -from babel import localedata -import six - -_localedir = os.environ.get('cerberus'.upper() + '_LOCALEDIR') -_t = gettext.translation('cerberus', localedir=_localedir, fallback=True) - -# We use separate translation catalogs for each log level, so set up a -# mapping between the log level name and the translator. The domain -# for the log level is project_name + "-log-" + log_level so messages -# for each level end up in their own catalog. -_t_log_levels = dict( - (level, gettext.translation('cerberus' + '-log-' + level, - localedir=_localedir, - fallback=True)) - for level in ['info', 'warning', 'error', 'critical'] -) - -_AVAILABLE_LANGUAGES = {} -USE_LAZY = False - - -def enable_lazy(): - """Convenience function for configuring _() to use lazy gettext - - Call this at the start of execution to enable the gettextutils._ - function to use lazy gettext functionality. This is useful if - your project is importing _ directly instead of using the - gettextutils.install() way of importing the _ function. - """ - global USE_LAZY - USE_LAZY = True - - -def _(msg): - if USE_LAZY: - return Message(msg, domain='cerberus') - else: - if six.PY3: - return _t.gettext(msg) - return _t.ugettext(msg) - - -def _log_translation(msg, level): - """Build a single translation of a log message - """ - if USE_LAZY: - return Message(msg, domain='cerberus' + '-log-' + level) - else: - translator = _t_log_levels[level] - if six.PY3: - return translator.gettext(msg) - return translator.ugettext(msg) - -# Translators for log levels. -# -# The abbreviated names are meant to reflect the usual use of a short -# name like '_'. The "L" is for "log" and the other letter comes from -# the level. -_LI = functools.partial(_log_translation, level='info') -_LW = functools.partial(_log_translation, level='warning') -_LE = functools.partial(_log_translation, level='error') -_LC = functools.partial(_log_translation, level='critical') - - -def install(domain, lazy=False): - """Install a _() function using the given translation domain. - - Given a translation domain, install a _() function using gettext's - install() function. - - The main difference from gettext.install() is that we allow - overriding the default localedir (e.g. /usr/share/locale) using - a translation-domain-specific environment variable (e.g. - NOVA_LOCALEDIR). - - :param domain: the translation domain - :param lazy: indicates whether or not to install the lazy _() function. - The lazy _() introduces a way to do deferred translation - of messages by installing a _ that builds Message objects, - instead of strings, which can then be lazily translated into - any available locale. - """ - if lazy: - # NOTE(mrodden): Lazy gettext functionality. - # - # The following introduces a deferred way to do translations on - # messages in OpenStack. We override the standard _() function - # and % (format string) operation to build Message objects that can - # later be translated when we have more information. - def _lazy_gettext(msg): - """Create and return a Message object. - - Lazy gettext function for a given domain, it is a factory method - for a project/module to get a lazy gettext function for its own - translation domain (i.e. nova, glance, cinder, etc.) - - Message encapsulates a string so that we can translate - it later when needed. - """ - return Message(msg, domain=domain) - - from six import moves - moves.builtins.__dict__['_'] = _lazy_gettext - else: - localedir = '%s_LOCALEDIR' % domain.upper() - if six.PY3: - gettext.install(domain, - localedir=os.environ.get(localedir)) - else: - gettext.install(domain, - localedir=os.environ.get(localedir), - unicode=True) - - -class Message(six.text_type): - """A Message object is a unicode object that can be translated. - - Translation of Message is done explicitly using the translate() method. - For all non-translation intents and purposes, a Message is simply unicode, - and can be treated as such. - """ - - def __new__(cls, msgid, msgtext=None, params=None, - domain='cerberus', *args): - """Create a new Message object. - - In order for translation to work gettext requires a message ID, this - msgid will be used as the base unicode text. It is also possible - for the msgid and the base unicode text to be different by passing - the msgtext parameter. - """ - # If the base msgtext is not given, we use the default translation - # of the msgid (which is in English) just in case the system locale is - # not English, so that the base text will be in that locale by default. - if not msgtext: - msgtext = Message._translate_msgid(msgid, domain) - # We want to initialize the parent unicode with the actual object that - # would have been plain unicode if 'Message' was not enabled. - msg = super(Message, cls).__new__(cls, msgtext) - msg.msgid = msgid - msg.domain = domain - msg.params = params - return msg - - def translate(self, desired_locale=None): - """Translate this message to the desired locale. - - :param desired_locale: The desired locale to translate the message to, - if no locale is provided the message will be - translated to the system's default locale. - - :returns: the translated message in unicode - """ - - translated_message = Message._translate_msgid(self.msgid, - self.domain, - desired_locale) - if self.params is None: - # No need for more translation - return translated_message - - # This Message object may have been formatted with one or more - # Message objects as substitution arguments, given either as a single - # argument, part of a tuple, or as one or more values in a dictionary. - # When translating this Message we need to translate those Messages too - translated_params = _translate_args(self.params, desired_locale) - - translated_message = translated_message % translated_params - - return translated_message - - @staticmethod - def _translate_msgid(msgid, domain, desired_locale=None): - if not desired_locale: - system_locale = locale.getdefaultlocale() - # If the system locale is not available to the runtime use English - if not system_locale[0]: - desired_locale = 'en_US' - else: - desired_locale = system_locale[0] - - locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') - lang = gettext.translation(domain, - localedir=locale_dir, - languages=[desired_locale], - fallback=True) - if six.PY3: - translator = lang.gettext - else: - translator = lang.ugettext - - translated_message = translator(msgid) - return translated_message - - def __mod__(self, other): - # When we mod a Message we want the actual operation to be performed - # by the parent class (i.e. unicode()), the only thing we do here is - # save the original msgid and the parameters in case of a translation - params = self._sanitize_mod_params(other) - unicode_mod = super(Message, self).__mod__(params) - modded = Message(self.msgid, - msgtext=unicode_mod, - params=params, - domain=self.domain) - return modded - - def _sanitize_mod_params(self, other): - """Sanitize the object being modded with this Message. - - - Add support for modding 'None' so translation supports it - - Trim the modded object, which can be a large dictionary, to only - those keys that would actually be used in a translation - - Snapshot the object being modded, in case the message is - translated, it will be used as it was when the Message was created - """ - if other is None: - params = (other,) - elif isinstance(other, dict): - # Merge the dictionaries - # Copy each item in case one does not support deep copy. - params = {} - if isinstance(self.params, dict): - for key, val in self.params.items(): - params[key] = self._copy_param(val) - for key, val in other.items(): - params[key] = self._copy_param(val) - else: - params = self._copy_param(other) - return params - - def _copy_param(self, param): - try: - return copy.deepcopy(param) - except Exception: - # Fallback to casting to unicode this will handle the - # python code-like objects that can't be deep-copied - return six.text_type(param) - - def __add__(self, other): - msg = _('Message objects do not support addition.') - raise TypeError(msg) - - def __radd__(self, other): - return self.__add__(other) - - def __str__(self): - # NOTE(luisg): Logging in python 2.6 tries to str() log records, - # and it expects specifically a UnicodeError in order to proceed. - msg = _('Message objects do not support str() because they may ' - 'contain non-ascii characters. ' - 'Please use unicode() or translate() instead.') - raise UnicodeError(msg) - - -def get_available_languages(domain): - """Lists the available languages for the given translation domain. - - :param domain: the domain to get languages for - """ - if domain in _AVAILABLE_LANGUAGES: - return copy.copy(_AVAILABLE_LANGUAGES[domain]) - - localedir = '%s_LOCALEDIR' % domain.upper() - find = lambda x: gettext.find(domain, - localedir=os.environ.get(localedir), - languages=[x]) - - # NOTE(mrodden): en_US should always be available (and first in case - # order matters) since our in-line message strings are en_US - language_list = ['en_US'] - # NOTE(luisg): Babel <1.0 used a function called list(), which was - # renamed to locale_identifiers() in >=1.0, the requirements master list - # requires >=0.9.6, uncapped, so defensively work with both. We can remove - # this check when the master list updates to >=1.0, and update all projects - list_identifiers = (getattr(localedata, 'list', None) or - getattr(localedata, 'locale_identifiers')) - locale_identifiers = list_identifiers() - - for i in locale_identifiers: - if find(i) is not None: - language_list.append(i) - - # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported - # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they - # are perfectly legitimate locales: - # https://github.com/mitsuhiko/babel/issues/37 - # In Babel 1.3 they fixed the bug and they support these locales, but - # they are still not explicitly "listed" by locale_identifiers(). - # That is why we add the locales here explicitly if necessary so that - # they are listed as supported. - aliases = {'zh': 'zh_CN', - 'zh_Hant_HK': 'zh_HK', - 'zh_Hant': 'zh_TW', - 'fil': 'tl_PH'} - for (locale, alias) in six.iteritems(aliases): - if locale in language_list and alias not in language_list: - language_list.append(alias) - - _AVAILABLE_LANGUAGES[domain] = language_list - return copy.copy(language_list) - - -def translate(obj, desired_locale=None): - """Gets the translated unicode representation of the given object. - - If the object is not translatable it is returned as-is. - If the locale is None the object is translated to the system locale. - - :param obj: the object to translate - :param desired_locale: the locale to translate the message to, if None the - default system locale will be used - :returns: the translated object in unicode, or the original object if - it could not be translated - """ - message = obj - if not isinstance(message, Message): - # If the object to translate is not already translatable, - # let's first get its unicode representation - message = six.text_type(obj) - if isinstance(message, Message): - # Even after unicoding() we still need to check if we are - # running with translatable unicode before translating - return message.translate(desired_locale) - return obj - - -def _translate_args(args, desired_locale=None): - """Translates all the translatable elements of the given arguments object. - - This method is used for translating the translatable values in method - arguments which include values of tuples or dictionaries. - If the object is not a tuple or a dictionary the object itself is - translated if it is translatable. - - If the locale is None the object is translated to the system locale. - - :param args: the args to translate - :param desired_locale: the locale to translate the args to, if None the - default system locale will be used - :returns: a new args object with the translated contents of the original - """ - if isinstance(args, tuple): - return tuple(translate(v, desired_locale) for v in args) - if isinstance(args, dict): - translated_dict = {} - for (k, v) in six.iteritems(args): - translated_v = translate(v, desired_locale) - translated_dict[k] = translated_v - return translated_dict - return translate(args, desired_locale) - - -class TranslationHandler(handlers.MemoryHandler): - """Handler that translates records before logging them. - - The TranslationHandler takes a locale and a target logging.Handler object - to forward LogRecord objects to after translating them. This handler - depends on Message objects being logged, instead of regular strings. - - The handler can be configured declaratively in the logging.conf as follows: - - [handlers] - keys = translatedlog, translator - - [handler_translatedlog] - class = handlers.WatchedFileHandler - args = ('/var/log/api-localized.log',) - formatter = context - - [handler_translator] - class = openstack.common.log.TranslationHandler - target = translatedlog - args = ('zh_CN',) - - If the specified locale is not available in the system, the handler will - log in the default locale. - """ - - def __init__(self, locale=None, target=None): - """Initialize a TranslationHandler - - :param locale: locale to use for translating messages - :param target: logging.Handler object to forward - LogRecord objects to after translation - """ - # NOTE(luisg): In order to allow this handler to be a wrapper for - # other handlers, such as a FileHandler, and still be able to - # configure it using logging.conf, this handler has to extend - # MemoryHandler because only the MemoryHandlers' logging.conf - # parsing is implemented such that it accepts a target handler. - handlers.MemoryHandler.__init__(self, capacity=0, target=target) - self.locale = locale - - def setFormatter(self, fmt): - self.target.setFormatter(fmt) - - def emit(self, record): - # We save the message from the original record to restore it - # after translation, so other handlers are not affected by this - original_msg = record.msg - original_args = record.args - - try: - self._translate_and_log_record(record) - finally: - record.msg = original_msg - record.args = original_args - - def _translate_and_log_record(self, record): - record.msg = translate(record.msg, self.locale) - - # In addition to translating the message, we also need to translate - # arguments that were passed to the log method that were not part - # of the main message e.g., log.info(_('Some message %s'), this_one)) - record.args = _translate_args(record.args, self.locale) - - self.target.emit(record) diff --git a/cerberus/openstack/common/importutils.py b/cerberus/openstack/common/importutils.py deleted file mode 100644 index af78d95..0000000 --- a/cerberus/openstack/common/importutils.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Import related utilities and helper functions. -""" - -import sys -import traceback - - -def import_class(import_str): - """Returns a class from a string including module and class.""" - mod_str, _sep, class_str = import_str.rpartition('.') - try: - __import__(mod_str) - return getattr(sys.modules[mod_str], class_str) - except (ValueError, AttributeError): - raise ImportError('Class %s cannot be found (%s)' % - (class_str, - traceback.format_exception(*sys.exc_info()))) - - -def import_object(import_str, *args, **kwargs): - """Import a class and return an instance of it.""" - return import_class(import_str)(*args, **kwargs) - - -def import_object_ns(name_space, import_str, *args, **kwargs): - """Tries to import object from default namespace. - - Imports a class and return an instance of it, first by trying - to find the class in a default namespace, then failing back to - a full path if not found in the default namespace. - """ - import_value = "%s.%s" % (name_space, import_str) - try: - return import_class(import_value)(*args, **kwargs) - except ImportError: - return import_class(import_str)(*args, **kwargs) - - -def import_module(import_str): - """Import a module.""" - __import__(import_str) - return sys.modules[import_str] - - -def import_versioned_module(version, submodule=None): - module = 'cerberus.v%s' % version - if submodule: - module = '.'.join((module, submodule)) - return import_module(module) - - -def try_import(import_str, default=None): - """Try to import a module and if it fails return default.""" - try: - return import_module(import_str) - except ImportError: - return default diff --git a/cerberus/openstack/common/jsonutils.py b/cerberus/openstack/common/jsonutils.py deleted file mode 100644 index fa7073b..0000000 --- a/cerberus/openstack/common/jsonutils.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -''' -JSON related utilities. - -This module provides a few things: - - 1) A handy function for getting an object down to something that can be - JSON serialized. See to_primitive(). - - 2) Wrappers around loads() and dumps(). The dumps() wrapper will - automatically use to_primitive() for you if needed. - - 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson - is available. -''' - - -import codecs -import datetime -import functools -import inspect -import itertools -import sys - -if sys.version_info < (2, 7): - # On Python <= 2.6, json module is not C boosted, so try to use - # simplejson module if available - try: - import simplejson as json - except ImportError: - import json -else: - import json - -import six -import six.moves.xmlrpc_client as xmlrpclib - -from cerberus.openstack.common import gettextutils -from cerberus.openstack.common import importutils -from cerberus.openstack.common import strutils -from cerberus.openstack.common import timeutils - -netaddr = importutils.try_import("netaddr") - -_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, - inspect.isfunction, inspect.isgeneratorfunction, - inspect.isgenerator, inspect.istraceback, inspect.isframe, - inspect.iscode, inspect.isbuiltin, inspect.isroutine, - inspect.isabstract] - -_simple_types = (six.string_types + six.integer_types - + (type(None), bool, float)) - - -def to_primitive(value, convert_instances=False, convert_datetime=True, - level=0, max_depth=3): - """Convert a complex object into primitives. - - Handy for JSON serialization. We can optionally handle instances, - but since this is a recursive function, we could have cyclical - data structures. - - To handle cyclical data structures we could track the actual objects - visited in a set, but not all objects are hashable. Instead we just - track the depth of the object inspections and don't go too deep. - - Therefore, convert_instances=True is lossy ... be aware. - - """ - # handle obvious types first - order of basic types determined by running - # full tests on nova project, resulting in the following counts: - # 572754 - # 460353 - # 379632 - # 274610 - # 199918 - # 114200 - # 51817 - # 26164 - # 6491 - # 283 - # 19 - if isinstance(value, _simple_types): - return value - - if isinstance(value, datetime.datetime): - if convert_datetime: - return timeutils.strtime(value) - else: - return value - - # value of itertools.count doesn't get caught by nasty_type_tests - # and results in infinite loop when list(value) is called. - if type(value) == itertools.count: - return six.text_type(value) - - # FIXME(vish): Workaround for LP bug 852095. Without this workaround, - # tests that raise an exception in a mocked method that - # has a @wrap_exception with a notifier will fail. If - # we up the dependency to 0.5.4 (when it is released) we - # can remove this workaround. - if getattr(value, '__module__', None) == 'mox': - return 'mock' - - if level > max_depth: - return '?' - - # The try block may not be necessary after the class check above, - # but just in case ... - try: - recursive = functools.partial(to_primitive, - convert_instances=convert_instances, - convert_datetime=convert_datetime, - level=level, - max_depth=max_depth) - if isinstance(value, dict): - return dict((k, recursive(v)) for k, v in six.iteritems(value)) - elif isinstance(value, (list, tuple)): - return [recursive(lv) for lv in value] - - # It's not clear why xmlrpclib created their own DateTime type, but - # for our purposes, make it a datetime type which is explicitly - # handled - if isinstance(value, xmlrpclib.DateTime): - value = datetime.datetime(*tuple(value.timetuple())[:6]) - - if convert_datetime and isinstance(value, datetime.datetime): - return timeutils.strtime(value) - elif isinstance(value, gettextutils.Message): - return value.data - elif hasattr(value, 'iteritems'): - return recursive(dict(value.iteritems()), level=level + 1) - elif hasattr(value, '__iter__'): - return recursive(list(value)) - elif convert_instances and hasattr(value, '__dict__'): - # Likely an instance of something. Watch for cycles. - # Ignore class member vars. - return recursive(value.__dict__, level=level + 1) - elif netaddr and isinstance(value, netaddr.IPAddress): - return six.text_type(value) - else: - if any(test(value) for test in _nasty_type_tests): - return six.text_type(value) - return value - except TypeError: - # Class objects are tricky since they may define something like - # __iter__ defined but it isn't callable as list(). - return six.text_type(value) - - -def dumps(value, default=to_primitive, **kwargs): - return json.dumps(value, default=default, **kwargs) - - -def loads(s, encoding='utf-8'): - return json.loads(strutils.safe_decode(s, encoding)) - - -def load(fp, encoding='utf-8'): - return json.load(codecs.getreader(encoding)(fp)) - - -try: - import anyjson -except ImportError: - pass -else: - anyjson._modules.append((__name__, 'dumps', TypeError, - 'loads', ValueError, 'load')) - anyjson.force_implementation(__name__) diff --git a/cerberus/openstack/common/local.py b/cerberus/openstack/common/local.py deleted file mode 100644 index 0819d5b..0000000 --- a/cerberus/openstack/common/local.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Local storage of variables using weak references""" - -import threading -import weakref - - -class WeakLocal(threading.local): - def __getattribute__(self, attr): - rval = super(WeakLocal, self).__getattribute__(attr) - if rval: - # NOTE(mikal): this bit is confusing. What is stored is a weak - # reference, not the value itself. We therefore need to lookup - # the weak reference and return the inner value here. - rval = rval() - return rval - - def __setattr__(self, attr, value): - value = weakref.ref(value) - return super(WeakLocal, self).__setattr__(attr, value) - - -# NOTE(mikal): the name "store" should be deprecated in the future -store = WeakLocal() - -# A "weak" store uses weak references and allows an object to fall out of scope -# when it falls out of scope in the code that uses the thread local storage. A -# "strong" store will hold a reference to the object so that it never falls out -# of scope. -weak_store = WeakLocal() -strong_store = threading.local() diff --git a/cerberus/openstack/common/lockutils.py b/cerberus/openstack/common/lockutils.py deleted file mode 100644 index 3a54542..0000000 --- a/cerberus/openstack/common/lockutils.py +++ /dev/null @@ -1,377 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib -import errno -import fcntl -import functools -import os -import shutil -import subprocess -import sys -import tempfile -import threading -import time -import weakref - -from oslo.config import cfg - -from cerberus.openstack.common import fileutils -from cerberus.openstack.common.gettextutils import _, _LE, _LI -from cerberus.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -util_opts = [ - cfg.BoolOpt('disable_process_locking', default=False, - help='Whether to disable inter-process locks'), - cfg.StrOpt('lock_path', - default=os.environ.get("CERBERUS_LOCK_PATH"), - help=('Directory to use for lock files.')) -] - - -CONF = cfg.CONF -CONF.register_opts(util_opts) - - -def set_defaults(lock_path): - cfg.set_defaults(util_opts, lock_path=lock_path) - - -class _FileLock(object): - """Lock implementation which allows multiple locks, working around - issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does - not require any cleanup. Since the lock is always held on a file - descriptor rather than outside of the process, the lock gets dropped - automatically if the process crashes, even if __exit__ is not executed. - - There are no guarantees regarding usage by multiple green threads in a - single process here. This lock works only between processes. Exclusive - access between local threads should be achieved using the semaphores - in the @synchronized decorator. - - Note these locks are released when the descriptor is closed, so it's not - safe to close the file descriptor while another green thread holds the - lock. Just opening and closing the lock file can break synchronisation, - so lock files must be accessed only using this abstraction. - """ - - def __init__(self, name): - self.lockfile = None - self.fname = name - - def acquire(self): - basedir = os.path.dirname(self.fname) - - if not os.path.exists(basedir): - fileutils.ensure_tree(basedir) - LOG.info(_LI('Created lock path: %s'), basedir) - - self.lockfile = open(self.fname, 'w') - - while True: - try: - # Using non-blocking locks since green threads are not - # patched to deal with blocking locking calls. - # Also upon reading the MSDN docs for locking(), it seems - # to have a laughable 10 attempts "blocking" mechanism. - self.trylock() - LOG.debug('Got file lock "%s"', self.fname) - return True - except IOError as e: - if e.errno in (errno.EACCES, errno.EAGAIN): - # external locks synchronise things like iptables - # updates - give it some time to prevent busy spinning - time.sleep(0.01) - else: - raise threading.ThreadError(_("Unable to acquire lock on" - " `%(filename)s` due to" - " %(exception)s") % - { - 'filename': self.fname, - 'exception': e, - }) - - def __enter__(self): - self.acquire() - return self - - def release(self): - try: - self.unlock() - self.lockfile.close() - LOG.debug('Released file lock "%s"', self.fname) - except IOError: - LOG.exception(_LE("Could not release the acquired lock `%s`"), - self.fname) - - def __exit__(self, exc_type, exc_val, exc_tb): - self.release() - - def exists(self): - return os.path.exists(self.fname) - - def trylock(self): - raise NotImplementedError() - - def unlock(self): - raise NotImplementedError() - - -class _WindowsLock(_FileLock): - def trylock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) - - def unlock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) - - -class _FcntlLock(_FileLock): - def trylock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) - - def unlock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_UN) - - -class _PosixLock(object): - def __init__(self, name): - # Hash the name because it's not valid to have POSIX semaphore - # names with things like / in them. Then use base64 to encode - # the digest() instead taking the hexdigest() because the - # result is shorter and most systems can't have shm sempahore - # names longer than 31 characters. - h = hashlib.sha1() - h.update(name.encode('ascii')) - self.name = str((b'/' + base64.urlsafe_b64encode( - h.digest())).decode('ascii')) - - def acquire(self, timeout=None): - self.semaphore = posix_ipc.Semaphore(self.name, - flags=posix_ipc.O_CREAT, - initial_value=1) - self.semaphore.acquire(timeout) - return self - - def __enter__(self): - self.acquire() - return self - - def release(self): - self.semaphore.release() - self.semaphore.close() - - def __exit__(self, exc_type, exc_val, exc_tb): - self.release() - - def exists(self): - try: - semaphore = posix_ipc.Semaphore(self.name) - except posix_ipc.ExistentialError: - return False - else: - semaphore.close() - return True - - -if os.name == 'nt': - import msvcrt - InterProcessLock = _WindowsLock - FileLock = _WindowsLock -else: - import base64 - import hashlib - import posix_ipc - InterProcessLock = _PosixLock - FileLock = _FcntlLock - -_semaphores = weakref.WeakValueDictionary() -_semaphores_lock = threading.Lock() - - -def _get_lock_path(name, lock_file_prefix, lock_path=None): - # NOTE(mikal): the lock name cannot contain directory - # separators - name = name.replace(os.sep, '_') - if lock_file_prefix: - sep = '' if lock_file_prefix.endswith('-') else '-' - name = '%s%s%s' % (lock_file_prefix, sep, name) - - local_lock_path = lock_path or CONF.lock_path - - if not local_lock_path: - # NOTE(bnemec): Create a fake lock path for posix locks so we don't - # unnecessarily raise the RequiredOptError below. - if InterProcessLock is not _PosixLock: - raise cfg.RequiredOptError('lock_path') - local_lock_path = 'posixlock:/' - - return os.path.join(local_lock_path, name) - - -def external_lock(name, lock_file_prefix=None, lock_path=None): - LOG.debug('Attempting to grab external lock "%(lock)s"', - {'lock': name}) - - lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path) - - # NOTE(bnemec): If an explicit lock_path was passed to us then it - # means the caller is relying on file-based locking behavior, so - # we can't use posix locks for those calls. - if lock_path: - return FileLock(lock_file_path) - return InterProcessLock(lock_file_path) - - -def remove_external_lock_file(name, lock_file_prefix=None): - """Remove a external lock file when it's not used anymore - This will be helpful when we have a lot of lock files - """ - with internal_lock(name): - lock_file_path = _get_lock_path(name, lock_file_prefix) - try: - os.remove(lock_file_path) - except OSError: - LOG.info(_LI('Failed to remove file %(file)s'), - {'file': lock_file_path}) - - -def internal_lock(name): - with _semaphores_lock: - try: - sem = _semaphores[name] - except KeyError: - sem = threading.Semaphore() - _semaphores[name] = sem - - LOG.debug('Got semaphore "%(lock)s"', {'lock': name}) - return sem - - -@contextlib.contextmanager -def lock(name, lock_file_prefix=None, external=False, lock_path=None): - """Context based lock - - This function yields a `threading.Semaphore` instance (if we don't use - eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is - True, in which case, it'll yield an InterProcessLock instance. - - :param lock_file_prefix: The lock_file_prefix argument is used to provide - lock files on disk with a meaningful prefix. - - :param external: The external keyword argument denotes whether this lock - should work across multiple processes. This means that if two different - workers both run a a method decorated with @synchronized('mylock', - external=True), only one of them will execute at a time. - """ - int_lock = internal_lock(name) - with int_lock: - if external and not CONF.disable_process_locking: - ext_lock = external_lock(name, lock_file_prefix, lock_path) - with ext_lock: - yield ext_lock - else: - yield int_lock - - -def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): - """Synchronization decorator. - - Decorating a method like so:: - - @synchronized('mylock') - def foo(self, *args): - ... - - ensures that only one thread will execute the foo method at a time. - - Different methods can share the same lock:: - - @synchronized('mylock') - def foo(self, *args): - ... - - @synchronized('mylock') - def bar(self, *args): - ... - - This way only one of either foo or bar can be executing at a time. - """ - - def wrap(f): - @functools.wraps(f) - def inner(*args, **kwargs): - try: - with lock(name, lock_file_prefix, external, lock_path): - LOG.debug('Got semaphore / lock "%(function)s"', - {'function': f.__name__}) - return f(*args, **kwargs) - finally: - LOG.debug('Semaphore / lock released "%(function)s"', - {'function': f.__name__}) - return inner - return wrap - - -def synchronized_with_prefix(lock_file_prefix): - """Partial object generator for the synchronization decorator. - - Redefine @synchronized in each project like so:: - - (in nova/utils.py) - from nova.openstack.common import lockutils - - synchronized = lockutils.synchronized_with_prefix('nova-') - - - (in nova/foo.py) - from nova import utils - - @utils.synchronized('mylock') - def bar(self, *args): - ... - - The lock_file_prefix argument is used to provide lock files on disk with a - meaningful prefix. - """ - - return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) - - -def main(argv): - """Create a dir for locks and pass it to command from arguments - - If you run this: - python -m openstack.common.lockutils python setup.py testr - - a temporary directory will be created for all your locks and passed to all - your tests in an environment variable. The temporary dir will be deleted - afterwards and the return value will be preserved. - """ - - lock_dir = tempfile.mkdtemp() - os.environ["CERBERUS_LOCK_PATH"] = lock_dir - try: - ret_val = subprocess.call(argv[1:]) - finally: - shutil.rmtree(lock_dir, ignore_errors=True) - return ret_val - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/cerberus/openstack/common/log.py b/cerberus/openstack/common/log.py deleted file mode 100644 index 8cef7af..0000000 --- a/cerberus/openstack/common/log.py +++ /dev/null @@ -1,713 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""OpenStack logging handler. - -This module adds to logging functionality by adding the option to specify -a context object when calling the various log methods. If the context object -is not specified, default formatting is used. Additionally, an instance uuid -may be passed as part of the log message, which is intended to make it easier -for admins to find messages related to a specific instance. - -It also allows setting of formatting information through conf. - -""" - -import inspect -import itertools -import logging -import logging.config -import logging.handlers -import os -import re -import sys -import traceback - -from oslo.config import cfg -import six -from six import moves - -from cerberus.openstack.common.gettextutils import _ -from cerberus.openstack.common import importutils -from cerberus.openstack.common import jsonutils -from cerberus.openstack.common import local - - -_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" - -_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password'] - -# NOTE(ldbragst): Let's build a list of regex objects using the list of -# _SANITIZE_KEYS we already have. This way, we only have to add the new key -# to the list of _SANITIZE_KEYS and we can generate regular expressions -# for XML and JSON automatically. -_SANITIZE_PATTERNS = [] -_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', - r'(<%(key)s>).*?()', - r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', - r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] - -for key in _SANITIZE_KEYS: - for pattern in _FORMAT_PATTERNS: - reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) - _SANITIZE_PATTERNS.append(reg_ex) - - -common_cli_opts = [ - cfg.BoolOpt('debug', - short='d', - default=False, - help='Print debugging output (set logging level to ' - 'DEBUG instead of default WARNING level).'), - cfg.BoolOpt('verbose', - short='v', - default=False, - help='Print more verbose output (set logging level to ' - 'INFO instead of default WARNING level).'), -] - -logging_cli_opts = [ - cfg.StrOpt('log-config-append', - metavar='PATH', - deprecated_name='log-config', - help='The name of logging configuration file. It does not ' - 'disable existing loggers, but just appends specified ' - 'logging configuration to any other existing logging ' - 'options. Please see the Python logging module ' - 'documentation for details on logging configuration ' - 'files.'), - cfg.StrOpt('log-format', - default=None, - metavar='FORMAT', - help='DEPRECATED. ' - 'A logging.Formatter log message format string which may ' - 'use any of the available logging.LogRecord attributes. ' - 'This option is deprecated. Please use ' - 'logging_context_format_string and ' - 'logging_default_format_string instead.'), - cfg.StrOpt('log-date-format', - default=_DEFAULT_LOG_DATE_FORMAT, - metavar='DATE_FORMAT', - help='Format string for %%(asctime)s in log records. ' - 'Default: %(default)s'), - cfg.StrOpt('log-file', - metavar='PATH', - deprecated_name='logfile', - help='(Optional) Name of log file to output to. ' - 'If no default is set, logging will go to stdout.'), - cfg.StrOpt('log-dir', - deprecated_name='logdir', - help='(Optional) The base directory used for relative ' - '--log-file paths'), - cfg.BoolOpt('use-syslog', - default=False, - help='Use syslog for logging. ' - 'Existing syslog format is DEPRECATED during I, ' - 'and then will be changed in J to honor RFC5424'), - cfg.BoolOpt('use-syslog-rfc-format', - # TODO(bogdando) remove or use True after existing - # syslog format deprecation in J - default=False, - help='(Optional) Use syslog rfc5424 format for logging. ' - 'If enabled, will add APP-NAME (RFC5424) before the ' - 'MSG part of the syslog message. The old format ' - 'without APP-NAME is deprecated in I, ' - 'and will be removed in J.'), - cfg.StrOpt('syslog-log-facility', - default='LOG_USER', - help='Syslog facility to receive log lines') -] - -generic_log_opts = [ - cfg.BoolOpt('use_stderr', - default=True, - help='Log output to standard error') -] - -log_opts = [ - cfg.StrOpt('logging_context_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [%(request_id)s %(user_identity)s] ' - '%(instance)s%(message)s', - help='Format string to use for log messages with context'), - cfg.StrOpt('logging_default_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [-] %(instance)s%(message)s', - help='Format string to use for log messages without context'), - cfg.StrOpt('logging_debug_format_suffix', - default='%(funcName)s %(pathname)s:%(lineno)d', - help='Data to append to log format when level is DEBUG'), - cfg.StrOpt('logging_exception_prefix', - default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' - '%(instance)s', - help='Prefix each line of exception output with this format'), - cfg.ListOpt('default_log_levels', - default=[ - 'amqp=WARN', - 'amqplib=WARN', - 'boto=WARN', - 'qpid=WARN', - 'sqlalchemy=WARN', - 'suds=INFO', - 'oslo.messaging=INFO', - 'iso8601=WARN', - 'requests.packages.urllib3.connectionpool=WARN' - ], - help='List of logger=LEVEL pairs'), - cfg.BoolOpt('publish_errors', - default=False, - help='Publish error events'), - cfg.BoolOpt('fatal_deprecations', - default=False, - help='Make deprecations fatal'), - - # NOTE(mikal): there are two options here because sometimes we are handed - # a full instance (and could include more information), and other times we - # are just handed a UUID for the instance. - cfg.StrOpt('instance_format', - default='[instance: %(uuid)s] ', - help='If an instance is passed with the log message, format ' - 'it like this'), - cfg.StrOpt('instance_uuid_format', - default='[instance: %(uuid)s] ', - help='If an instance UUID is passed with the log message, ' - 'format it like this'), -] - -CONF = cfg.CONF -CONF.register_cli_opts(common_cli_opts) -CONF.register_cli_opts(logging_cli_opts) -CONF.register_opts(generic_log_opts) -CONF.register_opts(log_opts) - -# our new audit level -# NOTE(jkoelker) Since we synthesized an audit level, make the logging -# module aware of it so it acts like other levels. -logging.AUDIT = logging.INFO + 1 -logging.addLevelName(logging.AUDIT, 'AUDIT') - - -try: - NullHandler = logging.NullHandler -except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 - class NullHandler(logging.Handler): - def handle(self, record): - pass - - def emit(self, record): - pass - - def createLock(self): - self.lock = None - - -def _dictify_context(context): - if context is None: - return None - if not isinstance(context, dict) and getattr(context, 'to_dict', None): - context = context.to_dict() - return context - - -def _get_binary_name(): - return os.path.basename(inspect.stack()[-1][1]) - - -def _get_log_file_path(binary=None): - logfile = CONF.log_file - logdir = CONF.log_dir - - if logfile and not logdir: - return logfile - - if logfile and logdir: - return os.path.join(logdir, logfile) - - if logdir: - binary = binary or _get_binary_name() - return '%s.log' % (os.path.join(logdir, binary),) - - return None - - -def mask_password(message, secret="***"): - """Replace password with 'secret' in message. - - :param message: The string which includes security information. - :param secret: value with which to replace passwords. - :returns: The unicode value of message with the password fields masked. - - For example: - - >>> mask_password("'adminPass' : 'aaaaa'") - "'adminPass' : '***'" - >>> mask_password("'admin_pass' : 'aaaaa'") - "'admin_pass' : '***'" - >>> mask_password('"password" : "aaaaa"') - '"password" : "***"' - >>> mask_password("'original_password' : 'aaaaa'") - "'original_password' : '***'" - >>> mask_password("u'original_password' : u'aaaaa'") - "u'original_password' : u'***'" - """ - message = six.text_type(message) - - # NOTE(ldbragst): Check to see if anything in message contains any key - # specified in _SANITIZE_KEYS, if not then just return the message since - # we don't have to mask any passwords. - if not any(key in message for key in _SANITIZE_KEYS): - return message - - secret = r'\g<1>' + secret + r'\g<2>' - for pattern in _SANITIZE_PATTERNS: - message = re.sub(pattern, secret, message) - return message - - -class BaseLoggerAdapter(logging.LoggerAdapter): - - def audit(self, msg, *args, **kwargs): - self.log(logging.AUDIT, msg, *args, **kwargs) - - -class LazyAdapter(BaseLoggerAdapter): - def __init__(self, name='unknown', version='unknown'): - self._logger = None - self.extra = {} - self.name = name - self.version = version - - @property - def logger(self): - if not self._logger: - self._logger = getLogger(self.name, self.version) - return self._logger - - -class ContextAdapter(BaseLoggerAdapter): - warn = logging.LoggerAdapter.warning - - def __init__(self, logger, project_name, version_string): - self.logger = logger - self.project = project_name - self.version = version_string - self._deprecated_messages_sent = dict() - - @property - def handlers(self): - return self.logger.handlers - - def deprecated(self, msg, *args, **kwargs): - """Call this method when a deprecated feature is used. - - If the system is configured for fatal deprecations then the message - is logged at the 'critical' level and :class:`DeprecatedConfig` will - be raised. - - Otherwise, the message will be logged (once) at the 'warn' level. - - :raises: :class:`DeprecatedConfig` if the system is configured for - fatal deprecations. - - """ - stdmsg = _("Deprecated: %s") % msg - if CONF.fatal_deprecations: - self.critical(stdmsg, *args, **kwargs) - raise DeprecatedConfig(msg=stdmsg) - - # Using a list because a tuple with dict can't be stored in a set. - sent_args = self._deprecated_messages_sent.setdefault(msg, list()) - - if args in sent_args: - # Already logged this message, so don't log it again. - return - - sent_args.append(args) - self.warn(stdmsg, *args, **kwargs) - - def process(self, msg, kwargs): - # NOTE(mrodden): catch any Message/other object and - # coerce to unicode before they can get - # to the python logging and possibly - # cause string encoding trouble - if not isinstance(msg, six.string_types): - msg = six.text_type(msg) - - if 'extra' not in kwargs: - kwargs['extra'] = {} - extra = kwargs['extra'] - - context = kwargs.pop('context', None) - if not context: - context = getattr(local.store, 'context', None) - if context: - extra.update(_dictify_context(context)) - - instance = kwargs.pop('instance', None) - instance_uuid = (extra.get('instance_uuid') or - kwargs.pop('instance_uuid', None)) - instance_extra = '' - if instance: - instance_extra = CONF.instance_format % instance - elif instance_uuid: - instance_extra = (CONF.instance_uuid_format - % {'uuid': instance_uuid}) - extra['instance'] = instance_extra - - extra.setdefault('user_identity', kwargs.pop('user_identity', None)) - - extra['project'] = self.project - extra['version'] = self.version - extra['extra'] = extra.copy() - return msg, kwargs - - -class JSONFormatter(logging.Formatter): - def __init__(self, fmt=None, datefmt=None): - # NOTE(jkoelker) we ignore the fmt argument, but its still there - # since logging.config.fileConfig passes it. - self.datefmt = datefmt - - def formatException(self, ei, strip_newlines=True): - lines = traceback.format_exception(*ei) - if strip_newlines: - lines = [moves.filter( - lambda x: x, - line.rstrip().splitlines()) for line in lines] - lines = list(itertools.chain(*lines)) - return lines - - def format(self, record): - message = {'message': record.getMessage(), - 'asctime': self.formatTime(record, self.datefmt), - 'name': record.name, - 'msg': record.msg, - 'args': record.args, - 'levelname': record.levelname, - 'levelno': record.levelno, - 'pathname': record.pathname, - 'filename': record.filename, - 'module': record.module, - 'lineno': record.lineno, - 'funcname': record.funcName, - 'created': record.created, - 'msecs': record.msecs, - 'relative_created': record.relativeCreated, - 'thread': record.thread, - 'thread_name': record.threadName, - 'process_name': record.processName, - 'process': record.process, - 'traceback': None} - - if hasattr(record, 'extra'): - message['extra'] = record.extra - - if record.exc_info: - message['traceback'] = self.formatException(record.exc_info) - - return jsonutils.dumps(message) - - -def _create_logging_excepthook(product_name): - def logging_excepthook(exc_type, value, tb): - extra = {} - if CONF.verbose or CONF.debug: - extra['exc_info'] = (exc_type, value, tb) - getLogger(product_name).critical( - "".join(traceback.format_exception_only(exc_type, value)), - **extra) - return logging_excepthook - - -class LogConfigError(Exception): - - message = _('Error loading logging config %(log_config)s: %(err_msg)s') - - def __init__(self, log_config, err_msg): - self.log_config = log_config - self.err_msg = err_msg - - def __str__(self): - return self.message % dict(log_config=self.log_config, - err_msg=self.err_msg) - - -def _load_log_config(log_config_append): - try: - logging.config.fileConfig(log_config_append, - disable_existing_loggers=False) - except moves.configparser.Error as exc: - raise LogConfigError(log_config_append, str(exc)) - - -def setup(product_name, version='unknown'): - """Setup logging.""" - if CONF.log_config_append: - _load_log_config(CONF.log_config_append) - else: - _setup_logging_from_conf(product_name, version) - sys.excepthook = _create_logging_excepthook(product_name) - - -def set_defaults(logging_context_format_string): - cfg.set_defaults(log_opts, - logging_context_format_string= - logging_context_format_string) - - -def _find_facility_from_conf(): - facility_names = logging.handlers.SysLogHandler.facility_names - facility = getattr(logging.handlers.SysLogHandler, - CONF.syslog_log_facility, - None) - - if facility is None and CONF.syslog_log_facility in facility_names: - facility = facility_names.get(CONF.syslog_log_facility) - - if facility is None: - valid_facilities = facility_names.keys() - consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', - 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', - 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', - 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', - 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] - valid_facilities.extend(consts) - raise TypeError(_('syslog facility must be one of: %s') % - ', '.join("'%s'" % fac - for fac in valid_facilities)) - - return facility - - -class RFCSysLogHandler(logging.handlers.SysLogHandler): - def __init__(self, *args, **kwargs): - self.binary_name = _get_binary_name() - super(RFCSysLogHandler, self).__init__(*args, **kwargs) - - def format(self, record): - msg = super(RFCSysLogHandler, self).format(record) - msg = self.binary_name + ' ' + msg - return msg - - -def _setup_logging_from_conf(project, version): - log_root = getLogger(None).logger - for handler in log_root.handlers: - log_root.removeHandler(handler) - - if CONF.use_syslog: - facility = _find_facility_from_conf() - # TODO(bogdando) use the format provided by RFCSysLogHandler - # after existing syslog format deprecation in J - if CONF.use_syslog_rfc_format: - syslog = RFCSysLogHandler(address='/dev/log', - facility=facility) - else: - syslog = logging.handlers.SysLogHandler(address='/dev/log', - facility=facility) - log_root.addHandler(syslog) - - logpath = _get_log_file_path() - if logpath: - filelog = logging.handlers.WatchedFileHandler(logpath) - log_root.addHandler(filelog) - - if CONF.use_stderr: - streamlog = ColorHandler() - log_root.addHandler(streamlog) - - elif not logpath: - # pass sys.stdout as a positional argument - # python2.6 calls the argument strm, in 2.7 it's stream - streamlog = logging.StreamHandler(sys.stdout) - log_root.addHandler(streamlog) - - if CONF.publish_errors: - handler = importutils.import_object( - "cerberus.openstack.common.log_handler.PublishErrorsHandler", - logging.ERROR) - log_root.addHandler(handler) - - datefmt = CONF.log_date_format - for handler in log_root.handlers: - # NOTE(alaski): CONF.log_format overrides everything currently. This - # should be deprecated in favor of context aware formatting. - if CONF.log_format: - handler.setFormatter(logging.Formatter(fmt=CONF.log_format, - datefmt=datefmt)) - log_root.info('Deprecated: log_format is now deprecated and will ' - 'be removed in the next release') - else: - handler.setFormatter(ContextFormatter(project=project, - version=version, - datefmt=datefmt)) - - if CONF.debug: - log_root.setLevel(logging.DEBUG) - elif CONF.verbose: - log_root.setLevel(logging.INFO) - else: - log_root.setLevel(logging.WARNING) - - for pair in CONF.default_log_levels: - mod, _sep, level_name = pair.partition('=') - level = logging.getLevelName(level_name) - logger = logging.getLogger(mod) - logger.setLevel(level) - -_loggers = {} - - -def getLogger(name='unknown', version='unknown'): - if name not in _loggers: - _loggers[name] = ContextAdapter(logging.getLogger(name), - name, - version) - return _loggers[name] - - -def getLazyLogger(name='unknown', version='unknown'): - """Returns lazy logger. - - Creates a pass-through logger that does not create the real logger - until it is really needed and delegates all calls to the real logger - once it is created. - """ - return LazyAdapter(name, version) - - -class WritableLogger(object): - """A thin wrapper that responds to `write` and logs.""" - - def __init__(self, logger, level=logging.INFO): - self.logger = logger - self.level = level - - def write(self, msg): - self.logger.log(self.level, msg.rstrip()) - - -class ContextFormatter(logging.Formatter): - """A context.RequestContext aware formatter configured through flags. - - The flags used to set format strings are: logging_context_format_string - and logging_default_format_string. You can also specify - logging_debug_format_suffix to append extra formatting if the log level is - debug. - - For information about what variables are available for the formatter see: - http://docs.python.org/library/logging.html#formatter - - If available, uses the context value stored in TLS - local.store.context - - """ - - def __init__(self, *args, **kwargs): - """Initialize ContextFormatter instance - - Takes additional keyword arguments which can be used in the message - format string. - - :keyword project: project name - :type project: string - :keyword version: project version - :type version: string - - """ - - self.project = kwargs.pop('project', 'unknown') - self.version = kwargs.pop('version', 'unknown') - - logging.Formatter.__init__(self, *args, **kwargs) - - def format(self, record): - """Uses contextstring if request_id is set, otherwise default.""" - - # store project info - record.project = self.project - record.version = self.version - - # store request info - context = getattr(local.store, 'context', None) - if context: - d = _dictify_context(context) - for k, v in d.items(): - setattr(record, k, v) - - # NOTE(sdague): default the fancier formatting params - # to an empty string so we don't throw an exception if - # they get used - for key in ('instance', 'color', 'user_identity'): - if key not in record.__dict__: - record.__dict__[key] = '' - - if record.__dict__.get('request_id'): - self._fmt = CONF.logging_context_format_string - else: - self._fmt = CONF.logging_default_format_string - - if (record.levelno == logging.DEBUG and - CONF.logging_debug_format_suffix): - self._fmt += " " + CONF.logging_debug_format_suffix - - # Cache this on the record, Logger will respect our formatted copy - if record.exc_info: - record.exc_text = self.formatException(record.exc_info, record) - return logging.Formatter.format(self, record) - - def formatException(self, exc_info, record=None): - """Format exception output with CONF.logging_exception_prefix.""" - if not record: - return logging.Formatter.formatException(self, exc_info) - - stringbuffer = moves.StringIO() - traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], - None, stringbuffer) - lines = stringbuffer.getvalue().split('\n') - stringbuffer.close() - - if CONF.logging_exception_prefix.find('%(asctime)') != -1: - record.asctime = self.formatTime(record, self.datefmt) - - formatted_lines = [] - for line in lines: - pl = CONF.logging_exception_prefix % record.__dict__ - fl = '%s%s' % (pl, line) - formatted_lines.append(fl) - return '\n'.join(formatted_lines) - - -class ColorHandler(logging.StreamHandler): - LEVEL_COLORS = { - logging.DEBUG: '\033[00;32m', # GREEN - logging.INFO: '\033[00;36m', # CYAN - logging.AUDIT: '\033[01;36m', # BOLD CYAN - logging.WARN: '\033[01;33m', # BOLD YELLOW - logging.ERROR: '\033[01;31m', # BOLD RED - logging.CRITICAL: '\033[01;31m', # BOLD RED - } - - def format(self, record): - record.color = self.LEVEL_COLORS[record.levelno] - return logging.StreamHandler.format(self, record) - - -class DeprecatedConfig(Exception): - message = _("Fatal call to deprecated config: %(msg)s") - - def __init__(self, msg): - super(Exception, self).__init__(self.message % dict(msg=msg)) diff --git a/cerberus/openstack/common/log_handler.py b/cerberus/openstack/common/log_handler.py deleted file mode 100644 index 836eab3..0000000 --- a/cerberus/openstack/common/log_handler.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -from oslo.config import cfg - -from cerberus.openstack.common import notifier - - -class PublishErrorsHandler(logging.Handler): - def emit(self, record): - if ('cerberus.openstack.common.notifier.log_notifier' in - cfg.CONF.notification_driver): - return - notifier.api.notify(None, 'error.publisher', - 'error_notification', - notifier.api.ERROR, - dict(error=record.getMessage())) diff --git a/cerberus/openstack/common/loopingcall.py b/cerberus/openstack/common/loopingcall.py deleted file mode 100644 index ce4fac3..0000000 --- a/cerberus/openstack/common/loopingcall.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from eventlet import event -from eventlet import greenthread - -from cerberus.openstack.common.gettextutils import _LE, _LW -from cerberus.openstack.common import log as logging -from cerberus.openstack.common import timeutils - -LOG = logging.getLogger(__name__) - - -class LoopingCallDone(Exception): - """Exception to break out and stop a LoopingCall. - - The poll-function passed to LoopingCall can raise this exception to - break out of the loop normally. This is somewhat analogous to - StopIteration. - - An optional return-value can be included as the argument to the exception; - this return-value will be returned by LoopingCall.wait() - - """ - - def __init__(self, retvalue=True): - """:param retvalue: Value that LoopingCall.wait() should return.""" - self.retvalue = retvalue - - -class LoopingCallBase(object): - def __init__(self, f=None, *args, **kw): - self.args = args - self.kw = kw - self.f = f - self._running = False - self.done = None - - def stop(self): - self._running = False - - def wait(self): - return self.done.wait() - - -class FixedIntervalLoopingCall(LoopingCallBase): - """A fixed interval looping call.""" - - def start(self, interval, initial_delay=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - start = timeutils.utcnow() - self.f(*self.args, **self.kw) - end = timeutils.utcnow() - if not self._running: - break - delay = interval - timeutils.delta_seconds(start, end) - if delay <= 0: - LOG.warn(_LW('task run outlasted interval by %s sec') % - -delay) - greenthread.sleep(delay if delay > 0 else 0) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_LE('in fixed duration looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn_n(_inner) - return self.done - - -# TODO(mikal): this class name is deprecated in Havana and should be removed -# in the I release -LoopingCall = FixedIntervalLoopingCall - - -class DynamicLoopingCall(LoopingCallBase): - """A looping call which sleeps until the next known event. - - The function called should return how long to sleep for before being - called again. - """ - - def start(self, initial_delay=None, periodic_interval_max=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - idle = self.f(*self.args, **self.kw) - if not self._running: - break - - if periodic_interval_max is not None: - idle = min(idle, periodic_interval_max) - LOG.debug('Dynamic looping call sleeping for %.02f ' - 'seconds', idle) - greenthread.sleep(idle) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_LE('in dynamic looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn(_inner) - return self.done diff --git a/cerberus/openstack/common/network_utils.py b/cerberus/openstack/common/network_utils.py deleted file mode 100644 index fa812b2..0000000 --- a/cerberus/openstack/common/network_utils.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2012 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Network-related utilities and helper functions. -""" - -# TODO(jd) Use six.moves once -# https://bitbucket.org/gutworth/six/pull-request/28 -# is merged -try: - import urllib.parse - SplitResult = urllib.parse.SplitResult -except ImportError: - import urlparse - SplitResult = urlparse.SplitResult - -from six.moves.urllib import parse - - -def parse_host_port(address, default_port=None): - """Interpret a string as a host:port pair. - - An IPv6 address MUST be escaped if accompanied by a port, - because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334 - means both [2001:db8:85a3::8a2e:370:7334] and - [2001:db8:85a3::8a2e:370]:7334. - - >>> parse_host_port('server01:80') - ('server01', 80) - >>> parse_host_port('server01') - ('server01', None) - >>> parse_host_port('server01', default_port=1234) - ('server01', 1234) - >>> parse_host_port('[::1]:80') - ('::1', 80) - >>> parse_host_port('[::1]') - ('::1', None) - >>> parse_host_port('[::1]', default_port=1234) - ('::1', 1234) - >>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234) - ('2001:db8:85a3::8a2e:370:7334', 1234) - - """ - if address[0] == '[': - # Escaped ipv6 - _host, _port = address[1:].split(']') - host = _host - if ':' in _port: - port = _port.split(':')[1] - else: - port = default_port - else: - if address.count(':') == 1: - host, port = address.split(':') - else: - # 0 means ipv4, >1 means ipv6. - # We prohibit unescaped ipv6 addresses with port. - host = address - port = default_port - - return (host, None if port is None else int(port)) - - -class ModifiedSplitResult(SplitResult): - """Split results class for urlsplit.""" - - # NOTE(dims): The functions below are needed for Python 2.6.x. - # We can remove these when we drop support for 2.6.x. - @property - def hostname(self): - netloc = self.netloc.split('@', 1)[-1] - host, port = parse_host_port(netloc) - return host - - @property - def port(self): - netloc = self.netloc.split('@', 1)[-1] - host, port = parse_host_port(netloc) - return port - - -def urlsplit(url, scheme='', allow_fragments=True): - """Parse a URL using urlparse.urlsplit(), splitting query and fragments. - This function papers over Python issue9374 when needed. - - The parameters are the same as urlparse.urlsplit. - """ - scheme, netloc, path, query, fragment = parse.urlsplit( - url, scheme, allow_fragments) - if allow_fragments and '#' in path: - path, fragment = path.split('#', 1) - if '?' in path: - path, query = path.split('?', 1) - return ModifiedSplitResult(scheme, netloc, - path, query, fragment) diff --git a/cerberus/openstack/common/periodic_task.py b/cerberus/openstack/common/periodic_task.py deleted file mode 100644 index 5311a40..0000000 --- a/cerberus/openstack/common/periodic_task.py +++ /dev/null @@ -1,183 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import time - -from oslo.config import cfg -import six - -from cerberus.openstack.common.gettextutils import _, _LE, _LI -from cerberus.openstack.common import log as logging - - -periodic_opts = [ - cfg.BoolOpt('run_external_periodic_tasks', - default=True, - help=('Some periodic tasks can be run in a separate process. ' - 'Should we run them here?')), -] - -CONF = cfg.CONF -CONF.register_opts(periodic_opts) - -LOG = logging.getLogger(__name__) - -DEFAULT_INTERVAL = 60.0 - - -class InvalidPeriodicTaskArg(Exception): - message = _("Unexpected argument for periodic task creation: %(arg)s.") - - -def periodic_task(*args, **kwargs): - """Decorator to indicate that a method is a periodic task. - - This decorator can be used in two ways: - - 1. Without arguments '@periodic_task', this will be run on every cycle - of the periodic scheduler. - - 2. With arguments: - @periodic_task(spacing=N [, run_immediately=[True|False]]) - this will be run on approximately every N seconds. If this number is - negative the periodic task will be disabled. If the run_immediately - argument is provided and has a value of 'True', the first run of the - task will be shortly after task scheduler starts. If - run_immediately is omitted or set to 'False', the first time the - task runs will be approximately N seconds after the task scheduler - starts. - """ - def decorator(f): - # Test for old style invocation - if 'ticks_between_runs' in kwargs: - raise InvalidPeriodicTaskArg(arg='ticks_between_runs') - - # Control if run at all - f._periodic_task = True - f._periodic_external_ok = kwargs.pop('external_process_ok', False) - if f._periodic_external_ok and not CONF.run_external_periodic_tasks: - f._periodic_enabled = False - else: - f._periodic_enabled = kwargs.pop('enabled', True) - - # Control frequency - f._periodic_spacing = kwargs.pop('spacing', 0) - f._periodic_immediate = kwargs.pop('run_immediately', False) - if f._periodic_immediate: - f._periodic_last_run = None - else: - f._periodic_last_run = time.time() - return f - - # NOTE(sirp): The `if` is necessary to allow the decorator to be used with - # and without parents. - # - # In the 'with-parents' case (with kwargs present), this function needs to - # return a decorator function since the interpreter will invoke it like: - # - # periodic_task(*args, **kwargs)(f) - # - # In the 'without-parents' case, the original function will be passed - # in as the first argument, like: - # - # periodic_task(f) - if kwargs: - return decorator - else: - return decorator(args[0]) - - -class _PeriodicTasksMeta(type): - def __init__(cls, names, bases, dict_): - """Metaclass that allows us to collect decorated periodic tasks.""" - super(_PeriodicTasksMeta, cls).__init__(names, bases, dict_) - - # NOTE(sirp): if the attribute is not present then we must be the base - # class, so, go ahead an initialize it. If the attribute is present, - # then we're a subclass so make a copy of it so we don't step on our - # parent's toes. - try: - cls._periodic_tasks = cls._periodic_tasks[:] - except AttributeError: - cls._periodic_tasks = [] - - try: - cls._periodic_spacing = cls._periodic_spacing.copy() - except AttributeError: - cls._periodic_spacing = {} - - for value in cls.__dict__.values(): - if getattr(value, '_periodic_task', False): - task = value - name = task.__name__ - - if task._periodic_spacing < 0: - LOG.info(_LI('Skipping periodic task %(task)s because ' - 'its interval is negative'), - {'task': name}) - continue - if not task._periodic_enabled: - LOG.info(_LI('Skipping periodic task %(task)s because ' - 'it is disabled'), - {'task': name}) - continue - - # A periodic spacing of zero indicates that this task should - # be run every pass - if task._periodic_spacing == 0: - task._periodic_spacing = None - - cls._periodic_tasks.append((name, task)) - cls._periodic_spacing[name] = task._periodic_spacing - - -@six.add_metaclass(_PeriodicTasksMeta) -class PeriodicTasks(object): - def __init__(self): - super(PeriodicTasks, self).__init__() - self._periodic_last_run = {} - for name, task in self._periodic_tasks: - self._periodic_last_run[name] = task._periodic_last_run - - def run_periodic_tasks(self, context, raise_on_error=False): - """Tasks to be run at a periodic interval.""" - idle_for = DEFAULT_INTERVAL - for task_name, task in self._periodic_tasks: - full_task_name = '.'.join([self.__class__.__name__, task_name]) - - spacing = self._periodic_spacing[task_name] - last_run = self._periodic_last_run[task_name] - - # If a periodic task is _nearly_ due, then we'll run it early - if spacing is not None: - idle_for = min(idle_for, spacing) - if last_run is not None: - delta = last_run + spacing - time.time() - if delta > 0.2: - idle_for = min(idle_for, delta) - continue - - LOG.debug("Running periodic task %(full_task_name)s", - {"full_task_name": full_task_name}) - self._periodic_last_run[task_name] = time.time() - - try: - task(self, context) - except Exception as e: - if raise_on_error: - raise - LOG.exception(_LE("Error during %(full_task_name)s: %(e)s"), - {"full_task_name": full_task_name, "e": e}) - time.sleep(0) - - return idle_for diff --git a/cerberus/openstack/common/policy.py b/cerberus/openstack/common/policy.py deleted file mode 100644 index d6d1747..0000000 --- a/cerberus/openstack/common/policy.py +++ /dev/null @@ -1,897 +0,0 @@ -# Copyright (c) 2012 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Common Policy Engine Implementation - -Policies can be expressed in one of two forms: A list of lists, or a -string written in the new policy language. - -In the list-of-lists representation, each check inside the innermost -list is combined as with an "and" conjunction--for that check to pass, -all the specified checks must pass. These innermost lists are then -combined as with an "or" conjunction. This is the original way of -expressing policies, but there now exists a new way: the policy -language. - -In the policy language, each check is specified the same way as in the -list-of-lists representation: a simple "a:b" pair that is matched to -the correct code to perform that check. However, conjunction -operators are available, allowing for more expressiveness in crafting -policies. - -As an example, take the following rule, expressed in the list-of-lists -representation:: - - [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]] - -In the policy language, this becomes:: - - role:admin or (project_id:%(project_id)s and role:projectadmin) - -The policy language also has the "not" operator, allowing a richer -policy rule:: - - project_id:%(project_id)s and not role:dunce - -It is possible to perform policy checks on the following user -attributes (obtained through the token): user_id, domain_id or -project_id:: - - domain_id: - -Attributes sent along with API calls can be used by the policy engine -(on the right side of the expression), by using the following syntax:: - - :user.id - -Contextual attributes of objects identified by their IDs are loaded -from the database. They are also available to the policy engine and -can be checked through the `target` keyword:: - - :target.role.name - -All these attributes (related to users, API calls, and context) can be -checked against each other or against constants, be it literals (True, -) or strings. - -Finally, two special policy checks should be mentioned; the policy -check "@" will always accept an access, and the policy check "!" will -always reject an access. (Note that if a rule is either the empty -list ("[]") or the empty string, this is equivalent to the "@" policy -check.) Of these, the "!" policy check is probably the most useful, -as it allows particular rules to be explicitly disabled. -""" - -import abc -import ast -import re - -from oslo.config import cfg -import six -import six.moves.urllib.parse as urlparse -import six.moves.urllib.request as urlrequest - -from cerberus.openstack.common import fileutils -from cerberus.openstack.common.gettextutils import _, _LE -from cerberus.openstack.common import jsonutils -from cerberus.openstack.common import log as logging - - -policy_opts = [ - cfg.StrOpt('policy_file', - default='policy.json', - help=_('JSON file containing policy')), - cfg.StrOpt('policy_default_rule', - default='default', - help=_('Rule enforced when requested rule is not found')), -] - -CONF = cfg.CONF -CONF.register_opts(policy_opts) - -LOG = logging.getLogger(__name__) - -_checks = {} - - -class PolicyNotAuthorized(Exception): - - def __init__(self, rule): - msg = _("Policy doesn't allow %s to be performed.") % rule - super(PolicyNotAuthorized, self).__init__(msg) - - -class Rules(dict): - """A store for rules. Handles the default_rule setting directly.""" - - @classmethod - def load_json(cls, data, default_rule=None): - """Allow loading of JSON rule data.""" - - # Suck in the JSON data and parse the rules - rules = dict((k, parse_rule(v)) for k, v in - jsonutils.loads(data).items()) - - return cls(rules, default_rule) - - def __init__(self, rules=None, default_rule=None): - """Initialize the Rules store.""" - - super(Rules, self).__init__(rules or {}) - self.default_rule = default_rule - - def __missing__(self, key): - """Implements the default rule handling.""" - - if isinstance(self.default_rule, dict): - raise KeyError(key) - - # If the default rule isn't actually defined, do something - # reasonably intelligent - if not self.default_rule: - raise KeyError(key) - - if isinstance(self.default_rule, BaseCheck): - return self.default_rule - - # We need to check this or we can get infinite recursion - if self.default_rule not in self: - raise KeyError(key) - - elif isinstance(self.default_rule, six.string_types): - return self[self.default_rule] - - def __str__(self): - """Dumps a string representation of the rules.""" - - # Start by building the canonical strings for the rules - out_rules = {} - for key, value in self.items(): - # Use empty string for singleton TrueCheck instances - if isinstance(value, TrueCheck): - out_rules[key] = '' - else: - out_rules[key] = str(value) - - # Dump a pretty-printed JSON representation - return jsonutils.dumps(out_rules, indent=4) - - -class Enforcer(object): - """Responsible for loading and enforcing rules. - - :param policy_file: Custom policy file to use, if none is - specified, `CONF.policy_file` will be - used. - :param rules: Default dictionary / Rules to use. It will be - considered just in the first instantiation. If - `load_rules(True)`, `clear()` or `set_rules(True)` - is called this will be overwritten. - :param default_rule: Default rule to use, CONF.default_rule will - be used if none is specified. - :param use_conf: Whether to load rules from cache or config file. - """ - - def __init__(self, policy_file=None, rules=None, - default_rule=None, use_conf=True): - self.rules = Rules(rules, default_rule) - self.default_rule = default_rule or CONF.policy_default_rule - - self.policy_path = None - self.policy_file = policy_file or CONF.policy_file - self.use_conf = use_conf - - def set_rules(self, rules, overwrite=True, use_conf=False): - """Create a new Rules object based on the provided dict of rules. - - :param rules: New rules to use. It should be an instance of dict. - :param overwrite: Whether to overwrite current rules or update them - with the new rules. - :param use_conf: Whether to reload rules from cache or config file. - """ - - if not isinstance(rules, dict): - raise TypeError(_("Rules must be an instance of dict or Rules, " - "got %s instead") % type(rules)) - self.use_conf = use_conf - if overwrite: - self.rules = Rules(rules, self.default_rule) - else: - self.rules.update(rules) - - def clear(self): - """Clears Enforcer rules, policy's cache and policy's path.""" - self.set_rules({}) - self.default_rule = None - self.policy_path = None - - def load_rules(self, force_reload=False): - """Loads policy_path's rules. - - Policy file is cached and will be reloaded if modified. - - :param force_reload: Whether to overwrite current rules. - """ - - if force_reload: - self.use_conf = force_reload - - if self.use_conf: - if not self.policy_path: - self.policy_path = self._get_policy_path() - - reloaded, data = fileutils.read_cached_file( - self.policy_path, force_reload=force_reload) - if reloaded or not self.rules: - rules = Rules.load_json(data, self.default_rule) - self.set_rules(rules) - LOG.debug("Rules successfully reloaded") - - def _get_policy_path(self): - """Locate the policy json data file. - - :param policy_file: Custom policy file to locate. - - :returns: The policy path - - :raises: ConfigFilesNotFoundError if the file couldn't - be located. - """ - policy_file = CONF.find_file(self.policy_file) - - if policy_file: - return policy_file - - raise cfg.ConfigFilesNotFoundError((self.policy_file,)) - - def enforce(self, rule, target, creds, do_raise=False, - exc=None, *args, **kwargs): - """Checks authorization of a rule against the target and credentials. - - :param rule: A string or BaseCheck instance specifying the rule - to evaluate. - :param target: As much information about the object being operated - on as possible, as a dictionary. - :param creds: As much information about the user performing the - action as possible, as a dictionary. - :param do_raise: Whether to raise an exception or not if check - fails. - :param exc: Class of the exception to raise if the check fails. - Any remaining arguments passed to check() (both - positional and keyword arguments) will be passed to - the exception class. If not specified, PolicyNotAuthorized - will be used. - - :return: Returns False if the policy does not allow the action and - exc is not provided; otherwise, returns a value that - evaluates to True. Note: for rules using the "case" - expression, this True value will be the specified string - from the expression. - """ - - # NOTE(flaper87): Not logging target or creds to avoid - # potential security issues. - LOG.debug("Rule %s will be now enforced" % rule) - - self.load_rules() - - # Allow the rule to be a Check tree - if isinstance(rule, BaseCheck): - result = rule(target, creds, self) - elif not self.rules: - # No rules to reference means we're going to fail closed - result = False - else: - try: - # Evaluate the rule - result = self.rules[rule](target, creds, self) - except KeyError: - LOG.debug("Rule [%s] doesn't exist" % rule) - # If the rule doesn't exist, fail closed - result = False - - # If it is False, raise the exception if requested - if do_raise and not result: - if exc: - raise exc(*args, **kwargs) - - raise PolicyNotAuthorized(rule) - - return result - - -@six.add_metaclass(abc.ABCMeta) -class BaseCheck(object): - """Abstract base class for Check classes.""" - - @abc.abstractmethod - def __str__(self): - """String representation of the Check tree rooted at this node.""" - - pass - - @abc.abstractmethod - def __call__(self, target, cred, enforcer): - """Triggers if instance of the class is called. - - Performs the check. Returns False to reject the access or a - true value (not necessary True) to accept the access. - """ - - pass - - -class FalseCheck(BaseCheck): - """A policy check that always returns False (disallow).""" - - def __str__(self): - """Return a string representation of this check.""" - - return "!" - - def __call__(self, target, cred, enforcer): - """Check the policy.""" - - return False - - -class TrueCheck(BaseCheck): - """A policy check that always returns True (allow).""" - - def __str__(self): - """Return a string representation of this check.""" - - return "@" - - def __call__(self, target, cred, enforcer): - """Check the policy.""" - - return True - - -class Check(BaseCheck): - """A base class to allow for user-defined policy checks.""" - - def __init__(self, kind, match): - """Initiates Check instance. - - :param kind: The kind of the check, i.e., the field before the - ':'. - :param match: The match of the check, i.e., the field after - the ':'. - """ - - self.kind = kind - self.match = match - - def __str__(self): - """Return a string representation of this check.""" - - return "%s:%s" % (self.kind, self.match) - - -class NotCheck(BaseCheck): - """Implements the "not" logical operator. - - A policy check that inverts the result of another policy check. - """ - - def __init__(self, rule): - """Initialize the 'not' check. - - :param rule: The rule to negate. Must be a Check. - """ - - self.rule = rule - - def __str__(self): - """Return a string representation of this check.""" - - return "not %s" % self.rule - - def __call__(self, target, cred, enforcer): - """Check the policy. - - Returns the logical inverse of the wrapped check. - """ - - return not self.rule(target, cred, enforcer) - - -class AndCheck(BaseCheck): - """Implements the "and" logical operator. - - A policy check that requires that a list of other checks all return True. - """ - - def __init__(self, rules): - """Initialize the 'and' check. - - :param rules: A list of rules that will be tested. - """ - - self.rules = rules - - def __str__(self): - """Return a string representation of this check.""" - - return "(%s)" % ' and '.join(str(r) for r in self.rules) - - def __call__(self, target, cred, enforcer): - """Check the policy. - - Requires that all rules accept in order to return True. - """ - - for rule in self.rules: - if not rule(target, cred, enforcer): - return False - - return True - - def add_check(self, rule): - """Adds rule to be tested. - - Allows addition of another rule to the list of rules that will - be tested. Returns the AndCheck object for convenience. - """ - - self.rules.append(rule) - return self - - -class OrCheck(BaseCheck): - """Implements the "or" operator. - - A policy check that requires that at least one of a list of other - checks returns True. - """ - - def __init__(self, rules): - """Initialize the 'or' check. - - :param rules: A list of rules that will be tested. - """ - - self.rules = rules - - def __str__(self): - """Return a string representation of this check.""" - - return "(%s)" % ' or '.join(str(r) for r in self.rules) - - def __call__(self, target, cred, enforcer): - """Check the policy. - - Requires that at least one rule accept in order to return True. - """ - - for rule in self.rules: - if rule(target, cred, enforcer): - return True - return False - - def add_check(self, rule): - """Adds rule to be tested. - - Allows addition of another rule to the list of rules that will - be tested. Returns the OrCheck object for convenience. - """ - - self.rules.append(rule) - return self - - -def _parse_check(rule): - """Parse a single base check rule into an appropriate Check object.""" - - # Handle the special checks - if rule == '!': - return FalseCheck() - elif rule == '@': - return TrueCheck() - - try: - kind, match = rule.split(':', 1) - except Exception: - LOG.exception(_LE("Failed to understand rule %s") % rule) - # If the rule is invalid, we'll fail closed - return FalseCheck() - - # Find what implements the check - if kind in _checks: - return _checks[kind](kind, match) - elif None in _checks: - return _checks[None](kind, match) - else: - LOG.error(_LE("No handler for matches of kind %s") % kind) - return FalseCheck() - - -def _parse_list_rule(rule): - """Translates the old list-of-lists syntax into a tree of Check objects. - - Provided for backwards compatibility. - """ - - # Empty rule defaults to True - if not rule: - return TrueCheck() - - # Outer list is joined by "or"; inner list by "and" - or_list = [] - for inner_rule in rule: - # Elide empty inner lists - if not inner_rule: - continue - - # Handle bare strings - if isinstance(inner_rule, six.string_types): - inner_rule = [inner_rule] - - # Parse the inner rules into Check objects - and_list = [_parse_check(r) for r in inner_rule] - - # Append the appropriate check to the or_list - if len(and_list) == 1: - or_list.append(and_list[0]) - else: - or_list.append(AndCheck(and_list)) - - # If we have only one check, omit the "or" - if not or_list: - return FalseCheck() - elif len(or_list) == 1: - return or_list[0] - - return OrCheck(or_list) - - -# Used for tokenizing the policy language -_tokenize_re = re.compile(r'\s+') - - -def _parse_tokenize(rule): - """Tokenizer for the policy language. - - Most of the single-character tokens are specified in the - _tokenize_re; however, parentheses need to be handled specially, - because they can appear inside a check string. Thankfully, those - parentheses that appear inside a check string can never occur at - the very beginning or end ("%(variable)s" is the correct syntax). - """ - - for tok in _tokenize_re.split(rule): - # Skip empty tokens - if not tok or tok.isspace(): - continue - - # Handle leading parens on the token - clean = tok.lstrip('(') - for i in range(len(tok) - len(clean)): - yield '(', '(' - - # If it was only parentheses, continue - if not clean: - continue - else: - tok = clean - - # Handle trailing parens on the token - clean = tok.rstrip(')') - trail = len(tok) - len(clean) - - # Yield the cleaned token - lowered = clean.lower() - if lowered in ('and', 'or', 'not'): - # Special tokens - yield lowered, clean - elif clean: - # Not a special token, but not composed solely of ')' - if len(tok) >= 2 and ((tok[0], tok[-1]) in - [('"', '"'), ("'", "'")]): - # It's a quoted string - yield 'string', tok[1:-1] - else: - yield 'check', _parse_check(clean) - - # Yield the trailing parens - for i in range(trail): - yield ')', ')' - - -class ParseStateMeta(type): - """Metaclass for the ParseState class. - - Facilitates identifying reduction methods. - """ - - def __new__(mcs, name, bases, cls_dict): - """Create the class. - - Injects the 'reducers' list, a list of tuples matching token sequences - to the names of the corresponding reduction methods. - """ - - reducers = [] - - for key, value in cls_dict.items(): - if not hasattr(value, 'reducers'): - continue - for reduction in value.reducers: - reducers.append((reduction, key)) - - cls_dict['reducers'] = reducers - - return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) - - -def reducer(*tokens): - """Decorator for reduction methods. - - Arguments are a sequence of tokens, in order, which should trigger running - this reduction method. - """ - - def decorator(func): - # Make sure we have a list of reducer sequences - if not hasattr(func, 'reducers'): - func.reducers = [] - - # Add the tokens to the list of reducer sequences - func.reducers.append(list(tokens)) - - return func - - return decorator - - -@six.add_metaclass(ParseStateMeta) -class ParseState(object): - """Implement the core of parsing the policy language. - - Uses a greedy reduction algorithm to reduce a sequence of tokens into - a single terminal, the value of which will be the root of the Check tree. - - Note: error reporting is rather lacking. The best we can get with - this parser formulation is an overall "parse failed" error. - Fortunately, the policy language is simple enough that this - shouldn't be that big a problem. - """ - - def __init__(self): - """Initialize the ParseState.""" - - self.tokens = [] - self.values = [] - - def reduce(self): - """Perform a greedy reduction of the token stream. - - If a reducer method matches, it will be executed, then the - reduce() method will be called recursively to search for any more - possible reductions. - """ - - for reduction, methname in self.reducers: - if (len(self.tokens) >= len(reduction) and - self.tokens[-len(reduction):] == reduction): - # Get the reduction method - meth = getattr(self, methname) - - # Reduce the token stream - results = meth(*self.values[-len(reduction):]) - - # Update the tokens and values - self.tokens[-len(reduction):] = [r[0] for r in results] - self.values[-len(reduction):] = [r[1] for r in results] - - # Check for any more reductions - return self.reduce() - - def shift(self, tok, value): - """Adds one more token to the state. Calls reduce().""" - - self.tokens.append(tok) - self.values.append(value) - - # Do a greedy reduce... - self.reduce() - - @property - def result(self): - """Obtain the final result of the parse. - - Raises ValueError if the parse failed to reduce to a single result. - """ - - if len(self.values) != 1: - raise ValueError("Could not parse rule") - return self.values[0] - - @reducer('(', 'check', ')') - @reducer('(', 'and_expr', ')') - @reducer('(', 'or_expr', ')') - def _wrap_check(self, _p1, check, _p2): - """Turn parenthesized expressions into a 'check' token.""" - - return [('check', check)] - - @reducer('check', 'and', 'check') - def _make_and_expr(self, check1, _and, check2): - """Create an 'and_expr'. - - Join two checks by the 'and' operator. - """ - - return [('and_expr', AndCheck([check1, check2]))] - - @reducer('and_expr', 'and', 'check') - def _extend_and_expr(self, and_expr, _and, check): - """Extend an 'and_expr' by adding one more check.""" - - return [('and_expr', and_expr.add_check(check))] - - @reducer('check', 'or', 'check') - def _make_or_expr(self, check1, _or, check2): - """Create an 'or_expr'. - - Join two checks by the 'or' operator. - """ - - return [('or_expr', OrCheck([check1, check2]))] - - @reducer('or_expr', 'or', 'check') - def _extend_or_expr(self, or_expr, _or, check): - """Extend an 'or_expr' by adding one more check.""" - - return [('or_expr', or_expr.add_check(check))] - - @reducer('not', 'check') - def _make_not_expr(self, _not, check): - """Invert the result of another check.""" - - return [('check', NotCheck(check))] - - -def _parse_text_rule(rule): - """Parses policy to the tree. - - Translates a policy written in the policy language into a tree of - Check objects. - """ - - # Empty rule means always accept - if not rule: - return TrueCheck() - - # Parse the token stream - state = ParseState() - for tok, value in _parse_tokenize(rule): - state.shift(tok, value) - - try: - return state.result - except ValueError: - # Couldn't parse the rule - LOG.exception(_LE("Failed to understand rule %r") % rule) - - # Fail closed - return FalseCheck() - - -def parse_rule(rule): - """Parses a policy rule into a tree of Check objects.""" - - # If the rule is a string, it's in the policy language - if isinstance(rule, six.string_types): - return _parse_text_rule(rule) - return _parse_list_rule(rule) - - -def register(name, func=None): - """Register a function or Check class as a policy check. - - :param name: Gives the name of the check type, e.g., 'rule', - 'role', etc. If name is None, a default check type - will be registered. - :param func: If given, provides the function or class to register. - If not given, returns a function taking one argument - to specify the function or class to register, - allowing use as a decorator. - """ - - # Perform the actual decoration by registering the function or - # class. Returns the function or class for compliance with the - # decorator interface. - def decorator(func): - _checks[name] = func - return func - - # If the function or class is given, do the registration - if func: - return decorator(func) - - return decorator - - -@register("rule") -class RuleCheck(Check): - def __call__(self, target, creds, enforcer): - """Recursively checks credentials based on the defined rules.""" - - try: - return enforcer.rules[self.match](target, creds, enforcer) - except KeyError: - # We don't have any matching rule; fail closed - return False - - -@register("role") -class RoleCheck(Check): - def __call__(self, target, creds, enforcer): - """Check that there is a matching role in the cred dict.""" - - return self.match.lower() in [x.lower() for x in creds['roles']] - - -@register('http') -class HttpCheck(Check): - def __call__(self, target, creds, enforcer): - """Check http: rules by calling to a remote server. - - This example implementation simply verifies that the response - is exactly 'True'. - """ - - url = ('http:' + self.match) % target - data = {'target': jsonutils.dumps(target), - 'credentials': jsonutils.dumps(creds)} - post_data = urlparse.urlencode(data) - f = urlrequest.urlopen(url, post_data) - return f.read() == "True" - - -@register(None) -class GenericCheck(Check): - def __call__(self, target, creds, enforcer): - """Check an individual match. - - Matches look like: - - tenant:%(tenant_id)s - role:compute:admin - True:%(user.enabled)s - 'Member':%(role.name)s - """ - - # TODO(termie): do dict inspection via dot syntax - try: - match = self.match % target - except KeyError: - # While doing GenericCheck if key not - # present in Target return false - return False - - try: - # Try to interpret self.kind as a literal - leftval = ast.literal_eval(self.kind) - except ValueError: - try: - leftval = creds[self.kind] - except KeyError: - return False - return match == six.text_type(leftval) diff --git a/cerberus/openstack/common/processutils.py b/cerberus/openstack/common/processutils.py deleted file mode 100644 index de617bc..0000000 --- a/cerberus/openstack/common/processutils.py +++ /dev/null @@ -1,272 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" - -import errno -import logging -import os -import random -import shlex -import signal - -from eventlet.green import subprocess -from eventlet import greenthread -import six - -from cerberus.openstack.common.gettextutils import _ # noqa -from cerberus.openstack.common import strutils - - -LOG = logging.getLogger(__name__) - - -class InvalidArgumentError(Exception): - def __init__(self, message=None): - super(InvalidArgumentError, self).__init__(message) - - -class UnknownArgumentError(Exception): - def __init__(self, message=None): - super(UnknownArgumentError, self).__init__(message) - - -class ProcessExecutionError(Exception): - def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None, - description=None): - self.exit_code = exit_code - self.stderr = stderr - self.stdout = stdout - self.cmd = cmd - self.description = description - - if description is None: - description = _("Unexpected error while running command.") - if exit_code is None: - exit_code = '-' - message = _('%(description)s\n' - 'Command: %(cmd)s\n' - 'Exit code: %(exit_code)s\n' - 'Stdout: %(stdout)r\n' - 'Stderr: %(stderr)r') % {'description': description, - 'cmd': cmd, - 'exit_code': exit_code, - 'stdout': stdout, - 'stderr': stderr} - super(ProcessExecutionError, self).__init__(message) - - -class NoRootWrapSpecified(Exception): - def __init__(self, message=None): - super(NoRootWrapSpecified, self).__init__(message) - - -def _subprocess_setup(): - # Python installs a SIGPIPE handler by default. This is usually not what - # non-Python subprocesses expect. - signal.signal(signal.SIGPIPE, signal.SIG_DFL) - - -def execute(*cmd, **kwargs): - """Helper method to shell out and execute a command through subprocess. - - Allows optional retry. - - :param cmd: Passed to subprocess.Popen. - :type cmd: string - :param process_input: Send to opened process. - :type process_input: string - :param check_exit_code: Single bool, int, or list of allowed exit - codes. Defaults to [0]. Raise - :class:`ProcessExecutionError` unless - program exits with one of these code. - :type check_exit_code: boolean, int, or [int] - :param delay_on_retry: True | False. Defaults to True. If set to True, - wait a short amount of time before retrying. - :type delay_on_retry: boolean - :param attempts: How many times to retry cmd. - :type attempts: int - :param run_as_root: True | False. Defaults to False. If set to True, - the command is prefixed by the command specified - in the root_helper kwarg. - :type run_as_root: boolean - :param root_helper: command to prefix to commands called with - run_as_root=True - :type root_helper: string - :param shell: whether or not there should be a shell used to - execute this command. Defaults to false. - :type shell: boolean - :param loglevel: log level for execute commands. - :type loglevel: int. (Should be logging.DEBUG or logging.INFO) - :returns: (stdout, stderr) from process execution - :raises: :class:`UnknownArgumentError` on - receiving unknown arguments - :raises: :class:`ProcessExecutionError` - """ - - process_input = kwargs.pop('process_input', None) - check_exit_code = kwargs.pop('check_exit_code', [0]) - ignore_exit_code = False - delay_on_retry = kwargs.pop('delay_on_retry', True) - attempts = kwargs.pop('attempts', 1) - run_as_root = kwargs.pop('run_as_root', False) - root_helper = kwargs.pop('root_helper', '') - shell = kwargs.pop('shell', False) - loglevel = kwargs.pop('loglevel', logging.DEBUG) - - if isinstance(check_exit_code, bool): - ignore_exit_code = not check_exit_code - check_exit_code = [0] - elif isinstance(check_exit_code, int): - check_exit_code = [check_exit_code] - - if kwargs: - raise UnknownArgumentError(_('Got unknown keyword args ' - 'to utils.execute: %r') % kwargs) - - if run_as_root and hasattr(os, 'geteuid') and os.geteuid() != 0: - if not root_helper: - raise NoRootWrapSpecified( - message=_('Command requested root, but did not ' - 'specify a root helper.')) - cmd = shlex.split(root_helper) + list(cmd) - - cmd = map(str, cmd) - sanitized_cmd = strutils.mask_password(' '.join(cmd)) - - while attempts > 0: - attempts -= 1 - try: - LOG.log(loglevel, _('Running cmd (subprocess): %s'), sanitized_cmd) - _PIPE = subprocess.PIPE # pylint: disable=E1101 - - if os.name == 'nt': - preexec_fn = None - close_fds = False - else: - preexec_fn = _subprocess_setup - close_fds = True - - obj = subprocess.Popen(cmd, - stdin=_PIPE, - stdout=_PIPE, - stderr=_PIPE, - close_fds=close_fds, - preexec_fn=preexec_fn, - shell=shell) - result = None - for _i in six.moves.range(20): - # NOTE(russellb) 20 is an arbitrary number of retries to - # prevent any chance of looping forever here. - try: - if process_input is not None: - result = obj.communicate(process_input) - else: - result = obj.communicate() - except OSError as e: - if e.errno in (errno.EAGAIN, errno.EINTR): - continue - raise - break - obj.stdin.close() # pylint: disable=E1101 - _returncode = obj.returncode # pylint: disable=E1101 - LOG.log(loglevel, _('Result was %s') % _returncode) - if not ignore_exit_code and _returncode not in check_exit_code: - (stdout, stderr) = result - sanitized_stdout = strutils.mask_password(stdout) - sanitized_stderr = strutils.mask_password(stderr) - raise ProcessExecutionError(exit_code=_returncode, - stdout=sanitized_stdout, - stderr=sanitized_stderr, - cmd=sanitized_cmd) - return result - except ProcessExecutionError: - if not attempts: - raise - else: - LOG.log(loglevel, _('%r failed. Retrying.'), sanitized_cmd) - if delay_on_retry: - greenthread.sleep(random.randint(20, 200) / 100.0) - finally: - # NOTE(termie): this appears to be necessary to let the subprocess - # call clean something up in between calls, without - # it two execute calls in a row hangs the second one - greenthread.sleep(0) - - -def trycmd(*args, **kwargs): - """A wrapper around execute() to more easily handle warnings and errors. - - Returns an (out, err) tuple of strings containing the output of - the command's stdout and stderr. If 'err' is not empty then the - command can be considered to have failed. - - :discard_warnings True | False. Defaults to False. If set to True, - then for succeeding commands, stderr is cleared - - """ - discard_warnings = kwargs.pop('discard_warnings', False) - - try: - out, err = execute(*args, **kwargs) - failed = False - except ProcessExecutionError as exn: - out, err = '', str(exn) - failed = True - - if not failed and discard_warnings and err: - # Handle commands that output to stderr but otherwise succeed - err = '' - - return out, err - - -def ssh_execute(ssh, cmd, process_input=None, - addl_env=None, check_exit_code=True): - sanitized_cmd = strutils.mask_password(cmd) - LOG.debug('Running cmd (SSH): %s', sanitized_cmd) - if addl_env: - raise InvalidArgumentError(_('Environment not supported over SSH')) - - if process_input: - # This is (probably) fixable if we need it... - raise InvalidArgumentError(_('process_input not supported over SSH')) - - stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd) - channel = stdout_stream.channel - - # NOTE(justinsb): This seems suspicious... - # ...other SSH clients have buffering issues with this approach - stdout = stdout_stream.read() - sanitized_stdout = strutils.mask_password(stdout) - stderr = stderr_stream.read() - sanitized_stderr = strutils.mask_password(stderr) - - stdin_stream.close() - - exit_status = channel.recv_exit_status() - - # exit_status == -1 if no exit code was returned - if exit_status != -1: - LOG.debug('Result was %s' % exit_status) - if check_exit_code and exit_status != 0: - raise ProcessExecutionError(exit_code=exit_status, - stdout=sanitized_stdout, - stderr=sanitized_stderr, - cmd=sanitized_cmd) - - return (sanitized_stdout, sanitized_stderr) diff --git a/cerberus/openstack/common/service.py b/cerberus/openstack/common/service.py deleted file mode 100644 index 6b5aefc..0000000 --- a/cerberus/openstack/common/service.py +++ /dev/null @@ -1,504 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Generic Node base class for all workers that run on hosts.""" - -import errno -import logging as std_logging -import os -import random -import signal -import sys -import time - -try: - # Importing just the symbol here because the io module does not - # exist in Python 2.6. - from io import UnsupportedOperation # noqa -except ImportError: - # Python 2.6 - UnsupportedOperation = None - -import eventlet -from eventlet import event -from oslo.config import cfg - -from cerberus.openstack.common import eventlet_backdoor -from cerberus.openstack.common.gettextutils import _LE, _LI, _LW -from cerberus.openstack.common import importutils -from cerberus.openstack.common import log as logging -from cerberus.openstack.common import systemd -from cerberus.openstack.common import threadgroup - - -rpc = importutils.try_import('cerberus.openstack.common.rpc') -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -def _sighup_supported(): - return hasattr(signal, 'SIGHUP') - - -def _is_daemon(): - # The process group for a foreground process will match the - # process group of the controlling terminal. If those values do - # not match, or ioctl() fails on the stdout file handle, we assume - # the process is running in the background as a daemon. - # http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics - try: - is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno()) - except OSError as err: - if err.errno == errno.ENOTTY: - # Assume we are a daemon because there is no terminal. - is_daemon = True - else: - raise - except UnsupportedOperation: - # Could not get the fileno for stdout, so we must be a daemon. - is_daemon = True - return is_daemon - - -def _is_sighup_and_daemon(signo): - if not (_sighup_supported() and signo == signal.SIGHUP): - # Avoid checking if we are a daemon, because the signal isn't - # SIGHUP. - return False - return _is_daemon() - - -def _signo_to_signame(signo): - signals = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT'} - if _sighup_supported(): - signals[signal.SIGHUP] = 'SIGHUP' - return signals[signo] - - -def _set_signals_handler(handler): - signal.signal(signal.SIGTERM, handler) - signal.signal(signal.SIGINT, handler) - if _sighup_supported(): - signal.signal(signal.SIGHUP, handler) - - -class Launcher(object): - """Launch one or more services and wait for them to complete.""" - - def __init__(self): - """Initialize the service launcher. - - :returns: None - - """ - self.services = Services() - self.backdoor_port = eventlet_backdoor.initialize_if_enabled() - - def launch_service(self, service): - """Load and start the given service. - - :param service: The service you would like to start. - :returns: None - - """ - service.backdoor_port = self.backdoor_port - self.services.add(service) - - def stop(self): - """Stop all services which are currently running. - - :returns: None - - """ - self.services.stop() - - def wait(self): - """Waits until all services have been stopped, and then returns. - - :returns: None - - """ - self.services.wait() - - def restart(self): - """Reload config files and restart service. - - :returns: None - - """ - cfg.CONF.reload_config_files() - self.services.restart() - - -class SignalExit(SystemExit): - def __init__(self, signo, exccode=1): - super(SignalExit, self).__init__(exccode) - self.signo = signo - - -class ServiceLauncher(Launcher): - def _handle_signal(self, signo, frame): - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - raise SignalExit(signo) - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _wait_for_exit_or_signal(self, ready_callback=None): - status = None - signo = 0 - - LOG.debug('Full set of CONF:') - CONF.log_opt_values(LOG, std_logging.DEBUG) - - try: - if ready_callback: - ready_callback() - super(ServiceLauncher, self).wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_LI('Caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - finally: - self.stop() - if rpc: - try: - rpc.cleanup() - except Exception: - # We're shutting down, so it doesn't matter at this point. - LOG.exception(_LE('Exception during rpc cleanup.')) - - return status, signo - - def wait(self, ready_callback=None): - systemd.notify_once() - while True: - self.handle_signal() - status, signo = self._wait_for_exit_or_signal(ready_callback) - if not _is_sighup_and_daemon(signo): - return status - self.restart() - - -class ServiceWrapper(object): - def __init__(self, service, workers): - self.service = service - self.workers = workers - self.children = set() - self.forktimes = [] - - -class ProcessLauncher(object): - def __init__(self, wait_interval=0.01): - """Constructor. - - :param wait_interval: The interval to sleep for between checks - of child process exit. - """ - self.children = {} - self.sigcaught = None - self.running = True - self.wait_interval = wait_interval - rfd, self.writepipe = os.pipe() - self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r') - self.handle_signal() - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _handle_signal(self, signo, frame): - self.sigcaught = signo - self.running = False - - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - - def _pipe_watcher(self): - # This will block until the write end is closed when the parent - # dies unexpectedly - self.readpipe.read() - - LOG.info(_LI('Parent process has died unexpectedly, exiting')) - - sys.exit(1) - - def _child_process_handle_signal(self): - # Setup child signal handlers differently - def _sigterm(*args): - signal.signal(signal.SIGTERM, signal.SIG_DFL) - raise SignalExit(signal.SIGTERM) - - def _sighup(*args): - signal.signal(signal.SIGHUP, signal.SIG_DFL) - raise SignalExit(signal.SIGHUP) - - signal.signal(signal.SIGTERM, _sigterm) - if _sighup_supported(): - signal.signal(signal.SIGHUP, _sighup) - # Block SIGINT and let the parent send us a SIGTERM - signal.signal(signal.SIGINT, signal.SIG_IGN) - - def _child_wait_for_exit_or_signal(self, launcher): - status = 0 - signo = 0 - - # NOTE(johannes): All exceptions are caught to ensure this - # doesn't fallback into the loop spawning children. It would - # be bad for a child to spawn more children. - try: - launcher.wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_LI('Caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - except BaseException: - LOG.exception(_LE('Unhandled exception')) - status = 2 - finally: - launcher.stop() - - return status, signo - - def _child_process(self, service): - self._child_process_handle_signal() - - # Reopen the eventlet hub to make sure we don't share an epoll - # fd with parent and/or siblings, which would be bad - eventlet.hubs.use_hub() - - # Close write to ensure only parent has it open - os.close(self.writepipe) - # Create greenthread to watch for parent to close pipe - eventlet.spawn_n(self._pipe_watcher) - - # Reseed random number generator - random.seed() - - launcher = Launcher() - launcher.launch_service(service) - return launcher - - def _start_child(self, wrap): - if len(wrap.forktimes) > wrap.workers: - # Limit ourselves to one process a second (over the period of - # number of workers * 1 second). This will allow workers to - # start up quickly but ensure we don't fork off children that - # die instantly too quickly. - if time.time() - wrap.forktimes[0] < wrap.workers: - LOG.info(_LI('Forking too fast, sleeping')) - time.sleep(1) - - wrap.forktimes.pop(0) - - wrap.forktimes.append(time.time()) - - pid = os.fork() - if pid == 0: - launcher = self._child_process(wrap.service) - while True: - self._child_process_handle_signal() - status, signo = self._child_wait_for_exit_or_signal(launcher) - if not _is_sighup_and_daemon(signo): - break - launcher.restart() - - os._exit(status) - - LOG.info(_LI('Started child %d'), pid) - - wrap.children.add(pid) - self.children[pid] = wrap - - return pid - - def launch_service(self, service, workers=1): - wrap = ServiceWrapper(service, workers) - - LOG.info(_LI('Starting %d workers'), wrap.workers) - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def _wait_child(self): - try: - # Don't block if no child processes have exited - pid, status = os.waitpid(0, os.WNOHANG) - if not pid: - return None - except OSError as exc: - if exc.errno not in (errno.EINTR, errno.ECHILD): - raise - return None - - if os.WIFSIGNALED(status): - sig = os.WTERMSIG(status) - LOG.info(_LI('Child %(pid)d killed by signal %(sig)d'), - dict(pid=pid, sig=sig)) - else: - code = os.WEXITSTATUS(status) - LOG.info(_LI('Child %(pid)s exited with status %(code)d'), - dict(pid=pid, code=code)) - - if pid not in self.children: - LOG.warning(_LW('pid %d not in child list'), pid) - return None - - wrap = self.children.pop(pid) - wrap.children.remove(pid) - return wrap - - def _respawn_children(self): - while self.running: - wrap = self._wait_child() - if not wrap: - # Yield to other threads if no children have exited - # Sleep for a short time to avoid excessive CPU usage - # (see bug #1095346) - eventlet.greenthread.sleep(self.wait_interval) - continue - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def wait(self): - """Loop waiting on children to die and respawning as necessary.""" - - systemd.notify_once() - LOG.debug('Full set of CONF:') - CONF.log_opt_values(LOG, std_logging.DEBUG) - - try: - while True: - self.handle_signal() - self._respawn_children() - if self.sigcaught: - signame = _signo_to_signame(self.sigcaught) - LOG.info(_LI('Caught %s, stopping children'), signame) - if not _is_sighup_and_daemon(self.sigcaught): - break - - for pid in self.children: - os.kill(pid, signal.SIGHUP) - self.running = True - self.sigcaught = None - except eventlet.greenlet.GreenletExit: - LOG.info(_LI("Wait called after thread killed. Cleaning up.")) - - for pid in self.children: - try: - os.kill(pid, signal.SIGTERM) - except OSError as exc: - if exc.errno != errno.ESRCH: - raise - - # Wait for children to die - if self.children: - LOG.info(_LI('Waiting on %d children to exit'), len(self.children)) - while self.children: - self._wait_child() - - -class Service(object): - """Service object for binaries running on hosts.""" - - def __init__(self, threads=1000): - self.tg = threadgroup.ThreadGroup(threads) - - # signal that the service is done shutting itself down: - self._done = event.Event() - - def reset(self): - # NOTE(Fengqian): docs for Event.reset() recommend against using it - self._done = event.Event() - - def start(self): - pass - - def stop(self): - self.tg.stop() - self.tg.wait() - # Signal that service cleanup is done: - if not self._done.ready(): - self._done.send() - - def wait(self): - self._done.wait() - - -class Services(object): - - def __init__(self): - self.services = [] - self.tg = threadgroup.ThreadGroup() - self.done = event.Event() - - def add(self, service): - self.services.append(service) - self.tg.add_thread(self.run_service, service, self.done) - - def stop(self): - # wait for graceful shutdown of services: - for service in self.services: - service.stop() - service.wait() - - # Each service has performed cleanup, now signal that the run_service - # wrapper threads can now die: - if not self.done.ready(): - self.done.send() - - # reap threads: - self.tg.stop() - - def wait(self): - self.tg.wait() - - def restart(self): - self.stop() - self.done = event.Event() - for restart_service in self.services: - restart_service.reset() - self.tg.add_thread(self.run_service, restart_service, self.done) - - @staticmethod - def run_service(service, done): - """Service start wrapper. - - :param service: service to run - :param done: event to wait on until a shutdown is triggered - :returns: None - - """ - service.start() - done.wait() - - -def launch(service, workers=1): - if workers is None or workers == 1: - launcher = ServiceLauncher() - launcher.launch_service(service) - else: - launcher = ProcessLauncher() - launcher.launch_service(service, workers=workers) - - return launcher diff --git a/cerberus/openstack/common/sslutils.py b/cerberus/openstack/common/sslutils.py deleted file mode 100644 index 5ad2766..0000000 --- a/cerberus/openstack/common/sslutils.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import ssl - -from oslo.config import cfg - -from cerberus.openstack.common.gettextutils import _ - - -ssl_opts = [ - cfg.StrOpt('ca_file', - default=None, - help="CA certificate file to use to verify " - "connecting clients."), - cfg.StrOpt('cert_file', - default=None, - help="Certificate file to use when starting " - "the server securely."), - cfg.StrOpt('key_file', - default=None, - help="Private key file to use when starting " - "the server securely."), -] - - -CONF = cfg.CONF -CONF.register_opts(ssl_opts, "ssl") - - -def is_enabled(): - cert_file = CONF.ssl.cert_file - key_file = CONF.ssl.key_file - ca_file = CONF.ssl.ca_file - use_ssl = cert_file or key_file - - if cert_file and not os.path.exists(cert_file): - raise RuntimeError(_("Unable to find cert_file : %s") % cert_file) - - if ca_file and not os.path.exists(ca_file): - raise RuntimeError(_("Unable to find ca_file : %s") % ca_file) - - if key_file and not os.path.exists(key_file): - raise RuntimeError(_("Unable to find key_file : %s") % key_file) - - if use_ssl and (not cert_file or not key_file): - raise RuntimeError(_("When running server in SSL mode, you must " - "specify both a cert_file and key_file " - "option value in your configuration file")) - - return use_ssl - - -def wrap(sock): - ssl_kwargs = { - 'server_side': True, - 'certfile': CONF.ssl.cert_file, - 'keyfile': CONF.ssl.key_file, - 'cert_reqs': ssl.CERT_NONE, - } - - if CONF.ssl.ca_file: - ssl_kwargs['ca_certs'] = CONF.ssl.ca_file - ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED - - return ssl.wrap_socket(sock, **ssl_kwargs) - - -_SSL_PROTOCOLS = { - "tlsv1": ssl.PROTOCOL_TLSv1, - "sslv23": ssl.PROTOCOL_SSLv23, - "sslv3": ssl.PROTOCOL_SSLv3 -} - -try: - _SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2 -except AttributeError: - pass - - -def validate_ssl_version(version): - key = version.lower() - try: - return _SSL_PROTOCOLS[key] - except KeyError: - raise RuntimeError(_("Invalid SSL version : %s") % version) diff --git a/cerberus/openstack/common/strutils.py b/cerberus/openstack/common/strutils.py deleted file mode 100644 index e50c9b7..0000000 --- a/cerberus/openstack/common/strutils.py +++ /dev/null @@ -1,322 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" - -import math -import re -import sys -import unicodedata - -import six - -from cerberus.openstack.common.gettextutils import _ - - -UNIT_PREFIX_EXPONENT = { - 'k': 1, - 'K': 1, - 'Ki': 1, - 'M': 2, - 'Mi': 2, - 'G': 3, - 'Gi': 3, - 'T': 4, - 'Ti': 4, -} -UNIT_SYSTEM_INFO = { - 'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')), - 'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')), -} - -TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes') -FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no') - -SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]") -SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+") - - -# NOTE(flaper87): The following globals are used by `mask_password` -_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password'] - -# NOTE(ldbragst): Let's build a list of regex objects using the list of -# _SANITIZE_KEYS we already have. This way, we only have to add the new key -# to the list of _SANITIZE_KEYS and we can generate regular expressions -# for XML and JSON automatically. -_SANITIZE_PATTERNS_2 = [] -_SANITIZE_PATTERNS_1 = [] - -# NOTE(amrith): Some regular expressions have only one parameter, some -# have two parameters. Use different lists of patterns here. -_FORMAT_PATTERNS_1 = [r'(%(key)s\s*[=]\s*)[^\s^\'^\"]+'] -_FORMAT_PATTERNS_2 = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', - r'(%(key)s\s+[\"\']).*?([\"\'])', - r'([-]{2}%(key)s\s+)[^\'^\"^=^\s]+([\s]*)', - r'(<%(key)s>).*?()', - r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', - r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])', - r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?' - '[\'"]).*?([\'"])', - r'(%(key)s\s*--?[A-z]+\s*)\S+(\s*)'] - -for key in _SANITIZE_KEYS: - for pattern in _FORMAT_PATTERNS_2: - reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) - _SANITIZE_PATTERNS_2.append(reg_ex) - - for pattern in _FORMAT_PATTERNS_1: - reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) - _SANITIZE_PATTERNS_1.append(reg_ex) - - -def int_from_bool_as_string(subject): - """Interpret a string as a boolean and return either 1 or 0. - - Any string value in: - - ('True', 'true', 'On', 'on', '1') - - is interpreted as a boolean True. - - Useful for JSON-decoded stuff and config file parsing - """ - return bool_from_string(subject) and 1 or 0 - - -def bool_from_string(subject, strict=False, default=False): - """Interpret a string as a boolean. - - A case-insensitive match is performed such that strings matching 't', - 'true', 'on', 'y', 'yes', or '1' are considered True and, when - `strict=False`, anything else returns the value specified by 'default'. - - Useful for JSON-decoded stuff and config file parsing. - - If `strict=True`, unrecognized values, including None, will raise a - ValueError which is useful when parsing values passed in from an API call. - Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'. - """ - if not isinstance(subject, six.string_types): - subject = str(subject) - - lowered = subject.strip().lower() - - if lowered in TRUE_STRINGS: - return True - elif lowered in FALSE_STRINGS: - return False - elif strict: - acceptable = ', '.join( - "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS)) - msg = _("Unrecognized value '%(val)s', acceptable values are:" - " %(acceptable)s") % {'val': subject, - 'acceptable': acceptable} - raise ValueError(msg) - else: - return default - - -def safe_decode(text, incoming=None, errors='strict'): - """Decodes incoming text/bytes string using `incoming` if they're not - already unicode. - - :param incoming: Text's current encoding - :param errors: Errors handling policy. See here for valid - values http://docs.python.org/2/library/codecs.html - :returns: text or a unicode `incoming` encoded - representation of it. - :raises TypeError: If text is not an instance of str - """ - if not isinstance(text, (six.string_types, six.binary_type)): - raise TypeError("%s can't be decoded" % type(text)) - - if isinstance(text, six.text_type): - return text - - if not incoming: - incoming = (sys.stdin.encoding or - sys.getdefaultencoding()) - - try: - return text.decode(incoming, errors) - except UnicodeDecodeError: - # Note(flaper87) If we get here, it means that - # sys.stdin.encoding / sys.getdefaultencoding - # didn't return a suitable encoding to decode - # text. This happens mostly when global LANG - # var is not set correctly and there's no - # default encoding. In this case, most likely - # python will use ASCII or ANSI encoders as - # default encodings but they won't be capable - # of decoding non-ASCII characters. - # - # Also, UTF-8 is being used since it's an ASCII - # extension. - return text.decode('utf-8', errors) - - -def safe_encode(text, incoming=None, - encoding='utf-8', errors='strict'): - """Encodes incoming text/bytes string using `encoding`. - - If incoming is not specified, text is expected to be encoded with - current python's default encoding. (`sys.getdefaultencoding`) - - :param incoming: Text's current encoding - :param encoding: Expected encoding for text (Default UTF-8) - :param errors: Errors handling policy. See here for valid - values http://docs.python.org/2/library/codecs.html - :returns: text or a bytestring `encoding` encoded - representation of it. - :raises TypeError: If text is not an instance of str - """ - if not isinstance(text, (six.string_types, six.binary_type)): - raise TypeError("%s can't be encoded" % type(text)) - - if not incoming: - incoming = (sys.stdin.encoding or - sys.getdefaultencoding()) - - if isinstance(text, six.text_type): - if six.PY3: - return text.encode(encoding, errors).decode(incoming) - else: - return text.encode(encoding, errors) - elif text and encoding != incoming: - # Decode text before encoding it with `encoding` - text = safe_decode(text, incoming, errors) - if six.PY3: - return text.encode(encoding, errors).decode(incoming) - else: - return text.encode(encoding, errors) - - return text - - -def string_to_bytes(text, unit_system='IEC', return_int=False): - """Converts a string into an float representation of bytes. - - The units supported for IEC :: - - Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it) - KB, KiB, MB, MiB, GB, GiB, TB, TiB - - The units supported for SI :: - - kb(it), Mb(it), Gb(it), Tb(it) - kB, MB, GB, TB - - Note that the SI unit system does not support capital letter 'K' - - :param text: String input for bytes size conversion. - :param unit_system: Unit system for byte size conversion. - :param return_int: If True, returns integer representation of text - in bytes. (default: decimal) - :returns: Numerical representation of text in bytes. - :raises ValueError: If text has an invalid value. - - """ - try: - base, reg_ex = UNIT_SYSTEM_INFO[unit_system] - except KeyError: - msg = _('Invalid unit system: "%s"') % unit_system - raise ValueError(msg) - match = reg_ex.match(text) - if match: - magnitude = float(match.group(1)) - unit_prefix = match.group(2) - if match.group(3) in ['b', 'bit']: - magnitude /= 8 - else: - msg = _('Invalid string format: %s') % text - raise ValueError(msg) - if not unit_prefix: - res = magnitude - else: - res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix]) - if return_int: - return int(math.ceil(res)) - return res - - -def to_slug(value, incoming=None, errors="strict"): - """Normalize string. - - Convert to lowercase, remove non-word characters, and convert spaces - to hyphens. - - Inspired by Django's `slugify` filter. - - :param value: Text to slugify - :param incoming: Text's current encoding - :param errors: Errors handling policy. See here for valid - values http://docs.python.org/2/library/codecs.html - :returns: slugified unicode representation of `value` - :raises TypeError: If text is not an instance of str - """ - value = safe_decode(value, incoming, errors) - # NOTE(aababilov): no need to use safe_(encode|decode) here: - # encodings are always "ascii", error handling is always "ignore" - # and types are always known (first: unicode; second: str) - value = unicodedata.normalize("NFKD", value).encode( - "ascii", "ignore").decode("ascii") - value = SLUGIFY_STRIP_RE.sub("", value).strip().lower() - return SLUGIFY_HYPHENATE_RE.sub("-", value) - - -def mask_password(message, secret="***"): - """Replace password with 'secret' in message. - - :param message: The string which includes security information. - :param secret: value with which to replace passwords. - :returns: The unicode value of message with the password fields masked. - - For example: - - >>> mask_password("'adminPass' : 'aaaaa'") - "'adminPass' : '***'" - >>> mask_password("'admin_pass' : 'aaaaa'") - "'admin_pass' : '***'" - >>> mask_password('"password" : "aaaaa"') - '"password" : "***"' - >>> mask_password("'original_password' : 'aaaaa'") - "'original_password' : '***'" - >>> mask_password("u'original_password' : u'aaaaa'") - "u'original_password' : u'***'" - """ - try: - message = six.text_type(message) - except UnicodeDecodeError: - # NOTE(jecarey): Temporary fix to handle cases where message is a - # byte string. A better solution will be provided in Kilo. - pass - - # NOTE(ldbragst): Check to see if anything in message contains any key - # specified in _SANITIZE_KEYS, if not then just return the message since - # we don't have to mask any passwords. - if not any(key in message for key in _SANITIZE_KEYS): - return message - - substitute = r'\g<1>' + secret + r'\g<2>' - for pattern in _SANITIZE_PATTERNS_2: - message = re.sub(pattern, substitute, message) - - substitute = r'\g<1>' + secret - for pattern in _SANITIZE_PATTERNS_1: - message = re.sub(pattern, substitute, message) - - return message diff --git a/cerberus/openstack/common/systemd.py b/cerberus/openstack/common/systemd.py deleted file mode 100644 index a5707cb..0000000 --- a/cerberus/openstack/common/systemd.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright 2012-2014 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Helper module for systemd service readiness notification. -""" - -import os -import socket -import sys - -from cerberus.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -def _abstractify(socket_name): - if socket_name.startswith('@'): - # abstract namespace socket - socket_name = '\0%s' % socket_name[1:] - return socket_name - - -def _sd_notify(unset_env, msg): - notify_socket = os.getenv('NOTIFY_SOCKET') - if notify_socket: - sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) - try: - sock.connect(_abstractify(notify_socket)) - sock.sendall(msg) - if unset_env: - del os.environ['NOTIFY_SOCKET'] - except EnvironmentError: - LOG.debug("Systemd notification failed", exc_info=True) - finally: - sock.close() - - -def notify(): - """Send notification to Systemd that service is ready. - For details see - http://www.freedesktop.org/software/systemd/man/sd_notify.html - """ - _sd_notify(False, 'READY=1') - - -def notify_once(): - """Send notification once to Systemd that service is ready. - Systemd sets NOTIFY_SOCKET environment variable with the name of the - socket listening for notifications from services. - This method removes the NOTIFY_SOCKET environment variable to ensure - notification is sent only once. - """ - _sd_notify(True, 'READY=1') - - -def onready(notify_socket, timeout): - """Wait for systemd style notification on the socket. - - :param notify_socket: local socket address - :type notify_socket: string - :param timeout: socket timeout - :type timeout: float - :returns: 0 service ready - 1 service not ready - 2 timeout occured - """ - sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) - sock.settimeout(timeout) - sock.bind(_abstractify(notify_socket)) - try: - msg = sock.recv(512) - except socket.timeout: - return 2 - finally: - sock.close() - if 'READY=1' in msg: - return 0 - else: - return 1 - - -if __name__ == '__main__': - # simple CLI for testing - if len(sys.argv) == 1: - notify() - elif len(sys.argv) >= 2: - timeout = float(sys.argv[1]) - notify_socket = os.getenv('NOTIFY_SOCKET') - if notify_socket: - retval = onready(notify_socket, timeout) - sys.exit(retval) diff --git a/cerberus/openstack/common/test.py b/cerberus/openstack/common/test.py deleted file mode 100644 index a391f54..0000000 --- a/cerberus/openstack/common/test.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -############################################################################## -############################################################################## -## -## DO NOT MODIFY THIS FILE -## -## This file is being graduated to the cerberustest library. Please make all -## changes there, and only backport critical fixes here. - dhellmann -## -############################################################################## -############################################################################## - -"""Common utilities used in testing""" - -import logging -import os -import tempfile - -import fixtures -import testtools - -_TRUE_VALUES = ('True', 'true', '1', 'yes') -_LOG_FORMAT = "%(levelname)8s [%(name)s] %(message)s" - - -class BaseTestCase(testtools.TestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - self._set_timeout() - self._fake_output() - self._fake_logs() - self.useFixture(fixtures.NestedTempfile()) - self.useFixture(fixtures.TempHomeDir()) - self.tempdirs = [] - - def _set_timeout(self): - test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) - try: - test_timeout = int(test_timeout) - except ValueError: - # If timeout value is invalid do not set a timeout. - test_timeout = 0 - if test_timeout > 0: - self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) - - def _fake_output(self): - if os.environ.get('OS_STDOUT_CAPTURE') in _TRUE_VALUES: - stdout = self.useFixture(fixtures.StringStream('stdout')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) - if os.environ.get('OS_STDERR_CAPTURE') in _TRUE_VALUES: - stderr = self.useFixture(fixtures.StringStream('stderr')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) - - def _fake_logs(self): - if os.environ.get('OS_DEBUG') in _TRUE_VALUES: - level = logging.DEBUG - else: - level = logging.INFO - capture_logs = os.environ.get('OS_LOG_CAPTURE') in _TRUE_VALUES - if capture_logs: - self.useFixture( - fixtures.FakeLogger( - format=_LOG_FORMAT, - level=level, - nuke_handlers=capture_logs, - ) - ) - else: - logging.basicConfig(format=_LOG_FORMAT, level=level) - - def create_tempfiles(self, files, ext='.conf'): - tempfiles = [] - for (basename, contents) in files: - if not os.path.isabs(basename): - (fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext) - else: - path = basename + ext - fd = os.open(path, os.O_CREAT | os.O_WRONLY) - tempfiles.append(path) - try: - os.write(fd, contents) - finally: - os.close(fd) - return tempfiles diff --git a/cerberus/openstack/common/threadgroup.py b/cerberus/openstack/common/threadgroup.py deleted file mode 100644 index 6a80a2a..0000000 --- a/cerberus/openstack/common/threadgroup.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import threading - -import eventlet -from eventlet import greenpool - -from cerberus.openstack.common import log as logging -from cerberus.openstack.common import loopingcall - - -LOG = logging.getLogger(__name__) - - -def _thread_done(gt, *args, **kwargs): - """Callback function to be passed to GreenThread.link() when we spawn() - Calls the :class:`ThreadGroup` to notify if. - - """ - kwargs['group'].thread_done(kwargs['thread']) - - -class Thread(object): - """Wrapper around a greenthread, that holds a reference to the - :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when - it has done so it can be removed from the threads list. - """ - def __init__(self, thread, group): - self.thread = thread - self.thread.link(_thread_done, group=group, thread=self) - - def stop(self): - self.thread.kill() - - def wait(self): - return self.thread.wait() - - def link(self, func, *args, **kwargs): - self.thread.link(func, *args, **kwargs) - - -class ThreadGroup(object): - """The point of the ThreadGroup class is to: - - * keep track of timers and greenthreads (making it easier to stop them - when need be). - * provide an easy API to add timers. - """ - def __init__(self, thread_pool_size=10): - self.pool = greenpool.GreenPool(thread_pool_size) - self.threads = [] - self.timers = [] - - def add_dynamic_timer(self, callback, initial_delay=None, - periodic_interval_max=None, *args, **kwargs): - timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs) - timer.start(initial_delay=initial_delay, - periodic_interval_max=periodic_interval_max) - self.timers.append(timer) - - def add_timer(self, interval, callback, initial_delay=None, - *args, **kwargs): - pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) - pulse.start(interval=interval, - initial_delay=initial_delay) - self.timers.append(pulse) - - def add_thread(self, callback, *args, **kwargs): - gt = self.pool.spawn(callback, *args, **kwargs) - th = Thread(gt, self) - self.threads.append(th) - return th - - def thread_done(self, thread): - self.threads.remove(thread) - - def _stop_threads(self): - current = threading.current_thread() - - # Iterate over a copy of self.threads so thread_done doesn't - # modify the list while we're iterating - for x in self.threads[:]: - if x is current: - # don't kill the current thread. - continue - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - - def stop_timers(self): - for x in self.timers: - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - self.timers = [] - - def stop(self, graceful=False): - """stop function has the option of graceful=True/False. - - * In case of graceful=True, wait for all threads to be finished. - Never kill threads. - * In case of graceful=False, kill threads immediately. - """ - self.stop_timers() - if graceful: - # In case of graceful=True, wait for all threads to be - # finished, never kill threads - self.wait() - else: - # In case of graceful=False(Default), kill threads - # immediately - self._stop_threads() - - def wait(self): - for x in self.timers: - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) - current = threading.current_thread() - - # Iterate over a copy of self.threads so thread_done doesn't - # modify the list while we're iterating - for x in self.threads[:]: - if x is current: - continue - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) diff --git a/cerberus/openstack/common/timeutils.py b/cerberus/openstack/common/timeutils.py deleted file mode 100644 index 52688a0..0000000 --- a/cerberus/openstack/common/timeutils.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Time related utilities and helper functions. -""" - -import calendar -import datetime -import time - -import iso8601 -import six - - -# ISO 8601 extended time format with microseconds -_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' -_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' -PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND - - -def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format.""" - if not at: - at = utcnow() - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) - tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - st += ('Z' if tz == 'UTC' else tz) - return st - - -def parse_isotime(timestr): - """Parse time from ISO 8601 format.""" - try: - return iso8601.parse_date(timestr) - except iso8601.ParseError as e: - raise ValueError(six.text_type(e)) - except TypeError as e: - raise ValueError(six.text_type(e)) - - -def strtime(at=None, fmt=PERFECT_TIME_FORMAT): - """Returns formatted utcnow.""" - if not at: - at = utcnow() - return at.strftime(fmt) - - -def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): - """Turn a formatted time back into a datetime.""" - return datetime.datetime.strptime(timestr, fmt) - - -def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object.""" - offset = timestamp.utcoffset() - if offset is None: - return timestamp - return timestamp.replace(tzinfo=None) - offset - - -def is_older_than(before, seconds): - """Return True if before is older than seconds.""" - if isinstance(before, six.string_types): - before = parse_strtime(before).replace(tzinfo=None) - else: - before = before.replace(tzinfo=None) - - return utcnow() - before > datetime.timedelta(seconds=seconds) - - -def is_newer_than(after, seconds): - """Return True if after is newer than seconds.""" - if isinstance(after, six.string_types): - after = parse_strtime(after).replace(tzinfo=None) - else: - after = after.replace(tzinfo=None) - - return after - utcnow() > datetime.timedelta(seconds=seconds) - - -def utcnow_ts(): - """Timestamp version of our utcnow function.""" - if utcnow.override_time is None: - # NOTE(kgriffs): This is several times faster - # than going through calendar.timegm(...) - return int(time.time()) - - return calendar.timegm(utcnow().timetuple()) - - -def utcnow(): - """Overridable version of utils.utcnow.""" - if utcnow.override_time: - try: - return utcnow.override_time.pop(0) - except AttributeError: - return utcnow.override_time - return datetime.datetime.utcnow() - - -def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formatted date from timestamp.""" - return isotime(datetime.datetime.utcfromtimestamp(timestamp)) - - -utcnow.override_time = None - - -def set_time_override(override_time=None): - """Overrides utils.utcnow. - - Make it return a constant time or a list thereof, one at a time. - - :param override_time: datetime instance or list thereof. If not - given, defaults to the current UTC time. - """ - utcnow.override_time = override_time or datetime.datetime.utcnow() - - -def advance_time_delta(timedelta): - """Advance overridden time using a datetime.timedelta.""" - assert(not utcnow.override_time is None) - try: - for dt in utcnow.override_time: - dt += timedelta - except TypeError: - utcnow.override_time += timedelta - - -def advance_time_seconds(seconds): - """Advance overridden time by seconds.""" - advance_time_delta(datetime.timedelta(0, seconds)) - - -def clear_time_override(): - """Remove the overridden time.""" - utcnow.override_time = None - - -def marshall_now(now=None): - """Make an rpc-safe datetime with microseconds. - - Note: tzinfo is stripped, but not required for relative times. - """ - if not now: - now = utcnow() - return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, - minute=now.minute, second=now.second, - microsecond=now.microsecond) - - -def unmarshall_time(tyme): - """Unmarshall a datetime dict.""" - return datetime.datetime(day=tyme['day'], - month=tyme['month'], - year=tyme['year'], - hour=tyme['hour'], - minute=tyme['minute'], - second=tyme['second'], - microsecond=tyme['microsecond']) - - -def delta_seconds(before, after): - """Return the difference between two timing objects. - - Compute the difference in seconds between two date, time, or - datetime objects (as a float, to microsecond resolution). - """ - delta = after - before - return total_seconds(delta) - - -def total_seconds(delta): - """Return the total seconds of datetime.timedelta object. - - Compute total seconds of datetime.timedelta, datetime.timedelta - doesn't have method total_seconds in Python2.6, calculate it manually. - """ - try: - return delta.total_seconds() - except AttributeError: - return ((delta.days * 24 * 3600) + delta.seconds + - float(delta.microseconds) / (10 ** 6)) - - -def is_soon(dt, window): - """Determines if time is going to happen in the next window seconds. - - :param dt: the time - :param window: minimum seconds to remain to consider the time not soon - - :return: True if expiration is within the given duration - """ - soon = (utcnow() + datetime.timedelta(seconds=window)) - return normalize_time(dt) <= soon diff --git a/cerberus/openstack/common/uuidutils.py b/cerberus/openstack/common/uuidutils.py deleted file mode 100644 index 234b880..0000000 --- a/cerberus/openstack/common/uuidutils.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2012 Intel Corporation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -UUID related utilities and helper functions. -""" - -import uuid - - -def generate_uuid(): - return str(uuid.uuid4()) - - -def is_uuid_like(val): - """Returns validation of a value as a UUID. - - For our purposes, a UUID is a canonical form string: - aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa - - """ - try: - return str(uuid.UUID(val)) == val - except (TypeError, ValueError, AttributeError): - return False diff --git a/cerberus/openstack/common/versionutils.py b/cerberus/openstack/common/versionutils.py deleted file mode 100644 index 0ed0452..0000000 --- a/cerberus/openstack/common/versionutils.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Helpers for comparing version strings. -""" - -import functools -import pkg_resources - -from cerberus.openstack.common.gettextutils import _ -from cerberus.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class deprecated(object): - """A decorator to mark callables as deprecated. - - This decorator logs a deprecation message when the callable it decorates is - used. The message will include the release where the callable was - deprecated, the release where it may be removed and possibly an optional - replacement. - - Examples: - - 1. Specifying the required deprecated release - - >>> @deprecated(as_of=deprecated.ICEHOUSE) - ... def a(): pass - - 2. Specifying a replacement: - - >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()') - ... def b(): pass - - 3. Specifying the release where the functionality may be removed: - - >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=+1) - ... def c(): pass - - """ - - FOLSOM = 'F' - GRIZZLY = 'G' - HAVANA = 'H' - ICEHOUSE = 'I' - - _RELEASES = { - 'F': 'Folsom', - 'G': 'Grizzly', - 'H': 'Havana', - 'I': 'Icehouse', - } - - _deprecated_msg_with_alternative = _( - '%(what)s is deprecated as of %(as_of)s in favor of ' - '%(in_favor_of)s and may be removed in %(remove_in)s.') - - _deprecated_msg_no_alternative = _( - '%(what)s is deprecated as of %(as_of)s and may be ' - 'removed in %(remove_in)s. It will not be superseded.') - - def __init__(self, as_of, in_favor_of=None, remove_in=2, what=None): - """Initialize decorator - - :param as_of: the release deprecating the callable. Constants - are define in this class for convenience. - :param in_favor_of: the replacement for the callable (optional) - :param remove_in: an integer specifying how many releases to wait - before removing (default: 2) - :param what: name of the thing being deprecated (default: the - callable's name) - - """ - self.as_of = as_of - self.in_favor_of = in_favor_of - self.remove_in = remove_in - self.what = what - - def __call__(self, func): - if not self.what: - self.what = func.__name__ + '()' - - @functools.wraps(func) - def wrapped(*args, **kwargs): - msg, details = self._build_message() - LOG.deprecated(msg, details) - return func(*args, **kwargs) - return wrapped - - def _get_safe_to_remove_release(self, release): - # TODO(dstanek): this method will have to be reimplemented once - # when we get to the X release because once we get to the Y - # release, what is Y+2? - new_release = chr(ord(release) + self.remove_in) - if new_release in self._RELEASES: - return self._RELEASES[new_release] - else: - return new_release - - def _build_message(self): - details = dict(what=self.what, - as_of=self._RELEASES[self.as_of], - remove_in=self._get_safe_to_remove_release(self.as_of)) - - if self.in_favor_of: - details['in_favor_of'] = self.in_favor_of - msg = self._deprecated_msg_with_alternative - else: - msg = self._deprecated_msg_no_alternative - return msg, details - - -def is_compatible(requested_version, current_version, same_major=True): - """Determine whether `requested_version` is satisfied by - `current_version`; in other words, `current_version` is >= - `requested_version`. - - :param requested_version: version to check for compatibility - :param current_version: version to check against - :param same_major: if True, the major version must be identical between - `requested_version` and `current_version`. This is used when a - major-version difference indicates incompatibility between the two - versions. Since this is the common-case in practice, the default is - True. - :returns: True if compatible, False if not - """ - requested_parts = pkg_resources.parse_version(requested_version) - current_parts = pkg_resources.parse_version(current_version) - - if same_major and (requested_parts[0] != current_parts[0]): - return False - - return current_parts >= requested_parts diff --git a/cerberus/plugins/__init__.py b/cerberus/plugins/__init__.py deleted file mode 100644 index 73ca62b..0000000 --- a/cerberus/plugins/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/cerberus/plugins/base.py b/cerberus/plugins/base.py deleted file mode 100644 index dd50002..0000000 --- a/cerberus/plugins/base.py +++ /dev/null @@ -1,156 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import abc -import fnmatch -import json -import six - -import oslo.messaging - -from cerberus.openstack.common import log -from cerberus.openstack.common import loopingcall -from cerberus.openstack.common import threadgroup - - -LOG = log.getLogger(__name__) - - -@six.add_metaclass(abc.ABCMeta) -class PluginBase(object): - """ - Base class for all plugins - """ - - TOOL_NAME = "" - TYPE = "" - PROVIDER = "" - DESCRIPTION = "" - - _name = None - - _uuid = None - - _event_groups = { - 'INSTANCE': [ - 'compute.instance.created', - 'compute.instance.deleted' - 'compute.instance.updated' - ], - 'NETWORK': [ - 'network.created', - ], - 'PROJECT': [ - 'project.created' - ] - } - - def __init__(self, description=None, provider=None, type=None, - tool_name=None): - self._subscribedEvents = [] - self._name = "{0}.{1}".format(self.__class__.__module__, - self.__class__.__name__) - - def subscribe_event(self, event): - if not (event in self._subscribedEvents): - self._subscribedEvents.append(event) - - def register_manager(self, manager): - """ - Enables the plugin to add tasks to the manager - :param manager: the task manager to add tasks to - """ - self.manager = manager - - @staticmethod - def _handle_event_type(subscribed_events, event_type): - """Check whether event_type should be handled. - - It is according to event_type_to_handle.l - """ - return any(map(lambda e: fnmatch.fnmatch(event_type, e), - subscribed_events)) - - @staticmethod - def get_targets(conf): - """Return a sequence of oslo.messaging.Target - - Sequence defining the exchange and topics to be connected for this - plugin. - """ - return [oslo.messaging.Target(topic=topic) - for topic in conf.notification_topics] - - @abc.abstractmethod - def process_notification(self, ctxt, publisher_id, event_type, payload, - metadata): - pass - - def info(self, ctxt, publisher_id, event_type, payload, metadata): - # Check if event is registered for plugin - if self._handle_event_type(self._subscribedEvents, event_type): - self.process_notification(ctxt, publisher_id, event_type, payload, - metadata) - ''' - http://stackoverflow.com/questions/3378949/ - python-decorators-and-class-inheritance - http://stackoverflow.com/questions/338101/ - python-function-attributes-uses-and-abuses - ''' - @staticmethod - def webmethod(func): - func.is_webmethod = True - return func - - -class PluginEncoder(json.JSONEncoder): - def default(self, obj): - if not isinstance(obj, PluginBase): - return super(PluginEncoder, self).default(obj) - methods = [method for method in dir(obj) - if hasattr(getattr(obj, method), 'is_webmethod')] - return {'name': obj._name, - 'subscribed_events': obj._subscribedEvents, - 'methods': methods} - - -class FixedIntervalLoopingCallEncoder(json.JSONEncoder): - def default(self, obj): - if not isinstance(obj, loopingcall.FixedIntervalLoopingCall): - return super(FixedIntervalLoopingCallEncoder, self).default(obj) - if obj._running is True: - state = 'running' - else: - state = 'stopped' - return {'id': str(obj.kw.get('task_id', None)), - 'name': obj.kw.get('task_name', None), - 'period': obj.kw.get('task_period', None), - 'type': obj.kw.get('task_type', None), - 'plugin_id': obj.kw.get('plugin_id', None), - 'persistent': obj.kw.get('persistent', False), - 'state': state} - - -class ThreadEncoder(json.JSONEncoder): - def default(self, obj): - if not isinstance(obj, threadgroup.Thread): - return super(ThreadEncoder, self).default(obj) - return {'id': str(obj.kw.get('task_id', None)), - 'name': obj.kw.get('task_name', None), - 'type': obj.kw.get('task_type', None), - 'plugin_id': obj.kw.get('plugin_id', None), - 'persistent': obj.kw.get('persistent', False), - 'state': 'running'} diff --git a/cerberus/plugins/extension.py b/cerberus/plugins/extension.py deleted file mode 100644 index c4f9305..0000000 --- a/cerberus/plugins/extension.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from __future__ import print_function - -import argparse - -from stevedore import extension - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument( - '--width', - default=60, - type=int, - help='maximum output width for text', - ) - parsed_args = parser.parse_args() - - data = { - 'a': 'A', - 'b': 'B', - 'long': 'word ' * 80, - } - - mgr = extension.ExtensionManager( - namespace='stevedore.example.formatter', - invoke_on_load=True, - invoke_args=(parsed_args.width,), - ) - - def format_data(ext, data): - return (ext.name, ext.obj.format(data)) - - results = mgr.map(format_data, data) - - for name, result in results: - print('Formatter: {0}'.format(name)) - for chunk in result: - print(chunk, end='') - print('') diff --git a/cerberus/plugins/task_plugin.py b/cerberus/plugins/task_plugin.py deleted file mode 100644 index 438372b..0000000 --- a/cerberus/plugins/task_plugin.py +++ /dev/null @@ -1,61 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime -import eventlet - - -from cerberus.openstack.common import log -from cerberus.plugins import base - - -LOG = log.getLogger(__name__) - -_IMAGE_UPDATE = 'image.update' - - -class TaskPlugin(base.PluginBase): - - def __init__(self): - super(TaskPlugin, self).__init__() - - @base.PluginBase.webmethod - def act_long(self, *args, **kwargs): - ''' - Each second, log the date during 40 seconds. - :param args: - :param kwargs: - :return: - ''' - LOG.info(str(kwargs.get('task_name', 'unknown')) + " :" - + str(datetime.datetime.time(datetime.datetime.now()))) - i = 0 - while(i < 60): - LOG.info(str(kwargs.get('task_name', 'unknown')) + " :" - + str(datetime.datetime.time(datetime.datetime.now()))) - i += 1 - eventlet.sleep(1) - LOG.info(str(kwargs.get('task_name', 'unknown')) + " :" - + str(datetime.datetime.time(datetime.datetime.now()))) - - @base.PluginBase.webmethod - def act_short(self, *args, **kwargs): - LOG.info(str(kwargs.get('task_name', 'unknown')) + " :" - + str(datetime.datetime.time(datetime.datetime.now()))) - - def process_notification(self, ctxt, publisher_id, event_type, payload, - metadata): - pass diff --git a/cerberus/plugins/test_plugin.py b/cerberus/plugins/test_plugin.py deleted file mode 100644 index c7ee29a..0000000 --- a/cerberus/plugins/test_plugin.py +++ /dev/null @@ -1,169 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime -import json - -from cerberus.common import exception as cerberus_exception -from cerberus.common import json_encoders -from cerberus import manager as cerberus_manager -from cerberus.openstack.common import log -from cerberus.plugins import base - - -LOG = log.getLogger(__name__) - - -class TestPlugin(base.PluginBase): - - def __init__(self): - self.task_id = None - super(TestPlugin, self).__init__() - super(TestPlugin, self).subscribe_event('image.update') - - def act_short(self, *args, **kwargs): - LOG.info(str(kwargs.get('task_name', 'unknown')) + " :" - + str(datetime.datetime.time(datetime.datetime.now()))) - - @base.PluginBase.webmethod - def get_security_reports(self, **kwargs): - security_reports = [] - try: - security_report = { - 'vulns': {'443': {'ip': '192.168.100.3', 'archived': False, - 'protocol': 'tcp', 'iface_id': 329, - 'family': 'Web Servers', - 'plugin': '1.3.6.1.4.1.25623.1.0.10386', - 'service_name': 'Apache httpd 2.2.22', - 'vuln_state': 'acked', 'port': 80, - 'state': 'acked', 'service': '80/tcp', - 'service_status': None, 'host_id': 328, - 'vuln_id': 443, - 'output': "Summary: \nRemote web server does not reply with 404 error code.\n\nInsight: \nThis web server is [mis]configured in that it does not return\n '404 Not Found' error codes when a non-existent file is requested,\n perhaps returning a site map, search page or authentication page\n instead.\n \n OpenVAS enabled some counter measures for that, however they might\n be insufficient. If a great number of security holes are produced\n for this port, they might not all be accurate\n\nReferences: \nNOXREF\nCVE:NOCVE\n\n", # noqa - 'service_id': 337, 'score': 0.0, 'id': 443, - 'name': 'No 404 check'}, - '447': {'ip': '192.168.100.3', 'archived': False, - 'protocol': 'tcp', 'iface_id': 329, - 'family': 'Denial of Service', - 'plugin': '1.3.6.1.4.1.25623.1.0.121035', - 'service_name': 'OpenSSH 5.9p1 Debian', - 'vuln_state': 'acked', 'port': 22, - 'state': 'acked', 'service': '22/tcp', - 'service_status': None, 'host_id': 328, - 'vuln_id': 447, - 'output': "Summary: \nDenial of Service Vulnerability in OpenSSH\n\nInsight: \nThe sshd_config configuration file indicates connection limits:\n - MaxStartups: maximal number of unauthenticated connections (default : 10)\n - LoginGraceTime: expiration duration of unauthenticated connections (default : 2 minutes)\n\nHowever, in this default configuration, an attacker can open 10 TCP sessions on port 22/tcp, and then reopen them every 2 minutes, in order to limit the probability of a legitimate client to access to the service.\n\nNote: MaxStartups supports the 'random early drop' feature, which protects against this type of attack, but it is not enabled by default.\n\nAn unauthenticated attacker can therefore open ten connections to OpenSSH, in order to forbid the access to legitimate users.\n\nThis plugin only check OpenSSH version and not test to exploit this vulnerability.\n\nImpact: \nAttackers to cause a denial of service (connection-slot exhaustion).\n\nReferences: \nURL:http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/sshd_config?r1=1.89#rev1.89\nURL:http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/sshd_config.5?r1=1.156#rev1.156\nURL:http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/servconf.c?r1=1.234#rev1.234\nURL:http://vigilance.fr/vulnerability/OpenSSH-denial-of-service-via-MaxStartups-11256\nCVE:CVE-2010-5107\n\nSolution: \nUpgrade your OpenSSH to 6.2. or modify LoginGraceTime and MaxStartups on server configuration\n\n", # noqa - 'service_id': 333, 'score': 5.0, 'id': 447, - 'name': 'Denial of Service in OpenSSH'}, - '446': {'ip': '192.168.100.3', 'archived': False, - 'protocol': 'udp', 'iface_id': 329, - 'family': 'Service detection', - 'plugin': '1.3.6.1.4.1.25623.1.0.10884', - 'service_name': 'NTP v4 (unsynchronized)', - 'vuln_state': 'new', 'port': 123, - 'state': 'new', 'service': '123/udp', - 'service_status': None, 'host_id': 328, - 'vuln_id': 446, - 'output': 'Summary: \nA NTP (Network Time Protocol) server is listening on this port.\n\nReferences: \nNOXREF\nCVE:NOCVE\n\n', # noqa - 'service_id': 335, 'score': 0.0, 'id': 446, - 'name': 'NTP read variables'}, - '445': {'ip': '192.168.100.3', 'archived': False, - 'protocol': 'tcp', 'iface_id': 329, - 'family': 'General', - 'plugin': '1.3.6.1.4.1.25623.1.0.120008', - 'service_name': 'Apache httpd 2.2.22 ', - 'vuln_state': 'acked', 'port': 443, - 'state': 'acked', 'service': '443/tcp', - 'service_status': None, 'host_id': 328, - 'vuln_id': 445, - 'output': '\nFollowing is a list of the SSL cipher suites supported when connecting to the host.\n\nSupported cipher suites (ORDER IS NOT SIGNIFICANT)\n SSLv3\n RSA_WITH_3DES_EDE_CBC_SHA\n DHE_RSA_WITH_3DES_EDE_CBC_SHA\n RSA_WITH_AES_128_CBC_SHA\n DHE_RSA_WITH_AES_128_CBC_SHA\n RSA_WITH_AES_256_CBC_SHA\n DHE_RSA_WITH_AES_256_CBC_SHA\n RSA_WITH_CAMELLIA_128_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_128_CBC_SHA\n RSA_WITH_CAMELLIA_256_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_256_CBC_SHA\n (TLSv1.0: idem)\n (TLSv1.1: idem)\n TLSv1.2\n RSA_WITH_3DES_EDE_CBC_SHA\n DHE_RSA_WITH_3DES_EDE_CBC_SHA\n RSA_WITH_AES_128_CBC_SHA\n DHE_RSA_WITH_AES_128_CBC_SHA\n RSA_WITH_AES_256_CBC_SHA\n DHE_RSA_WITH_AES_256_CBC_SHA\n RSA_WITH_AES_128_CBC_SHA256\n RSA_WITH_AES_256_CBC_SHA256\n RSA_WITH_CAMELLIA_128_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_128_CBC_SHA\n DHE_RSA_WITH_AES_128_CBC_SHA256\n DHE_RSA_WITH_AES_256_CBC_SHA256\n RSA_WITH_CAMELLIA_256_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_256_CBC_SHA\n\n', # noqa - 'service_id': 339, 'score': 0.0, 'id': 445, - 'name': 'SSL Cipher Suites Supported'}, - '444': {'ip': '192.168.100.3', 'archived': False, - 'protocol': 'tcp', 'iface_id': 329, - 'family': 'General', - 'plugin': '1.3.6.1.4.1.25623.1.0.120002', - 'service_name': 'Apache httpd 2.2.22', - 'vuln_state': 'acked', 'port': 443, - 'state': 'acked', 'service': '443/tcp', - 'service_status': None, 'host_id': 328, - 'vuln_id': 444, - 'output': '\nA vulnerability exists in SSL 3.0 and TLS 1.0 that could allow information \ndisclosure if an attacker intercepts encrypted traffic served from an affected \nsystem. It is also known as BEAST attack. \n\nCVSS Severity:\n CVSS Base Score: 4.3 (AV:N/AC:M/Au:N/C:P/I:N/A:N) \n Impact Subscore: \n Exploitability Subscore:\n\nReference:\n CVE-2011-3389\n \nSolution:\n Disable usage of CBC ciphers with SSL 3.0 and TLS 1.0 protocols.\n \nNote: \n This script detects the vulnerability in the SSLv3/TLSv1 protocol implemented \n in the server. It does not detect the BEAST attack where it exploits the \n vulnerability at HTTPS client-side.\n\n The detection at server-side does not necessarily mean your server is \n vulnerableto the BEAST attack because the attack exploits the vulnerability \n at client-side, and both SSL/TLS clients and servers can independently employ \n the split record countermeasure.\n \nSee Also:\n http://vnhacker.blogspot.com/2011/09/beast.html\n http://www.openssl.org/~bodo/tls-cbc.txt\n http://blogs.msdn.com/b/kaushal/archive/2012/01/21/fixing-the-beast.aspx\n \n \n', # noqa - 'service_id': 339, 'score': 4.3, 'id': 444, - 'name': 'BEAST Vulnerability'}}, - 'host': {'archived': False, 'name': '192.168.100.3', - 'ifaces': [329], 'scan': True, - 'cpe': 'cpe:/o:canonical:ubuntu_linux', 'state': 'up', - 'cpe_title': 'Canonical Ubuntu Linux', - 'fingerprint': 'Linux Kernel', 'device': 'server', - 'id': 328}, - 'stat': {'ignored': 0, 'entity_id': 328, 'medium': 2, - 'grade': 7.4, 'vulns': 2, 'archived': 0, - 'not_scanned': 0, 'high': 0, 'score': 9.3, 'hosts': 1, - 'trending': 0.0, 'scanned': 1, 'critical': 0, - 'low': 0}, - 'ifaces': {'329': {'archived': False, 'ip': '192.168.100.3', - 'state': 'up', - 'services': [333, 335, 337, 339], - 'host_id': 328, 'id': 329}}} - - report_id = 'test_plugin_report_id' - if (security_report.get('stat'), False): - vulnerabilities_number = security_report['stat']\ - .get('vulns', None) - - cerberus_manager.store_report_and_notify( - 'Test security report', self._uuid, report_id, - 'a1d869a1-6ab0-4f02-9e56-f83034bacfcb', - 'openstack-test-server', 'instance', - '510c7f4ed14243f09df371bba2561177', - 'openstack-test-server', security_report['stat']['grade'], - json.dumps(security_report['vulns'], - cls=json_encoders.DateTimeEncoder), - vulnerabilities_number, - datetime.datetime(2015, 6, 1, 10, 11, 59)) - security_reports.append(security_report) - except cerberus_exception.DBException as e: - LOG.exception(e) - pass - return security_reports - - def process_notification(self, ctxt, publisher_id, event_type, payload, - metadata): - - LOG.info('--> Plugin %(plugin)s managed event %(event)s' - 'payload %(payload)s' - % {'plugin': self._name, - 'event': event_type, - 'payload': payload}) - if ('START' in payload['name']and self.task_id is None): - self.task_id = self.manager.create_task( - {}, - self._uuid, - 'act_short', - task_type='recurrent', - task_period=1, - task_name='TEST_PLUGIN_START_PAYLOAD') - LOG.info('Start cycling task id %s', self.task_id) - - if ('STOP' in payload['name']): - try: - self.manager._force_delete_recurrent_task(self.task_id) - LOG.info('Stop cycling task id %s', self.task_id) - self.task_id = None - except StopIteration as e: - LOG.debug('Error when stopping task') - LOG.exception(e) - return self._name diff --git a/cerberus/service.py b/cerberus/service.py deleted file mode 100644 index 4deb11b..0000000 --- a/cerberus/service.py +++ /dev/null @@ -1,141 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import os -import socket - -from oslo.config import cfg -from stevedore import named - -from cerberus.common import config -from cerberus.openstack.common.gettextutils import _ # noqa -from cerberus.openstack.common import log -from cerberus import utils - - -OPTS = [ - cfg.StrOpt('host', - default=socket.gethostname(), - help='Name of this node, which must be valid in an AMQP ' - 'key. Can be an opaque identifier. For ZeroMQ only, must ' - 'be a valid host name, FQDN, or IP address.'), - cfg.MultiStrOpt('dispatcher', - deprecated_group="collector", - default=['database'], - help='Dispatcher to process data.'), - cfg.IntOpt('collector_workers', - default=1, - help='Number of workers for collector service. A single ' - 'collector is enabled by default.'), - cfg.IntOpt('notification_workers', - default=1, - help='Number of workers for notification service. A single ' - 'notification agent is enabled by default.'), -] -cfg.CONF.register_opts(OPTS) - -CLI_OPTIONS = [ - cfg.StrOpt('os-username', - deprecated_group="DEFAULT", - default=os.environ.get('OS_USERNAME', 'cerberus'), - help='User name to use for OpenStack service access.'), - cfg.StrOpt('os-password', - deprecated_group="DEFAULT", - secret=True, - default=os.environ.get('OS_PASSWORD', 'admin'), - help='Password to use for OpenStack service access.'), - cfg.StrOpt('os-tenant-id', - deprecated_group="DEFAULT", - default=os.environ.get('OS_TENANT_ID', ''), - help='Tenant ID to use for OpenStack service access.'), - cfg.StrOpt('os-tenant-name', - deprecated_group="DEFAULT", - default=os.environ.get('OS_TENANT_NAME', 'admin'), - help='Tenant name to use for OpenStack service access.'), - cfg.StrOpt('os-cacert', - default=os.environ.get('OS_CACERT'), - help='Certificate chain for SSL validation.'), - cfg.StrOpt('os-auth-url', - deprecated_group="DEFAULT", - default=os.environ.get('OS_AUTH_URL', - 'http://localhost:5000/v2.0'), - help='Auth URL to use for OpenStack service access.'), - cfg.StrOpt('os-region-name', - deprecated_group="DEFAULT", - default=os.environ.get('OS_REGION_NAME'), - help='Region name to use for OpenStack service endpoints.'), - cfg.StrOpt('os-endpoint-type', - default=os.environ.get('OS_ENDPOINT_TYPE', 'publicURL'), - help='Type of endpoint in Identity service catalog to use for ' - 'communication with OpenStack services.'), - cfg.BoolOpt('insecure', - default=False, - help='Disables X.509 certificate validation when an ' - 'SSL connection to Identity Service is established.'), -] -cfg.CONF.register_opts(CLI_OPTIONS, group="service_credentials") - - -LOG = log.getLogger(__name__) - - -class WorkerException(Exception): - """Exception for errors relating to service workers - """ - - -class DispatchedService(object): - - DISPATCHER_NAMESPACE = 'cerberus.dispatcher' - - def start(self): - super(DispatchedService, self).start() - LOG.debug(_('loading dispatchers from %s'), - self.DISPATCHER_NAMESPACE) - self.dispatcher_manager = named.NamedExtensionManager( - namespace=self.DISPATCHER_NAMESPACE, - names=cfg.CONF.dispatcher, - invoke_on_load=True, - invoke_args=[cfg.CONF]) - if not list(self.dispatcher_manager): - LOG.warning(_('Failed to load any dispatchers for %s'), - self.DISPATCHER_NAMESPACE) - - -def get_workers(name): - workers = (cfg.CONF.get('%s_workers' % name) or - utils.cpu_count()) - if workers and workers < 1: - msg = (_("%(worker_name)s value of %(workers)s is invalid, " - "must be greater than 0") % - {'worker_name': '%s_workers' % name, 'workers': str(workers)}) - raise WorkerException(msg) - return workers - - -def prepare_service(argv=[]): - config.parse_args(argv) - cfg.set_defaults(log.log_opts, - default_log_levels=['amqplib=WARN', - 'qpid.messaging=INFO', - 'sqlalchemy=WARN', - 'keystoneclient=INFO', - 'stevedore=INFO', - 'eventlet.wsgi.server=WARN', - 'iso8601=WARN', - 'paramiko=WARN', - ]) - log.setup('cerberus') diff --git a/cerberus/tests/__init__.py b/cerberus/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/tests/functional/__init__.py b/cerberus/tests/functional/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/tests/functional/api/__init__.py b/cerberus/tests/functional/api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/tests/functional/api/v1/__init__.py b/cerberus/tests/functional/api/v1/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/tests/functional/api/v1/test_api.py b/cerberus/tests/functional/api/v1/test_api.py deleted file mode 100644 index e3da68d..0000000 --- a/cerberus/tests/functional/api/v1/test_api.py +++ /dev/null @@ -1,285 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import json - -from cerberus.tests.functional import base - -TEST_REPORT_ID = 'test_plugin_report_id' - - -class AlarmTestsV1(base.TestCase): - - _service = 'security' - - def test_list_alarms(self): - resp, body = self.security_client.get( - self.security_client._version + '/security_alarms') - self.assertEqual(200, resp.status) - - -class ReportTestsV1(base.TestCase): - - _service = 'security' - - def test_list_reports(self): - resp, body = self.security_client.get( - self.security_client._version + '/security_reports') - self.assertEqual(200, resp.status) - - def test_create_get_delete_report(self): - - # Create a task to get security report from test_plugin - plugin_id = None - resp, body = self.security_client.get( - self.security_client._version + '/plugins', - ) - plugins = json.loads(body).get('plugins', None) - if plugins is not None: - for plugin in plugins: - if (plugin.get('name', None) == - 'cerberus.plugins.test_plugin.TestPlugin'): - plugin_id = plugin.get('uuid', None) - - self.assertIsNotNone(plugin_id, - message='cerberus.plugins.test_plugin.TestPlugin ' - 'must exist and have an id') - task = { - "name": "test_create_get_delete_report", - "method": "get_security_reports", - "plugin_id": plugin_id, - "type": "unique" - } - headers = {'content-type': 'application/json'} - resp, body = self.security_client.post( - self.security_client._version + '/tasks', json.dumps(task), - headers=headers) - self.assertEqual(200, resp.status) - - # Get uuid of the security report - resp, body = self.security_client.get( - self.security_client._version + '/security_reports/') - - report_uuid = '' - security_reports = json.loads(body).get('security_reports', []) - for security_report in security_reports: - if security_report['report_id'] == TEST_REPORT_ID: - report_uuid = security_report['uuid'] - - # Check if security report has been stored in db and delete it - - resp, body = self.security_client.get( - self.security_client._version + '/security_reports/' + report_uuid) - report = json.loads(body) - self.assertEqual('a1d869a1-6ab0-4f02-9e56-f83034bacfcb', - report['component_id']) - self.assertEqual(200, resp.status) - - # Delete security report - resp, body = self.security_client.delete( - self.security_client._version + '/security_reports/' + report_uuid) - - self.assertEqual(204, resp.status) - - -class TaskTestsV1(base.TestCase): - - _service = 'security' - - def test_list_tasks(self): - resp, body = self.security_client.get( - self.security_client._version + '/tasks') - self.assertEqual(200, resp.status) - - def test_create_unique_task_not_persistent(self): - plugin_id = None - resp, body = self.security_client.get( - self.security_client._version + '/plugins', - ) - plugins = json.loads(body).get('plugins', None) - if plugins is not None: - for plugin in plugins: - if (plugin.get('name', None) == - 'cerberus.plugins.test_plugin.TestPlugin'): - plugin_id = plugin.get('uuid', None) - - self.assertIsNotNone(plugin_id, - message='cerberus.plugins.test_plugin.TestPlugin ' - 'must exist and have an id') - - task = { - "name": "unique_task_np", - "method": "act_short", - "plugin_id": plugin_id, - "type": "unique" - } - headers = {'content-type': 'application/json'} - resp, body = self.security_client.post( - self.security_client._version + '/tasks', json.dumps(task), - headers=headers) - self.assertEqual(200, resp.status) - - def test_create_get_delete_recurrent_task_not_persistent(self): - plugin_id = None - resp, body = self.security_client.get( - self.security_client._version + '/plugins', - ) - plugins = json.loads(body).get('plugins', None) - if plugins is not None: - for plugin in plugins: - if (plugin.get('name', None) == - 'cerberus.plugins.test_plugin.TestPlugin'): - plugin_id = plugin.get('uuid', None) - - self.assertIsNotNone(plugin_id, - message='cerberus.plugins.test_plugin.TestPlugin ' - 'must exist and have an id') - - task = { - "name": "recurrent_task_np", - "method": "act_short", - "plugin_id": plugin_id, - "type": "recurrent", - "period": 3 - } - headers = {'content-type': 'application/json'} - resp, body = self.security_client.post( - self.security_client._version + '/tasks', json.dumps(task), - headers=headers) - task_id = json.loads(body).get('id', None) - self.assertEqual(200, resp.status) - self.assertIsNotNone(task_id) - resp, body = self.security_client.delete( - self.security_client._version + '/tasks/' + task_id, - ) - self.assertEqual(204, resp.status) - - def test_create_get_stop_start_delete_recurrent_task_persistent(self): - - # Get test_plugin - plugin_id = None - resp, body = self.security_client.get( - self.security_client._version + '/plugins', - ) - plugins = json.loads(body).get('plugins', None) - if plugins is not None: - for plugin in plugins: - if (plugin.get('name', None) == - 'cerberus.plugins.test_plugin.TestPlugin'): - plugin_id = plugin.get('uuid', None) - - self.assertIsNotNone(plugin_id, - message='cerberus.plugins.test_plugin.TestPlugin ' - 'must exist and have an id') - - # create the task - task = { - 'name': 'recurrent_persistent_task', - 'method': 'act_short', - 'plugin_id': plugin_id, - 'type': 'recurrent', - 'period': 3, - 'persistent': True - } - headers = {'content-type': 'application/json'} - resp, body = self.security_client.post( - self.security_client._version + '/tasks', json.dumps(task), - headers=headers) - task_id = json.loads(body).get('id', None) - self.assertEqual(200, resp.status) - self.assertIsNotNone(task_id) - - # Get the task through API - resp, body = self.security_client.get( - self.security_client._version + '/tasks/' + task_id, - headers=headers) - self.assertEqual(200, resp.status) - self.assertEqual(task_id, json.loads(body)['id']) - self.assertEqual(True, json.loads(body)['persistent']) - self.assertEqual('recurrent', json.loads(body)['type']) - self.assertEqual('running', json.loads(body)['state']) - self.assertEqual(3, json.loads(body)['period']) - - # Stop the task - resp, body = self.security_client.post( - self.security_client._version + '/tasks/' + task_id + - '/action/stop', json.dumps({}), headers=headers) - self.assertEqual(204, resp.status) - - resp, body = self.security_client.get( - self.security_client._version + '/tasks/' + task_id, - headers=headers) - self.assertEqual(200, resp.status) - self.assertEqual(task_id, json.loads(body)['id']) - self.assertEqual(True, json.loads(body)['persistent']) - self.assertEqual('recurrent', json.loads(body)['type']) - self.assertEqual('stopped', json.loads(body)['state']) - self.assertEqual(3, json.loads(body)['period']) - - # Start the task - resp, body = self.security_client.post( - self.security_client._version + '/tasks/' + task_id + - '/action/start', json.dumps({}), headers=headers) - self.assertEqual(204, resp.status) - - resp, body = self.security_client.get( - self.security_client._version + '/tasks/' + task_id, - headers=headers) - self.assertEqual(200, resp.status) - self.assertEqual(task_id, json.loads(body)['id']) - self.assertEqual(True, json.loads(body)['persistent']) - self.assertEqual('recurrent', json.loads(body)['type']) - self.assertEqual('running', json.loads(body)['state']) - self.assertEqual(3, json.loads(body)['period']) - - # Delete the task - resp, body = self.security_client.delete( - self.security_client._version + '/tasks/' + task_id, - ) - self.assertEqual(204, resp.status) - - -class PluginTestsV1(base.TestCase): - - _service = 'security' - - def test_list_plugins(self): - resp, body = self.security_client.get( - self.security_client._version + '/plugins') - self.assertEqual(200, resp.status) - - def test_get_plugin(self): - # Get test_plugin - plugin_id = None - resp, body = self.security_client.get( - self.security_client._version + '/plugins', - ) - plugins = json.loads(body).get('plugins', None) - if plugins is not None: - for plugin in plugins: - if (plugin.get('name', None) == - 'cerberus.plugins.test_plugin.TestPlugin'): - plugin_id = plugin.get('uuid', None) - - self.assertIsNotNone(plugin_id, - message='cerberus.plugins.test_plugin.TestPlugin ' - 'must exist and have an id') - resp, body = self.security_client.get( - self.security_client._version + '/plugins/' + plugin_id, - ) - self.assertEqual(200, resp.status) - self.assertEqual('cerberus.plugins.test_plugin.TestPlugin', - json.loads(body)['name']) diff --git a/cerberus/tests/functional/base.py b/cerberus/tests/functional/base.py deleted file mode 100644 index 4304348..0000000 --- a/cerberus/tests/functional/base.py +++ /dev/null @@ -1,122 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import mock -import os - -from tempest import clients -from tempest import config -from tempest_lib import auth -from tempest_lib import base -from tempest_lib.common import rest_client -from tempest_lib import exceptions - - -CONF = config.CONF - - -def get_resource(path): - main_package = 'cerberus/tests' - dir_path = __file__[0:__file__.find(main_package) + len(main_package) + 1] - - return open(dir_path + 'resources/' + path).read() - - -def find_items(items, **props): - def _matches(item, **props): - for prop_name, prop_val in props.iteritems(): - if item[prop_name] != prop_val: - return False - - return True - - filtered = filter(lambda item: _matches(item, **props), items) - - if len(filtered) == 1: - return filtered[0] - - return filtered - - -class CerberusClientBase(rest_client.RestClient): - - def __init__(self, auth_provider, service_type): - super(CerberusClientBase, self).__init__( - auth_provider=auth_provider, - service=service_type, - region=CONF.identity.region) - - if service_type not in ('security'): - msg = ("Invalid parameter 'service_type'. ") - raise exceptions.UnprocessableEntity(msg) - - self.endpoint_url = 'publicURL' - - self.workbooks = [] - self.executions = [] - self.workflows = [] - self.triggers = [] - self.actions = [] - - -class CerberusClientV1(CerberusClientBase): - - def __init__(self, auth_provider, service_type): - super(CerberusClientV1, self).__init__(auth_provider, service_type) - self._version = 'v1' - - -class AuthProv(auth.KeystoneV2AuthProvider): - - def __init__(self): - self.alt_part = None - - def auth_request(self, method, url, *args, **kwargs): - req_url, headers, body = super(AuthProv, self).auth_request( - method, url, *args, **kwargs) - return 'http://localhost:8300/{0}/{1}'.format( - 'v1', url), headers, body - - def get_auth(self): - return 'mock_str', 'mock_str' - - def base_url(self, *args, **kwargs): - return '' - - -class TestCase(base.BaseTestCase): - - @classmethod - def setUpClass(cls): - """This method allows to initialize authentication before - each test case and define parameters of Mistral API Service. - """ - super(TestCase, cls).setUpClass() - - if 'WITHOUT_AUTH' in os.environ: - cls.mgr = mock.MagicMock() - cls.mgr.auth_provider = AuthProv() - else: - cls.mgr = clients.Manager() - - cls.security_client = CerberusClientV1( - cls.mgr.auth_provider, cls._service) - - def setUp(self): - super(TestCase, self).setUp() - - def tearDown(self): - super(TestCase, self).tearDown() diff --git a/cerberus/tests/functional/test_notifications.py b/cerberus/tests/functional/test_notifications.py deleted file mode 100644 index 6a222b6..0000000 --- a/cerberus/tests/functional/test_notifications.py +++ /dev/null @@ -1,131 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import time - -from tempest import test -from tempest_lib.common.utils import data_utils - -from cerberus.tests.functional import base - - -class NotificationTests(base.TestCase): - - _service = 'security' - - @test.attr(type='gate') - @test.services("image") - def test_notification_image(self): - - # Create image - image_name = data_utils.rand_name('image') - glance_resp = self.mgr.image_client.create_image(image_name, - 'bare', - 'iso', - visibility='private') - self.assertEqual('queued', glance_resp['status']) - image_id = glance_resp['id'] - - # Remove image at the end - self.addCleanup(self.mgr.image_client.delete_image, glance_resp['id']) - - # Check how many tasks there are at the beginning - resp, task_list_0 = self.security_client.get("v1/tasks") - task_list_0 = json.loads(task_list_0) - - # Update Image - self.mgr.image_client.update_image(image_id, 'START') - - # Verifying task has been created - resp, task_list_1 = self.security_client.get("v1/tasks") - task_list_1 = json.loads(task_list_1) - self.assertEqual(len(task_list_0.get('tasks', 0)) + 1, - len(task_list_1.get('tasks', 0))) - - # Update Image - self.mgr.image_client.update_image(image_id, 'STOP') - - # Verify task has been created - resp, task_list_2 = self.security_client.get("v1/tasks") - task_list_2 = json.loads(task_list_2) - self.assertEqual(len(task_list_1.get('tasks', 0)) - 1, - len(task_list_2.get('tasks', 0))) - - @test.services("telemetry") - def test_notifier(self): - - # Create a task to get security report from test_plugin - plugin_id = None - resp, body = self.security_client.get( - self.security_client._version + '/plugins', - ) - plugins = json.loads(body).get('plugins', None) - if plugins is not None: - for plugin in plugins: - if (plugin.get('name', None) == - 'cerberus.plugins.test_plugin.TestPlugin'): - plugin_id = plugin.get('uuid', None) - - self.assertIsNotNone(plugin_id, - message='cerberus.plugins.test_plugin.TestPlugin ' - 'must exist and have an id') - - # Count the number of security.security_report sample - # todo(rza): delete the sample at the end if possible - resp = self.mgr.telemetry_client.list_samples( - 'security.security_report.store') - samples_number = len(resp) - - task = { - "name": "test_notifier", - "method": "get_security_reports", - "plugin_id": plugin_id, - "type": "unique" - } - headers = {'content-type': 'application/json'} - resp, body = self.security_client.post( - self.security_client._version + '/tasks', json.dumps(task), - headers=headers) - self.assertEqual(200, resp.status) - - # Check if secu[rity report has been stored in db and delete it - report_id = 'test_plugin_report_id' - resp, body = self.security_client.get( - self.security_client._version + '/security_reports/') - - i = 0 - security_reports = json.loads(body)['security_reports'] - while security_reports[i].get('report_id', None) != report_id: - i += 1 - report_uuid = security_reports[i].get('uuid', None) - resp, body = self.security_client.get( - self.security_client._version + '/security_reports/' + report_uuid) - report = json.loads(body) - self.assertEqual('a1d869a1-6ab0-4f02-9e56-f83034bacfcb', - report['component_id']) - self.assertEqual(200, resp.status) - - # Delete security report - resp, body = self.security_client.delete( - self.security_client._version + '/security_reports/' + report_uuid) - - self.assertEqual(204, resp.status) - - # Check if a sample has been created in Ceilometer - time.sleep(10) - resp = self.mgr.telemetry_client.list_samples( - 'security.security_report.store') - self.assertEqual(samples_number + 1, len(resp)) diff --git a/cerberus/tests/unit/__init__.py b/cerberus/tests/unit/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/tests/unit/api/__init__.py b/cerberus/tests/unit/api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/tests/unit/api/base.py b/cerberus/tests/unit/api/base.py deleted file mode 100644 index 55e7f7a..0000000 --- a/cerberus/tests/unit/api/base.py +++ /dev/null @@ -1,208 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from oslo.config import cfg -import pecan.testing - -from cerberus.api import auth -from cerberus.db import api as dbapi -from cerberus.tests.unit import base - - -PATH_PREFIX = '/v1' - - -class TestApiCase(base.TestCase): - - def setUp(self): - super(TestApiCase, self).setUp() - self.app = self._make_app() - self.dbapi = dbapi.get_instance() - cfg.CONF.set_override("auth_version", - "v2.0", - group=auth.OPT_GROUP_NAME) - - def _make_app(self, enable_acl=False): - - root_dir = self.path_get() - - self.config = { - 'app': { - 'root': 'cerberus.api.root.RootController', - 'modules': ['cerberus.api'], - 'static_root': '%s/public' % root_dir, - 'template_path': '%s/api/templates' % root_dir, - 'enable_acl': enable_acl, - 'acl_public_routes': ['/', '/v1', '/security_reports'] - }, - } - return pecan.testing.load_test_app(self.config) - - def _request_json(self, path, params, expect_errors=False, headers=None, - method="post", extra_environ=None, status=None, - path_prefix=PATH_PREFIX): - """Sends simulated HTTP request to Pecan test app. - - :param path: url path of target service - :param params: content for wsgi.input of request - :param expect_errors: Boolean value; whether an error is expected based - on request - :param headers: a dictionary of headers to send along with the request - :param method: Request method type. Appropriate method function call - should be used rather than passing attribute in. - :param extra_environ: a dictionary of environ variables to send along - with the request - :param status: expected status code of response - :param path_prefix: prefix of the url path - """ - full_path = path_prefix + path - print('%s: %s %s' % (method.upper(), full_path, params)) - response = getattr(self.app, "%s_json" % method)( - str(full_path), - params=params, - headers=headers, - status=status, - extra_environ=extra_environ, - expect_errors=expect_errors - ) - print('GOT:%s' % response) - return response - - def put_json(self, path, params, expect_errors=False, headers=None, - extra_environ=None, status=None): - """Sends simulated HTTP PUT request to Pecan test app. - - :param path: url path of target service - :param params: content for wsgi.input of request - :param expect_errors: Boolean value; whether an error is expected based - on request - :param headers: a dictionary of headers to send along with the request - :param extra_environ: a dictionary of environ variables to send along - with the request - :param status: expected status code of response - """ - return self._request_json(path=path, params=params, - expect_errors=expect_errors, - headers=headers, extra_environ=extra_environ, - status=status, method="put") - - def post_json(self, path, params, expect_errors=False, headers=None, - extra_environ=None, status=None): - """Sends simulated HTTP POST request to Pecan test app. - - :param path: url path of target service - :param params: content for wsgi.input of request - :param expect_errors: Boolean value; whether an error is expected based - on request - :param headers: a dictionary of headers to send along with the request - :param extra_environ: a dictionary of environ variables to send along - with the request - :param status: expected status code of response - """ - return self._request_json(path=path, params=params, - expect_errors=expect_errors, - headers=headers, extra_environ=extra_environ, - status=status, method="post") - - def patch_json(self, path, params, expect_errors=False, headers=None, - extra_environ=None, status=None): - """Sends simulated HTTP PATCH request to Pecan test app. - - :param path: url path of target service - :param params: content for wsgi.input of request - :param expect_errors: Boolean value; whether an error is expected based - on request - :param headers: a dictionary of headers to send along with the request - :param extra_environ: a dictionary of environ variables to send along - with the request - :param status: expected status code of response - """ - return self._request_json(path=path, params=params, - expect_errors=expect_errors, - headers=headers, extra_environ=extra_environ, - status=status, method="patch") - - def delete(self, path, expect_errors=False, headers=None, - extra_environ=None, status=None, path_prefix=PATH_PREFIX): - """Sends simulated HTTP DELETE request to Pecan test app. - - :param path: url path of target service - :param expect_errors: Boolean value; whether an error is expected based - on request - :param headers: a dictionary of headers to send along with the request - :param extra_environ: a dictionary of environ variables to send along - with the request - :param status: expected status code of response - :param path_prefix: prefix of the url path - """ - full_path = path_prefix + path - print('DELETE: %s' % (full_path)) - response = self.app.delete(str(full_path), - headers=headers, - status=status, - extra_environ=extra_environ, - expect_errors=expect_errors) - print('GOT:%s' % response) - return response - - def get_json(self, path, expect_errors=False, headers=None, - extra_environ=None, q=[], path_prefix=PATH_PREFIX, **params): - """Sends simulated HTTP GET request to Pecan test app. - - :param path: url path of target service - :param expect_errors: Boolean value;whether an error is expected based - on request - :param headers: a dictionary of headers to send along with the request - :param extra_environ: a dictionary of environ variables to send along - with the request - :param q: list of queries consisting of: field, value, op, and type - keys - :param path_prefix: prefix of the url path - :param params: content for wsgi.input of request - """ - full_path = path_prefix + path - query_params = {'q.field': [], - 'q.value': [], - 'q.op': [], - } - for query in q: - for name in ['field', 'op', 'value']: - query_params['q.%s' % name].append(query.get(name, '')) - all_params = {} - all_params.update(params) - if q: - all_params.update(query_params) - print('GET: %s %r' % (full_path, all_params)) - response = self.app.get(full_path, - params=all_params, - headers=headers, - extra_environ=extra_environ, - expect_errors=expect_errors) - if not expect_errors: - response = response.json - print('GOT:%s' % response) - return response - - def validate_link(self, link): - """Checks if the given link can get correct data.""" - - # removes 'http://loicalhost' part - full_path = link.split('localhost', 1)[1] - try: - self.get_json(full_path, path_prefix='') - return True - except Exception: - return False diff --git a/cerberus/tests/unit/api/utils.py b/cerberus/tests/unit/api/utils.py deleted file mode 100644 index 574b56c..0000000 --- a/cerberus/tests/unit/api/utils.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Utils for testing the API service. -""" - -import datetime -import json - -ADMIN_TOKEN = '4562138218392831' -MEMBER_TOKEN = '4562138218392832' - - -class FakeMemcache(object): - """Fake cache that is used for keystone tokens lookup.""" - - _cache = { - 'tokens/%s' % ADMIN_TOKEN: { - 'access': { - 'token': {'id': ADMIN_TOKEN, - 'expires': '2100-09-11T00:00:00'}, - 'user': {'id': 'user_id1', - 'name': 'user_name1', - 'tenantId': '123i2910', - 'tenantName': 'mytenant', - 'roles': [{'name': 'admin'}]}, - } - }, - 'tokens/%s' % MEMBER_TOKEN: { - 'access': { - 'token': {'id': MEMBER_TOKEN, - 'expires': '2100-09-11T00:00:00'}, - 'user': {'id': 'user_id2', - 'name': 'user-good', - 'tenantId': 'project-good', - 'tenantName': 'goodies', - 'roles': [{'name': 'Member'}]} - } - } - } - - def __init__(self): - self.set_key = None - self.set_value = None - self.token_expiration = None - - def get(self, key): - dt = datetime.datetime.utcnow() + datetime.timedelta(minutes=5) - return json.dumps((self._cache.get(key), dt.isoformat())) - - def set(self, key, value, time=0, min_compress_len=0): - self.set_value = value - self.set_key = key diff --git a/cerberus/tests/unit/api/v1/__init__.py b/cerberus/tests/unit/api/v1/__init__.py deleted file mode 100644 index b06b406..0000000 --- a/cerberus/tests/unit/api/v1/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/cerberus/tests/unit/api/v1/test_plugins.py b/cerberus/tests/unit/api/v1/test_plugins.py deleted file mode 100644 index 2ce1a85..0000000 --- a/cerberus/tests/unit/api/v1/test_plugins.py +++ /dev/null @@ -1,115 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import json -from sqlalchemy import exc - -import mock -from oslo import messaging - -from cerberus import db -from cerberus.tests.unit.api import base -from cerberus.tests.unit.db import utils as db_utils - - -PLUGIN_ID_1 = 1 -PLUGIN_ID_2 = 2 -PLUGIN_NAME_2 = 'toolyx' - - -class TestPlugins(base.TestApiCase): - - def setUp(self): - super(TestPlugins, self).setUp() - self.fake_plugin = db_utils.get_test_plugin( - id=PLUGIN_ID_1 - ) - self.fake_plugins = [] - self.fake_plugins.append(self.fake_plugin) - self.fake_plugins.append(db_utils.get_test_plugin( - id=PLUGIN_ID_2, - name=PLUGIN_NAME_2 - )) - self.fake_plugin_model = db_utils.get_plugin_model( - id=PLUGIN_ID_1 - ) - self.fake_plugins_model = [] - self.fake_plugins_model.append( - self.fake_plugin_model) - self.fake_plugins_model.append( - db_utils.get_plugin_model( - id=PLUGIN_ID_2, - name=PLUGIN_NAME_2 - ) - ) - self.fake_rpc_plugin = db_utils.get_rpc_plugin() - self.fake_rpc_plugins = [] - self.fake_rpc_plugins.append(self.fake_rpc_plugin) - self.fake_rpc_plugins.append(db_utils.get_rpc_plugin( - name=PLUGIN_NAME_2 - )) - self.plugins_path = '/plugins' - self.plugin_path = '/plugins/%s' % self.fake_plugin['uuid'] - - def test_list(self): - - rpc_plugins = [] - for plugin in self.fake_rpc_plugins: - rpc_plugins.append(json.dumps(plugin)) - - messaging.RPCClient.call = mock.MagicMock( - return_value=rpc_plugins) - db.plugins_info_get = mock.MagicMock( - return_value=self.fake_plugins_model) - - plugins = self.get_json(self.plugins_path) - expecting_sorted = sorted({'plugins': self.fake_plugins}['plugins'], - key=lambda k: k['name']) - actual_sorted = sorted(plugins['plugins'], key=lambda k: k['name']) - self.assertEqual(expecting_sorted, - actual_sorted) - - def test_get(self): - rpc_plugin = json.dumps(self.fake_rpc_plugin) - messaging.RPCClient.call = mock.MagicMock(return_value=rpc_plugin) - db.plugin_info_get_from_uuid = mock.MagicMock( - return_value=self.fake_plugin_model) - plugin = self.get_json(self.plugin_path) - self.assertEqual(self.fake_plugin, plugin) - - def test_list_plugins_remote_error(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError) - res = self.get_json(self.plugins_path, expect_errors=True) - self.assertEqual(503, res.status_code) - - def test_get_plugin_not_existing(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError) - res = self.get_json(self.plugin_path, expect_errors=True) - self.assertEqual(503, res.status_code) - - def test_list_plugins_db_error(self): - messaging.RPCClient.call = mock.MagicMock(return_value=None) - db.plugins_info_get = mock.MagicMock(side_effect=exc.OperationalError) - res = self.get_json(self.plugins_path, expect_errors=True) - self.assertEqual(404, res.status_code) - - def test_get_plugin_remote_error(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError) - res = self.get_json(self.plugin_path, expect_errors=True) - self.assertEqual(503, res.status_code) diff --git a/cerberus/tests/unit/api/v1/test_security_alarms.py b/cerberus/tests/unit/api/v1/test_security_alarms.py deleted file mode 100644 index 4ee7d63..0000000 --- a/cerberus/tests/unit/api/v1/test_security_alarms.py +++ /dev/null @@ -1,86 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from sqlalchemy import exc as sql_exc - -import mock - -from cerberus import db -from cerberus.tests.unit.api import base -from cerberus.tests.unit.db import utils as db_utils - - -SECURITY_ALARM_ID = 'abc123' -SECURITY_ALARM_ID_2 = 'xyz789' - - -class TestSecurityReports(base.TestApiCase): - - def setUp(self): - super(TestSecurityReports, self).setUp() - self.fake_security_alarm = db_utils.get_test_security_alarm( - id=SECURITY_ALARM_ID - ) - self.fake_security_alarms = [] - self.fake_security_alarms.append(self.fake_security_alarm) - self.fake_security_alarms.append(db_utils.get_test_security_alarm( - id=SECURITY_ALARM_ID_2 - )) - self.fake_security_alarm_model = db_utils.get_security_alarm_model( - id=SECURITY_ALARM_ID - ) - self.fake_security_alarms_model = [] - self.fake_security_alarms_model.append( - self.fake_security_alarm_model) - self.fake_security_alarms_model.append( - db_utils.get_security_alarm_model( - id=SECURITY_ALARM_ID_2 - ) - ) - self.security_alarms_path = '/security_alarms' - self.security_alarm_path = '/security_alarms/%s' \ - % self.fake_security_alarm['alarm_id'] - - def test_get(self): - - db.security_alarm_get = mock.MagicMock( - return_value=self.fake_security_alarm_model) - security_alarm = self.get_json(self.security_alarm_path) - self.assertEqual(self.fake_security_alarm, - security_alarm) - - def test_list(self): - - db.security_alarm_get_all = mock.MagicMock( - return_value=self.fake_security_alarms_model) - - security_alarms = self.get_json(self.security_alarms_path) - - self.assertEqual({'security_alarms': self.fake_security_alarms}, - security_alarms) - - def test_get_salarms_db_error(self): - db.security_alarm_get_all = mock.MagicMock( - side_effect=sql_exc.NoSuchTableError) - - res = self.get_json(self.security_alarms_path, expect_errors=True) - self.assertEqual(404, res.status_code) - - def test_get_salarm_db_error(self): - db.security_alarm_get = mock.MagicMock( - side_effect=sql_exc.OperationalError) - res = self.get_json(self.security_alarm_path, expect_errors=True) - self.assertEqual(404, res.status_code) diff --git a/cerberus/tests/unit/api/v1/test_security_reports.py b/cerberus/tests/unit/api/v1/test_security_reports.py deleted file mode 100644 index e46f6be..0000000 --- a/cerberus/tests/unit/api/v1/test_security_reports.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from sqlalchemy import exc as sql_exc - -import mock - -from cerberus import db -from cerberus.tests.unit.api import base -from cerberus.tests.unit.db import utils as db_utils - - -SECURITY_REPORT_ID = 'abc123' -SECURITY_REPORT_ID_2 = 'xyz789' - - -class TestSecurityReports(base.TestApiCase): - - def setUp(self): - super(TestSecurityReports, self).setUp() - self.fake_security_report = db_utils.get_test_security_report( - uuid=SECURITY_REPORT_ID - ) - self.fake_security_reports = [] - self.fake_security_reports.append(self.fake_security_report) - self.fake_security_reports.append(db_utils.get_test_security_report( - uuid=SECURITY_REPORT_ID_2 - )) - self.fake_security_report_model = db_utils.get_security_report_model( - uuid=SECURITY_REPORT_ID - ) - self.fake_security_reports_model = [] - self.fake_security_reports_model.append( - self.fake_security_report_model) - self.fake_security_reports_model.append( - db_utils.get_security_report_model( - uuid=SECURITY_REPORT_ID_2 - ) - ) - self.security_reports_path = '/security_reports' - self.security_report_path = '/security_reports/%s' \ - % self.fake_security_report['uuid'] - - def test_get(self): - - db.security_report_get = mock.MagicMock( - return_value=self.fake_security_report_model) - security_report = self.get_json(self.security_report_path) - self.assertEqual(self.fake_security_report, - security_report) - - def test_list(self): - - db.security_report_get_all = mock.MagicMock( - return_value=self.fake_security_reports_model) - - security_reports = self.get_json(self.security_reports_path) - - self.assertEqual({'security_reports': self.fake_security_reports}, - security_reports) - - def test_update_sr_ticket_id(self): - db.security_report_update_ticket_id = mock.MagicMock() - res = self.put_json(self.security_report_path + '/tickets/1', None) - self.assertEqual(200, res.status_code) - - def test_get_sreports_db_error(self): - db.security_report_get_all = mock.MagicMock( - side_effect=sql_exc.NoSuchTableError) - - res = self.get_json(self.security_reports_path, expect_errors=True) - self.assertEqual(404, res.status_code) - - def test_get_sreport_db_error(self): - db.security_report_get = mock.MagicMock( - side_effect=sql_exc.OperationalError) - res = self.get_json(self.security_report_path, expect_errors=True) - self.assertEqual(404, res.status_code) diff --git a/cerberus/tests/unit/api/v1/test_tasks.py b/cerberus/tests/unit/api/v1/test_tasks.py deleted file mode 100644 index 033530a..0000000 --- a/cerberus/tests/unit/api/v1/test_tasks.py +++ /dev/null @@ -1,198 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import json - -import mock -from oslo import messaging - -from cerberus.api.v1.datamodels import task as task_model -from cerberus.tests.unit.api import base -from cerberus.tests.unit.db import utils as db_utils - - -class MockTask(object): - name = None - id = None - period = None - plugin_id = None - type = None - - def __init__(self, name, period, plugin_id, type, method): - self.name = name - self.period = period - self.plugin_id = plugin_id - self.type = type - self.method = method - - -class TestTasks(base.TestApiCase): - - def setUp(self): - super(TestTasks, self).setUp() - self.fake_task = db_utils.get_test_task() - self.fake_tasks = [] - self.fake_tasks.append(self.fake_task) - self.fake_tasks.append(db_utils.get_test_task( - id=2, - type='reccurent', - name='recurrent_task', - period=20 - )) - self.tasks_path = '/tasks' - self.task_path = '/tasks/%s' % self.fake_task['id'] - - def test_list(self): - rpc_tasks = [] - for task in self.fake_tasks: - rpc_tasks.append(json.dumps(task)) - - messaging.RPCClient.call = mock.MagicMock(return_value=rpc_tasks) - tasks = self.get_json(self.tasks_path) - self.assertEqual({'tasks': self.fake_tasks}, tasks) - - def test_create(self): - task_id = 1 - task = task_model.TaskResource( - initial_data={ - 'method': "act_long", - 'name': "task1", - 'type': "recurrent", - 'period': 60, - 'plugin_id': "test"}) - - expected_task = task - expected_task.id = task_id - messaging.RPCClient.call = mock.MagicMock(return_value=task_id) - task = self.post_json(self.tasks_path, task.as_dict()) - self.assertEqual(expected_task.as_dict(), task.json_body) - - def test_get(self): - rpc_task = json.dumps(self.fake_task) - messaging.RPCClient.call = mock.MagicMock( - return_value=rpc_task) - task = self.get_json(self.task_path,) - self.assertEqual(self.fake_task, task) - - def test_stop(self): - messaging.RPCClient.call = mock.MagicMock(return_value=1) - response = self.post_json(self.task_path + '/action/stop', {}) - self.assertEqual(204, response.status_code) - - def test_delete(self): - messaging.RPCClient.call = mock.MagicMock(return_value=1) - response = self.delete(self.task_path) - self.assertEqual(204, response.status_code) - - def test_list_tasks_remote_error(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError) - response = self.get_json(self.task_path, expect_errors=True) - self.assertEqual(404, response.status_code) - - def test_create_task_incorrect_json(self): - request_body = "INCORRECT JSON" - response = self.post_json(self.tasks_path, - request_body, - expect_errors=True) - self.assertEqual(400, response.status_code) - - def test_create_recurrent_task_without_task_object(self): - task_id = 1 - messaging.RPCClient.call = mock.MagicMock(return_value=task_id) - response = self.post_json(self.tasks_path, None, - expect_errors=True) - self.assertEqual(400, response.status_code) - - def test_create_recurrent_task_without_plugin_id(self): - task_id = 1 - task = task_model.TaskResource( - initial_data={ - "method": "act_long", - "name": "task1", - "type": "recurrent", - "period": 60, - }) - messaging.RPCClient.call = mock.MagicMock(return_value=task_id) - response = self.post_json(self.tasks_path, - task.as_dict(), - expect_errors=True) - self.assertEqual(400, response.status_code) - - def test_create_recurrent_task_without_method(self): - task_id = 1 - task = task_model.TaskResource( - initial_data={ - "name": "task1", - "type": "recurrent", - "period": 60, - "plugin_id": "plugin-test" - }) - messaging.RPCClient.call = mock.MagicMock(return_value=task_id) - response = self.post_json(self.tasks_path, - task.as_dict(), - expect_errors=True) - self.assertEqual(400, response.status_code) - - def test_create_recurrent_task_remote_error(self): - task = task_model.TaskResource( - initial_data={ - "method": "act_long", - "name": "task1", - "type": "recurrent", - "period": 60, - "plugin_id": "plugin-test" - }) - - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError(value="dummy")) - response = self.post_json(self.tasks_path, - task.as_dict(), - expect_errors=True) - self.assertEqual(400, response.status_code) - - def test_get_task_remote_error(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError) - response = self.get_json(self.task_path, expect_errors=True) - self.assertEqual(404, response.status_code) - - def test_stop_task_wrong_id(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError) - response = self.post_json(self.task_path + '/action/stop', {}, - expect_errors=True) - self.assertEqual(400, response.status_code) - self.assertEqual('Task can not be stopped', - response.json.get('faultstring', None)) - - def test_force_delete_task_wrong_id(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError) - response = self.post_json(self.task_path + 'action/force_delete', {}, - expect_errors=True) - self.assertEqual(404, response.status_code) - - def test_force_delete_task_id_not_integer(self): - response = self.post_json('/tasks/toto' + 'action/force_delete', {}, - expect_errors=True) - self.assertEqual(404, response.status_code) - - def test_delete_task_not_existing(self): - messaging.RPCClient.call = mock.MagicMock( - side_effect=messaging.RemoteError(value="dummy")) - response = self.delete(self.task_path, expect_errors=True) - self.assertEqual(404, response.status_code) diff --git a/cerberus/tests/unit/base.py b/cerberus/tests/unit/base.py deleted file mode 100644 index 556d48e..0000000 --- a/cerberus/tests/unit/base.py +++ /dev/null @@ -1,67 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -from oslo.config import cfg -from oslotest import base - -from cerberus.db import api as db_api -from cerberus.tests.unit import config_fixture -from cerberus.tests.unit import policy_fixture - - -CONF = cfg.CONF - - -class TestCase(base.BaseTestCase): - - """Test case base class for all unit tests.""" - def setUp(self): - super(TestCase, self).setUp() - self.useFixture(config_fixture.ConfigFixture(CONF)) - self.policy = self.useFixture(policy_fixture.PolicyFixture()) - - def path_get(self, project_file=None): - """Get the absolute path to a file. Used for testing the API. - :param project_file: File whose path to return. Default: None. - :returns: path to the specified file, or path to project root. - """ - root = os.path.abspath(os.path.join(os.path.dirname(__file__), - '..', - '..', - ) - ) - if project_file: - return os.path.join(root, project_file) - else: - return root - - -class WithDbTestCase(TestCase): - - def override_config(self, name, override, group=None): - CONF.set_override(name, override, group) - self.addCleanup(CONF.clear_override, name, group) - - def setUp(self): - super(WithDbTestCase, self).setUp() - self.override_config('connection', "sqlite://", group='database') - db_api.setup_db() - self.addCleanup(db_api.drop_db) - - -class TestCaseFaulty(TestCase): - """This test ensures we aren't letting any exceptions go unhandled.""" diff --git a/cerberus/tests/unit/client/__init__.py b/cerberus/tests/unit/client/__init__.py deleted file mode 100644 index 4aa294d..0000000 --- a/cerberus/tests/unit/client/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__author__ = 'svcdev' diff --git a/cerberus/tests/unit/client/test_keystone_client.py b/cerberus/tests/unit/client/test_keystone_client.py deleted file mode 100644 index c026821..0000000 --- a/cerberus/tests/unit/client/test_keystone_client.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import mock -from oslo.config import cfg - -from cerberus.client import keystone_client -from cerberus.tests.unit import base - -cfg.CONF.import_group('service_credentials', 'cerberus.service') - - -class TestKeystoneClient(base.TestCase): - - def setUp(self): - super(TestKeystoneClient, self).setUp() - - @staticmethod - def fake_get_user(): - return { - 'user': { - "id": "u1000", - "name": "jqsmith", - "email": "john.smith@example.org", - "enabled": True - } - } - - @mock.patch('keystoneclient.v2_0.client.Client') - def test_get_user(self, mock_client): - kc = keystone_client.Client() - user = self.fake_get_user() - kc.keystone_client_v2_0.users.get = mock.MagicMock( - return_value=user) - user = kc.user_detail_get("user") - self.assertEqual("u1000", user['user'].get('id')) - - @mock.patch('keystoneclient.v2_0.client.Client') - def test_roles_for_user(self, mock_client): - kc = keystone_client.Client() - kc.keystone_client_v2_0.roles.roles_for_user = mock.MagicMock( - return_value="role" - ) - role = kc.roles_for_user("user", "tenant") - self.assertEqual("role", role) diff --git a/cerberus/tests/unit/client/test_neutron_client.py b/cerberus/tests/unit/client/test_neutron_client.py deleted file mode 100644 index 1407396..0000000 --- a/cerberus/tests/unit/client/test_neutron_client.py +++ /dev/null @@ -1,211 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import mock -from oslo.config import cfg - -from cerberus.client import neutron_client -from cerberus.tests.unit import base - -cfg.CONF.import_group('service_credentials', 'cerberus.service') - - -class TestNeutronClient(base.TestCase): - - def setUp(self): - super(TestNeutronClient, self).setUp() - - @staticmethod - def fake_networks_list(): - return {'networks': - [{'admin_state_up': True, - 'id': '298a3088-a446-4d5a-bad8-f92ecacd786b', - 'name': 'public', - 'provider:network_type': 'gre', - 'provider:physical_network': None, - 'provider:segmentation_id': 2, - 'router:external': True, - 'shared': False, - 'status': 'ACTIVE', - 'subnets': [u'c4b6f5b8-3508-4896-b238-a441f25fb492'], - 'tenant_id': '62d6f08bbd3a44f6ad6f00ca15cce4e5'}, - ]} - - @staticmethod - def fake_network_get(): - return {"network": { - "status": "ACTIVE", - "subnets": [ - "54d6f61d-db07-451c-9ab3-b9609b6b6f0b"], - "name": "private-network", - "provider:physical_network": None, - "admin_state_up": True, - "tenant_id": "4fd44f30292945e481c7b8a0c8908869", - "provider:network_type": "local", - "router:external": True, - "shared": True, - "id": "d32019d3-bc6e-4319-9c1d-6722fc136a22", - "provider:segmentation_id": None - } - } - - @staticmethod - def fake_subnets_list(): - return {"subnets": [ - { - "name": "private-subnet", - "enable_dhcp": True, - "network_id": "db193ab3-96e3-4cb3-8fc5-05f4296d0324", - "tenant_id": "26a7980765d0414dbc1fc1f88cdb7e6e", - "dns_nameservers": [], - "allocation_pools": [ - { - "start": "10.0.0.2", - "end": "10.0.0.254" - } - ], - "host_routes": [], - "ip_version": 4, - "gateway_ip": "10.0.0.1", - "cidr": "10.0.0.0/24", - "id": "08eae331-0402-425a-923c-34f7cfe39c1b"}, - { - "name": "my_subnet", - "enable_dhcp": True, - "network_id": "d32019d3-bc6e-4319-9c1d-6722fc136a22", - "tenant_id": "4fd44f30292945e481c7b8a0c8908869", - "dns_nameservers": [], - "allocation_pools": [ - { - "start": "192.0.0.2", - "end": "192.255.255.254" - } - ], - "host_routes": [], - "ip_version": 4, - "gateway_ip": "192.0.0.1", - "cidr": "192.0.0.0/8", - "id": "54d6f61d-db07-451c-9ab3-b9609b6b6f0b" - } - ] - } - - @staticmethod - def fake_subnet_get(): - return {"subnet": { - "name": "my_subnet", - "enable_dhcp": True, - "network_id": "d32019d3-bc6e-4319-9c1d-6722fc136a22", - "tenant_id": "4fd44f30292945e481c7b8a0c8908869", - "dns_nameservers": [], - "allocation_pools": [ - { - "start": "192.0.0.2", - "end": "192.255.255.254" - }], - "host_routes": [], - "ip_version": 4, - "gateway_ip": "192.0.0.1", - "cidr": "192.0.0.0/8", - "id": "54d6f61d-db07-451c-9ab3-b9609b6b6f0b" - } - } - - @staticmethod - def fake_floating_ips_list(): - return {'floatingips': [ - { - 'router_id': 'd23abc8d-2991-4a55-ba98-2aaea84cc72f', - 'tenant_id': '4969c491a3c74ee4af974e6d800c62de', - 'floating_network_id': '376da547-b977-4cfe-9cba-275c80debf57', - 'fixed_ip_address': '10.0.0.3', - 'floating_ip_address': '172.24.4.228', - 'port_id': 'ce705c24-c1ef-408a-bda3-7bbd946164ab', - 'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda7'}, - { - 'router_id': None, - 'tenant_id': '4969c491a3c74ee4af974e6d800c62de', - 'floating_network_id': '376da547-b977-4cfe-9cba-275c80debf57', - 'fixed_ip_address': None, - 'floating_ip_address': '172.24.4.227', - 'port_id': None, - 'id': '61cea855-49cb-4846-997d-801b70c71bdd' - } - ]} - - @mock.patch('neutronclient.v2_0.client.Client') - def test_list_networks(self, mock_client): - nc = neutron_client.Client() - nc.neutronClient.list_networks = mock.MagicMock( - return_value=self.fake_networks_list()) - networks = nc.list_networks('tenant') - self.assertTrue(len(networks) == 1) - self.assertEqual('298a3088-a446-4d5a-bad8-f92ecacd786b', - networks[0].get('id')) - - @mock.patch('neutronclient.v2_0.client.Client') - def test_list_floatingips(self, mock_client): - nc = neutron_client.Client() - nc.neutronClient.list_floatingips = mock.MagicMock( - return_value=self.fake_floating_ips_list()) - floating_ips = nc.list_floatingips('tenant') - self.assertTrue(len(floating_ips) == 2) - self.assertEqual('2f245a7b-796b-4f26-9cf9-9e82d248fda7', - floating_ips[0].get('id')) - self.assertEqual('61cea855-49cb-4846-997d-801b70c71bdd', - floating_ips[1].get('id')) - - @mock.patch('neutronclient.v2_0.client.Client') - def test_list_associated_floatingips(self, mock_client): - nc = neutron_client.Client() - nc.neutronClient.list_floatingips = mock.MagicMock( - return_value=self.fake_floating_ips_list()) - floating_ips = nc.list_associated_floatingips() - self.assertTrue(len(floating_ips) == 1) - self.assertEqual('2f245a7b-796b-4f26-9cf9-9e82d248fda7', - floating_ips[0].get('id')) - - @mock.patch('neutronclient.v2_0.client.Client') - def test_subnet_ips_get(self, mock_client): - nc = neutron_client.Client() - nc.neutronClient.show_subnet = mock.MagicMock( - return_value=self.fake_subnet_get()) - subnet_ips = nc.subnet_ips_get("d32019d3-bc6e-4319-9c1d-6722fc136a22") - self.assertTrue(len(subnet_ips) == 1) - self.assertEqual("192.0.0.2", subnet_ips[0].get("start", None)) - self.assertEqual("192.255.255.254", subnet_ips[0].get("end", None)) - - @mock.patch('neutronclient.v2_0.client.Client') - def test_net_ips_get(self, mock_client): - nc = neutron_client.Client() - nc.neutronClient.show_network = mock.MagicMock( - return_value=self.fake_network_get()) - nc.neutronClient.show_subnet = mock.MagicMock( - return_value=self.fake_subnet_get()) - ips = nc.net_ips_get("d32019d3-bc6e-4319-9c1d-6722fc136a22") - self.assertTrue(len(ips) == 1) - self.assertTrue(len(ips[0]) == 1) - self.assertEqual("192.0.0.2", ips[0][0].get("start", None)) - self.assertEqual("192.255.255.254", ips[0][0].get("end", None)) - - @mock.patch('neutronclient.v2_0.client.Client') - def test_get_net_of_subnet(self, mock_client): - nc = neutron_client.Client() - nc.neutronClient.show_subnet = mock.MagicMock( - return_value=self.fake_subnet_get()) - network_id = nc.get_net_of_subnet( - "54d6f61d-db07-451c-9ab3-b9609b6b6f0b") - self.assertEqual("d32019d3-bc6e-4319-9c1d-6722fc136a22", network_id) diff --git a/cerberus/tests/unit/client/test_nova_client.py b/cerberus/tests/unit/client/test_nova_client.py deleted file mode 100644 index 1ad1ec4..0000000 --- a/cerberus/tests/unit/client/test_nova_client.py +++ /dev/null @@ -1,129 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import mock -from oslo.config import cfg - -from cerberus.client import nova_client -from cerberus.tests.unit import base - -cfg.CONF.import_group('service_credentials', 'cerberus.service') - - -class TestNovaClient(base.TestCase): - - @staticmethod - def fake_servers_list(*args, **kwargs): - a = mock.MagicMock() - a.id = 42 - a.flavor = {'id': 1} - a.image = {'id': 1} - a_addresses = [] - a_addresses.append({"addr": "10.0.0.1", "version": 4, - 'OS-EXT-IPS:type': 'floating'}) - a.addresses = {'private': a_addresses} - b = mock.MagicMock() - b.id = 43 - b.flavor = {'id': 2} - b.image = {'id': 2} - return [a, b] - - @staticmethod - def fake_detailed_servers_list(): - return \ - {"servers": [ - { - "accessIPv4": "", - "accessIPv6": "", - "addresses": { - "private": [ - { - "addr": "192.168.0.3", - "version": 4 - } - ] - }, - "created": "2012-09-07T16:56:37Z", - "flavor": { - "id": "1", - "links": [ - { - "href": "http://openstack.example.com/" - "openstack/flavors/1", - "rel": "bookmark" - } - ] - }, - "hostId": "16d193736a5cfdb60c697ca27ad071d6126fa13baeb670f" - "c9d10645e", - "id": "05184ba3-00ba-4fbc-b7a2-03b62b884931", - "image": { - "id": "70a599e0-31e7-49b7-b260-868f441e862b", - "links": [ - { - "href": "http://openstack.example.com/" - "openstack/images/70a599e0-31e7-49b7-" - "b260-868f441e862b", - "rel": "bookmark" - } - ] - }, - "links": [ - { - "href": "http://openstack.example.com/v2/" - "openstack/servers/05184ba3-00ba-4fbc-" - "b7a2-03b62b884931", - "rel": "self" - }, - { - "href": "http://openstack.example.com/openstack/" - "servers/05184ba3-00ba-4fbc-b7a2-" - "03b62b884931", - "rel": "bookmark" - } - ], - "metadata": { - "My Server Name": "Apache1" - }, - "name": "new-server-test", - "progress": 0, - "status": "ACTIVE", - "tenant_id": "openstack", - "updated": "2012-09-07T16:56:37Z", - "user_id": "fake" - } - ] - } - - def setUp(self): - super(TestNovaClient, self).setUp() - self.nova_client = nova_client.Client() - - def test_instance_get_all(self): - self.nova_client.nova_client.servers.list = mock.MagicMock( - return_value=self.fake_servers_list()) - instances = self.nova_client.instance_get_all() - self.assertTrue(instances is not None) - - def test_get_instance_details_from_floating_ip(self): - self.nova_client.nova_client.servers.list = mock.MagicMock( - return_value=self.fake_servers_list()) - instance_1 = self.nova_client.get_instance_details_from_floating_ip( - "10.0.0.1") - instance_2 = self.nova_client.get_instance_details_from_floating_ip( - "10.0.0.2") - self.assertTrue(instance_1 is not None) - self.assertTrue(instance_2 is None) diff --git a/cerberus/tests/unit/config_fixture.py b/cerberus/tests/unit/config_fixture.py deleted file mode 100644 index 46da1ab..0000000 --- a/cerberus/tests/unit/config_fixture.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import fixtures -from oslo.config import cfg - -from cerberus.common import config - -CONF = cfg.CONF - - -class ConfigFixture(fixtures.Fixture): - """Fixture to manage global conf settings.""" - - def __init__(self, conf): - self.conf = conf - - def setUp(self): - super(ConfigFixture, self).setUp() - self.conf.set_default('verbose', True) - config.parse_args([], default_config_files=[]) - self.addCleanup(self.conf.reset) diff --git a/cerberus/tests/unit/db/__init__.py b/cerberus/tests/unit/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cerberus/tests/unit/db/test_db_api.py b/cerberus/tests/unit/db/test_db_api.py deleted file mode 100644 index e218723..0000000 --- a/cerberus/tests/unit/db/test_db_api.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -""" -Tests for `db api` module. -""" - -import mock - -from cerberus.db.sqlalchemy import api as db_api -from cerberus.openstack.common.db.sqlalchemy import models as db_models -from cerberus.tests.unit import base - - -class DbApiTestCase(base.WithDbTestCase): - - def setUp(self): - super(DbApiTestCase, self).setUp() - - def test_security_report_create(self): - db_models.ModelBase.save = mock.MagicMock() - report = db_api.security_report_create( - {'title': 'TitleSecurityReport', - 'plugin_id': '123456789', - 'description': 'The first', - 'component_id': '1234'}) - - self.assertEqual('TitleSecurityReport', report.title) - self.assertEqual('123456789', report.plugin_id) - self.assertEqual('The first', report.description) - self.assertEqual('1234', report.component_id) - - def test_plugin_info_create(self): - pi = db_api.plugin_info_create( - {'name': 'NameOfPlugin', - 'uuid': '0000-aaaa-1111-bbbb'}) - self.assertTrue(pi.id >= 0) - - def test_plugin_info_get(self): - db_api.plugin_info_create( - {'name': 'NameOfPluginToGet', - 'uuid': '3333-aaaa-1111-bbbb'}) - - pi = db_api.plugin_info_get('NameOfPluginToGet') - self.assertEqual('NameOfPluginToGet', pi.name) diff --git a/cerberus/tests/unit/db/utils.py b/cerberus/tests/unit/db/utils.py deleted file mode 100644 index 571c0a4..0000000 --- a/cerberus/tests/unit/db/utils.py +++ /dev/null @@ -1,184 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime - -from cerberus.common import loopingcall -from cerberus.db.sqlalchemy import models - - -def fake_function(): - pass - - -def get_test_security_report(**kwargs): - return { - 'uuid': kwargs.get('uuid', 1), - 'plugin_id': kwargs.get('plugin_id', - '228df8e8-d5f4-4eb9-a547-dfc649dd1017'), - 'report_id': kwargs.get('report_id', '1234'), - 'component_id': kwargs.get('component_id', - '422zb9d5-c5g3-8wy9-a547-hhc885dd8548'), - 'component_type': kwargs.get('component_type', 'instance'), - 'component_name': kwargs.get('component_name', 'instance-test'), - 'project_id': kwargs.get('project_id', - '28c6f9e6add24c29a589a9967432fede'), - 'title': kwargs.get('title', 'test-security-report'), - 'description': kwargs.get('description', - 'no fear, this is just a test'), - 'security_rating': kwargs.get('security_rating', 5.1), - 'vulnerabilities': kwargs.get('vulnerabilities', 'vulns'), - 'vulnerabilities_number': kwargs.get('vulnerabilities_number', 1), - 'last_report_date': kwargs.get('last_report_date', - '2015-01-01T00:00:00') - } - - -def get_security_report_model(**kwargs): - security_report = models.SecurityReport() - security_report.uuid = kwargs.get('uuid', 1) - security_report.plugin_id = kwargs.get( - 'plugin_id', - '228df8e8-d5f4-4eb9-a547-dfc649dd1017' - ) - security_report.report_id = kwargs.get('report_id', '1234') - security_report.component_id = kwargs.get( - 'component_id', - '422zb9d5-c5g3-8wy9-a547-hhc885dd8548') - security_report.component_type = kwargs.get('component_type', 'instance') - security_report.component_name = kwargs.get('component_name', - 'instance-test') - security_report.project_id = kwargs.get('project_id', - '28c6f9e6add24c29a589a9967432fede') - security_report.title = kwargs.get('title', 'test-security-report') - security_report.description = kwargs.get('description', - 'no fear, this is just a test') - security_report.security_rating = kwargs.get('security_rating', - float('5.1')) - security_report.vulnerabilities = kwargs.get('vulnerabilities', 'vulns') - security_report.vulnerabilities_number = kwargs.get( - 'vulnerabilities_number', 1) - security_report.last_report_date = kwargs.get( - 'last_report_date', - datetime.datetime(2015, 1, 1) - ) - return security_report - - -def get_test_plugin(**kwargs): - return { - 'id': kwargs.get('id', 1), - 'provider': kwargs.get('provider', 'provider'), - 'tool_name': kwargs.get('tool_name', 'toolbox'), - 'type': kwargs.get('type', 'tool_whatever'), - 'description': kwargs.get('description', 'This is a tool'), - 'uuid': kwargs.get('uuid', '490cc562-9e60-46a7-9b5f-c7619aca2e07'), - 'version': kwargs.get('version', '0.1a'), - 'name': kwargs.get('name', 'tooly'), - 'subscribed_events': kwargs.get('subscribed_events', - ["compute.instance.updated"]), - 'methods': kwargs.get('methods', []) - } - - -def get_plugin_model(**kwargs): - plugin = models.PluginInfo() - plugin.id = kwargs.get('id', 1) - plugin.provider = kwargs.get('provider', 'provider') - plugin.tool_name = kwargs.get('tool_name', 'toolbox') - plugin.type = kwargs.get('type', 'tool_whatever') - plugin.description = kwargs.get('description', 'This is a tool') - plugin.uuid = kwargs.get('uuid', '490cc562-9e60-46a7-9b5f-c7619aca2e07') - plugin.version = kwargs.get('version', '0.1a') - plugin.name = kwargs.get('name', 'tooly') - return plugin - - -def get_rpc_plugin(**kwargs): - return { - 'name': kwargs.get('name', 'tooly'), - 'subscribed_events': kwargs.get('subscribed_events', - ["compute.instance.updated"]), - 'methods': kwargs.get('methods', []) - } - - -def get_test_task(**kwargs): - return { - 'id': kwargs.get('task_id', 1), - 'type': kwargs.get('task_type', 'unique'), - 'name': kwargs.get('task_name', 'No Name'), - 'period': kwargs.get('task_period', ''), - 'persistent': False, - } - - -def get_recurrent_task_object(**kwargs): - return(loopingcall.CerberusFixedIntervalLoopingCall(fake_function, - **kwargs)) - - -def get_recurrent_task_model(**kwargs): - task = models.Task() - task.id = kwargs.get('id', 1) - task.name = kwargs.get('name', 'this_task') - task.method = kwargs.get('method', 'method') - task.type = kwargs.get('type', 'recurrent') - task.period = kwargs.get('period', 10) - task.plugin_id = kwargs.get('plugin_id', - '490cc562-9e60-46a7-9b5f-c7619aca2e07') - task.uuid = kwargs.get('uuid', '500cc562-5c50-89t4-5fc8-c7619aca3n29') - - -def get_test_security_alarm(**kwargs): - return { - 'id': kwargs.get('id', 1), - 'plugin_id': kwargs.get('plugin_id', - '228df8e8-d5f4-4eb9-a547-dfc649dd1017'), - 'alarm_id': kwargs.get('alarm_id', '1234'), - 'timestamp': kwargs.get('timestamp', '2015-01-01T00:00:00'), - 'status': kwargs.get('status', 'new'), - 'severity': kwargs.get('severity', 'CRITICAL'), - 'component_id': kwargs.get('component_id', - '422zb9d5-c5g3-8wy9-a547-hhc885dd8548'), - 'summary': kwargs.get('summary', 'test-security-alarm'), - 'description': kwargs.get('description', - 'no fear, this is just a test') - - } - - -def get_security_alarm_model(**kwargs): - security_alarm = models.SecurityAlarm() - security_alarm.id = kwargs.get('id', 1) - security_alarm.plugin_id = kwargs.get( - 'plugin_id', - '228df8e8-d5f4-4eb9-a547-dfc649dd1017' - ) - security_alarm.alarm_id = kwargs.get('alarm_id', '1234') - security_alarm.timestamp = kwargs.get( - 'timestamp', - datetime.datetime(2015, 1, 1) - ) - security_alarm.status = kwargs.get('status', 'new') - security_alarm.severity = kwargs.get('severity', 'CRITICAL') - security_alarm.component_id = kwargs.get( - 'component_id', - '422zb9d5-c5g3-8wy9-a547-hhc885dd8548') - security_alarm.summary = kwargs.get('summary', 'test-security-alarm') - security_alarm.description = kwargs.get('description', - 'no fear, this is just a test') - return security_alarm diff --git a/cerberus/tests/unit/fake_policy.py b/cerberus/tests/unit/fake_policy.py deleted file mode 100644 index 64bcecb..0000000 --- a/cerberus/tests/unit/fake_policy.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -policy_data = """ -{ - "context_is_admin": "role:admin", - "default": "" -} -""" diff --git a/cerberus/tests/unit/policy_fixture.py b/cerberus/tests/unit/policy_fixture.py deleted file mode 100644 index 459810c..0000000 --- a/cerberus/tests/unit/policy_fixture.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import os - -import fixtures -from oslo.config import cfg - -from cerberus.common import policy as cerberus_policy -from cerberus.openstack.common import policy as common_policy -from cerberus.tests.unit import fake_policy - - -CONF = cfg.CONF - - -class PolicyFixture(fixtures.Fixture): - - def setUp(self): - super(PolicyFixture, self).setUp() - self.policy_dir = self.useFixture(fixtures.TempDir()) - self.policy_file_name = os.path.join(self.policy_dir.path, - 'policy.json') - with open(self.policy_file_name, 'w') as policy_file: - policy_file.write(fake_policy.policy_data) - CONF.set_override('policy_file', self.policy_file_name) - cerberus_policy._ENFORCER = None - self.addCleanup(cerberus_policy.get_enforcer().clear) - - def set_rules(self, rules): - common_policy.set_rules(common_policy.Rules( - dict((k, common_policy.parse_rule(v)) - for k, v in rules.items()))) diff --git a/cerberus/tests/unit/test_cerberus_manager.py b/cerberus/tests/unit/test_cerberus_manager.py deleted file mode 100644 index 1f62ee5..0000000 --- a/cerberus/tests/unit/test_cerberus_manager.py +++ /dev/null @@ -1,617 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -""" -test_cerberus manager ----------------------------------- - -Tests for `cerberus` module. -""" - -from eventlet import greenpool -import json -import mock -import pkg_resources -from stevedore import extension -import uuid - -from oslo import messaging - -from cerberus.common import errors -from cerberus.common import loopingcall -from cerberus.common import threadgroup -from cerberus.db.sqlalchemy import api as db_api -from cerberus import manager -from cerberus.plugins import base as base_plugin -from cerberus.tests.unit import base -from cerberus.tests.unit.db import utils as db_utils - - -PLUGIN_UUID = 'UUID' - - -class FakePlugin(base_plugin.PluginBase): - - def __init__(self): - super(FakePlugin, self).__init__() - self._uuid = PLUGIN_UUID - - def fake_function(self, *args, **kwargs): - return(args, kwargs) - - @base_plugin.PluginBase.webmethod - def another_fake_but_web_method(self): - pass - - def process_notification(self, ctxt, publisher_id, event_type, payload, - metadata): - pass - - -class DbPluginInfo(object): - def __init__(self, id, uuid): - self.id = id - self.uuid = uuid - - -class EntryPoint(object): - def __init__(self): - self.dist = pkg_resources.Distribution.from_filename( - "FooPkg-1.2-py2.4.egg") - - -class TestCerberusManager(base.WithDbTestCase): - - def setUp(self): - super(TestCerberusManager, self).setUp() - self.plugin = FakePlugin() - self.extension_mgr = extension.ExtensionManager.make_test_instance( - [ - extension.Extension( - 'plugin', - EntryPoint(), - None, - self.plugin, ), - ] - ) - self.db_plugin_info = DbPluginInfo(1, PLUGIN_UUID) - self.manager = manager.CerberusManager() - self.manager.cerberus_manager = self.extension_mgr - self.fake_db_task = db_utils.get_recurrent_task_model( - plugin_id=PLUGIN_UUID - ) - - def test_register_plugin(self): - with mock.patch('cerberus.db.sqlalchemy.api.plugin_info_create') \ - as MockClass: - MockClass.return_value = DbPluginInfo(1, PLUGIN_UUID) - self.manager._register_plugin(self.manager. - cerberus_manager['plugin']) - self.assertEqual(self.db_plugin_info.uuid, - self.manager.cerberus_manager['plugin'].obj._uuid) - - def test_register_plugin_new_version(self): - with mock.patch('cerberus.db.sqlalchemy.api.plugin_info_get') \ - as MockClass: - MockClass.return_value = DbPluginInfo(1, PLUGIN_UUID) - db_api.plugin_version_update = mock.MagicMock() - self.manager._register_plugin( - self.manager.cerberus_manager['plugin']) - self.assertEqual(self.db_plugin_info.uuid, - self.manager.cerberus_manager['plugin'].obj._uuid) - - @mock.patch.object(messaging.MessageHandlingServer, 'start') - def test_start(self, rpc_start): - manager.CerberusManager._register_plugin = mock.MagicMock() - manager.CerberusManager.add_stored_tasks = mock.MagicMock() - mgr = manager.CerberusManager() - mgr.start() - rpc_start.assert_called_with() - assert(rpc_start.call_count == 2) - - @mock.patch.object(greenpool.GreenPool, "spawn") - def test_add_task_without_args(self, mock): - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function) - assert(len(self.manager.tg.threads) == 1) - mock.assert_called_with( - self.manager.cerberus_manager['plugin'].obj.fake_function) - - @mock.patch.object(greenpool.GreenPool, "spawn") - def test_add_task_with_args(self, mock): - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - name="fake") - assert(len(self.manager.tg.threads) == 1) - mock.assert_called_with( - self.manager.cerberus_manager['plugin'].obj.fake_function, - name="fake") - - @mock.patch.object(loopingcall.CerberusFixedIntervalLoopingCall, "start") - def test_add_recurrent_task_without_delay(self, mock): - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 15) - assert(len(self.manager.tg.timers) == 1) - mock.assert_called_with(initial_delay=None, interval=15) - - @mock.patch.object(loopingcall.CerberusFixedIntervalLoopingCall, "start") - def test_add_recurrent_task_with_delay(self, mock): - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 15, - 200) - assert(len(self.manager.tg.timers) == 1) - mock.assert_called_with(initial_delay=200, interval=15) - - @mock.patch.object(db_api, "create_task") - def test_store_task(self, db_mock): - task = db_utils.get_recurrent_task_object( - persistent='True', task_name='task_name', task_type='recurrent', - task_period=5, plugin_id='490cc562-9e60-46a7-9b5f-c7619aca2e07', - task_id='500cc562-5c50-89t4-5fc8-c7619aca3n29') - self.manager._store_task(task, 'method_') - db_mock.assert_called_with( - {'name': 'task_name', - 'method': 'method_', - 'type': 'recurrent', - 'period': 5, - 'plugin_id': '490cc562-9e60-46a7-9b5f-c7619aca2e07', - 'running': True, - 'uuid': '500cc562-5c50-89t4-5fc8-c7619aca3n29'}) - - @mock.patch.object(greenpool.GreenPool, "spawn") - @mock.patch.object(uuid, "uuid4", return_value=1) - def test_create_task(self, uuid_mock, th_mock): - ctx = {"some": "context"} - db_api.create_task = mock.MagicMock(return_value=self.fake_db_task) - self.manager.create_task(ctx, PLUGIN_UUID, 'fake_function') - assert(len(self.manager.tg.threads) == 1) - th_mock.assert_called_with( - self.manager.cerberus_manager['plugin'].obj.fake_function, - plugin_id=PLUGIN_UUID, - task_id='1') - - @mock.patch.object(greenpool.GreenPool, "spawn") - @mock.patch.object(uuid, "uuid4", return_value=1) - def test_create_task_incorrect_task_type(self, uuid_mock, th_mock): - ctx = {"some": "context"} - db_api.create_task = mock.MagicMock(return_value=self.fake_db_task) - self.manager.create_task(ctx, PLUGIN_UUID, 'fake_function', - task_type='INCORRECT') - assert(len(self.manager.tg.threads) == 1) - th_mock.assert_called_with( - self.manager.cerberus_manager['plugin'].obj.fake_function, - plugin_id=PLUGIN_UUID, - task_type='INCORRECT', - task_id='1') - - @mock.patch.object(loopingcall.CerberusFixedIntervalLoopingCall, "start") - def test_create_recurrent_task_with_interval(self, mock): - ctx = {"some": "context"} - db_api.create_task = mock.MagicMock(return_value=self.fake_db_task) - self.manager.create_task(ctx, PLUGIN_UUID, 'fake_function', - task_type='recurrent', task_period=5) - assert(len(self.manager.tg.timers) == 1) - mock.assert_called_with(initial_delay=None, interval=5) - - def test_get_recurrent_task(self): - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 15, - task_id=1) - recurrent_task = self.manager._get_recurrent_task(1) - assert(isinstance(recurrent_task, - loopingcall.CerberusFixedIntervalLoopingCall)) - - def test_get_recurrent_task_wrong_id(self): - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 15, - task_id=task_id) - self.assertTrue(self.manager._get_recurrent_task(task_id + 1) is None) - - def test_get_plugins(self): - ctx = {"some": "context"} - json_plugin1 = { - "name": "cerberus.tests.unit.test_cerberus_manager.FakePlugin", - "subscribed_events": - [ - ], - "methods": - [ - "another_fake_but_web_method" - ] - } - expected_json_plugins = [] - jplugin1 = json.dumps(json_plugin1) - expected_json_plugins.append(jplugin1) - json_plugins = self.manager.get_plugins(ctx) - self.assertEqual(json_plugins, expected_json_plugins) - - def test_get_plugin(self): - ctx = {"some": "context"} - c_manager = manager.CerberusManager() - c_manager.cerberus_manager = self.extension_mgr - - json_plugin1 = { - "name": "cerberus.tests.unit.test_cerberus_manager.FakePlugin", - "subscribed_events": - [ - ], - "methods": - [ - "another_fake_but_web_method" - ] - } - jplugin1 = json.dumps(json_plugin1) - json_plugin = c_manager.get_plugin_from_uuid(ctx, PLUGIN_UUID) - self.assertEqual(json_plugin, jplugin1) - - def test_get_plugin_wrong_id(self): - ctx = {"some": "context"} - self.assertEqual(self.manager.get_plugin_from_uuid(ctx, 'wrong_test'), - None) - - def test_get_tasks(self): - recurrent_task_id = 1 - unique_task_id = 2 - task_period = 5 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_period, - task_id=recurrent_task_id) - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_id=unique_task_id) - tasks = self.manager._get_tasks() - self.assertTrue(len(tasks) == 2) - self.assertTrue( - isinstance(tasks[0], - loopingcall.CerberusFixedIntervalLoopingCall)) - self.assertTrue(isinstance(tasks[1], threadgroup.CerberusThread)) - - def test_get_tasks_(self): - recurrent_task_id = 1 - unique_task_id = 2 - task_period = 5 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_period, - task_id=recurrent_task_id) - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_id=unique_task_id) - tasks = self.manager.get_tasks({'some': 'context'}) - self.assertTrue(len(tasks) == 2) - - def test_get_task_reccurent(self): - task_id = 1 - task_period = 5 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_period, - task_id=task_id) - task = self.manager._get_task(task_id) - self.assertTrue( - isinstance(task, loopingcall.CerberusFixedIntervalLoopingCall)) - - def test_get_task_unique(self): - task_id = 1 - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_id=task_id) - task = self.manager._get_task(task_id) - self.assertTrue(isinstance(task, threadgroup.CerberusThread)) - - def test_get_task(self): - recurrent_task_id = 1 - recurrent_task_name = "recurrent_task" - unique_task_id = 2 - unique_task_name = "unique_task" - task_period = 5 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_period, - task_name=recurrent_task_name, - task_period=task_period, - task_id=recurrent_task_id) - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_id=unique_task_id, - task_name=unique_task_name) - task = self.manager.get_task({'some': 'context'}, 1) - self.assertTrue(json.loads(task).get('name') == recurrent_task_name) - self.assertTrue(int(json.loads(task).get('id')) == recurrent_task_id) - task_2 = self.manager.get_task({'some': 'context'}, 2) - self.assertTrue(json.loads(task_2).get('name') == unique_task_name) - self.assertTrue(int(json.loads(task_2).get('id')) == unique_task_id) - - def test_stop_unique_task(self): - task_id = 1 - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_id=task_id) - assert(len(self.manager.tg.threads) == 1) - self.manager._stop_unique_task(task_id) - assert(len(self.manager.tg.threads) == 0) - - def test_stop_recurrent_task(self): - db_api.update_state_task = mock.MagicMock() - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - assert(self.manager.tg.timers[0]._running is True) - self.manager._stop_recurrent_task(task_id) - assert(self.manager.tg.timers[0]._running is False) - - def test_stop_task_recurrent(self): - db_api.update_state_task = mock.MagicMock() - recurrent_task_id = 1 - unique_task_id = 2 - task_period = 5 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_period, - task_id=recurrent_task_id) - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_id=unique_task_id) - self.assertTrue(len(self.manager.tg.timers) == 1) - assert(self.manager.tg.timers[0]._running is True) - self.assertTrue(len(self.manager.tg.threads) == 1) - self.manager._stop_task(recurrent_task_id) - self.assertTrue(len(self.manager.tg.timers) == 1) - assert(self.manager.tg.timers[0]._running is False) - self.assertTrue(len(self.manager.tg.threads) == 1) - self.manager._stop_task(unique_task_id) - self.assertTrue(len(self.manager.tg.timers) == 1) - assert(self.manager.tg.timers[0]._running is False) - self.assertTrue(len(self.manager.tg.threads) == 0) - - @mock.patch.object(manager.CerberusManager, "_stop_task") - def test_stop_task(self, mock): - self.manager.stop_task({'some': 'context'}, 1) - mock.assert_called_with(1) - - def test_delete_recurrent_task(self): - ctx = {"some": "context"} - db_api.delete_task = mock.MagicMock() - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - recurrent_task = self.manager._get_recurrent_task(task_id) - assert(self.manager.tg.timers[0]._running is True) - assert(recurrent_task.gt.dead is False) - self.manager.delete_recurrent_task(ctx, task_id) - assert(recurrent_task.gt.dead is False) - assert(len(self.manager.tg.timers) == 0) - - def test_force_delete_recurrent_task(self): - task_id = 1 - ctx = {"some": "ctx"} - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - recurrent_task = self.manager._get_recurrent_task(task_id) - assert(self.manager.tg.timers[0]._running is True) - assert(recurrent_task.gt.dead is False) - self.manager.force_delete_recurrent_task(ctx, task_id) - assert(recurrent_task.gt.dead is True) - assert(len(self.manager.tg.timers) == 0) - - def test_start_recurrent_task(self): - ctxt = {'some': 'context'} - db_api.update_state_task = mock.MagicMock() - task_id = 1 - task_period = 5 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_period, - task_id=task_id, - task_period=task_period) - assert(self.manager.tg.timers[0]._running is True) - self.manager._stop_recurrent_task(task_id) - assert(self.manager.tg.timers[0]._running is False) - self.manager.start_recurrent_task(ctxt, task_id) - assert(self.manager.tg.timers[0]._running is True) - - -class FaultyTestCerberusManager(base.TestCaseFaulty): - - def setUp(self): - super(FaultyTestCerberusManager, self).setUp() - self.plugin = FakePlugin() - self.extension_mgr = extension.ExtensionManager.make_test_instance( - [ - extension.Extension( - 'plugin', - EntryPoint(), - None, - self.plugin, ), - ] - ) - self.db_plugin_info = DbPluginInfo(1, PLUGIN_UUID) - self.manager = manager.CerberusManager() - self.manager.cerberus_manager = self.extension_mgr - - def test_create_task_wrong_plugin_id(self): - ctx = {"some": "context"} - self.assertRaises(errors.PluginNotFound, self.manager.create_task, - ctx, 'WRONG_UUID', 'fake_function') - assert(len(self.manager.tg.threads) == 0) - - def test_create_task_incorrect_period(self): - ctx = {"some": "context"} - self.assertRaises(errors.TaskPeriodNotInteger, - self.manager.create_task, - ctx, - PLUGIN_UUID, - 'fake_function', - task_type='recurrent', - task_period='NOT_INTEGER') - assert(len(self.manager.tg.threads) == 0) - - def test_create_task_wrong_plugin_method(self): - ctx = {"some": "context"} - self.assertRaises(errors.MethodNotCallable, - self.manager.create_task, ctx, PLUGIN_UUID, 'fake') - assert(len(self.manager.tg.threads) == 0) - - def test_create_task_method_not_as_string(self): - ctx = {"some": "context"} - self.assertRaises(errors.MethodNotString, - self.manager.create_task, - ctx, - PLUGIN_UUID, - self.manager.cerberus_manager[ - 'plugin'].obj.fake_function) - assert(len(self.manager.tg.threads) == 0) - - def test_create_recurrent_task_without_period(self): - ctx = {"some": "context"} - self.assertRaises(errors.TaskPeriodNotInteger, - self.manager.create_task, - ctx, - PLUGIN_UUID, - 'fake_function', - task_type='recurrent') - assert(len(self.manager.tg.timers) == 0) - - def test_create_recurrent_task_wrong_plugin_method(self): - ctx = {"some": "context"} - self.assertRaises(errors.MethodNotCallable, - self.manager.create_task, ctx, PLUGIN_UUID, 'fake', - task_type='recurrent', task_period=5) - assert(len(self.manager.tg.timers) == 0) - - def test_create_recurrent_task_method_not_as_string(self): - ctx = {"some": "context"} - self.assertRaises(errors.MethodNotString, - self.manager.create_task, - ctx, - PLUGIN_UUID, - self.manager.cerberus_manager[ - 'plugin'].obj.fake_function, - task_type='recurrent', - task_period=5) - assert(len(self.manager.tg.timers) == 0) - - def test_get_task_unique_wrong_id(self): - task_id = 1 - ctx = {"some": "context"} - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - self.assertRaises(errors.TaskNotFound, - self.manager.get_task, - ctx, - task_id + 1) - - def test_stop_unique_task_wrong_id(self): - task_id = 1 - self.manager._add_unique_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - task_id=task_id) - assert(len(self.manager.tg.threads) == 1) - self.assertRaises(errors.TaskNotFound, - self.manager._stop_unique_task, - task_id + 1) - assert(len(self.manager.tg.threads) == 1) - - def test_stop_recurrent_task_wrong_id(self): - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - assert(self.manager.tg.timers[0]._running is True) - self.assertRaises(errors.TaskNotFound, - self.manager._stop_recurrent_task, - task_id + 1) - assert(self.manager.tg.timers[0]._running is True) - - def test_delete_recurrent_task_wrong_id(self): - ctx = {"some": "context"} - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - recurrent_task = self.manager._get_recurrent_task(task_id) - assert(self.manager.tg.timers[0]._running is True) - assert(recurrent_task.gt.dead is False) - self.assertRaises(errors.TaskDeletionNotAllowed, - self.manager.delete_recurrent_task, - ctx, - task_id + 1) - assert(self.manager.tg.timers[0]._running is True) - assert(recurrent_task.gt.dead is False) - - def test_force_delete_recurrent_task_wrong_id(self): - ctx = {"some": "ctx"} - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - recurrent_task = self.manager._get_recurrent_task(task_id) - assert(self.manager.tg.timers[0]._running is True) - assert(recurrent_task.gt.dead is False) - self.assertRaises(errors.TaskDeletionNotAllowed, - self.manager.force_delete_recurrent_task, - ctx, - task_id + 1) - assert(recurrent_task.gt.dead is False) - assert(len(self.manager.tg.timers) == 1) - - def test_start_recurrent_task_wrong_id(self): - ctxt = {"some": "ctx"} - db_api.update_state_task = mock.MagicMock() - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - assert(self.manager.tg.timers[0]._running is True) - self.manager._stop_recurrent_task(task_id) - assert(self.manager.tg.timers[0]._running is False) - self.assertRaises(errors.TaskStartNotAllowed, - self.manager.start_recurrent_task, - ctxt, - task_id + 1) - assert(self.manager.tg.timers[0]._running is False) - - def test_start_recurrent_task_running(self): - ctxt = {"some": "ctx"} - task_id = 1 - self.manager._add_recurrent_task( - self.manager.cerberus_manager['plugin'].obj.fake_function, - 5, - task_id=task_id) - assert(self.manager.tg.timers[0]._running is True) - self.assertRaises(errors.TaskStartNotPossible, - self.manager.start_recurrent_task, - ctxt, - task_id) - assert(self.manager.tg.timers[0]._running is True) diff --git a/cerberus/tests/unit/test_notifications.py b/cerberus/tests/unit/test_notifications.py deleted file mode 100644 index e70da29..0000000 --- a/cerberus/tests/unit/test_notifications.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import uuid - -import mock -from oslo.config import cfg - -from cerberus import notifications -from cerberus.openstack.common.fixture import moxstubout -from cerberus.tests.unit import base - - -EXP_RESOURCE_TYPE = uuid.uuid4().hex - - -class NotificationsTestCase(base.TestCase): - def setUp(self): - super(NotificationsTestCase, self).setUp() - fixture = self.useFixture(moxstubout.MoxStubout()) - self.stubs = fixture.stubs - - # these should use self.config_fixture.config(), but they haven't - # been registered yet - cfg.CONF.rpc_backend = 'fake' - cfg.CONF.notification_driver = ['fake'] - - def test_send_notification(self): - """Test the private method _send_notification to ensure event_type, - payload, and context are built and passed properly. - """ - resource = uuid.uuid4().hex - payload = {'resource_info': resource} - resource_type = EXP_RESOURCE_TYPE - operation = 'created' - - # NOTE(ldbragst): Even though notifications._send_notification doesn't - # contain logic that creates cases, this is suppose to test that - # context is always empty and that we ensure the resource ID of the - # resource in the notification is contained in the payload. It was - # agreed that context should be empty in Keystone's case, which is - # also noted in the /keystone/notifications.py module. This test - # ensures and maintains these conditions. - expected_args = [ - {}, # empty context - 'security.%s.created' % resource_type, # event_type - {'resource_info': resource}, # payload - 'INFO', # priority is always INFO... - ] - - with mock.patch.object(notifications._get_notifier(), - '_notify') as mocked: - notifications.send_notification(operation, resource_type, - payload) - mocked.assert_called_once_with(*expected_args) - - notifications.send_notification(operation, resource_type, payload) diff --git a/cerberus/tests/unit/test_utils.py b/cerberus/tests/unit/test_utils.py deleted file mode 100644 index b10cfe1..0000000 --- a/cerberus/tests/unit/test_utils.py +++ /dev/null @@ -1,98 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Utilities and helper functions.""" -"""Tests for cerberus/utils.py -""" -import datetime -import decimal - -from oslotest import base - -from cerberus import utils - - -class TestUtils(base.BaseTestCase): - - def test_datetime_to_decimal(self): - expected = 1356093296.12 - utc_datetime = datetime.datetime.utcfromtimestamp(expected) - actual = utils.dt_to_decimal(utc_datetime) - self.assertAlmostEqual(expected, float(actual), places=5) - - def test_decimal_to_datetime(self): - expected = 1356093296.12 - dexpected = decimal.Decimal(str(expected)) # Python 2.6 wants str() - expected_datetime = datetime.datetime.utcfromtimestamp(expected) - actual_datetime = utils.decimal_to_dt(dexpected) - # Python 3 have rounding issue on this, so use float - self.assertAlmostEqual(utils.dt_to_decimal(expected_datetime), - utils.dt_to_decimal(actual_datetime), - places=5) - - def test_restore_nesting_unested(self): - metadata = {'a': 'A', 'b': 'B'} - unwound = utils.restore_nesting(metadata) - self.assertIs(metadata, unwound) - - def test_restore_nesting(self): - metadata = {'a': 'A', 'b': 'B', - 'nested:a': 'A', - 'nested:b': 'B', - 'nested:twice:c': 'C', - 'nested:twice:d': 'D', - 'embedded:e': 'E'} - unwound = utils.restore_nesting(metadata) - expected = {'a': 'A', 'b': 'B', - 'nested': {'a': 'A', 'b': 'B', - 'twice': {'c': 'C', 'd': 'D'}}, - 'embedded': {'e': 'E'}} - self.assertEqual(expected, unwound) - self.assertIsNot(metadata, unwound) - - def test_restore_nesting_with_separator(self): - metadata = {'a': 'A', 'b': 'B', - 'nested.a': 'A', - 'nested.b': 'B', - 'nested.twice.c': 'C', - 'nested.twice.d': 'D', - 'embedded.e': 'E'} - unwound = utils.restore_nesting(metadata, separator='.') - expected = {'a': 'A', 'b': 'B', - 'nested': {'a': 'A', 'b': 'B', - 'twice': {'c': 'C', 'd': 'D'}}, - 'embedded': {'e': 'E'}} - self.assertEqual(expected, unwound) - self.assertIsNot(metadata, unwound) - - def test_decimal_to_dt_with_none_parameter(self): - self.assertIsNone(utils.decimal_to_dt(None)) - - def test_dict_to_kv(self): - data = {'a': 'A', - 'b': 'B', - 'nested': {'a': 'A', - 'b': 'B', - }, - 'nested2': [{'c': 'A'}, {'c': 'B'}] - } - pairs = list(utils.dict_to_keyval(data)) - self.assertEqual([('a', 'A'), - ('b', 'B'), - ('nested.a', 'A'), - ('nested.b', 'B'), - ('nested2[0].c', 'A'), - ('nested2[1].c', 'B')], - sorted(pairs, key=lambda x: x[0])) diff --git a/cerberus/utils.py b/cerberus/utils.py deleted file mode 100644 index 97b4092..0000000 --- a/cerberus/utils.py +++ /dev/null @@ -1,163 +0,0 @@ -# -# Copyright (c) 2014 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Utilities and helper functions.""" - -import calendar -import copy -import datetime -import decimal -import multiprocessing - -from oslo.utils import timeutils -from oslo.utils import units - - -def restore_nesting(d, separator=':'): - """Unwinds a flattened dict to restore nesting. - """ - d = copy.copy(d) if any([separator in k for k in d.keys()]) else d - for k, v in d.items(): - if separator in k: - top, rem = k.split(separator, 1) - nest = d[top] if isinstance(d.get(top), dict) else {} - nest[rem] = v - d[top] = restore_nesting(nest, separator) - del d[k] - return d - - -def dt_to_decimal(utc): - """Datetime to Decimal. - - Some databases don't store microseconds in datetime - so we always store as Decimal unixtime. - """ - if utc is None: - return None - - decimal.getcontext().prec = 30 - return decimal.Decimal(str(calendar.timegm(utc.utctimetuple()))) + \ - (decimal.Decimal(str(utc.microsecond)) / - decimal.Decimal("1000000.0")) - - -def decimal_to_dt(dec): - """Return a datetime from Decimal unixtime format. - """ - if dec is None: - return None - - integer = int(dec) - micro = (dec - decimal.Decimal(integer)) * decimal.Decimal(units.M) - daittyme = datetime.datetime.utcfromtimestamp(integer) - return daittyme.replace(microsecond=int(round(micro))) - - -def sanitize_timestamp(timestamp): - """Return a naive utc datetime object.""" - if not timestamp: - return timestamp - if not isinstance(timestamp, datetime.datetime): - timestamp = timeutils.parse_isotime(timestamp) - return timeutils.normalize_time(timestamp) - - -def stringify_timestamps(data): - """Stringify any datetimes in given dict.""" - isa_timestamp = lambda v: isinstance(v, datetime.datetime) - return dict((k, v.isoformat() if isa_timestamp(v) else v) - for (k, v) in data.iteritems()) - - -def dict_to_keyval(value, key_base=None): - """Expand a given dict to its corresponding key-value pairs. - - Generated keys are fully qualified, delimited using dot notation. - ie. key = 'key.child_key.grandchild_key[0]' - """ - val_iter, key_func = None, None - if isinstance(value, dict): - val_iter = value.iteritems() - key_func = lambda k: key_base + '.' + k if key_base else k - elif isinstance(value, (tuple, list)): - val_iter = enumerate(value) - key_func = lambda k: key_base + '[%d]' % k - - if val_iter: - for k, v in val_iter: - key_gen = key_func(k) - if isinstance(v, dict) or isinstance(v, (tuple, list)): - for key_gen, v in dict_to_keyval(v, key_gen): - yield key_gen, v - else: - yield key_gen, v - - -def lowercase_keys(mapping): - """Converts the values of the keys in mapping to lowercase.""" - items = mapping.items() - for key, value in items: - del mapping[key] - mapping[key.lower()] = value - - -def lowercase_values(mapping): - """Converts the values in the mapping dict to lowercase.""" - items = mapping.items() - for key, value in items: - mapping[key] = value.lower() - - -def update_nested(original_dict, updates): - """Updates the leaf nodes in a nest dict, without replacing - entire sub-dicts. - """ - dict_to_update = copy.deepcopy(original_dict) - for key, value in updates.iteritems(): - if isinstance(value, dict): - sub_dict = update_nested(dict_to_update.get(key, {}), value) - dict_to_update[key] = sub_dict - else: - dict_to_update[key] = updates[key] - return dict_to_update - - -def cpu_count(): - try: - return multiprocessing.cpu_count() or 1 - except NotImplementedError: - return 1 - - -def uniq(dupes, attrs): - """Exclude elements of dupes with a duplicated set of attribute values.""" - key = lambda d: '/'.join([getattr(d, a) or '' for a in attrs]) - keys = [] - deduped = [] - for d in dupes: - if key(d) not in keys: - deduped.append(d) - keys.append(key(d)) - return deduped - - -def create_datetime_obj(date): - """ - '20150109T10:53:50' - :param date: The date to build a datetime object. Format: 20150109T10:53:50 - :return: a datetime object - """ - return datetime.datetime.strptime(date, '%Y%m%dT%H:%M:%S') diff --git a/cerberus/version.py b/cerberus/version.py deleted file mode 100644 index b5322f9..0000000 --- a/cerberus/version.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (c) 2015 EUROGICIEL -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import pbr.version - -version_info = pbr.version.VersionInfo('cerberus') diff --git a/contrib/devstack/README.rst b/contrib/devstack/README.rst deleted file mode 100644 index e69de29..0000000 diff --git a/contrib/devstack/extras.d/50-cerberus.sh b/contrib/devstack/extras.d/50-cerberus.sh deleted file mode 100644 index be9b8fc..0000000 --- a/contrib/devstack/extras.d/50-cerberus.sh +++ /dev/null @@ -1,39 +0,0 @@ -# cerberus.sh - Devstack extras script to install Cerberus - -if is_service_enabled cerberus-api cerberus-agent; then - if [[ "$1" == "source" ]]; then - # Initial source - source $TOP_DIR/lib/cerberus - elif [[ "$1" == "stack" && "$2" == "install" ]]; then - echo_summary "Installing Cerberus" - install_cerberus - install_cerberusclient - - if is_service_enabled cerberus-dashboard; then - install_cerberusdashboard - fi - cleanup_cerberus - elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then - echo_summary "Configuring Cerberus" - configure_cerberus - if is_service_enabled cerberus-dashboard; then - configure_cerberusdashboard - fi - if is_service_enabled key; then - create_cerberus_accounts - fi - - elif [[ "$1" == "stack" && "$2" == "extra" ]]; then - # Initialize cerberus - echo_summary "Initializing Cerberus" - init_cerberus - - # Start the Cerberus API and Cerberus agent components - echo_summary "Starting Cerberus" - start_cerberus - fi - - if [[ "$1" == "unstack" ]]; then - stop_cerberus - fi -fi diff --git a/contrib/devstack/lib/cerberus b/contrib/devstack/lib/cerberus deleted file mode 100644 index 6dc2b96..0000000 --- a/contrib/devstack/lib/cerberus +++ /dev/null @@ -1,231 +0,0 @@ -# lib/cerberus -# Install and start **Cerberus** service - -# To enable a minimal set of Cerberus services: -# - add the following to localrc: -# -# enable_service cerberus-api cerberus-agent -# -# Dependencies: -# - functions -# - OS_AUTH_URL for auth in api -# - DEST, HORIZON_DIR, DATA_DIR set to the destination directory -# - SERVICE_PASSWORD, SERVICE_TENANT_NAME for auth in api -# - IDENTITY_API_VERSION for the version of Keystone -# - STACK_USER service user - -# stack.sh -# --------- -# install_cerberus -# install_cerberusclient -# configure_cerberus -# init_cerberus -# start_cerberus -# stop_cerberus -# cleanup_cerberus - -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set +o xtrace - - -# Defaults -# -------- - -# Set up default directories -CERBERUS_DIR=$DEST/cerberus -CERBERUS_CONF_DIR=/etc/cerberus -CERBERUS_CONF=$CERBERUS_CONF_DIR/cerberus.conf -CERBERUS_POLICY=$CERBERUS_CONF_DIR/policy.json -CERBERUS_API_LOG_DIR=/var/log/cerberus -CERBERUS_AUTH_CACHE_DIR=${CERBERUS_AUTH_CACHE_DIR:-/var/cache/cerberus} -CERBERUS_REPORTS_DIR=${DATA_DIR}/cerberus/reports -CERBERUS_CLIENT_DIR=$DEST/python-cerberusclient -CERBERUS_DASHBOARD_DIR=$DEST/cerberus-dashboard - -# Support potential entry-points console scripts -if [[ -d $CERBERUS_DIR/bin ]]; then - CERBERUS_BIN_DIR=$CERBERUS_DIR/bin -else - CERBERUS_BIN_DIR=$(get_python_exec_prefix) -fi - -# Set up database backend -CERBERUS_BACKEND=${CERBERUS_BACKEND:-sqlite} - -# Set cerberus repository -CERBERUS_REPO=${CERBERUS_REPO:-https://github.com/openstack/cerberus.git} -CERBERUS_BRANCH=${CERBERUS_BRANCH:-master} -CERBERUS_CLIENT_REPO=${CERBERUS_CLIENT_REPO:-https://github.com/openstack/python-cerberusclient.git} -CERBERUS_CLIENT_BRANCH=${CERBERUS_CLIENT_BRANCH:-master} -CERBERUS_DASHBOARD_REPO=${CERBERUS_DASHBOARD_REPO:-https://github.com/openstack/cerberus-dashboard.git} -CERBERUS_DASHBOARD_BRANCH=${CERBERUS_DASHBOARD_BRANCH:-master} - -# Set Cerberus connection info -CERBERUS_SERVICE_HOST=${CERBERUS_SERVICE_HOST:-$SERVICE_HOST} -CERBERUS_SERVICE_PORT=${CERBERUS_SERVICE_PORT:-8300} -CERBERUS_SERVICE_HOSTPORT="$CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT" -CERBERUS_SERVICE_PROTOCOL=${CERBERUS_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL} - -# Set Cerberus auth info -CERBERUS_ADMIN_USER=${CERBERUS_ADMIN_USER:-"admin"} -CERBERUS_ADMIN_PASSWORD=${CERBERUS_ADMIN_PASSWORD:-$ADMIN_PASSWORD} -CERBERUS_ADMIN_TENANT=${CERBERUS_ADMIN_TENANT:-"admin"} - -# Tell Tempest this project is present -TEMPEST_SERVICES+=,cerberus - - -# Functions -# --------- - -# create_cerberus_accounts() - Set up common required cerberus accounts - -# Tenant User Roles -# ------------------------------------------------------------------ -# service cerberus admin # if enabled -function create_cerberus_accounts { - - SERVICE_TENANT=$(openstack project list | awk "/ $SERVICE_TENANT_NAME / { print \$2 }") - ADMIN_ROLE=$(openstack role list | awk "/ admin / { print \$2 }") - - # Cerberus - if [[ "$ENABLED_SERVICES" =~ "cerberus-api" ]]; then - CERBERUS_USER=$(openstack user create \ - cerberus \ - --password "$SERVICE_PASSWORD" \ - --project $SERVICE_TENANT \ - --email cerberus@example.com \ - | grep " id " | get_field 2) - openstack role add \ - $ADMIN_ROLE \ - --project $SERVICE_TENANT \ - --user $CERBERUS_USER - if [[ "$KEYSTONE_CATALOG_BACKEND" = 'sql' ]]; then - CERBERUS_SERVICE=$(openstack service create \ - cerberus \ - --type=security \ - --description="Security service" \ - | grep " id " | get_field 2) - openstack endpoint create \ - $CERBERUS_SERVICE \ - --region RegionOne \ - --publicurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" \ - --adminurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" \ - --internalurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" - fi - fi -} - - -# Test if any Cerberus services are enabled -# is_cerberus_enabled -function is_cerberus_enabled { - [[ ,${ENABLED_SERVICES} =~ ,"cerberus-" ]] && return 0 - return 1 -} - -# cleanup_cerberus() - Remove residual data files, anything left over from previous -# runs that a clean run would need to clean up -function cleanup_cerberus { - # Clean up dirs - rm -rf $CERBERUS_AUTH_CACHE_DIR/* - rm -rf $CERBERUS_CONF_DIR/* - if [[ "$ENABLED_SERVICES" =~ "cerberus-dashboard" ]]; then - rm -f $HORIZON_DIR/openstack_dashboard/local/enabled/_50_cerberus.py - fi -} - -# configure_cerberus() - Set config files, create data dirs, etc -function configure_cerberus { - setup_develop $CERBERUS_DIR - - sudo mkdir -m 755 -p $CERBERUS_CONF_DIR - sudo chown $STACK_USER $CERBERUS_CONF_DIR - - sudo mkdir -m 755 -p $CERBERUS_API_LOG_DIR - sudo chown $STACK_USER $CERBERUS_API_LOG_DIR - - cp $CERBERUS_DIR$CERBERUS_CONF.sample $CERBERUS_CONF - cp $CERBERUS_DIR$CERBERUS_POLICY $CERBERUS_POLICY - - # Default - iniset $CERBERUS_CONF DEFAULT verbose True - iniset $CERBERUS_CONF DEFAULT debug "$ENABLE_DEBUG_LOG_LEVEL" - iniset $CERBERUS_CONF DEFAULT sql_connection `database_connection_url cerberus` - - # auth - iniset $CERBERUS_CONF keystone_authtoken auth_uri "$KEYSTONE_SERVICE_PROTOCOL://$KEYSTONE_SERVICE_HOST:5000/v2.0/" - iniset $CERBERUS_CONF keystone_authtoken admin_user cerberus - iniset $CERBERUS_CONF keystone_authtoken admin_password $SERVICE_PASSWORD - iniset $CERBERUS_CONF keystone_authtoken admin_tenant_name $SERVICE_TENANT_NAME - iniset $CERBERUS_CONF keystone_authtoken region $REGION_NAME - iniset $CERBERUS_CONF keystone_authtoken auth_host $KEYSTONE_AUTH_HOST - iniset $CERBERUS_CONF keystone_authtoken auth_protocol $KEYSTONE_AUTH_PROTOCOL - iniset $CERBERUS_CONF keystone_authtoken auth_port $KEYSTONE_AUTH_PORT - iniset $CERBERUS_CONF keystone_authtoken signing_dir $CERBERUS_AUTH_CACHE_DIR -} - -# configure_cerberusdashboard() -function configure_cerberusdashboard { - ln -s $CERBERUS_DASHBOARD_DIR/_cerberus.py.example $HORIZON_DIR/openstack_dashboard/local/enabled/_50_cerberus.py -} - -# init_cerberus() - Initialize Cerberus database -function init_cerberus { - # Delete existing cache - sudo rm -rf $CERBERUS_AUTH_CACHE_DIR - sudo mkdir -p $CERBERUS_AUTH_CACHE_DIR - sudo chown $STACK_USER $CERBERUS_AUTH_CACHE_DIR - - # (Re)create cerberus database - if is_service_enabled mysql postgresql; then - recreate_database cerberus utf8 - $CERBERUS_BIN_DIR/cerberus-dbsync upgrade - fi # Migrate cerberus database -} - -# install_cerberus() - Collect source and prepare -function install_cerberus { - git_clone $CERBERUS_REPO $CERBERUS_DIR $CERBERUS_BRANCH - setup_develop $CERBERUS_DIR -} - -# install_cerberusclient() - Collect source and prepare -function install_cerberusclient { - git_clone $CERBERUS_CLIENT_REPO $CERBERUS_CLIENT_DIR $CERBERUS_CLIENT_BRANCH - setup_develop $CERBERUS_CLIENT_DIR -} - -# install_cerberusdashboard() - Collect source and prepare -function install_cerberusdashboard { - git_clone $CERBERUS_DASHBOARD_REPO $CERBERUS_DASHBOARD_DIR $CERBERUS_DASHBOARD_BRANCH - setup_develop $CERBERUS_DASHBOARD_DIR -} - - -# start_cerberus() - Start running processes, including screen -function start_cerberus { - screen_it cerberus-agent "cd $CERBERUS_DIR; $CERBERUS_BIN_DIR/cerberus-agent --config-file=$CERBERUS_CONF" - screen_it cerberus-api "cd $CERBERUS_DIR; $CERBERUS_BIN_DIR/cerberus-api --config-file=$CERBERUS_CONF" - echo "Waiting for cerberus-api ($CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT) to start..." - if ! timeout $SERVICE_TIMEOUT sh -c "while ! curl --noproxy '*' -s http://$CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT/v1/ >/dev/null; do sleep 1; done"; then - die $LINENO "cerberus-api did not start" - fi -} - -# stop_cerberus() - Stop running processes -function stop_cerberus { - # Kill the cerberus screen windows - for serv in cerberus-api cerberus-agent; do - screen_stop $serv - done -} - - -# Restore xtrace -$XTRACE - -# Local variables: -# mode: shell-script -# End: diff --git a/devstack/README.rst b/devstack/README.rst deleted file mode 100644 index cbb37c4..0000000 --- a/devstack/README.rst +++ /dev/null @@ -1,15 +0,0 @@ -============================= -Enabling cerberus in DevStack -============================= - -1. Download Devstack:: - - git clone https://git.openstack.org/openstack-dev/devstack - cd devstack - -2. Add this repo as an external repository into your ``local.conf`` file:: - - [[local|localrc]] - enable_plugin cerberus https://git.openstack.org/openstack/cerberus - -3. Run ``stack.sh``. diff --git a/devstack/plugin.sh b/devstack/plugin.sh deleted file mode 100755 index d45563f..0000000 --- a/devstack/plugin.sh +++ /dev/null @@ -1,223 +0,0 @@ -#!/usr/bin/env/bash -# Plugin file for Cerberus security component -#-------------------------------------------- -# Install and start **Cerberus** service - - -# Dependencies: -# - functions -# - OS_AUTH_URL for auth in api -# - DEST, HORIZON_DIR, DATA_DIR set to the destination directory -# - SERVICE_PASSWORD, SERVICE_TENANT_NAME for auth in api -# - IDENTITY_API_VERSION for the version of Keystone -# - STACK_USER service user - - -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set +o xtrace - - -# Defaults -# -------- - -# Support potential entry-points console scripts -if [[ -d $CERBERUS_DIR/bin ]]; then - CERBERUS_BIN_DIR=$CERBERUS_DIR/bin -else - CERBERUS_BIN_DIR=$(get_python_exec_prefix) -fi - - -# Functions -# --------- - -# create_cerberus_accounts() - Set up common required cerberus accounts - -# Tenant User Roles -# ------------------------------------------------------------------ -# service cerberus admin # if enabled -function create_cerberus_accounts { - - SERVICE_TENANT=$(openstack project list | awk "/ $SERVICE_TENANT_NAME / { print \$2 }") - ADMIN_ROLE=$(openstack role list | awk "/ admin / { print \$2 }") - - # Cerberus - if [[ "$ENABLED_SERVICES" =~ "cerberus-api" ]]; then - CERBERUS_USER=$(openstack user create \ - cerberus \ - --password "$SERVICE_PASSWORD" \ - --project $SERVICE_TENANT \ - --email cerberus@example.com \ - | grep " id " | get_field 2) - openstack role add \ - $ADMIN_ROLE \ - --project $SERVICE_TENANT \ - --user $CERBERUS_USER - if [[ "$KEYSTONE_CATALOG_BACKEND" = 'sql' ]]; then - CERBERUS_SERVICE=$(openstack service create \ - cerberus \ - --type=security \ - --description="Security service" \ - | grep " id " | get_field 2) - openstack endpoint create \ - $CERBERUS_SERVICE \ - --region RegionOne \ - --publicurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" \ - --adminurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" \ - --internalurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" - fi - fi -} - - -# Test if any Cerberus services are enabled -# is_cerberus_enabled -function is_cerberus_enabled { - [[ ,${ENABLED_SERVICES} =~ ,"cerberus-" ]] && return 0 - return 1 -} - -# cleanup_cerberus() - Remove residual data files, anything left over from previous -# runs that a clean run would need to clean up -function cleanup_cerberus { - # Clean up dirs - rm -rf $CERBERUS_AUTH_CACHE_DIR/* - rm -rf $CERBERUS_CONF_DIR/* - if [[ "$ENABLED_SERVICES" =~ "cerberus-dashboard" ]]; then - rm -f $HORIZON_DIR/openstack_dashboard/local/enabled/_50_cerberus.py - fi -} - -# configure_cerberus() - Set config files, create data dirs, etc -function configure_cerberus { - setup_develop $CERBERUS_DIR - - sudo mkdir -m 755 -p $CERBERUS_CONF_DIR - sudo chown $STACK_USER $CERBERUS_CONF_DIR - - sudo mkdir -m 755 -p $CERBERUS_API_LOG_DIR - sudo chown $STACK_USER $CERBERUS_API_LOG_DIR - - cp $CERBERUS_DIR$CERBERUS_CONF.sample $CERBERUS_CONF - cp $CERBERUS_DIR$CERBERUS_POLICY $CERBERUS_POLICY - - # Default - iniset $CERBERUS_CONF DEFAULT verbose True - iniset $CERBERUS_CONF DEFAULT debug "$ENABLE_DEBUG_LOG_LEVEL" - iniset $CERBERUS_CONF DEFAULT sql_connection `database_connection_url cerberus` - - # auth - iniset $CERBERUS_CONF keystone_authtoken auth_uri "$KEYSTONE_SERVICE_PROTOCOL://$KEYSTONE_SERVICE_HOST:5000/v2.0/" - iniset $CERBERUS_CONF keystone_authtoken admin_user cerberus - iniset $CERBERUS_CONF keystone_authtoken admin_password $SERVICE_PASSWORD - iniset $CERBERUS_CONF keystone_authtoken admin_tenant_name $SERVICE_TENANT_NAME - iniset $CERBERUS_CONF keystone_authtoken region $REGION_NAME - iniset $CERBERUS_CONF keystone_authtoken auth_host $KEYSTONE_AUTH_HOST - iniset $CERBERUS_CONF keystone_authtoken auth_protocol $KEYSTONE_AUTH_PROTOCOL - iniset $CERBERUS_CONF keystone_authtoken auth_port $KEYSTONE_AUTH_PORT - iniset $CERBERUS_CONF keystone_authtoken signing_dir $CERBERUS_AUTH_CACHE_DIR -} - -# configure_cerberusdashboard() -function configure_cerberusdashboard { - ln -s $CERBERUS_DASHBOARD_DIR/_cerberus.py.example $HORIZON_DIR/openstack_dashboard/local/enabled/_50_cerberus.py -} - -# init_cerberus() - Initialize Cerberus database -function init_cerberus { - # Delete existing cache - sudo rm -rf $CERBERUS_AUTH_CACHE_DIR - sudo mkdir -p $CERBERUS_AUTH_CACHE_DIR - sudo chown $STACK_USER $CERBERUS_AUTH_CACHE_DIR - - # (Re)create cerberus database - if is_service_enabled mysql postgresql; then - recreate_database cerberus utf8 - $CERBERUS_BIN_DIR/cerberus-dbsync upgrade - fi # Migrate cerberus database -} - -# install_cerberus() - Collect source and prepare -function install_cerberus { - git_clone $CERBERUS_REPO $CERBERUS_DIR $CERBERUS_BRANCH - setup_develop $CERBERUS_DIR -} - -# install_cerberusclient() - Collect source and prepare -function install_cerberusclient { - git_clone $CERBERUS_CLIENT_REPO $CERBERUS_CLIENT_DIR $CERBERUS_CLIENT_BRANCH - setup_develop $CERBERUS_CLIENT_DIR -} - -# install_cerberusdashboard() - Collect source and prepare -function install_cerberusdashboard { - git_clone $CERBERUS_DASHBOARD_REPO $CERBERUS_DASHBOARD_DIR $CERBERUS_DASHBOARD_BRANCH - setup_develop $CERBERUS_DASHBOARD_DIR -} - - -# start_cerberus() - Start running processes, including screen -function start_cerberus { - screen_it cerberus-agent "cd $CERBERUS_DIR; $CERBERUS_BIN_DIR/cerberus-agent --config-file=$CERBERUS_CONF" - screen_it cerberus-api "cd $CERBERUS_DIR; $CERBERUS_BIN_DIR/cerberus-api --config-file=$CERBERUS_CONF" - echo "Waiting for cerberus-api ($CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT) to start..." - if ! timeout $SERVICE_TIMEOUT sh -c "while ! curl --noproxy '*' -s http://$CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT/v1/ >/dev/null; do sleep 1; done"; then - die $LINENO "cerberus-api did not start" - fi -} - -# stop_cerberus() - Stop running processes -function stop_cerberus { - # Kill the cerberus screen windows - for serv in cerberus-api cerberus-agent; do - screen_stop $serv - done -} - - -# Main dispatcher -# ---------------- - -if is_service_enabled cerberus-api cerberus-agent; then - if [[ "$1" == "stack" && "$2" == "install" ]]; then - echo_summary "Installing Cerberus" - install_cerberus - install_cerberusclient - - if is_service_enabled cerberus-dashboard; then - install_cerberusdashboard - fi - cleanup_cerberus - elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then - echo_summary "Configuring Cerberus" - configure_cerberus - if is_service_enabled cerberus-dashboard; then - configure_cerberusdashboard - fi - if is_service_enabled keystone; then - create_cerberus_accounts - fi - - elif [[ "$1" == "stack" && "$2" == "extra" ]]; then - # Initialize cerberus - echo_summary "Initializing Cerberus" - init_cerberus - - # Start the Cerberus API and Cerberus agent components - echo_summary "Starting Cerberus" - start_cerberus - fi - - if [[ "$1" == "unstack" ]]; then - stop_cerberus - fi -fi - - -# Restore xtrace -$XTRACE - -# Local variables: -# mode: shell-script -# End: diff --git a/devstack/settings b/devstack/settings deleted file mode 100644 index 5830ad9..0000000 --- a/devstack/settings +++ /dev/null @@ -1,40 +0,0 @@ -# Devstack settings -#-------------------- - -# Set up default directories -CERBERUS_DIR=$DEST/cerberus -CERBERUS_CONF_DIR=/etc/cerberus -CERBERUS_CONF=$CERBERUS_CONF_DIR/cerberus.conf -CERBERUS_POLICY=$CERBERUS_CONF_DIR/policy.json -CERBERUS_API_LOG_DIR=/var/log/cerberus -CERBERUS_AUTH_CACHE_DIR=${CERBERUS_AUTH_CACHE_DIR:-/var/cache/cerberus} -CERBERUS_REPORTS_DIR=${DATA_DIR}/cerberus/reports -CERBERUS_CLIENT_DIR=$DEST/python-cerberusclient -CERBERUS_DASHBOARD_DIR=$DEST/cerberus-dashboard - -# Set up database backend -CERBERUS_BACKEND=${CERBERUS_BACKEND:-sqlite} - -# Set cerberus repository -CERBERUS_REPO=${CERBERUS_REPO:-https://github.com/openstack/cerberus.git} -CERBERUS_BRANCH=${CERBERUS_BRANCH:-master} -CERBERUS_CLIENT_REPO=${CERBERUS_CLIENT_REPO:-https://github.com/openstack/python-cerberusclient.git} -CERBERUS_CLIENT_BRANCH=${CERBERUS_CLIENT_BRANCH:-master} -CERBERUS_DASHBOARD_REPO=${CERBERUS_DASHBOARD_REPO:-https://github.com/openstack/cerberus-dashboard.git} -CERBERUS_DASHBOARD_BRANCH=${CERBERUS_DASHBOARD_BRANCH:-master} - -# Set Cerberus connection info -CERBERUS_SERVICE_HOST=${CERBERUS_SERVICE_HOST:-$SERVICE_HOST} -CERBERUS_SERVICE_PORT=${CERBERUS_SERVICE_PORT:-8300} -CERBERUS_SERVICE_HOSTPORT="$CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT" -CERBERUS_SERVICE_PROTOCOL=${CERBERUS_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL} - -# Set Cerberus auth info -CERBERUS_ADMIN_USER=${CERBERUS_ADMIN_USER:-"admin"} -CERBERUS_ADMIN_PASSWORD=${CERBERUS_ADMIN_PASSWORD:-$ADMIN_PASSWORD} -CERBERUS_ADMIN_TENANT=${CERBERUS_ADMIN_TENANT:-"admin"} - -# Tell Tempest this project is present -TEMPEST_SERVICES+=,cerberus - -enable_service cerberus-api cerberus-agent diff --git a/doc/.gitignore b/doc/.gitignore deleted file mode 100644 index 378eac2..0000000 --- a/doc/.gitignore +++ /dev/null @@ -1 +0,0 @@ -build diff --git a/doc/Makefile b/doc/Makefile deleted file mode 100644 index 12965c4..0000000 --- a/doc/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/cloudkitty.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/cloudkitty.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/cloudkitty" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/cloudkitty" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/doc/source/arch.rst b/doc/source/arch.rst deleted file mode 100644 index 44a44da..0000000 --- a/doc/source/arch.rst +++ /dev/null @@ -1,123 +0,0 @@ -====================== -Cerberus' Architecture -====================== - -Cerberus can be cut in two big parts: - -* API -* Manager - - -.. figure:: ./cerberus-arch.png - :width: 100% - :align: center - :alt: Architecture summary - - -Cerberus' API -============= -The API is a REST server documented later. - - -Cerberus' manager -================= -Cerberus is easy to extend thanks to a plugin system. - -The manager has some functions: - -- it loads ``plugins`` -- it manages ``tasks`` -- it stores ``security reports`` and ``security alarms`` in database - - -Plugins -======= - -Plugins are created to communicate with a particular security component. -They are defined by their: - -- unique identifier (uuid) -- name -- version -- provider -- type (scanner, SIEM...) - -Plugins can subscribe to events sent on the notification topic Cerberus' -manager listens on. For example, this can be useful to automatically configure -a tool if a project has been created or if a certain role is granted to an user. -Plugins may also implement some functions that the manager calls through -``tasks`` (e.g: a scan to run once a day, retrieve security reports once -a week...) - - -Tasks -===== -Cerberus manages tasks. -In order to create a task, you need to call the Cerberus' API by passing some -information: - -- The name of the task -- The plugin uuid handling the task -- The method to call on this plugin -- The type (periodic or not, default is not) -- The period if the task is periodic (for now, period is in seconds only) -- Persistent (True/False, conditional): tell Cerberus you want this task to be -stored in database (useful if the manager handling the task is shut down) - -The periodic tasks may be stopped/started. As such, they have a state (running -or not). The tasks that are not periodic may be stopped but it is not possible -to start them after. - - -Security reports -================ -Cerberus stores security reports provided by the security components. -These security reports have a predefined schema and Cerberus stores the -following information: - -- The uuid of the security report -- The uuid of the plugin -- The report identifier -- The Openstack's component identifier (e.g: an instance id, a network id) -- The component type (e.g: instance, network) -- The component name -- The Openstack's project identifier -- The ticket identifier (see `sticks`_) -- The title -- The description -- The security rating -- The vulnerabilities -- The number of vulnerabilities -- The date of the last report - -Security reports may be retrieved by their uuid. - -.. _sticks: http://sticks-project.readthedocs.org/en/latest/index.html - - -Security alarms -=============== -Cerberus stores security alarms provided by the security components such as -SIEM. -These security alarms have a predefined schema and Cerberus stores the -following information: - -- The uuid of the alarm -- The uuid of the plugin -- The alarm identifier -- The Openstack's component identifier (e.g: an instance id, a network id) -- The Openstack's project identifier -- The ticket identifier (see `sticks`_) -- The timestamp (date when the notification has been received on oslo bus) -- The summary -- The severity -- The status (e.G: new) -- The description - -Security alarms may be retrieved by their uuid. - - -Module loading and extensions -============================= - -Cerberus manager makes use of stevedore to load extensions dynamically. \ No newline at end of file diff --git a/doc/source/cerberus-arch.png b/doc/source/cerberus-arch.png deleted file mode 100755 index 48dd8b96837ef0bbd4278779beb4f274896f2645..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 69241 zcmbq*bySpX_q8oZDh(rubcu9>fOH8+moS1f0)m8q(h@^RNq5)Kk|HrkNJ&W7(A~{< zjXXY&@9*D_wOlT7xbLgZ-uvuxE;$1s;NNYIqa;8~wGkmi?70*iEQER}nVZFRxs= z$dGv|`rJu>weGgl&%xBfF8lRgzZS+yH-t{c^nQ4>kJBVf#3>ZD?pmw7?+HY&6BoN9 zgq}mad(Q=ZeEfMFY|UFAbND@0X3aJD#%WR+Cuy*WrBbEfPF**VRB2fmXIeRR9fv6= zfqb(}KtMpEh=;zW=HbQpZpPq(Z#H*7o2%N9+LHf%mrOP^F)=YA;cTU-K?R16ddMkyJ3I~bAF;8=bKgG^ zvFbnj-~V{MHd2lVp-}zjyO@_Bf4#S?*3R~O6c?g=j+{o~==2EiI5bLkbmVZaKNd|g z1P?W$Lz)Pv6cu;Gdpr5x24%bjUxW=7111dG#J513nQ?#}85II>vLS%!%S!nn5m5PvP; zM|Bu#KJH(0%ttznBo0D{EXui@mhZAk)DY(5 z=j|6mZ0cUIkLx5ey`lR1slkvCc=&9x%qZ_cXU)E#ohf_2zdT&n6ac7u5djqj5D z*-(`L$Pfk5T88P6Nrr;xqESC(_^&# zIj!L;b5-lvobku#!=P`0$`+nZAdykUm?df9qLu# zNMfzm6RS1VWY07^;JZ1m)eBWpfAp~!kNQLZK2@?|v)WCSE~rJKLu|ilWuMRUW}%_Y z3-Rj;j8}M86w%Y!SDt3mT;e3792J`KCj>IuY`aq8Q0p#xHaCp+f1XI{HRKGQ_|HuI z@6t=a@^ExfD=2n(nkgld@%i=@{t2*VwEq)AMp7`IJtrNyBK@uj>v4w5Fl!D1UDDFS`b9a^4ID3C;@AUB)I=dvN2>-;T$~x)5?m&{Zq7O zD?A`nW)xlx3bmEX~ z_PzJ#5Z$xjlYEd<^6>5P<6BFISMRO)NV9b0eGO>;K=OY=iE2A~hZaklr}VLG*-rrAeY1@j~ z>lyqgr0yaR4hhjRsD|h$SqicymLF?v*LgD^QD(M4J~0^Pjd-&2OxJ$dOq(Suwc~lj zUDN{iH@W!-=+deA$S18nSI!1N;#%G|I;$Qx&W_wW!N|3*oZv_hvEUl|{P$aFzHbdb z5^)kJbhFEMDhSoH==Wh`8Xmk@wDdeRv_EXr{hr$u$V;4|^&iB5!L8R0k8Tx1M%}sd z6PWVszo}~!<~;dlE1Q#s`$|3Kky;B8_bNkUSH4I%KS5)76+w-dzTdFrs2c|Tlv)wh z!Dxvj#@nCMeAHdeXv_Q0DVq`bYD~a1(bwOk=xVKQjDucT#L&m=VK03~6J{kC`+b$IOwC_N zSvGu@k`kjf0f4CI?ru8zl#kh|gJ55Y2?}#&-af(S%FGcBZ)OoHc&?lVYfo3FW$7PL z3G~mEI5sKAkq}a_GIm~zu-c)iNR}U|=O5))SHQccDV>Rif3qxVR@7?VNr0wRgu5iW(vzD8Jg2FUHzT4ibWjLW z_&^fJKZtr_TdZaf1jqC&r&2W+LZuzJ*IMTF2TdEUl4eWOWqLd0;xQiI8&G01r1}|U zW|gB`$=<`8RjAb^o>S;hy3EL;s^ZqM^K6-D(vv+WC+&rfl+IJ>_sZdr1B;`u%2|I1 z8TFsDAoYZb;Q6bF8!Dos5o+V7zJYq z#TiF!$l%-V1V3#Io9cJS>IJ%+mINqR(%kZIu6!ef>t8Z58iUSnErj?Swnn>QD&*)E z7{(NfrPKFj@j$`v(f*rysmm7?OXf#kiZroyLKF}pD4xEJsV^FjNB}y;jKf_u&bCcb z=@p)~y=tXsCBA?{pIXvi zMP7=~s?d{bb7w6(6@y6Rb zJ4qrQFng?LbcsT)z6}x*67AvMxJpn1n8W$<+Tx&f=O3)H{rv6QH#()nspoc!Jt+<= zgI%9FUTsb_rlh3kRlhl!^1?CcisgE{KIVf#DLH`5a6aX9T~+!0lv2{+nX z9yr-w3k(UFEVmd^5;-f=xz+z-;YY$@pkG*6Sf$OR`ruPE+e8qAFKo#+B=iEU*II-A zs8s|5BV)Dw!VlJl(~`H{i8OLi@(KJ7J;|bVu6xVo(5%WW$AyP2B*P!H)z1KVRPpv;Iu2QWGpv&TQ2z z#o$=3JoN&8lg=10PIf^-L3(<6%}m?b*6jXTXM0}GcQ8>o=^hsogeO0*aE-LoF6O39DGoe^|rL6HBO!XSQ?zEV0^A0K_YPCW8ORoZXAVAO}xVxYu!$3#J! z6UTfcy^t9(VE(-5c}^eL91vCRhtr$WO^6_p>njO@&g|!6o>__s0ox3!neUF5vJNrk zJ3sFnEhGi5gd6sS&Mx(5QSiP7>65H*AZzy53|qxuDJ_i(ah&FZWrnkop|kvZ zD+AeDrA93Q)6>&|ge;yna3V?AU!s)}`&yu{u6qTSCjQ_j!52Y znb}*xo|Fg&vY&pChOO2(D#@V3)zWU0AMt!IzP=Y_r;>s}gx~Q#B@q)e{N{Zy>EW?G z_kFfCxQVmOsj!pC;N8}G&4w6^vS({+D<>!C_gRHH@u`a}611h*M)S%33Q#M$v) z(`2K`H>6zfGy>Nc{6T^(@Bj;7XlNM8s75N_$UGO51i_oZcBe+r-(_u$sIi6b%~V7p z&9YL5Tt4wKHszKp=o+2LVL0P5?{tXcC8 zfjqLKyCW}M`l9Z5h2}R)U_Jrya4=E#oX`4KMR4eBGHtLLU!lqQoKvIc&Cpr*vtulB zL1JwPIXSEWEIYA?yPzepwxHDrXG$BzyUoVcU!}?yJKZ81y=hV{0R)Dfy}iA>G7L<# z-n=1OB77C+4-?Ag+d1s;e#EinJ?mA=MCCUO0;#EKHT$bqS=2@Ntou^G=<$(rn)Zx+ zv8QabsoNVA*r@etIO}>6HW~ea1P9>H&unD?6h8DyiGoM{N+N50869yp_2{2q0RgF~ zRYfk81oV|m)gCL<0D0M|q3-nP-$O*lPh0I8LQZMIXFX9JxsTyI6ANwIvM8$Kem(V- z&wl=i2RQ9m9E3y~UxT(Z-nk3DY{Na=Y73*HMD!T7ZACDs0*oFg(5d8^aJo*gzc%_7 z*-r2qYPKl&q8I~-t~V2H3zB9SUusRnM;L7VDjznM4@o7uRaU!gQ){p{Brgdf?kRG9 z2wk-C!@ghbxONAL3a}#mx~X<>MtYFKZpXDdKLlo0TmWE_b6XO7J9Wme0iLj_KcfBV z4A27DEYH>D(QL_~9q_O}*-Eo5K3`N=FpA zW)vgr=~g=x$E`RjuAn3gP@+ZDX z43Ak~dWF@f80_R=qX953;L6LR>&B-qc&?Yw5EXwmyPRzoWHvq)z5_(m|9yLb4v!2> zz`Fv?EbY^5H5S?PBDGT4^4y~OfLX zUeDyHW?|pw=j7xp15*VOq|x(mwC3%Gew~ZqqgAc3nF6IG;b(?|&6DS6Cp^|;hvh^1 zUNSbr#naIS?#igsmuQ5n^4>jqou%#(CXUB!7Z3Kv`Vpq)keKs-vO`&zZyomzpY`3o zOqo1F0AW@?%gn@&tO-o`3ir1=SPxcz*#m_3x`lP;&K=KzY*Q zPogK6Q1enZ#`>2X=^u>=yUw@XSEmCgcAE=f661 zLX6kEr2z8bIkCwY94b|{0lZW?pdTm`InC}_68ED>-~TR^%kJV;5nsZltm$SzB`!zE zXg2t}qwVx`>fiBcs|>qa>@EA;v%Ls7c=@c< z4%rTr7Xa@W+QJUD)E($rLEu^Ee_9@fM})(-K2ohG-BDo$;BM&o6WGQ6GLy~oT+MQq z@@NC+44^dAB!gqwjoZD^@qkX^HS423O`zpaH31k!;c>bJVu6rZbMx1$B)7A@p~-hU zT`1-re>>TjOWD;C#gc|Xdh73~w|j8?n-}=cK7k21u3h7>?#(QJ3|*r`c(@^vLzD=B zKikWBV1`&YI0jX(Dc>Cc=1AbNA*UwBn*xk=0S=;J(_xKiZ>mkh*?wM8y(fiuU^KIq zXT!=+k)KT3>F!eh?{J(ggi!H|?92V<`|x7VRe^04gSFVqYJK18@Q4=XD^%ZO^*FPL zexxTELVk93Fxj0XLa|=sv}JVa!_*za25paEI6MJ(*9j&%Qf3z8`=}|>unFi!%}VS0 zbHrc60lVe=K3L?T&>P&$>jrSA(;9cPR{wwXJaFGskJJesLv1N4T|}`i_DAvv@}3ty z&)D`jMR+`cwa_6b+;)4x3+fMNFxs|KB?5(y=I|nzHH)u^VG5tDazwCdmE0h$v;0+} z5XWsbTzu7})})K{a2F0mL=fPK#UnWEnEMu*5~}Zc1pNEjkAWdW?Y6J`X?u*+U>E1d z7LmE?Ir^oolNaX)iXfu^%mLs;+z%UQ(-PbnAmg`dHtO-nYsE}J{;)Dw;Du{5R&`el zQ_fc=gp8-qVyF<0PEpWdIikc!%zwv81$HLzZ^%d;e2449kV>$;q3rW*rlKdXI4%oy ziA+(iTP!*is}pr6t{Zhk|!p7|pVN_CppotttDs4iHui}4> zR>Pga{;QbKZ0ika3W_T?XDHXG4id3us^ng6V^h5j__O}+W^i&~yKfFcYmthEO~fMo;PC>q?%E{! z8pIYXc4!O6F;ZcL!C^gKy$2F4rFh=UG~$1z@&Iwa`VFDoSh|G$p>6TIBxnrVb~{;- zMy`6f$(nXi{k}Pn8p}gPI2?ovl%Ql|w>{l$jj{&vrv;hu{H`mhCtgr>ASd+EQu z*qe6bBPcfZ!EV*`-%JUn4vVX>OgR@z44u`Oe3xdoXC4k|I5r2mn6gnV_j!(b0VzI- zcF@O>a*MQLKp`NXdrxuwkgbzAU<{-!KsM#LW3~p7*zYbX|Hi5mh`@iyj7L@!4Tjv8 zto%5&9i_(zNlCRqA_!1ltw5VVJIHK{$PQE47n1iisO9!|)N!4t{SrE>UZ5RJ$`yc0 zZvHm45%9>_sZ53E67I<0p}fIt{XE6BAp;K~YU-zL@Wah2-=mpXFi;E$LQsZbUC6N; z%h%$VMmzxdCL5cw78_-=bM0@B`m>iAL-ifx<9`n%-6O>>k!06IK=L$JF$3~K^TC3& zb~eE(0E(frx!^EF7ZzmegA_T24dk=fbHr$TgzjUSlDfKAfyS=-n1uv#%a6_D(4zbf z0pn?bq96Axx5M${L7Y>~f3pt%8nv=JW*8`us-}7*wf(aQIe=lVDUwY&cNokwfo4Xg zlBK@Z9?7Ut_&i4z*{+r+L!|Kpb~T;Ej=50e7FOpF1^8jc$uX=TVz!5rl%khY0=Z*r z8F>x43G-o>$XQ1m%W-8=h13zgmd2zMUp*Vs+gT&M6#GAZ*bML1?ftiFztVu9JhTnu zE|^9v3o()M*!Xf<;_DWPo&jdA#0W!H8D%hrQ;5UzP&;L!nL>}_E1=m9OPxdzInS@3~72V@FVC>LiNhV9zT;K^G^50@j`W-@Oz$` zYykvX3>K;>>poYO2#4ZdlSkuFFDiOZGL)yG$EsEx=}xxBDc_^cwd~qOsZL`ZDt30YBiB-oJJd*r(QIX z>adc_Wl^h~V}#6AW9;C`$DQoNv&*lW=cT7dbH{wq+PJN>_yGjCLd@goo)qy*jj9AQ zrgKkQstM>M#93a1Xy~fh0-L+P;NIXjfSEXKVE{a^+THnvG@~@9;2cx>DjYcuLJ1zqTR-suLB9wCt3ymcZbEld1SZ^ZrO@EHbmNx5o5WPjBRdy zrWrnqlPaFI?w(=zHbZ6Wbi>0#>lp3JTz5!@1$BD&$AUY+e=tTe-m=vBfLgF-X$bG@ zWMK5^xxU#LE(qM5b-XX&FUQ=hMz}dk%Mzu_eZgq4;9_!v%Wu4$P26u?pvX=x8@D5DQMe#4iuG9m3sZo7X8Lt0c+)X2yvBvFX} z?V4xV#_(NbR$DN4abIVF7ol5`_k7~^By!i)Y2n@Z@cYySd<<4U2KH};A>=Win#BLHvF_4 zol4*fzU7WghU9fCoCuD`6XN}leDn2cx+|!fD^d>0|(Y z$LUh!NO`q_+2#-3MTgR9xnif9?;?>dT0S|R+n5>XI0`H3(=Q@gjDPzvkmtsYbMXIv%ZO(Plp7P@%U0y`rvjLv{;NkdaCP)DS~a@N9@6d#aGDK0k6uuW|AKX zwQYoNPutcVi;7~c+~&!i%$7Mbp%gm7q7ZUkHk@b=#^rey(QEW!o^`G~sUmB9?taf? z1zyGQEo2On+q%UDhBG)Ar+yx%!LN-r8!k={7%7{`YJ~)S+uk)_`DI4*uMu(g4h9Aeg|MK-U_MBB%ghIsyFJc#lFoOtrlzJs_fuHJN6Jh` zt0m$FSw<>8&_Gj_OEY8M^E7DXJdD@mg7U&cp{E4VYG_jktQ8*18v@oplTFTOsc7BK z$B)HlO@4^HjscXvGTg@Y@bu8j)ZfOB8xn=er6z&u zEIQ?8{i$k9@@1yuxyEsW!Eszx_xmaoq(}MpmcRMuJukS%;tS|j-GdHC5rU6@qbaK1 zf2i0nZOT_Pm}~&uoXdVaH^Hw+KH+TOgLtjZP0B*36723TEyKdslSWjvgW6Wg*f!Xp z6uy;Ql=qex57cosT@7AT|JtM}m@pW1{1(;2AV34JfSb=s{ZR<2UqhfqMnL^=S y zt)o_U$iTHg<`XTD#g-GbeCHsOy@j_XCN4hW1A;Hbq(ED(i-HXJiOu7{-=_j-xdAfS zWT0&|(?y$(vAi-uLRM~a#qBm`1x6J>L>yWTCki}qdFUl!dcll{0?8sDT%P;Y#2z{@ z?Qzgqvcw*=~NS~cqB%LB~_;30WoManH+>3~1kE+hB=;}z*5J|;s0iALr&XsGV=1B%L92L$BSt|&o{a;0qvY28wv94N?<*J z#0kp_%$m!5B;O2VxrUwemZKF`0-u3TlYDLQXAYh`KQ(o_m2?~-OB|0yZ>{ald}CKE zkL3t+nQGQuDyc%P(%4t@V!BNjcFZU6L*b;|N{GI`CC?xIsyh$7Xy9)d07V2GR*>Zg zyjkpN4JNxlvU=nI^$I9-&gPd4Fkq7t<$>YySnf+8KqbuLSBWve=Ng9nZ@|(6h!e?mS1i6+z-~3<-xt&J@$rFYtN^hOa1@oe zY2J#$M7IWu3hHK)E-MAVI1eHhI`+a5)&v%QiE#(F&BQz4hNP#bgS0eW#N)it>-No? zH)ZX;adH0cR1*}lwq8#Vc*_pHI5;-OYoa3~eReP+q8Et7+V5O;x&7NsMvs8~Q(;tr zeGO|jj-W-Oq<)e7Zwlnkc2^Q3;3fH4{PtrSK8NuU3z0(6a9Sl^E<2|jK8|@eWeN7)ef}j94;2!yosY4 z-m=i>=lX{4XWdS(bwWz1*y~T8E3SfBvC=FQR=vBX3;R;lZ-s|5J{<<}t;~H~`1NZ> zi$N(2SFD}ozZ`W-=$kMGxuEo9sm`%wm8$Dj&gNETa5txUHl7y64JI{V==ucm#RB%n zv&p)n{{7k@%q_u4It4UJB-Vdxd5Ae3n~O)W+_z=w=SUaF#owR(A?2Z4Hl%O<*};#G z8!}Pxt_!fLbuniWZ(J3{zm^B*>)l)KN4v`d>EH1%SDGu7Sx6W!9`Jn(kVj)MptqBc z2eomuEk?0G!>G*;Z*sj59*cau9=p5Qr@iZXc$2O?J8gFvOFDGFI4gaA=zVyj9nOSm zLC_a7-Ac1xNN#!DVYj7h`#nrpdYx9!!gizCma(LCRp?zsV+}^sSwylRt+x|JJG@`e zb$=BcM&MlfBN{0)ii(#4Q7ocz@ScNAxCKa=lCKnC!-V+?m-&&Hy zI^(5_so<)QhRKHoe z>=+d+0nfYbcNZd0o~17^k~Ngm+Q=WdI|RX5kq~)=T|mM^ax9*O1KreMNuj`-v0_BU z*FutqB5g)kc(~pA*cadi8NYlf2w>u9X8~1ASEu&Ijm9;Jw?9~i*KfpQ5_3o@a$$dq z`ye$`*calk*jcrcues!yT57a}@IDN>1BxRZot?Gfvey-ZtoA^iG(H{@!-liyjeF4O zY6(#a&r}p1%>ANBaVAU`%gxX2^tJR=w-c`#+@|&6E>!HDgpa4Er{D}R=zryfq1Nb> z5p5j!a4x5W&FtPS$92$S33I6b{d^O@}|2A4wRYNFrUwdT9U1>S@^ zyozYHco50g&l%|Hr9ba1^%5#L3=%)!11tPdxoKy6yO`nHX_a>M zxQGRPWS76*8NBIYQ0ue>Oh+oWN&epat(;cTt1+JxNbfHW9sxGHdOi(LGhr4Jp>6JX z20J5+H%q>MoruQQ9k=80t?u^{=-JP_kD6qGq)1kXIJK5kE%Nzfr^CF@vd3&cX-X^LSTS} zz@ZI{L+5>pp zPd-7rW8i3D7nQ{M_Qji>HQv!n@R_trpnm%L!sMVDpD)ojY$Z;!8w$+^3Bt#=ik#@J za>5hOtLJ%p=7v9A4j>IRfHh~0sM*=yYj7)qatT?b+yqrH*0O{IEj?+Qt|+({$JP22 z40@sK_tGyULry69`K&q5c$e>T}$7?}7|5 z6R$+i&IXePKrA3{vaRx{3S#qU+@q*i;_id>$u}$3zQ=2~s&~0LpsCv3Umuor-+SLO?&-r~Dob~b*qp0C0KyE83yd?Ib`-(i&O!KX#ZfE>R z&1sPur?wtGoX@7;_92uT9X!H>^1}l}JqPseF)%wt!J=8t9io95Tz!O)BI7C}8&2Lq7p8qv`tz_3V$oIQT?RQzo3Oo)7S&e{*5edqalP{JS%lpx8m_uwp>;c3OEfMH>S1r}#$M03gNe~2 z3X#&EB*h|dyvvk#%Zxd`0~zZ1Wtp)iPEyoVU0biiBpl1Hg0H7AIDMWaHdWW)*Yq`@(mM_V&2nxaAF{-zXruFi2-GH74{Y&qZt@+-#OAVkccaj9=Y}pC5|vT) z{&N~1PaN{yF{mYX`>9g$V6S>4;|H<+x{7KOJf7=Mh9^?V&U)qkBOiE<@oM z)b>0l%8ec|Av^ova6M&Sfw_KE_IRw)TRK`MhKa&*;wk$)ZQ|C8niL3VrM!9dEY# zS6@4nIcADFs$_gH_ILMQ@+lTaFjqN_NAM|d3b{Vkk$JXUBQTGUM zivIk`oN+Uce_q4N9OrybCrW_-&r!Fz2mWd_O zW9If>&J9)KRl0f!Uw*E?m#MC5u1#2`dR1BH6K$E&6^25$l`K#<9bd@#n9)-a)|Z$I z7w5wseg;bjhd$NrTtxsWQhOCSjMLk(wOHpHlJ1fd=EqE9Z#>%6oQwHAd|rxhgcU{pm^ctnc16?ugkLML9gp)(^=&ij1?^HD6Uc<>X!wF1S9?LfC&Bn=qD$+Z7fvUXdzT~Pc0Z_&f{K5CS; z7Ke(dI!MUrM_bC_aCZ)?(;L+go@}xzuw~Q;$A>JEAn<;F2m}K;Ji(Td_v{5!7B4wo zX||{l#3y7j*&4)*Za9RsKC$rmjRV3>b!#A%mv&`NF@d?+UPU+^eYkSF{}v9C_k}p$ zBYl{27aOIq*SmGPLxwb*y=S7WJNXOnv4nFiS~Th)rj>McWWb+{RKX>)hQLj7w6! zT?j}S@GNhx`~8s2A+Dn+gO;ljj^=k=_k;-B;ck2rF05x$w({N-osmqk+ChGo`-IYa zfTlGls$wt4-gfLD7^|;g-0$;KJJOG(D{KgRw{(>?6+c!!2jG$zj-Q>S28`t(C)CC! zf1*c08(^q^G?09#1t<;x4*~JvQZ5vmsG*usKF7Rb#*yb(acqTs;1()q{t4Ie19kSG-l0CGC9){NSQqs*hc1`IKj>xJi>Hh*0 z9yV)yxe*(A8l!1T&MlVJ+@*8$?H27Zxr8HnFYSDFyYP9mR2Knf0%+3%nUd3W6mNc- zWps;XPwcp7{{|VP0}j@N4j5`$jP-s=B6sOi{1qQaoLq#2B}4t)Zf`r?YDnCMt|8eU z?9E;nC*!ex6H9P??S)`ij3WnB-(nkvmZ@x{>SxP_lBf|-oR&{^%y_P?CHa2he4s7D zh!eHJyc;I3US(xufRZ>PEv*T#8%9BguB5IpsQ&P?OqN%Zze(q@{+%xt`D zPuVWTsw(W@X92Qzw~Ber$qXk>>)|JSzQoih{8H%+nqPJbs>|VCBX-?BcB>H(xsJ_P z0G=oaYedg1E`H3za{yYWUUerbNK5+%1_r`lu!n{BNJzfC-I%y9;7D(-n{q8v+0!oL zmMuD_9Ht_sGUjtk4NPrJy>|F0g93~IsZckj!@}%-x0J#{e&iDf{zr(xFv8h!Mzk$j zz}q+IVp3C6L5>9KHe*$`v%p1?k&!`_+$D5pE}K?(?dH(r6_sc25IhbBrG!G25MCT# znp)6J)XY&@iiLTde!oSxUhoDFQ2eD($G6!9Bz? zym+X7ty5TB+zH6zf`WoT*GL9PUhhU%Iq#U5n=gY#X<%SwX50GQK`kVkMvhec?WTKP zc;fii37QDJxS#Aw(;;eB2DeR_`7Jmwl;>k-Xnzro=%mjLj$t8(VGtq;GZKAu(cj@f z$BV!xAeers`s`U~baXV;Ck5SWSytJjpaZQtQ7G?!-Xaww|9j$FU~n>jBArC zgvgmBlz&!RiP*+)&WU!h=m<1uey7!vz%Ym;zw@uYV&Uz&GAwCmNcn@m*%&pwq$H3= zpLImDGX2?{PPOH`8jZLF+L#g3D;rahGd?v8k))-Uc&~btr?Jd z4i)OD{;U$aDW69Dx-g6;v8BUKcPIU@H34hCOjE`-@FnY7Vcya&6vxdT-dwQfc~rp| z7By{5;O&HJip&6>7`YT3Lcn|o4FI!T=xUFTbk|Nd&jrPs zwUE(}f2jq%*Nfu~Xa*G(1@~xb~3+Ex|?gqtd`b^L-6cZU486Azl z3Hj&+hGJ2$42A+1;;V2ihi{(=nz5mP%o_}coDvZ_-r3n15fO257;=GoZ>;964cpb{ zX6{QF(b(jIP9U*gU0wYuSc6B@^L;Byd*C{E1k}~evi{@fQRy~HFr`bO>-;-Cay1xO zS=aN^Bj=s@9CBvbuNOxm7gF+=WZbKuKk$qFf})}#HZq164XFu2tsc^$>>;durk%Kp zS{y(>ct(BzNB~j^Ic-jXBMR;a7wJ?MJ~O<3F?n%tabM7B6Wo5#($WG>%rkRiXE9lj zGQ?S^;at*r7tuDShL$vrv)0Ya3EuavdHD*}BrZThV|=uY48Yde*+Njz(a?<7I`4pP zx>GT7RFX)FS{}k7Wm9(nZTmNJr?wI>zF8kT0$n*8Mfz@=Q=qPTl{Nc;;_oB>58HU-V<--cjH)*`H=mwu zc=(ZkH#bbyx$@Yki0VqxA;`?tQL6KwTvEh~BRJW=>W<@FR(7DCsx^LOG z1$uR06e8}Rr3nE1+`>W{OeL<$8+cMOL7PSlh=&nSFiJ@;yVc4m&g*N$RrGATUYf1J z&1G>U{f;9O>Ub4gSO)hwH{&h}dkH<_f;4}bpAJ^29#q7U1oLytvHdXqntB6VH{aZQ zTLvU!&Y5rQz|c@WVa&g&;l+)Gn7^&4v5xP){jhSz^l7+eM59?M&DUw{LcGAF|Ou7FXcvFD%paXW|v*152RZz`m zcR^EgBd?FQmxF_I1zZfq*}|}B*KK&Yt|A*b6c?E|e%uBu!u5KcprCyVED<&~MMZ&V zodZ|ytry%w1@|x1iT`~zC2u1>A>sVO!YgBAWtdO|p6Is_Itug0OAoGbNTPQQt2REt z${v-#;A=hk02)rn^=h~??QbV=-I9JdQXyQjh*2y4H8UBq(mWIfY~`(6s06vqR#EW> zXf>ziOLX_#dJ4O(BtXcyM~_e9!NwA~xbpbXtxJ-7J*PP?^GPu=XlcL=nh+~8m`(cZ z*RwoH`N~IG*yQp`N;tzc_28*&4}Q}(@t!zCJ6!5I*`Op(w5`yk%eIvgI(vG03h)8A zq2!tk!u2|@5(JLjX87L*UF^ptd7Sf6QBkq7vObw0S_1fN1caTte<*ZIk3tLPAd7hH zi*d_5SSW))R2q#^#-pRArPZ&0cLeHkS5M^l z0pw$%T_%2MO9+&4^MsG^#0<P$>40H#7E4!05& z;!=-y$9~m*k0n*p&?uTwI?9oG{U*fPRin_GB#GB{W*gZ71XovA7go075DH=5U#j+h z%(cfz;;EK;r>$$AP8$M=KDISCH@CCf29%MO zn(AmXM$G5N-{Y_U*_vYf18jn*pcJ^)1&-3a>eB1niqR(xGjeP_2}x3fmB3qeo*l-b z#cQjJ=xMup=GdJ;_4#a~4i1Nd%Rt~-M<=o)8g$B>0T8XN6}07&xUyf44RGw1H^}n* zOYtCWB{)8l;H7vUIS15y)`Ktv{v?e}2{tJt*v?*=TtOklQGJJp!fCzv>p_B>Flr*=0 zuoA5$2y-p33ESQmJ9xW>-&@!fm3(VuWl&eYJY1u+(6LJaYkA#ll(A4};ShUm^Txgc zX~W7+w-4UNXeBEx?JXr#A1PoX)VM$=ccP$kcv4a~PSAftVnyK1bU4FU8!Q40@|c*L zBBpoPMlv@w=@Ht+H$ka&I;>d6@)KAez_%x$o&}VqlB}%fo7|{c!02F~&UgEj1k{-) zh2H*{fTyM8o{aiEu+s8h{P$Zf$X?6eAFynAF zyFrzC-2LfrL7b?7Y@l^o?Y;UBy~&n$7fy!W(-(Q4=>YB$x#6YHgurSgd)TiMkws_A zQy!vl)9U!?Lad5An}{GIS&Ff>Ru*PbNcFP8j6&v0uap&I1o?`UMZ`0hLmT*24`CR0 z?%3JcfxA_2z&Y66eKQaq0_gJc{@+(w0FvO_7%~KfwLkzQOuh?VO%PUlSIzv;t(6(0 zko0hU+PcpmPwZ&tMVQ;C)-<04gGXW(qdYcIPv0T1^U?B}WZ}zaCkL};Q{S@{L+@981m7GZs2FWSTXrAf5PD95WAw?11mJHETQcTIqA?E zTQczSJ<~z?tT(8@gi%iG1C-Oso~0PauG*B(s7b@jkBHbVJxX~lVjcX?q;IQR)ma7K zYlrI7+?^HUeGdwsTK^rz^|9__a6r7 zEzlDWN1B!L2;pw}K!cK4xQORnpa2MI;eoZ1!!EW=tGa|5{SMIOxp2 zh}%6k2$#B`G2S(BC*O%SXg%Fq)nr&`J=;4yY|taVs0_qAcQl)se57=I5-H=+z<;!X zA-Vt4tk6PB@DcOdbu#CjiMFH*lg3k?Bo?Qgqi;M0@6@-3x)}-G_2FnosWTTRso`c} z(h__&u{*xBMO02K%XDY$E11v6Rwpe~&lCBP8$((3@1P?cJXv+|+-Jz$rJ}J|O5}d0 z#`?QW^VZu%ChmvJ<61=~Zo4CAA^NVHTNXGX&TGANMMCGhdvv7+9{eYJt9(TZozB-2 zI?ngNFS-%A(KN0NOT!Gkp#{T>@Ux&6NaTJbd(6v6(^9Q?=RCJ9P#S4#p)wP}ZYH|+ z(B!8b{yekJi^}>(=J(5rSmEwQd>wB;(IBG9F`qAg?4T{` z_M`e0y?Q}Hac-JHjysjKT#jaS(-n|DVpgEzlhAPtgxiC&E%F=NJ3Hn5w}I>_1XR8? zBQf0$E}43M5sOYKemoCoSuA#TW@f4j-0*unCQU-W(c++rL0I_(Zk~QfO`LoUlQ=8NE{7*{91q5yhvu4-af`uZGN88g?i|Mu&0YJ_SzUfXG24sO>`-y6;6Fa{c} z9A;LFLRuxJ(z5j4s?B^Vj@Wd$Lt&=WuHEM>IU~MK#vQk)sutl?CELZ*v6xkLM0goqhvv| zhFh$7tRyz*#iFP3iRl^>*2(8Z=-yREX$mTLYb5*1SJGHor4v(uB2`d?T1#jow+QS*xmTP8YjNC%oqty&VeQaFYTrdx~T+-i}tf;SJZ%@>H6`bwe=myQ1A&` z-vRL=&5=$xfm!1-Yh)oFTgnZ)YZ4@=-@gHpU3w)YC8=^C4_@45_HXG$ixu-SRUdw= z?mb|bah1MVdoz4%8?I2Sb|#;=8L(M>O*e74;VK5o$wLOr50s%16Sa z1IPHYP6iV`PJzjQP9&vsV$6FYMu$)i+ssvCdp5&{`k$GewGr;N%0|K4kbJVABAi7- zy)DX<+s|CoXOIugb^@^A$RqVLc%t1%s z7Ut_;r>F0xyLhRL1muRG)QDUYX1jx)W#*&#d)Z#?nm7EFd?YNye6FhF7mI;IOEG$5 zgT#8GcInl&w}$r+!PE)W3C|0ZxoYI0j;vZg`G2gvWn7eP_bzOLN()F0prUjmF@T^b zA>EC{fHX*VNFy*vN=Qq0HwX+2AtDmeNDR_QH@xSd-p~Dy5BvA-{mFot>pE-2v5s|| zZrJa9rMPC?F)h3I2b1ieL(v?fC)^}^vDelsYT*?Y6Ou-%lQF*jF4=GVyc}?c7&1>egBr;D|gexvb*X ze&t@}B{&w!19VvIg>K*%vnZv4hQ`Qh%M~MT#F5^L@fPBgpv3Dcz?iAi zCsQ6msVaLejpC_Eg^gG%!6nKcM9Sv+H`1hr0tkx{QK!xI)CNWN9ZE*`@tb z+e4=!B;{??vO0H8u!Ck(Pyy^&oUhvS)-PX9%^@oJx$d}-pK4fHvASO7jxmzhlutgP__MPsj=4T$$R5aE zec)+|0b!lW(w1afOz0x27twO&RP|)py8o>#ww7rrIU~o#Tyr3P}zheyg zjSv#HWW&iWt$v;C_WLfvenPS`fbmC$8X=&UJHz?2sAH!Zk#iBkWRH8S6Q!%O7jHE} zEa+t^=%QiN-Z)G`Jf7sI@$xgrQf~R0?sh17j)+D}cL0M3v=UTcYIR4Qx18aC7|Juq z(JR1ikg9a@N4$H7A-|or+TvOqqlB++!llcCVa;M7C&C0+Bv4;iwJ}qi!b@xQ#VgjL zr+wEM)-yO>_xKE&mzsKMca-GeGnXxEaBM>C;SW3m9KB*u&iSQw%)^Fnb&8mFMEz)H zt-a+a?nT3_X>hT==}c9HkzjnNvK;|^u82ov9>;r3!m_sqp!r9J!kz$b!P*n;GQq2g zraSjNr6}p7lbF>t**E4BFPzbeo4q11UqJv?kVkY2&*O2Oo7AH9`u-a3x;BtxVBN$= z>loN0l2} z$j8}yXJALsh^Ni$a|44}z!toH`?fOlPBUd#DDtPH0Ik=RlTS=ziA@%CC zH-O>;FXkz%W##wqC~SH;s(yjk+6n~706_$aQmZduk=y|V0sTAPWWp|6eog|c#NAO| zZU1bE*m>u#;h@MN4{yY&G6M*)J1;LU$vB$t!~O%{!+_{u>df_l6p+eOQ@uSD7{`2B zP+jMG=NQZ&ba-AbEM+VBn&qOj0(PU8va%n`34qW*UJn|(ffrW+U8xz@WJ7Cnu138Q zkAC|Qz3Su+(u-If+LdAbix5(d!RGgwKL|f zO>dQpm-e_>^$Q%L)o#$-)W!hzJ6@O4c=o>ZVIsX;z$dIzEGRC4&gV03&fhQF2F+`% zj=ZRyv9;XEg1B8!UJ`ZJ>Mrg7C4cVv94>xt#!39J0`S!^ejBVAh{`d` zMY^;qRpQTpqcvhl96bM~%>cU7h@XPKJPXAm)R$ zk;~Egif)WULb5gC7SYk_*^sfY2hFqv+gAJoxeP4St=sQAGG*ay+cH@`y>B7H{LVwsySuP^$nmec)UnK*?R&d4q*2>z5DBZ|oHD6l+`ijOYX6nSA*;S8IhpO#tmk&MjlPRG6vfoq{yTaFon`7-&N5k+uC4qDX zA`5r!4Vt*MrME`$dtM)pxT?eVy>_p>Itgp^ay&m<5s4eXdkbkk=sM|J?`dM-6cyq+ zFF)Q!gf96rscJ%h0pSI*S&#f7y_y+* zbxR522LiwSN&D;eR8-3trXH3O{IL(;h^sc-=suNM@r%jyQzu6^+XfoN_*|e4NwI(Q zT@#+|{}W1&Opt?bv&F4?+7@(g7T0G0!U~#Sp;tfxu)BqYDRAOJ;txVr8yg!C*eW2% z?qB3{^)Mu^Xc&|7r$>_VMT2*(eD#A{g73Y3^LFO#)1K+%UzbN(qhaCRmM9!(0Q51R zIp^f@a`L-5>*li|`-j%0gej11EG?>*#xsYXQ842C9-xXP1Q1m8gNccWiHHEy{R42Y zK~X?TQj#BV9~oxZfu4?_%`5^mvFcg>t=>TFVq6iA|6MC=XOpLY7VnH{p${OvPgPry zkm&@1NRK{4eqPB)*b)qeN|-|i7LA?H$5kI~U0L5P9^yu7Y}He)n8RaOW9QY~XB0a( zt8NVV?^~{`0v;Cws2*Kwqok_^BtKv^UKtur)HyIce*8F$?{jY=uac4yu!FBxM>zh@ z2fSAD@hlZ-Ea?Z*bTpRuh->;T94{KS-%_=e0AWgDK66c_oL}m4F$&AtCivD|J01b+ z3g=GYOE}Xt0HnXfn(xh9bVwUrAvN;x_YZfARpGj|G^dH)cW0M|Z)_V87o%RvOFonY z4NJDGK&IsEW`LW50zrodz`dcvN;n~>W|oK=qn8e!2=8F=A|kHb@d=hZ@%k9 z_k@K(Iqw)?H>$tOWYx!5iKmsevUYVW?ZzN5>9x{z;u zlzo;wXxbUYj{{ZBzK2|XH2)$gKyZc))kO);JPC^Lr_T#J4bjEz zoz4OMaHZr4^bS>kQ+s|Zx+~>!C3R6E=dg^dyL98-KD9^?H{Z*gy;O7+v;uRM+9VLz zBFd7&iTAKhLT1$%I_dl^KU0-tcFS10uJ9o?3w_N$|JNOr=$z-36b(Cbw;u)KHV9n4 zm-6u7=5)%bXqeNds&v6ajtd_ON@lMc0Q~ADD$u}yT=%vVb=iVlxcfMKfgjEQMLm1# z=&S^vygdGPYaV$K!E)Sl{Im(E+b6_pzp~JwJTa;(T>ru$imZCiT}QBg!Cd-{eL|m)gKZH%F4>p@pjt^4GKKWdRZl;w5EP#J7vd<4k`ZZ|NJCuN*`0u-gKMPa4f{2LuEWf3M^5X)4gR0RoWOdwgDJ z?jWK{IX^MpWET=@0>P^kbfoFY|H+q9Aha{{Lg^J=jo)2hR5AL$dB6YuRYur44FmP= zhh>aNz#nkDeDOMW<~vsZe>}++1!B)Rnd?)~<9NXt;@orjtOiZ8((8!f64C$TVQR;p z=Kq)99>Qq1#s*xQMZHV<7DkW1Xw{rrEMk_ni_(*S^0SJaENQYF>kYjbPu1`G4K93A z81Qu(G=*h;{1{~V)$pL~yolg_*7=gxc`mA^*D`y}l9!8r$CuPvklIqcdA`K6t2CE- zz)I{vfNQyexiYi^__r^c+Gc}CY%d#3cxm!_=XH9g&ini3Zx-Ju#wuoghrD|H-AcO% zPp6{|n&{PJd(1&aJ{rz|3=oUsm>TcZbP^L7w1|5DKG5tEVV#C!+fmhY;x?t=(;9^Ln#myFXx83U}o_B=IP-vvhpz z{q5%5l*B~y*z8Jtcm-bmy`}Ozn(BAqdDA>UzaS;Tu;-)5<2$pt*Sl+ZwIqjE6Ey71 zrrvK~ir3D^!-DrK0ktql7JzeoVy>3m@@`XliP^4kgn;L{440SMedSn2`9L7ck zC{7G+DrD3K@&3%jC0SQ+sZ@}p2JBVOwBFH#{eEIk1|bG^g^nRA1JS)wYCuh-P@QLk={$bh8+eP;K`8rYnFo8gTTd^_!3*SG^{}$g+E-(PQZ`9{O zeF1n4gxDE4o>`=&@zba4PM{3jp>MRkfCj=M5Qyh&kEw@^J%Ay`T!DR^z2s9pYCfzk z1btcW$b!p8dIE|*4&|qxa?$M}(a}Vob*im&dN#F|L&sFy=H)8IESxyns)r$UxksNW zb!BckJxwkliWS8&5uYw=YuQHT=kW66wIHrORU-7wrp8OQ;Y{T^<*T|{g;5ney~EbQ zD8?Mx%6lvTSpYqMpciDYdvkZ?d%utV2IUJV59@5y+{A=-!}RSrypDi0cTji+$|^IH zR8#Lx0p$m1r6I;S*SMcJ2Z?+ZxlRd=nTIz`J18A-kX%VSn=ApG3Gp2k;=wl7 zE*;`KHT6%;<*<+<0at45g_Yb4op*H1{zm;*#`-H)pVK1dR362-=gbWH-R%mMTQuVM zVS8o#vewmcg!K^#jQ9uSr|&7{+H_Q{<643$9LE#p&|R7aL^~Pj>E)5spzLqM+|dR& zgmW?!4P-D}8tk^6T5Q_ii^&hn`O#e^j17C6LB7{No`7aYBDNy=88)xO9)Nl%#6?E> zP*OO^Pv`dq?rP|UWG@4Dh#ExkSF9EQ1we)KCT1*%A!|oHe+>5Z_BMK*3&Ak}BgdMT zhS6lKKQ%WPpMiu6bpw(4<=Cw$p5x1PmsUmfJ1nV`9zD32eY&p11MsdCvVl-hS&0;C zIe`V<(~wd$0|3AlFl9G4FY#l*5Nx-y1+xEzkhy!YfbXpKDYfNm%O;6u=r-9$8SF|> z<03N9_5tu~sKS~D<|e*>&fU&ZjEDGEr9nBNfmK7wR|E3g_}4fzpp{xii>px$#bh9$uY%apP| zO-DO>?-6Wae)09@&9`N9G1Q1R*Nw9-;3SJ*QkTMl`9^>Y-wx<}=Hpu-`dASs+~vE{ z8PRmWy`(qUe?0Ln+XEQJ981mNbX|eDHPUJ?4pj~V4l54V#>VD+alp{sB<0(<)A=1@ z9!Nx$UQ(mP*+DjygZlY%MRq<^iGZ4F!<@RT)-qp(C4mLotR!7@K*myY-IUtL_Vdt? z@f9u?7Z)MEdS}S4y(rX?(1J+~rEZ(`v2UvTye@unB1Ng~g-c$h`P1G$-3Fs7`bj&z z29js{z!sru41ui`g*bv**}D3)F$JE2`b1p7BVo5aY?!Rm9pew(mq)`h1OnD^aYnyY z4FwFd=y%nNjEimh+!Bidr%d08$%&dOKbr|;zd;!lV(_8xP025%dQ15_H#elGKkT`5 z?&cP2ryOmCnX-X@+K%nKAINUDH4h-lo@a0>if(YRp6~N2*E>t>yVG#!K9krdoNsTe zAaJ4EoVmN4a&s<>v=Pjg2OE;FHHX}n^u+rau#?OB`@n&Z-SZD@4>OHM6P8dYqgSuY z=M+RXaZU~$q)vXjv~0k1{M@ug|{b_V-Jm!OW@t?x;bWa2c_P2zP|zYrbL^3A3c z**nBt3UTa{l#*haNwL8W*&6}K4yH}#P$$PKA_9Wbp>Tbp4!N4oWnf;cpxc^aCF?ch zyQN%`I=j}VnPF5B4pj7kX1)2gG^sUW5!j|>81&9Lh9?m6g>lBwA5MSj0n|X7mLLK; zc4YA&K?b{k0L4aEMfU5-U9nKE2v*9dijl1$Or7hrCFzsp*k?21HaSR8K}igHRup0@ z_#!fUtc~|OX-V1fJLs?Milt&#iDbs9vj$?c47YA%_KRqkZpX!gxu6)!)!E;^ePiv> zOP{xvi-AVN=F*cteE^ajABU4$9gLki4&_709*#Zkc~3`*mrkVxkhS#3^RquDOvV(D zkvC#)CfJv>)GXG~c;!J&EGwOwH~95b$BgeSalLfVN@mRR8opAPCQ}i*S9+&FDl*$N zn5WNgQ!I3V!vMH+;Hgd~V-Ka%j(drux@k!vKr%xmBTA)iHOar#O!4@Ss=^Rjs^PFA(x?S~)?&o}$fvj!@Wkmo_f0;eCFM~)jX$mq2y$_|J9jvrPP*rY>mS>FkaBVqzo~cPtCa2@ z8c@6(Mz<~8cS-DH;5Ha*_ed~*>V*;-bD{7t)Z1>B?uu>XKDJQW-H%msS;_x$n$?m5 zND2uVfI}NM@Q69&&R1N`XNe|cYb)P74>$u>L1>-QRXQS$m}FEAd+J<$Yxe%K{`bxL zt9c=dsF8&*nl}1)TRxL-Z!qQn?(68=>l5ZH^U+f<11u%91(dFy_5hw!EvkooFR4# zcsqVgcx3X>rA&izZjQA0_jXbz85HG)A%0XJU2NNrAL;~B`ETvoT9jL}heEcq$u~A) ziC8RXGO&0;>=_k>KgQdtgVl5Qd3q{xXgpP3Rz{CBBDHY0zs%oSi5FfOsQqEXx8zxE zY!@cE?AWxPyZz+8(-3)8spffR%(44u?et(p-#8$0xW3qWBpx@QJc#@q_JEb#HP`QE z#8?4SrQ2OvevV;v4dhW`jf%*Y`m1K)gJVk0Ukz73V8ar9ap)*>gn9D51X)JI>RVp5 zbhOa62DC1;iVdu5fKJgV)Kr~Ah*lG}Z&*0%6LDuTcqYND8SR=iA4gM5O-@~g<1l$$ znx^o8Pt>`y7{UtA}f1Y15V@QR~#qtGFykNxqL7s(c?Ez$a+gXm-a9)@gwY;^HP zp^V54+@My2dG%&6NguBAeHo^vP;fhVrP0sBUZ+W!iA)3lPqN&#+ zf^8#G+P*d%yHnFj<|T%2lw40c7w>m%{@5LPJ|aJ&Jz@f807V$;sMgMyDo9howw)x~lYO(tfJC(ApktM2tnUzBQfego-fb-}3;RihdUIS&OsTkusD8}E z>lB!YHz;l;Vo}IkY}@|zJoWa$sg*d+;6T*KR@FeG-ZX?n$;o(6z*X@pI-)INmck8%Tldh~O!Gq}t0(ESvZ>_zv zov|Xu=|Hk~bX5@Lmd}14V^FS191~{{t~)7Oa@|p|lnJM;I~(al?>7Lq5d;{JN*pPedhOa>|;7Av2)b%oWV~~q@_RQlBSbvjrEyMGRfW_N1OihU@Yju zBx8`VJYGF~no?fp+T2UGTgAex2^`Xjv(iu-2543^k&OOqnY6^~bbiZjbdT=7So(5U za1LQpM#m@a62F_1YUsVHH>dZ{;|p%fveRD{W zyG;B5GfMoyO*p?nZpPU;s&eH1WXzH_P=j+^2DD5-jU3C8Nd&@@J8R?L*T1JKr0>z-e%bQprRuGT zJA!2_H#S!TGxY6e$Z4#K$3EssjIh!~fGwbioGMDYydc|U>oBuHvWC;_5o}ujz&$hY zB}fJFK`7}xBwRO_Ml6j_(Ohn3x8pfGM%SRBlG)&lD-5^I6?Id=>Qp{j%JCAIz#eGdL+l5tvFKB&7@wxP|xeiRfGfC~EgfbeP5qL4Ho*Dmstn;a8h z{ych38E$N0-^dZM-E;N!hIPj7?kQBgNgYkLkQRsm2RtB zxA_)>t>vHxAvj{*XpG75F)cD9kND7>!5 zq5RwIQuczWxhpHsTCKkFAImWMxFu*3-RlQ{9-vnHwuI>^8UPy@n0d+fFb8alwEEb# zpQEhMb>lh_?uVaQTv}}L3n3Xwmv9TwcUUw7ERxBCmdkpo0n`~s&bll1-i7#5s=YsX;=5ad{*0m9id86-J#f9`VkS| z;A)dFzUcx^``BYgmzxDw>SN}bNA!U6=@1R23-9FEd(?4biRI=dYHD`oLk~VG(+*MK zD^>Y3Zhl}v^yAiGr|&`_Ro^=QjDR`LxOU;Wt$OQmQq#6Qk62cGb5xn)ZCGiUcP`R~xkg5HAF;?Y{N(f+loUOu8} zC!pUFv?x9cz;*$xPDEXK@-x+-D+@{Og=Rubi(sj@A6WHtKN?RTkYPw!x-}HdUOj)k z=cykAYqR|+T0&+hPerLtGRtvo3{d)Nz#pKPV(De137le!Mj4s)!`oHL);mInj|+F$ zNVkj>>iC=Fu2}54D*HIold(J>&hs(v=V6jbOpM8zQ_UYbVGvpa4zV7|%JxP@56STQ zfP^CS((m|K8ZN%}3j5tvRSf1Ra+@bfM#;ljHMVj_YDp0SW;m0|g(-$NGx$B`mI}fp zxE}WMg8dU=>Pkug_D@lT@_=dys~SmZLY0-{nTpa_@TkGqK*W5_G^fS=-?OqRk2$}( zF>zU74o#>A7uKBOV!B?=vY9~2x&p8dG=~8qF6h0ewis0`Wr=S-L9qdPzWc$idj;mN z&Q<2>Sa-$lSh#)+ySoNPQG>bM8(j_*X^h0Sy@@>gf^d7`!fQ+J?1>$q(0vS zTKEu9Hv+AspoH4ww2^#JAhq<0Y=_&GIwXU7yf-wLmnWTy8L9PjZftQO_Qgjw(T`%3 z3RG`bS`a&c6PgZ_`KpoW30R?uA5|$5A=%M-5)VVn+Ig8`dJ11iX>cdHaDP_R@Ey#m zBcip9?N$4pBZTRTa5}|`_*DvlAzRinxXChuQAOF-Z<$C(7oX9cCU6rAS9-nB;yUJxodC)M9(^-cxhL ze}j85%7{ORmnNg|tSBmA3YQQ;xu^}c3s3&~YUo;HeJv~VRn$VTXcKubzhrrG_yeTJ z`@dfHCd>M9$I9DhwNK^czoI*PceJ2YJ^QfwlOHpo{5UP@mMS$GO&{ILU5Z{bUDIZe z)9?-^J-lUITW0HD?O626W-vN7MzUf^I+){#_=V-$_FjjI-QIH5w&3=t(gHQXE^&nn zMA=m5KG#tpKd9XSId)G_xnpdcWjsGAK7aY$UElQ+Vp)nnDM}`5{(%cWgY4n3-vR~E zc52Up$iuPr(i=p>Rods2*tPH~Tfh6rZ;!g|Vtctr80ExtBon^!7!~2aej{<92oL#i z(vESF@8?14y{d05anfcLa9y8zZ~v|Cmtg^*_FW;gDZ7T$=wnHnP&&-r3$o-6EGQn^ z$5j6jw5RLJAK9L);U)NtEKMy|>(!b-29cy;!MOqhb&)DhQan?Pgd_`}Eh==WrD=vf zIx*&_WIR*XCLK5c{|UH7^g2qD#+px$%-<5$%9JNeia8xwN&jrG>q(jnvk$IX@UhNB z94T6(G%Z)8+C(jie^uFuI&+YCjTcF1D_LpjGMsyUzOwX|+$DnaMd>`hDzRWndxiiD z)u`ne_4$E#Cf&qb%K2{JNcM75bNy6YW7&O7k$E-=lOV0a{F34+pxhBz^7iJ+EyWid z%G=M%zZhtq6^RbsbI{act-d?2t|_U(zAZ_bRf$vdMgA6xR4(DL)b*ElZwp1i(jG#; zoc!^|L-v%-0;zRC*GK#e!M<14xc93NxUT%8qMhFInKWHze)GLngP)7HdXyeOJ@?&y zAN~hLO2?hJYp%^^yD|db4<_cIwk`bH?Xq8G+?SHV+#v_WN1|M`8L)Sjemzm8~7d zT^UySCVyteOGX6w{G17F_qF72#n{%Kaz#Qyh&4QF$xz*O^Wmi|gc>|}+IZ6PE=6!Gr%Mm3}XoeMhHYP%tL0!rT(Rt8uWaV#PkH<$yi<67>=XP2AF zB`obgbo5uFEWI;rmHKEm5J&gMhiqXzX_IrK~MGkwrYLa3&e&|}>8 zbU^k?Y!+|l_tzn}@Mv0xG7%MyMMgHz1&!G>`_1Z!)TD&_s=JwV@9;A}x0Ws6H`p|w z*pzM+@Z?HwQ*${1(yf^u^JTtIpxZ>L3-Z%QzfmKhwHYgYTP?p0y8R3xn2y(fnyAM7hz zdSp{1%=jE<0$c`)6Xu!;_jST#zR=+420JQq;j7FLW>Mi*mAzI#izSIOKaw)}Q-b%1wLyncuU-)_2@LPxfI%O!Voi+E~FaiMmsor8rX|4XEA zdkjV;cV%fFTjBkCit(8ml{vI&Uv@>r91mQbkJ(h9Vse(#O^u-T)O^@$9qkDmyY3lE zHvkCT*FdDsTiFfVk`(G6@%n9Us87oRG%=jP3YA>P#8JbU)#?9@#g(}lMF6D)4nm~7 z=3)C}ra0sKF_P+i0;k;wwY-VzH&!Yqmc_${6u0~!%BmZbM0yN_P{)&TC3Ng(g2?H1 zI9T_luV6#A@o^cDo zoH{_dT~YCNUho$cSIu?voih+W&kTcFb#!x0turjp$-)xURE2vqSO6?q121*O4seOV?ohDgp)U z*V43>R0jJ6^ei$VxdW{XNZm%&VZV;dHVX4jhQF75dQ)i3^{=$=-Yk|20p-{lt|gPz zR!nlTC!ow>bH#xkG)GO%gguh2m##~hpp0t`v}Qp&BM99W|S3avXx+(CwIQ;r#bcaDr{#vGQTE2q~7{O<#_o$kSdClQ-G zuR#A*^YP@yS6?8J4_bf`wSshX$g1K}zNuwD>F{V_o~e7&TWoiIiWKOuIV~mmb7gy~ zkAg`C+K1G#+n;-1yZ;o*3V32^Hhb&d?cD%P!G^J|m|<&c6;5w;>x1DPRiDJ6hgZT@ z8eEcAQd`nr@(g-La~JG;KwI0re3(9u7Fz{XdQhJL4Rmgx5#|xrF{gzE2s+!wJk~ek z7~}9Z;^==4)V*J5N%d+E_x@xr8Vx27Pn!+|Sm3_`mVg4xG`JLzg=G!&!+;#*>ntda?isqa!l}TD^z9+YuV{7&Rvz&G0tpd2uxr4_c45zA3h)8ux zG)inr{!D3wTkIrZ=m?ITogKhAf2e~tx)I~eWL5XXJGU`fe4gajGY)e;Fubfp+tQ&X zK#+9hbrmZ;y*KN0y%XMKpb{Tr>apLG44-ekoMX>eali0i*C&Nxz=qaKdnj^_Z{Yg= z3k@)yw$9DKzd&(>UgNZl^elrDTQKJIqJ2uYVc2E3=N{qtu-VSECfwj-1$h|Ge*^SZ zll9nf{Cy;z`j_Un25*uK%r0Yr%6#g2-mmf&%bx(rM(3)}_h%-ta7#d$j$VBKvh=4o z2TB|?@@Kb3FizHS&~lNGV;E2qoMWv22c#jeT|J25Hz0b8hWuW7d}0JgEoNv+OQCWD zFQ)ZQWB=}e+PIwCL&Mn~OC;9~p6fg}{u2y<%hLN8*RS~o&T>iI;P9r;fPXcp^r_2l z?Zc>yZ;O6mfl}YW#eZ9{m%8qA>$2paC!%F^LEMqFBdA54WqkyN^LmkRI$6?g^>0Gg z=_(1&Z^XnjARRBr8K$;v__(E|+dOL5vZ5pGQaDiXo!{e^Cej;^M2rnPG?fq&fTxQ#HsRY;*bxP zo`!+?p`@Whx8XWenEBoyI6e7;QGn`3C}ghT3;SMqgV}VZ%9Mt7ZQpldH!p?KpT@*) ztcLUM0skz}!>TyPKHI}<8UH*>>L!ph90LQ@IKmMP;BaS0b92*YU0TwAkI?i5_!Pv!%>M9sqdD8{K9l=Z!6QO-VYIVbH^u6~ zx*S!W1TjQ8tHLck-$bHkK*cGbv788c`1j=u{V%eO0GkJ`SI8{w6D3G4P)lOZ;p+B4 z>3Bce&)4)W#w2@hk09H#v;_o2VIi&~4kzp0%lAq50|Rk~lRC))HuZ2DDHqUBb@Jkq z@Uk=NFa}ukPtSaCSibBJH}`&F$H3{x3rDF#R+PEKCPp{h(J6h$!w>hSGy`T8(At3!8tE^oul5N*#*Cv#qjhCQPZBgUP;HxOUP4HA*C^YrvoRlOs91gJCM z0@NVT3>+NX(&ddmOFI%%@=U*)A9SyS-uF@|P;0KGMa~v(0CxqhLbX|+fadH0;R88> z6mOX@9C$ArR6p>kdHKeJFNK((fbyCHHta;NKoKq#fachlyq;=*qb2b1SBYN~2ks_l z^g*UJ1Ah$~ay)^G!2J9?7f|SaLwT#7c*=bWgajab8kt-@|!*p*I+R3%mS1%Xvz^L z%?Az$RISJ%Wj3pxi}RD0IR;<1cEl0V>2y}`5#j^kWP#{+c>v+8x9GUSj8(#xLHQvw zD7@tr3DALJwYkk4eL}KPHx_dUkA#?i&j$fak#CN!Hk4k*BMMBRvLhm6!o;gC%zas@z zmFZ~FwO|KJKIcQAL^W+qAR^(NntZ^3l!o5A*y$$}HNJmMcih=kdSFYL2 zs-bGe+?Xz`^lm)F_dcE1?}LQ}lqy6*Ltny=i}gt7FkwUQZJ7;ezt8-6&ixL;i(j~j z+o}mI8uPk-F;=12IT7#$X`j(GKWuWK+u%of2jW)O{*vsp<3z`UOf*6tLOBnn}(IR?F^`Kw|7XG5xseRlY!088IUEq6r zK#l79<*5$@G^sm*T*xa%mmSk1!4WUo?%Ubcx=oj;g-c(Ff^)ZzYJIj=&0{Z2$} z57)ipLdp+5i3X3&bVj>iFiYY)X6`5jmk{DmWFJKwFp*wb4sISOAf3GX+rb)Jz1{1X zYks2ys$*Z8W+f?&LAkD9^@0FyhuxA&mQ6Z$?Xi!i&Ae9K>T??8rso4sY*%X6h@_-2 zn(hB;F#JgILZw6HLKS~gBXzrN4v+STP$OT16i}^`m(#xR*})cJH!%A6x=86333jkt zv*)XMy^~Dl`KH1V;Vm|#*6&_R-#Kr3|Lz?=2?+@$C8cQ4=BG}?0fL_R{92gJif{G4 z2q^g?qJn6w-$XfK5=jBZ4N$#FYg+&KCED^U%_7MTu0W|avc>bk8BQJ52K=?O=O4;e zlEC}uXnPPmDgBrC+?4w|CFGvw0T?~(2Ff#QNsbaRM_nARX8e+(qR{d7ARbV_;gG%f zZaDA;vX3kvtKzf!sr&5;N1zG-8vjU%mzI`H|M^gs?jw!!UEAwjt$kPf>*_`I{@>A_XEH!>QZM_2H>0>H(e3L z&??hFhH<`;@CT`%B<>;(=;wy$^c-fjF&u2xNGQ62^gQ6H*sOtC)E$4d>p-nJlyD~q z%eKJWk6{~p$`Ai`XOLa|0q<)P&(gGlzvY`RFLP-+(mV3u?C6I)gbr7?MO%Vd6uEKp z^;_-JKRf?*$8Hb=QwEAaPYlVt`u7`33pORxH}5_8)IxB(>wLYG-xQDyWbIo zIq&@Ka@(tpKH7o4pZ5M|lsiF8wiW~M%Y^ccOM1e$fxYmhF-+TT9+9sCUKI-?Onkm( z25~CZTk9Ofa3-#kmV)_s)(5ge&S7{ZpO^CY+r0Mk1K|sB;6|Vsye4RK{xUQf-}-Ao zqj8;=9;=S|HXiRfXeDzNp8S*2q8%enObLQqEo&J`&4RttU(=> zhq79Z;nU^a`dklte@}wnbA|9`$mNxis9iI`JO(!vTp_bzwVMI63n)D{3cH=7fa!pB zt2j@Xb?n`XU@jE1Rw~mY&`s>nJ)DNh;Fnh%tw9vP?_CvX-A1TP(`DaHJ>daGs0%F%}t z(yg$N`5@8QsKaR1MmBFpO)SB|Fn){OF3)86-P7=zyU02*Y!WH!yvDmuZ!Pt3vx zWcqLQ2M2*Yd*xc_<);MqI>g9woA}N2MT<@;oJU2VJda3`z?@_QN1gTxr2^X{V+;K8 zBv&DD2en=F@zu+l12PZjf@_tc?y1`b8)_O5R73$yVZlcJHd)^etlv8kDbc^1m8CS8 zLz%ZitI)*XX6RJ(W=>By^2w#EmoVo6A*OG&IYB=(MU=4mrj)8!32%$09|%p>a?608 zBLGQkK~5NFgi^`aCug<0R|ABEH6`yh-Fo*x;i+^%w|uveA`PC#s~2wqV@PKk}+gkTtGs_Z#lnf3I zN7(9_zS{dX8e6N_14Ms6t zINXTlr{B@dihqR0;3+gC9CcH4DJneP~>$`O=vZ8Y9P+m0%@T$|y*{ zlG9j;;gW z7^)uUZ_rlqq8wvmqka#5I&~$D6IXXuyWlrHTlxFtjU(S}~;5}wRlF~{GRr)}TM zx<)JbsL*oyF0fz0UKx{eAZ~HF*^$?WtGg_8#ldD7ecHE|0)fXQ{XS)#3scz4(@zYe zZud)lnw-e<>MIQ#64MQ%%ZqSrnYf;uz@v#?`e~DWB@H+)d+6qz8=675jy8uZWiRlb z`4G^|kBl-oAk>u`^8kK|y8rcVxEuxhB7rzu5>-KXl@8d3P3wwd2J`?oGa*-Z$CYGH z4W6M-Dl2JOa>N|yLI<9Hwo@TE$>i6?YWJ^a~CMZr3v)?fWfsn;-&fdiP zIdb3c9B6wUk#Sp~|Ht2v-h(K)6~jp9ko&gUiznz8M8|MX=g6Q`6|rbP3P$$V-BGN0 zGWKkOw)2zdAvz?VMcxb@8c=t()rIHdY4@W20C@jD9_=j5QP}@ApjpI%sEN9#)cmhy z4=8vC2g?_Ji5V1u$TOF^6~{{Bh0_u;TtH@6`#5vz`pN8X<6?FL}LM9 z1eyLT4qqK}>9aLz4oevx zo2JP?=iZ~C-o0@pv=2fDPpcqYmx2`kZk-Nw9Ou59+nkJb zW5D7O=6u+(a_cc^?&;3Q>pZA+BRBiHvb1YXyg(E!1ChbTE|iIIhN382H4~zz*K9HZh~ad)YKF}S#N`bM?ud9VDMK*lwBRCZuD+?5}vC2Rp22tSila% zD2TDbM&wkWRNozP-kC8lr&==|J6(O`w{JhG7XZz-hf|aA%cl2+`jCpAP7ctDpA7*;3gMDsziTiqkV7 zhz%+SK@x?1V0idT9l0_1TfMPDxV)iqC44oCl*?jT{3Z-o*dq6D2~mSldi6P zE~C^f!l5=^vuj>D5UmWgQ!036!%$X(G}zXB8DcbGs%BU$BfyO!DH3&g<5-5Dnzzf#%9Cxlo_z*x30^xZu=uPAR zs_?WZ{Y>l`awrUVZQjEoS5Wh+vVXXgF?zgOv-=H6j>i=<8b{vukG_{lFcgbwi6?Zp zX3}L0KL2&?BTH~Qw_I0)SPxJ2c-CZsq%ZwogPGCL!d_urS3@fDH#%-Z5I6~Wzm7{E z04s7YwFRXgPy+%ikAc6#;Ikd70m1td?M3lfWa@sUtzg#nK9$4)LyppWnXVVWTI`N=Z-?F&ejZ+eHP2r?+x6-i&?aqS`KmY57s~QQS(84 z!-GOhc5`*vd86d2y{?Q#_t9C_CN`j)2cXl_g7ozJgWxip&Ym7$X&(jbHfLUtx*8Pc!Cj7O$wJn2Lc> z?#UQqf2xV_>7aJvR)11P6q+K!k!Grt`zq{chdJVog02tGw{7JFmDx{N6)sn1Z_*s~ z8Wk-am@6cd3F?ot1mEs4FLe!gSY?`>b@XVkaxt%qUaPH61Fs!v%e+UB6QcCjxQT@d zY*e$8{Fv7PJ>Hw*p_h(C7K!AKa-Tn!aZK-jT}2(->~qf{_;NS zr^GsGCPQQ2w^fJ*|kcv*<*( zmeWeTH1QP0ZG5sq=PiaHRHb2g$QWnNe*k9RG`U1w)iW>MKPD2AU;bPmv(mIW>rtA4 zr>kPx+J@wIz1M|Rrv zk;03tK!TZXI~E!*jkUV(nNd9A9P%mFfxKg!E0xO-R(T6sGMR zGPUU+Tb@dyf_T=3pQmLy4MtZyX6bdMZ;FP=&wu;to$jp=KxzH(XL0OpAyYsp&5$Zo z@*b>>-_GUQ{0pUYK_ZhpkH>CTAAF8}Bz=0tskh%$RO7 zm$MhXd#%E0n+reXSsNN8y3$k5=;)?t?TCi}JHb%Q-K%5sug?ml`-?7svsTuoMuk20 zv*KLFi?Q)B-&t<0`yqkzuW($}eA#-k=^mSZWj;7)Tmu^mjcb5%p47hYt9D(b>r2;q zP0zsvvZL9u8-$=Or;3?YFtY4uUfrDYWFDP4$N`1a8yvKJH?(>-@9)#NOYq#2J>IU^ zEB5ViWduC--2_-+7m$FTy`KkpUtE6`opPl{&7nyE57_U?+KsJOh|fv^P1T?pE&pM| zKSb_o_esikXrN}7o&}U?>-*5*MI!_&s=u3K18~rvW9QmBIXwZXA^sp?^SimUdnq8s zo`XSI!_P5ffqk`_qQr;EdUqpk(4WqHE`6c_jL?H<)|vVNZ%o(|Cgd}bJ0L<*qP5j- zYI-aRK473+>*(}E`ICnGkM!qjuMN8euiklhqh1jlqLJW$FeTtph_NpfLh!5vQb1_Y zZIgS!6%I4MNUy!lAiKWcQYsK5+&F5FI2I@)41*v+(8#gq0X}2K_5PpFfGQhEwn}nx zZWynFhBAO0*hKz84seeomI@8@=*Xe)eVR?!Yshf-<|iII0H*>0yj#G{|7c(;f)3;6 zDi%XK;2H3&3@%!ZAU;4T5ioGcrZdKF$qEQC+I@bb z#LrKE05RMP%;)JwwEKTTDTHTU_oiE+kp4w*gEmr9(4%%Bi}Md4z;UF|{0|V3^G|RB zg4P@2&9T(zRRku2!Mb_mcjJu+JgHGpJXAD9O-7*mf5v(-DCaw1eBh*}f{Pa`!6_gF zk=ylW!!))KLGFnE8A{%oSsb5z20%A=W~BP{(024kn11sB{ zI6wcuf$vWYi2YeXR7`de6+fc-|EPP*uqf264Oq903JOv(fYO}`45^?XE!{03B`|a= zO2g1yN+TW8AvrWkcMU@~()B%qvG+OWeb1MFz90L#+=4Stu4k=#t##kQ$d9yM=Wa0? zC(vp8=~Onl#{5r=F%O$Bk>en7lJDu^k`EY8Qa96SGcXHyhbL&BSG1>j4pZD^ zLmGxI&RbQ~U-0?4GcEu@m)f3b7yjD=`U5F#x^VfcO*-2ykNh!N^s_^ql?spI$|m0f zU8X(9jvE>ek@nxnnOX{R@9NE}SGC?dOU`9^IfM3SuWB$$9b=L30?mYG;L`!-VyQ%a z7vDR`cfmfJKj9_%vA$6+W=kd6U1g;`&oC_;F9`re&F(RB9zDQ*I`W%y1 z2E8?p$$GTJUfMFo$)kn4O0z?0@r789t7|)@)*cse&@mO?!U_HyGZ7Hf`%U8V``eb< z*BdhD*)nco#u|$~jz-7o>W!c#9OOKd?`igHSIq<6r^E3+{t7|qxeSWQo7NsRK7on> zzhc9Oy|NO6$$?84;arcU1|}XyuSxad-seaA`%d4E0M9nTRY0cJv^+3N}d7uH6D?;7Y>(kEGG_oee-i;k7 zNlR<2FW%~IGRU6iVMG)xw$>~fFczQIp)eYT_s*#)CwQrSxlN_yiG)d#S>5gp?c6bM z|1FB;ymwx$-qdD3P)Y0$z6YtWMP=vPd$ZW(lHtk* zowYGBAq;8hg2qbilxaXc1p}~9&MxL>K#Wu}-PL=IbNlHek|a9|6~BMcp1(n3C8MjE zycPC{at=?--iGA1W8)R*z+>6nr|TK}cQ1`h;mK0^se)PmV{Ngb@3aw5{wsnpD!JMk0`Wqw={>$_p5pGutRVkF4v>woVTv<4wT zH$QmGgfF9$9_?*e!s|OP>dA`a$gvj1BsK%gJU3~a7$z4@W z4N<}-w!!p#u&5agk6nNG?iOYsdTshl;9mePVFF)~n*L$V1F$p2BG?UBW zsf|5l@%R~4+>$;=zm8WMM>Tn&q-Z)L$_JZ3e)4kFrVXGq0#b%Iv%;O8%B5~IsJv+sw%z28ELC_2O7TRHLPf3CHUgO`-bG8 zpO2G5VB9a*1$4i_1gdxW0yN$@cz@Y{b@(iMG1ITz)sgivGRd!fM8XN69ca|}h00Z; z0c$7UTneOZ_7?xFE_3R$gdh<(FVe`gr?=0=(k5aug0yXcPL0{4ZBBoa3H3qW=J#It zJ2Jhm;M)0x|7tgu?YfGi!BgYFp*36G$X8F!_&fyJVGK* zX5IBif^EXN0%{Wgj&^>TJQr{1@L7$0-Uz*!Nr>-gEbiMq%bqrb(!KwFX)#2ap{Pyi zCkXzkv0MV=v(BCV7wK-g5-hp^$8<`C6Zm~r>c3ypPjq+9yY?d6YUW1~^&mYBa{b5! zO2V`u{Q(csMCnehNg*{^!IvryK2*KYdWYpGG}?3HGRuX_&p!~5=;t49WO#)8g4O}_ zy!@Qks68+14YI>{O^YJQRP-K5I{iA&#A&8z{G;|-wbsT)od&5-r5sB#Yq?7NEkfkl)o*l&S>UvJl{PH`zzqt4j=|U#@T>VCol1QA< zU?MjjZr=TTU-V@DIU4X0fL6m2K8PW*@dM|U-q35rXY(KT0xEaSr_K?f45cM58@nf# zeTvneRnKtG6F+FwltrHe{qWL{@X6Cdxbbxegr4&#x>#KnLIO|gWLN$imDiPr0^qBo z$58=Lf}s7pnU;sY5kIZ(YoAo7uP3<1>D(e(Ro>~mdn*96ZF$vwv;>cHRO-&>KM0o4 zZ^*AUXgp2pNGC8W?mcB=WrU;e8$mI&5D+VWqx6&jT=YSjPH&`0$~utPB*b z(a73rA0^{|!=G^_2hdVpfDZNjJBH}BTd+QY@jeE^Z0PwHK(^oDHnL}2n(L>eBAnn2Ngycm#}(jclkp!hPk;O>^@yqqxFSm57D2HcT2vIhDu%G^82^97 z^zG}VjfLe~W=w!b1-R>=nlM4Q4b#-^$6tr_!AAkt0YKX50dfsD4##srN{qje#WTPb z-N3H#=ML#wEO}h3v`ED?05pC&Lx9O|7KPHe?*6~Q8<8I;Sud=#Za)3BPq14Sag7(Z zKZX8A8k)1(VYgWDwP$fb((nfk`P-U1+*iU}>F&ye#=m|Pw<%!ogobb93urwN7C>wR1MPL;(BjBL&xUH?;6ZmVGx{_=m;{i zIKYih%wXsY0h<&J!7u;u>)SIx%(sRK-dtB{OPIQwy7dKWOO>=mM+11PwOx#YgIFBx zJh#DfYGaIOHgB+=!tkbq5ae%=8eGfv!SRbgad}-Pup8*p5fOfclC@gOvz<2Ql!SRb zM@7koq^D{R+)cGO?O$&5lsC~Gj?>=`MTX6E7|k`UD!{XAx#eiFQz+U|OEbnd$V#hJ zA0-)aJOIW7gfTj7s?a$X^pqY_PMC)$qa$YT$>fABP&RHZ@$AApOEK5urTkY7z;g?$ zPvMg5Ds%v?52-41Asv~zxwB=us;UFV+|bPZHnCWmuz}P*`GGf@7*veR`u(tvB4A~y z5U9<`uV_ZBYT0zid7OQmpo_BqF>#^mp{O)DRzHyoRaaQGX|8%1T|<}yrRfh%QFyA# z$f476XLZPD(614omI`KS;Frqq`0kg&N5QNL_hLp%>FzO7BnGGFsA6saYZQnob9Qm^ zJx{UGY1v%^D%rKyM6@em>U$@A7vtWGQ$qQ@7irN_oklX)z+LgfW)*L1yQR+~_KG<^ znStJz4(;49m24Tcl=6PceW4^N9JZ{Ax$+tT2rkfl!&l#@jr|-x3FO^s`)0=Y#kfl$ z5D`dFnxU4|1Edd@is`k-#c(yORle&%fNIuX%b~``ghjipyE)g5g=uErJbKoBL6uYauA*1kHy; zMe0L5WNL>j_9e0cZ#?8P`dZ8SD$CkvB)C)iif85H3~XYQOCdvl%-3zH2A~NB`da}i za*L}^<&n`*$7vR*-!jFM9HU}T8J>Lc_FZl%J;J8m?y|+qD=1u!B)5#QGMh~)LQfwr z4L9Tsg3cyIUr&i9JXN2aX_u~~Fzrs1$-oT(!mN9uQA&p6Fm15WvCX%t$Y*`30`IASfB-$cA1I%jxc>9g_NOhi-KD_n zzj&JKS0%o_ULdwK@j4d6{{);VW>neOiS8wIBUA4Df(Ga&<Jm^zayMBYj5K5` zav5U|xBNE1H`_;zIf$#dxPWAuZ~j3Q{B*#g1(jh~+l zUX?F<-2d{s%^yLH8Txm-;o6`=NHui&F-U%~Xo8wSi&6{|=xH$6OV4vYj3U+P4A$46 zkPB@ZW67h2#7nkiK@t@T; zu0@@%;4h=`+vNcNWYC6IPNJmVu`?x?HIjMg#|v6Hj8tRVo8qb|Q6IOG7xs{{3_JZz zGP#O!^idK%mP<1n(hX9rWqaMP=qQ)i)w$p3e3qpM8j;s>UzMZNq1|R^rV2w|5OIp? z$GFxI774Ti(QO#&l{_DJGXmuHhnNIFi8K@g5Ymv7^iVAEB{f+G95Lim;&otCF-Z*`w4z!BD`2?e1Wa?xof zSve&9z8Ls8X|q>iV&XZUlBI^s;K_*pU}dHeh&ylb6^{Ng6(jmce>^P}zS^6=-S|i* z(ZA$rHswhluaT+?6^FE_e17dcxnBp#$$mjQ-j;=F{z)W!R7e$>_hL+~M@xga`2^y~ z(}93>fK%B0@uy2$@i7SbJszLkmTY;EPF&Qwa8c(~884#=P0LuBXu=m-S?o4!`~uQ# z_pOF3V3hFVz1)7Oh=U)^hNX=>h6T4 zqzBu=N(#%we7e1Y(Rbt9qKAja&w2$hDJghh;3O#B2eP3hIFUnCMS$QBbX6*4y!@oo z;JM6sT=uB&tS5?pxx~!_25396IltZ+uXeIiS62rOF`i6UEm|kn?MNR|%uIfbKl~j_ z{T(SnyzGua!FdZsD6L+Dm}&b-npthmsjhltUC0`Uh5;(Us0>9f&@ypr+JqZQtQya-YmF$s~}uTgse z`AnDFLgf>E`sG4K9@SzviL*8V?O_W|x6@;2^VgqhUO*J4mV%l(@nM(MPuUbIAW(Jf z+D-PWXFH{nZk3huSRtf)!*|bi0HZ%pDfEbGV*|4QTDhOn0k@o1rVtJ zEHm>XkX~!w1TCX%C3T)#gGL#v)N@8O=FNwsX3Gm{0xD-0#4}NXVNH+pIK%YZU6~me zoOf4LY;AWzS2sHw+XG0c_jXEZYHC^<5F3--)Me-M>{G8Q9qft9N3pudbtQZy@%l3e z&}m_@Lbt<-!|O35m(QKkx^KFg`xIT!frhT9Klu@i{t|X@<%=(H&EeKikJq2k(@Ry? zB6TAKD#e#SE?c@Iz7rT8g7RTNx(%73us7KRLS>*4#}}6#gZi|A^h{FX7;yNm9`D4X zumT&%;BJ;cc_r@XrItzvkk&=Z>Mn23ce~7`Dc{_^o2~jMdj!@Y-!_rmmi!pYZ9H}& zS<2+hAX_a*s&cTVs5fHA_>8+Sdp9o^6L8)~UH^9U5u{B%Kl~c>0Mi|Zkl}XC5zx=P z`4BA->M3D*V(xaBIHMD~o99|uIvBE$pUUczJ0{^S^Qoqg#S&ZpEeKi4{z@b+%V%2H z7ed$Tng&a|HU?==VL)v`5GN$WujhNSBd`}!ihTqSKx5vzG{bP`amyN@6+iotE^vcr zZ7f-TA0atXtDwSrjF>!bXk=D(C4J~(U-eiA3N3JlGA<`+m*-W!fD|L|SKYgE*)QWN`Hbp3vR0cdBm7oQWE-{ zJ9+?FGda7e2`VCQ-`;hzOoanKps#iFBS=M^dk!DU-g8N#=H@ueC)5cn;vK)FIrlpb z^w=h5o1P7q_8NBH%p0FQ^Hl+Kfuwk5tK@H%Z8zp|K_8OG)tc`Yzq`M|hOMlWrlU(4 zL0t!i7uNLx&`H$D05WB!e6hb)hv#0aollJ=5)cMQYC0OtjZKp>7;&0wq~~#BYL>%L z?&j1|``M`4XxTGZJypPsEO}%G0U6RmiAk=G*ed~Ue0F%WQ8kY!1Ud$2D z2VNUc3!Gt_>5BlW#-QT>Ff`V=hy>jKVtG_JY{KnUy*i%5GhjMiVY9iU!jA7{P5kmC z@$?{7;CLDf^pL6+*=>bn;}jIsDddL!oW!2_4lCL{1_{;3 zIwtZO-$+o{5BenlaRERUqN1WQGBS+W`m>a@s%=iG=|`o{a#vT@J+rlHUBQ5WBp?D# zTxq>1tEd=JRw!r$Cm{I53b0|(KiKdQXes%%O6}L7o>PKWc^Z|w+uZ_*ybjniu|Yu( zn366K*||a8gyyA{Z1!P08GtyJgx{IK1~ekzuh^v~T=ua9MCMrJkU@{iOTJeWplj%? z7}`r^eYo}@7urk$)Z;WZx4%a+X@ExwO%(6Z=~R25Yckl)VW&~zG6U4rK!-yhH>hd` zB6|qJJF?HAO!!k9G_z55Sq~DP+Nc9)CqrYS;c1p7p5eK-tiN$&jjj7Ce-Kn011fw5 zKo0`8>*WOKsza*|qq*TZRu2x_6D!=Rvk(;-SlZ1|Kv-L7#S$cH@{OZd_bOrU6ko*s z{j8}w)lxR=+S+nVz!U#f#oQkO+DrgRM4?e{+BE4S?7E-I2*Q6I-QA^HksB(?chg}` z21nukkmXd_(EA6KFDC$QGqrLq)=gyVa?lT7^V{1QahwWW9?SzO)fs>joSL8Jac3Fi z;fFs(V(7Mlf+D4*j}R}cDq?&+)`t%tGBGisyHyARTUuJc!lTtEJH`yieo3jn&=#nl z_~0T287qj?(PE#gi@$?>?{dvsoZ(}wDpa#p@9ymWQOm?AR;QG-G=3tRtY16%!sE?V zTG0FRv#<{GmwJRAE8eE?G008+Lz6+5MJj>U0dT~Ui@=|hr@0L(UhbuN3Z0|d@z(k3 z`l0&9(Y;-U9amXiHJlmDrB*3=#Ioz*U$Em#v-=w>K*$0pTy6xshCcscp8Ff{@pG)Q zP=vp;WdJ%0Q!Vax6ZUz+x0?Q`vrurb%;Wc4(17*UP+&f%P9ocBU3}1%yJKN8kUd#o z+gQ%4#pC*2Fs=P9i~;f7fTiAA`$3X>SJlcWE4 z9{61#sYAjCdSHN*d`gOQQI=N=P(5~MN2^so)Bo6qCXA1XK$F7%-}~JEfnT1HTntRQ zwEi^plqc_F*@?{m19tX*<*EOFJ*2-W@BjUyZW!FKs(-o;^k31U9UI8qZa@(oE4yON zmFbhVLZ&MB|IwoS9zm4HsS8l4nwWSk>6wVGe*aiNkD!)i68y~4$5J{u@Aai98|OU` zXAX5;ztJ^wK_s!}CRTCmT?JUr8o$8__RROg;D^1tB{BIqi!I^eN%ha*aW4St3JE3z zyeO`Fj;kr^1BrwIeBvv*b|kajRu`l$P_6}v{iKiI>^)@VA}C2HP9Dy9jGpi}mwG@I zHHzsH6A|Glr!(ns4He(j6JvI4eRMB&ODz#ypf7{kkJ%ZggHvmL&2WdJP^cXZ`xiXz zZ)D9Uy+ki%p8jQNK}!ornt4?PD#rc22SQzqIgEFJQ%-gbTN*gy_~wPvi1U$+4JhsA zE!&Y?kB*q^2&)k&bZyl+2|=dsh=(9^uBBEg0A1N?p0l^MgGEFFiz5;&d{robeH4+mtGO}GDz8n^Ixz`a;12xlA z#O^5gzeGRyXqJrM)ad-y3qkvw&vx_h1`J&ov)83}WORT%19RL$n8>JJ zwo$ZB?NV7uJ!`Kw<^}rSYMl=NaXTAkpzDU?r5t{`m60zusVfs*4{s0?6A|4E6_u8r z*T)4o&1=mn`*#=7Pid8T^Tip!$4+2H;9zeU5ghz(uNTl)3jiu?b@jM?1>uV%x+x&C z0F*~~aLOfQJY@OiEm;9{sDTM#8_3njYO@p+{0q(uD31l4s_C-34}tIn7&heA-s-5= z`)SYHM2a9BuwLw0A8^jM*UAHtRqUSIhZwF)x|@0k_yJ-W2xMFqbos&jM4uNZ_gt2k z14IO9uS~c)mIx>~+-nsezAVrMEGFJ$u$z`alugV&5=QJaT5iC-k zjM~}cKkCbrvo{am9QX`LepWp<)UEkz6-lRdELnJ64j-v60fv&7G!SgJu>uQG`5|yY z)UaI1a-qPhYX%*cC&(STosP>dTqHpKE9SZtNOJ(n2aM8b^bMGj#1-xaVqGjFDY3YQ zhRDu{WBr$lqaW@4t{OF~J-YRnARI4jDxUBvjWFFZQ4#;dDu{@tE!=>#paOhzj_9T? z?T@m=YE4G4P^!v_JD+N~NHl9*iM^hAZ*`TCMl^qz4k}yymOdj^gihDot#znY{VSy= zV;4{|#Kl4SSXU6uZ(-_~cU+INjw?wn;MmiMu~cDTU}($O)-8Z|jBuI0$c$$DkPFTm z%pCacP8JLVz4{QI{0E04U@b7uPgM6I7X{#uz9$?W6LUX2Ukrj>N}9m9X629fR<{4D z$jD&*p}wq^T4%n&EgA&s>Me4$Mw&3!_-2y-qG9sy2K~Q3gMU9)Id1r3;m9l?27bbc zNuR{_(ip8*BH8|5G*eE_MtN{(XxpsTt!k{cQE^$Vd*e=4=DIaw&xWI25_K#^C6X*J zsoV>VYNTYHi+P6g?75!B)9;OCMaIuA>x*1|W)P)ANMNgeLFUyEzRVMC&-mbVtsVJV zjT3o0>F&NC5`!CO@n_TWwL7*p(tQ0prxh8wg~51+CD)>tt-t*L{SUU@p}XCtfQ(I{ z#yM^UJ6(alB;KxN`+-$_E=SKy$L|0B2h4{7J$1pcVW2zkh2OjCOSXGjLwgRaK;^FZ zBVW@V@rOG-vtQ%jBz%X9>4}Qj1m?5$AZB1O$iUt6O>qE@eUC2C?ta+#HhuzhUmBa3 z0NwUgewYN5suP4oL3rX0UF&y_GcuzyC3@d`{DOXjo4ied%LC9*>$^i{$;E)6FA()V`-gbMrR)(p%FlF z>4hI?gZyx(jaBq9m`pJNx={5KBvc+u{{Q?(MbxwXbT53L{F&*l%^WuZ_|?u}$pObw z?KU?e<;p*1K!BcPt_{dUsFh?F7KYfhkTYKYvko6~bJ0-%Fy&}rOJJ%eT=vEZQOTa2 zHE-mN&WyqX!Md{ly{=!^)_{2hOza+{>%d5Zc z{r`0~zOG3zq2|VTD5EeD5T+{n(F6_HTp6Tr)AHgbG2=ZD(;PJU+F*(lCjH|C(E?wPPuEvnNn00U7$=qYe26Qw;BXFq#Y1zbNFtk=4i0W-nV)rz2#g@n_gcX-jePc_muR z0FP)JQt8X#GR6~b|0^{2In_2_CJQZXSeQsVuXLM)?@fx-gpPm+JKktL*)V1Y9#sP; zwk#SqVV#MT<2W}1!}FsZw2YAu`yC&~yYjtVv$ZQOE|ER?!Cp>%RrDwA5h4bJ=+oTR z*aQjA*6v^lzlh8P3D(y32#MWSvc(cVUb&0DiI@#|?ZBtnjOZ$h>&S3mRBeuVWf)bp z)JhPVZQV-$eM?$ev$Hedr7RcnTBN+XQ>RzAfZf)@_mBQ2zxR$LpAp={{sYyh_2*o)B4lVk-&6O=J8;+HS zbGm6ZB`Ml`B!7*>!2czjQp}IC&HwHhjCX_*fh7lDrD2Tt`&+HrSlKw3vy$8<-^XUj zO#vLA#OKb@J|Fy4Bga;D}9G+I_FsE#>i2S_gp|kH+ z=Rx1nmupx|rz&TRfxLYWpcC9b3Me^$XgOF4QY+C^?hsPe_@D<4)5SqGQh~K^D|lv9 z@GP2D5E_AAe0Je31E_HOF+U&H+K7kwdM30-=I0wkA*zkc%*x^cI+F+_qWVanl%8EU z?6y;p>(txN{MF&tME>hhJ4Hn)Wo-z9@48XIgFXd#;;Lfs;8uKFwUZVh@MMa2hW_5+ z5Q_ZGIKT(^75uIFVfbD?Ymz*dF?JQqns5`{UEdk>Arq)SUFjas+^p*IX_}z z-s9*kD^~7thxvCbk8k&4@xSP-ga3McM3cai0HV8PP5a#w9@q3@xD z!Gedkxw>d4ws|UqjUW1tI^y{M+Ai?zgyg@EaLRWu8HTn*#UP%C%=6)K9e1M_yY;Od zmJFL3474~u7oTl_)_dZA4nlZtuflk;#Eh0};`go+w&)-`aZ&W<afs=G?iK;eIEdvFzzEJsw~){~YKL3>fdu%-VYOzO$K` znb-+KAPhpF=vKHs*v+(f@Dkd4tL`(Cn_X$qQYi309x)CXfUKC!{>F6wds20~LDKL$ zmC3ibr1%{+-aU6gY82Jmn>FRRve=3V6j7FHYsUAi$Md+R{I9kSSdUlEI&V-pr4U|w zmQ-QBHcRdOez|b5si>5+$)JO-&(imSYgpU@#RdMBvb|7kC)SAq+5o&o8oK6XDQOHT zfV~e!Dy3-|av<@##jOtM|KpVcrXyX!J{j!(haqBc3BdwnK0rjNS$2jeu1>_u8Y+Tn zi1)IbX@oP`hbh3*l)UK~0X*q7^;%cUGgtI^I&JQ5Q!`h!ZS?>**dG#W#`XL^E%<(z z-oaDN=D{w>qmlCXZay!8me7BBkmy=i+15u)I6CvVV6!il)QjA~YR7aAy(r2tZeukP z%&%*)p&O8nAvpV>_iWMpxKUtrh#t{JWc;vW^ySYt;~zKZxTPR!v)EmIe(t;3$cF42 zv_biESw-(c6JVBmkGln|_BV7^>SSL{B}1-x8nAQpm}bQQpYqOd946=w*{ZuXlW5^j za}toapH3_X4+rl$*i*1$Mg_Ag!FuI4ywSeb2GxUq|9BFmsLF+bSeA`=E=%B|-xn?- zBr+;dkOa&3BHm>^5F|L6D;6UYk5L!(8|cK`mL;3Qc|qI)_h!=rriZuP=IAv#keFUg zMnB_!A4xLUG0axjp*u_9?06fX9jPrwh%n^WZ>G({F=h&Ka8VpGYREdEDSW!FspiG| z*;M&?xp{SFRqlG5U-G0#3@rPfJ8c*Uk#6fgFtklW&*N5k?o>IUi<}5NG8{@Y1b5PH z+w6In$Zp7yvreQ1cgoAMtw^8+{k-sNrgn}59675v8gP2?c#_}_jeD1&B=OOT-?u-< z_xER7jj*D)SRTa|1RJRJgEK0xZBqA9R!yyPwE+uXV^A$KpA&dbom;W41q{tOpjY^x zbA&~-Y{u>eY-+Z3S)k)2kwcUKPj$C9bTIt`WRJ+QWw@p*j<%A=6SAji?u;LH+nXA3 zNrO#|&lw&8MA9qpQ=`=~fyyqH?w$ZwlG0N9X4S-?se1m$v^1*cQaow@veOu*j4%(q zTP)({6*9j@6*KB4xAWc)yrKU#F2&Z(*O3r`I5C(y2MmWJM@T=lgv|UfUyB!tX+xjm zON;2zLB8)F(B*?cIFRqjb8QkOS)D$NPYNCskRukD5k9Lw4!H_ zIzk(YMa&&Wrbqg(E7nqC4Y+#tv`DqqQ|a!YYIdl)u7Line=d|DRO%vhZTnMdLIh5G zE3TLDt`9YW{8~#R{2l9T*}6r0LY5bELA)BEiG^QOGkiJqNe!O!gV6O`f=in96BpRZ z7P3cxo-rZ?4v}8`ZnJZ56LbsdJ;$F_oHq(&VXnyw2uL6&g?&3^Be^@ zD$X~4Pq|laYkfk;Oh1-?g-9Asjk+E5X3o%o35o~kZ9*c6pTnU=%cIVQXEZs@~ zY|>HgWMit0zZx8=`wtP#=8O+*Nywm}(|uvbuL9TP$)IEBynL z+Qmz7VC|-Lwu?Z!0Q^qm?WjAF@OUe_&tA2}B5(~l^+d>-Lu|kQt|r(OaJKSL^Id1C zZrk;VcrtvzJ%l!IC7urujx$&ab;-kuHuBKZ*-&YK9=jdyTdq9N=KVnN7e@;4lr$=B z6;5{tiPnh9Y!>u3i%>oYqA+Qa!{z!3KBZlnO@rrwx!6Uwc}DH5$!3QpJX-3I%B$0v zms!n)qH{%+9mtCsLz}29!|h~>8T{k(*M$vSi{+!rZV!dRLWfBFKPYg8$%YQmWm70) zD^9|&Wm1Z^gn4W&})W0uKliD`N^vUsDHkNvB=P{qz|J6VdrB7O2U_WlMtr(DS*ugyK ze=Ip1CD0eDbT;%&!m(`7|A={SPV$&%f!cf6Gk%qa`!4nh+av+Z^f6~kKm&}|weQRA zwlXfl;i)P-=4xjH(_WN9=v`NKsEJAYQC$+c6wrBJHmNs^P8_=XTpnr=^${^#lZ;bf0$b%7gPHCb&AvnCL)f3TX7ms} zsAI0`UpL2b1(2af8=fcwzg-N`MltjSI>0sGro4)w8IZD zqJ>7V)9A`pFyum{L&Qsl1=@>dha6tVzrHID$Kjc`e;daf+=8_w0WOlP2t3%Ap4kgR z%5CipUf3ry&3N};CGpVD$-bwr8V`^yNDaCwnd(q~)p3)ruK|>rqTgBIj zwBolAo^_iRqAxjuUqTgmcyyH*lrI98`il{TBdJ0-EWu zw4@~6)8m7i*F|rcG7gzexE`&7YBqkSO%I$m^Pk)(wdK2UxZc# zStJC;&$CrL>nz#~CFx>M;57>wP}vG>+zJzolnSYJ+A3|_dc{RdDgCVd8-YWWI0C_K zN6=H=KpxM{JJx!L&v^D?otKTB_>}Xpz_m4#jxy-%P`SSKejJ9Cs>e}>?>>5&64dKD z4{0^QTda|0cJwt0wi4$k!{;2K*PJ5)2Z=fW3g0G|(X+~Cc}nVU(ZX`KJA(h@s)&7# zzabkvkTRw5+pKy|<;Zwq=P;UF9kl0AAZo3^dBtWIw+z(xx&IOF8(?+R66oE9rtvFSwLQ7K#R zFbG3Bz)!gcGhf1Zn{3*i@j`8x?49R=%H@j9nt^NPy#9KAs(=kS7%$Ymh6ATXNMZr+ zgW|Pm7fndeO;2Hn-8_vM(_o*UjmhhMZOe==2`G-GFVL#IB!!Jh5# z1%W_);2x;J0C$iopRtA1# zns3vqo-Wv4P%sf(jcl`AFm&J;y~b?-QG+*J3#{9Hch_JA!hZFxS3vOB_Dyk>MU>JB zNJZS2aBrR)(jSu7Ln^1FXO&2jXHR%|8W`>TxNaO;9iaqEX09J)w!~KNmwc|FLA-kf zo!~%jzCGsS{(Bu=RUj?2h zn7^7Wn7?petp0-~j5?}m@he_z-4wev?!g ztonFF>dko~fX^jw3fxtH&csMYC5QLvT>3D^Xc|GEtV;yh_5d7mL@?+*4@-G zfDPAy0|D2ddVrY8`c5Ics1F(Shl&qdvT2Ez@-WA@(0M;~&#Jgl2fyI!JaYBuis1c5 zP~{W|>UrJHP9|}>iPI&SL4`X1BG*X;F~6F8)+V&iuCwr<5La9l*?3N1C%-{doIa5&&J0u z-57O{sc9SozrTv2YU>IXzR67{oCP1N_e0eUvS;mmt@sOo58(-^ z90?#CBuCiB(JPf@lJN65Ph5#o6NO4)f*303V}TDHm9D2$chvq}MLz22Jd~MTwQ3(_ zzleJlH=%amlNUn4JhM74hYR)FlLSs(PsT5m+;(AbE#cp^e_y!xsg`M}*CgDg=yiNJ zUxgTYd#%9sdRFih`&8&8fd5FaYt%GN-wD@auKI$%;So!g3=$CI&A#ba|O>AFAP4MrK!aT0{;ZK`hlI@v}cPp zRjOBvSZ>^p<$guv^$H_PrGk91)j20)^-}`^^EQuf+|*|(isxUSxKE=+$w96{4c7hD z#_q)O2qzHHT|(`}K@)pbpe(~Kub)q>N~J$J3vA_fiR$%jj)=X5`;XhgCBx{+b!6a9 zeJbUJ&&msVmcADG!1P6x!z%Hj%bu%X z9!xZa@X0k6l)V&;1THOL?P7K~{=T+@z0z`WhfE?#A6j;7m8TGO$th0vLw?S7!GYUMM^EGj7xDN{#qKf+JLl0CpXNQEj zAAN=6X!L(bxk~T?P)po~PJoK(p1Tc`C#!jA$aAc8!r5Y;^mA^F*nbY-J*2x3i%7b) zn>aR9e?5m045C=b0YDF9Ql^4X#dD|?xMRG8i7_Ii6Clf`Gr=G8$h$G1Gq{eR=C#9T zd)=fS0tCNabyzRSn>Q7MvfJ4EOjFiAy+XnJU{i7LKy!0)#({zI6~$4SfECH=n4=@9 z$EA~sHj7RF5z|4PX6Y905K^lW#mOo# zpSp6($yf|oSv zWsz0jF*)d4aCyF|Q%<+a-<5Oo^$=LID?w_EuDipAq^|oq&%3iq`1qd<=lg=du4q5~ zL`i*21B|U@6OFI&nGOGExOs}QFBXThNWh~_-7P>-9DnwowN#O2ms=7N}?2*(QI=Od3cKfcLUvsvxpz6Jr zP2jYFp5Q%DBi>y*eYjNTCc}}P1P)n8?I8w$^n~GtlTPJ5y?7nPpOpR+0sIYlgHRjp z;>7D*fItriG0`!c_XPs%;uAd+jJ zI}PvYkDk@SiF9YnL<_g--`wu>CHR%f`3Efb8`-~(8hT~wDy4Gf3RvNQkcL3bEWoZV zy^bpQVyO_(E0}GBbz2QwpbJ4r(D>Zy@&r#{ZzX1OBF$8}X|-m@U^Q_~-O)@MOs?%s zvhW&qp`h-hlr(jph!3-_QEp>$Ikvc&kV_Q|QZlhdiLF;L{Xu?ckq|>Q;>SV-!Mk-^ zdf|?otud8*g_?N~wbJ9DGMB;Baz*tbd6q1}nLj5#_m>}YUfgUeuzri-^cX+kP*`&}BYVT`3B0ZA$<2}J!Zyq4LSYNZWE_Sz z%rvBr`GVBo1iuc}t}-)On#uUoi0i_t9`6uMo|1Eu&*q>5iYvPo8jX!A^Va`rJ0`{O zG+D2jX*isq_p#tgxL$HSYYr#-ZC1uL{c&8^VO39jZxC4LXuf?*7t4VAp_($C1Niv~kBEnEkaR`T3B7w^TU_i(qbZjQki zq~gNA_JZt<=vvTxgx>Oyg^cUm;F4QKB!b6IUe#Y&)pYVg_hQ&1lC{3+Pg2Bj*9t_> zM#@>Vhw|0n&;731na*4LN{*DzOSE-v#jv_F4ucP8noqcN$@Z9w0+H0TS8Bcteq-LM z``rkg0G?h=-uM+TgQ>+g;&@Oz_L}tZ1Mt7jCt9bed!?s0({7%aVM7x}w|zes=*i8K zAwH6bKgbkHYM$c|kh)orrWdpTF1`L&3m8a^gIWks_O@KTHxcV*)}13nq5y1P&*XmQ z%HXwx&j1U~%=eA5R4AQsJ8M&l`}Fwg`;k(Q7T@uTPJ*r&Z>@>Z8e(-t%!hsuKSCuz z>9cJjFp{xLm8+WWlmr6Jmm&Bejw*ji*H(^S*%tB&dJb>$*U1u)np*rOm%$jJeXQJ3 zmYu@FezDS}zC;ZXDC8CJsoctU3n%zH2$JG(V7kG2tVrR?6%Z?RFm&KNw2j@2y;A;C zHeIggZ7fw<`n=$)^f`S$QX+OwE6zd~9?a{;45+ro~n7zGZIta`|$l_Yx-Vbxv9XC7P zd9B`VK09Scd=%mhV*~{dbym_UulQ^Xl8&@6V$PSfG(q>?H4m6tD1<%$K-3UubUNHw z#y>r^+mz-}JS3vtl;#4N&aCYG+cr9bWQMN;I>*hJGgee@B|nDU>h$A>3IXX3+i9?V%r}I zfpw&7^SWJ$A`reADaljH%5Unu4qJ9PLXY=D3RP12M*CnP`-3lUpv^)FY|eq?oOBX7XX4Zhy?rgZi8@j7?B?uNP6LEEKOW_$kX z?H2#EVC3MIl~~?-DJZ$(bK0F+cA-=M{1-^mTnfk;27cSqXH3ckzz3J+KX_Hwd*OY*nq)de3^lDvQDb$!8FG!)Jy=_wBpx-Jn(ZH1lzlegpUdQ3PRXh;4R5yo&36F? z#I6o9`uPAUy!N}}8{rYFMGcDXnMr@9*L6e$p`kYxKa984=>;Plz^^S{DYr-q2B}*^ zWoZYh>h0c7yn4*DP5#A^I`$3&)g0d@TM8f?&AJbz8`yil9b^`FN!uXUNM(hSTb(}b<&I38DDdzi;p;rb!#1KIv8k%voUV7ZB zB4k)KH;Ui5*NUUTeS39m^LrQZRXgLew=QG&HE@0YiBu%`0p@DYCTquP-^DJz{P8Px znj6)ViDw!|smxE>(U`|T`K7}aqOZnvL?j8}r|ORLwK}cJFATHZw<67OfMj$t#DjQO~C#_w?q|1F5^RnDMj;qDf@p~(uT$^lZjUnJJ7o4>>wVi9} zC{FL?p0*)f>^q%?uad&9U=>`hx_IyIXf4Pex3dm(=le4$pC-p=pT6fQZkT+{R;I_p za8cZqJBPiE?pokD%->j0ZL{n9YTiJMsma#c9dn*XGq-ue2aN2mRt<1h4>2MDNR%MC zd~BbNX+M5#q_n!$W%TJP^Yv6lRT9(5-C+1?!phk6aIq9QTg4MIov-}o51Q}MHJ2aX zMSu>QgYUncWGNm{yZjBbxG05%H`U^LgM+NHv#d4~2hwt&f@Zd?(1i8X&84^RNXfZ% zkE3AzoOd2TJ!=IuuN7B=xD;_P*-$GHMDDUi%YSD zcXy%QBu^3eTOfTJjBMHsMj9QIf5W`h(3Kfc&T@kuXS?vBjydk^({|l>#Z4*8>DYl) z539+RcA0lWpIn4>PRBw7e|+czmLUYA-MKPD8|0!JtKtT&itc*DxEzLYwyo0eXRGB* z%O4+BqLihwqEzHrC2h^a z93OZ%6o7vr8|24%quP@r`s=$p3;fx{%|78c%f-H;4$y`7=~GVE|7zl_Yfvg=N1zOl zlwg?e*y_H1wA|Od5bBCI)1Qjt{G$%Os@>}L<;%Xg2Di|Tr+&)q27Hobx22+&y~^nd z9}n$~xcDOGy0#T79z^^wOuH?#!i*IaAsof$eBTxxSwfMg$0LF4eDZ@;DZ$Thx;-!( zLO_RS;ug8!U%J}0ZC`rl%gQY-$`m}+Y^kibY%_}sSFd>pXHCUdyGEG)XR6@4f%IwCSeeuKY)m3YM_1=-o5nSfhdi`_`Ple-ZqFUb|D&_7j*3F-;#E-Q<$dG*C)Q#@pQ=^- z_3JJgJSR!JpM;4Mu*Z`NZw_e)b~I1Ks_NWpt`0B2udKYP^f2`vj*M4elSS(J9qR`P z0hza!WqbO#ymM)kH!Wg>_?F~9;!Of&+A7^P-bRF?!EkGmtQ3OA5rD{}8)dg+z8*4H z`ED1EMN2kw*}%YtVdCKsi+x$Jwzfq5-0D%-r56%Aj!*6C_w#cZCoCp9ikPtqZcwzO z?A(GbcB8hOlud4oY=||Ew;PRnD8fR$IvbbgR(FK29-uY&mZeUezc;+5kFvvDt9rHA zj9-!U2_eaBesjf$OY4Yhm4XxXuzCK$jHD z4nfn=Xijz?%Z6)EB6vk;p*}XudkYUy^-}1|S<5)?=7eH5uO(wI`kATz&Votc% zU_qq=@^Z>!xM}PHJJKhUsuYabRYFxFDgriMwp50bC0zON(vxP#d6Y$ktQxZ@!R+GU zn3eaEOn>`HlR<0|x}FT-HAnFsX6T(0IbkBkHs8|bOx$&`xzK}9UAUy#lIT$uzl@P1 z221-hG_)KY=TTA8fy<1{Cd_sl2M~uBh4ZQ^Ny$e<)|c!dx?NcIEZSI8W+K$s{u<)3 z_*%i=h~ALLMm6KNszIXpnTNyNEO@CnBK;_}gJa{9X$ja9&{@Q_+tzTHOOnekjKGT?d*1X=>%OMdB9c$LR|K8r5nqoJ zcudFvM2lGW0RzVymj1Rd*=e-IVZ7i{jW^-u1d7H4#P+%LK@ZpkHhVGOy;Z8o8y9UE zmnQdidQ<#{ysg~sU`k+|&tVf`z+pK*Yy=L0bd8QvB?`1FSikgsxCBmM5Ex`$&Xbey zc6MheN*`2TB0xuX?egcERd!EXlTHPW7AOGTaj9_{hL}YOIMK@iAU`C&?q7grpESTC zXul9=Xj$Zf{pUVz;|YR=o5wU9)sH>NSlSmb?7OYbzJfsO1IuxT*^l1geBZUPNCVYw zL^XS7aLF1P8unXAmFhGoBtlle8XRnNCF*VSJLjs5#x8MxGa94YkF#V}bXw<-hT;E$ zWd6kXNvVLYf3fC6!apK1Bs7g-knX?|4ugsdctgClE;caDa11{Ny<2dpbBk<%x4CWC zRe*Hz=PLFDZ}1W`RIF6j+Gx3&O+6QLU4^fJG{6+VrQwkCC;*`W>SGY6TkzYD)EIM% ze*!qKw)+=rTQAc3;~o@j!>!}&8E^g*EDGYcp3mGohDRD+O@29mDH?Va`F{u0(%RTz zwo`--v`$_>9|+!*;8qL1ySK`BSgx?WqEdoSIEl7OxI@cD~ z!Ww%TCY>+b(YoNv5|ny_4KvvoH4JFM0Dj20GfSY*w?%h9>B@Tct)g!l{{-K%n_vdi z%}uOGQJi|7K8ntn03gyI&`e3MMPd`op)%HwaUk^xf3Z2y6fsv5{pAkt?RW1~$16t8 zpwlj33Mh1IGDazi8#D<*qe?4NG@X$?`RI)j%WVxx+n<6#00i9Ag~!Pa!%XekuLa6C zhy)7`5tdu@HLAEA`5RiyD2#<$dxob)YYhJavlb1WD|_4PvEnYXHhNfVU1zSJ=VO=q z1IUOLM;_Ag=5V^YUj81pNBC4VgyO*~0Vk#e zbsc&X#*ds@{kQjLi~jl6sqG%+-1B_qu~jy6lJOZAUm0O5aNf zU;t9>ndCXhOn@JGvqN$M{;pmA@v^5B7(_$WR}JSvm*#bn_>Nh|Cv_v$xeYSv}HO_GiXJxYAdU#a5 zySrOGPfHarqV-HYNKhJ4M)8xO7=J+DoW3GOk1$t;p~tt?8j!>b-pA@jeV+_FXoN9Fhk%pc=jCDgCR3l zZ>QjvsF-F%T%-I_%*6U)Hu&Pa^#cB-pbK>;BrJvG=+4DrJ6y2i>4*;3%CdehDPlEO zvxJe65mYfAciv5efw(d#5RvM^XT18Ih*~Wn?AY$-$16JvpfacA`pm6l0xSRDIpV1YZ z0Y6Q-yMxl9I0%b^x(DEKd!AN!zQZyA05eEiV9Y&3;ar^i2UMfYPZ9+v+{iZU30bsC zO?uvg#Q?^sqP+Y;x)tH(%kQr*rn;IBzJMHw32y(}$v%^MzAtqY_=$)x?6=#Mdv_lU zdIni?!)Vx2gu3zLYtiBuW<1>50!a!Q&QDa;b?JVRE>12j!Y5=6Q4Y>Mottz#i&mYAjw?*(DviW*?l@D z0ELZ{C8jv9varY=AbG-KI9C%G@QgTxD}P<4K~|$OF*0VzC$j$F)!-^Q!7v1~em%K< zR+11n3Yb#+A0jnMID-5h1|joDXtFhIfvos4CZz!Rytb+;!Tt+oH@pqQY0nGy4NK4y zB#^%kn2FwXHxI-ZtUwVZk~t}l;K)guaXhw*9k`f^iS7qufO&xwtl}q~c<%=km72Ob z=wjhCkRk>IFEY0ky}n1ofU~%q8Y1AcH|e|(q$KfVmWq_U`>UuH148J0rx_=?2`psm z>gp;kj#|Gok8uEmR*J{lr}D*s*)~iID;fnd%gYY{%pQU9Xm%r7i2(VM;s^{LXXoVL ziy_OVm`R#dg94gmdy++71JGBrus8(gNCxrudo2@Bv#P#e7-ecBv1Rb^+=IGN3TG72I?uvhGk)2C`#Lhg4QcY zM(d^CNPQ4YhZ3mKxv<#z&MpsAL&eI<3N)}dZ!OhnMYE6hpk3niIO7StexDOOm=()= zREOuSKi6X<`v`~zCVu0a!6hi&eG4kV4f`Io9pCAeLv@S2WL^a7)qLNU+`?U2^v1Dw&j0E-gXW6nt0Nv&$R5 z^E`OC`#$nHu#wi0WDq2t)eo;H@B|=G-2hp}Z|l8qy)hQy#2sB{y)A;nrq&n?G9MQg^B zXm}f07YO=^nsVHnudc2Jpp5Vda$dVM0;c=O1#eKH8B%gO*u3x1ywruoApm5`Vf`V0 z6~-sfZbKW+paiO+Zcs_J9AvQp{^{!Oh8H|?SPlghDMBH7pjtY>@?PUmaNlhOvuLD2 zj%3{ca`3jx3G|&ncc~jxw3>d3h8ht-e!f>C;CVuE9gCC}aJ`I6q!(Ty_SCiIhcm~) z&o42ld)?*5Xy(A_;RU}ckoFSGScboRcW3MEktf?vJ42xrnD3cS- zy!c{eMurI>=gj#*#Z#25+}v<&T_q%a-in5@?}IcV2qsk_#o}^8Gm_h9yFdLf&}>%X zq2FJ3ZDRM>C4uZCLI;4@2hWF;SFh0E@&hk0{`@BFr`ku^WGN8#JNKZPpbC?E}B(=IEWs0xaTAoS{id`&k8FWr0g+foh%WT+;%uzqfF z4HdT75eg!=XIs#Q9|BsE*J}UAsu@CyM*(d0S^4>q-}O%Tf&rCXEdXszSU2b**G(f& z#QOQGjU8|x2#_)sfSR$p;A5EIuhpECzC921*Iy-vUcg1N9>#h-pb+#Rr&$Ge3yuUj zCQX5+T|imi8~GY##FPFkg=$7q?W0Tx8wVE`7wA-TwrTD8OTER(HMTRTDu+*ND&!n> zETp2NMOs&h2LnxugrTdP-?N+mRnh^*lHnzI?;tCJg z*UD}EJ+_N=&(7%a^|S5z<5U?LPs9R(iBWhHhxciFc^fn#ICjYT$LfvO)@QTxO&4e4uM0funlFw|Fs7tU&PLDg zxSd>Q&)z~lU@7X+i7s2B4bKq=Hi4YtYB#vQl~{l@y2 zO+=>W+SOL!P6Zypu+Q(M8?vG&ed!{?S02T8a5~HlFGP3TP)p;x&UY>|b0UB^ zx|7-!AcMYqYgw+Vhcq_+TwHEB*M}Td3cm~t20=eX$4tPMl6auFt6AU>Kq>M}(w;mb z=2CZJ%4TM8uDNr#9~XE!p=+aGvnyfZfaXh{YD`bwwQ{R?t~6sXRfAsBMRB^cb|bUK zf&BB95AIjpWrC%~XzfJuQ)c(|(~bc0)Zx)fJ7Q2Rz2U&K=P)iUB_%%`=bTUo^v88D zFA@(Tt6onxI5uGFhxEKT#n}>_U0ba()dUfQOY+@)9{hLhg{_9QuT|~Jphr3}X?^{g z$R|{!a32&!T0BjcD2jAs(^F8omvawlr+1K6yYcvQ5iVTwWcVbs-PFb9a9;Yz*7mYM z@2>wYkt54<*)Qrp&RqO9wD<>;=9weU{E*#;m6;()EGIABf<7-UHr z^d#sbJR(=}a`)(m(NEp5Cs#L{9?{-(d@c%$tS|Fe%wwxAr&M=jYBKTigg-^och{EP zSN(aw!gu^nX9QhoUN&F~6t;;X9~ zCthu)Uo{WC#J|KN)lR2yP&i0#-}`quB#U=-ZPERI9CuCa(;xRme2*>&c zAcV5}u0)Z$nZySHPBY0mJ+WIUOHnOxZtN2u(c0DMe%0Rt!N0XBqkW(p`@A zPob?GyTf9i?(02`QO<(b*HtfV$i3?-(sRbA6%I~?&Fk2a4@{?-TP8vQxbg;7&kIt9 zZW(mv%=W0=irtS&e2BrlhAEu1-0wIRygEXd;)9jp zx{I~-`s0+HTD$4jclywh>%?@FXLT>AA5PwrUgY_3&GKm?RP=9Dkw$mF%iO;8nm9dD z+yTS%115w@Sl*$>|3GIu7pJM6hGzT=LPP^n;@~%6Iek69%Bf{Ajo%GN?s*sj2{No{5YbKR9ZFv28 zEt?g2I=U^j#i;5HH(BxgwtnftU|uIKrIPe^T#k#>MH!iM}*hLO5Zd8RPuyU#0(-L3$^!2CIl$b z_Qh_DnIWVwGX>03wZO){8sC<4PC?`?ABQczBm%RQg$y-tR5t=_iiz0QSqbM^Zg6?4TC6#hn9~PEl?8yXVb>r*2p z7FrFoM~W5gW2m$}3cq%j*+uo#VK{6BKYnLTwlqmHx6dsuj1HpS@B25+@UZn-J^pP{ zqHaGU9D)QZ#fXj|7d2<^UVB~A#nM|dHSN!e#vVPht>h|)zJnvNs}=xE{F<*tV^hxY7TC@R)7ArnOcM0>&ag;gM`OIt6 z=KKzwMGlhA`)zDyqHckG5AiIhq{aw14gW^2O!^ta?D1OhEd`Yq_XRmG5JTA*LD*c1(o(NU0C zY2uiD;?SU%U|!J=)-4e1eLqlUFIHer+x0A@?$x?WO3u{h|K4gyxxf+)GgB9Xt#dkE zt?L32X9|DY3dNhUw`m*b@9+J3T+h;AHDIevkic*KBrLNcp;L&FQ5vHx?K9uk)Fm)X z!2~Nc!p+b$(csU>pMY-4cqv8nUh z8CJ#;Xxe?Qm~-)GE_a1o*ifpw9W9aWWpc3stxKD{_ohWton2in{ABIR?P=mBcRW`k zW4B5@+w#j&9Je^(N6hYs^{p`!NnyEKczwTU7Aso8^wG5-vygq4kh=eyaB-^P+YfvD zot+&7hgLCJ4m6BNO?d}(%EwDvq*g!qe@V7gf%-{q92($QXADTM3c_fnV+po}0nKt^ z%`3!3Gt-4UwNim?@)%D#Q9+nYMAWhAt01C~Yp8B*@0q@(OrVErxMPA@ov^jk2*ze_ z4sF9vsdq@9DDdXi))U7vT_bf}Jl#6ExzAy#TY5$w(-13yPVf}}&bcM``k*Hs%qX2% z_0!^ko;fL;YM!t0`zp2A&Q_b!-&Q)&lLaatbie!1wbTy`8?#+W7 z%Xm85*!oi^j^&5x{bJpceCoqX zPWe&R0zPiCe^tAt)8|`lSu!B1_F>a?n|@=MllDC_oK{26ihpYmL|)UQ88n=r&`lKR zSb<+M>m)I|hWPxJ$z4v?D;D8)J$5*6S@-aStv@!Bj>c)S&%DZ!cSg$|036~71TK}Q z7R`@s&UjU3lMq1P)aB?#C}jyew?u^}u3ClUMlC4$i#`a;U@)5p*2m%q|M2aPU1vKZ z(H+&am9D8yvD||mVyb3kq0v=E<>(JPfe=*u#noZ=l-h2Sikv5lV;9^NqtyCTNzf|x z`7yDDjU80^)usy>yMFp*8lx0Xt>y)AsR+6c=c3BSx>3YA{0Op8Rc3SZT!7N2NMW!> z{E{ajciZ!uTrlEi*Bqmy{F%A~HO$g z5LFCZEcijez#NvTzrR|q`F8!T>A3H~X!NYZqtXM;EzkzhsL)4(OlN=>oNMH>AqlJ68*>hRK`8|!+!qcEdj4V zzq`};kIO+yIR3m`&KSubJcTUKdrFND-w)`-kAv5dhJN*R&cEpkI;QwP(&n7we|%UU z7t{7eQ%HfCh^*@kD#V`LD6)bd|$%{c85>}Us64S?)}rk2*pa;!D7 zX$88Jlmh9k8&8FS+kY$H7U@$N45$Ey?}b?IPfyU`t{XtZj*pLn;B_AO1De)@;RCTC zQuL6PPRLy0PB_~4$Vqqs?!SLsTU{NxFk!eoM^tVfzST| v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'cerberusdoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'cerberus.tex', u'cerberus Documentation', - u'Eurogiciel', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'cerberus', u'cerberus Documentation', - [u'Eurogiciel'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'cerberus', u'cerberus Documentation', - u'Eurogiciel', 'cerberus', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst deleted file mode 100644 index ed77c12..0000000 --- a/doc/source/contributing.rst +++ /dev/null @@ -1,4 +0,0 @@ -============ -Contributing -============ -.. include:: ../../CONTRIBUTING.rst \ No newline at end of file diff --git a/doc/source/development_plugin.rst b/doc/source/development_plugin.rst deleted file mode 100644 index 8654059..0000000 --- a/doc/source/development_plugin.rst +++ /dev/null @@ -1,57 +0,0 @@ -=============== -Writing plugins -=============== - -This documentation gives you some clues on how to write a new plugin for -Cerberus if you wish to integrate a security component which is not covered by -an existing plugin. - -Cerberus manager -================ - -The cerberus manager is implemented in ``cerberus/manager.py``. The cerberus -manager loads all plugins defined in the namespace ``cerberus.plugins``. -It is also responsible of tasks management. - -Plugins -======= - -Cerberus manager makes use of stevedore to load extensions dynamically. -Plugins can: - - * subscribe to notifications sent through AMQP. - * define a callable method (@webmethod) which will be invoked either once or periodically thanks to a task. - -Notifications -------------- - -Plugins must implement the method ``process_notification(self, ctxt, publisher_id, event_type, payload, metadata):`` -which receives an event message the plugin subscribed to. - -For example, the ``test_plugin`` plugin listens to one event: - - * image.update - -Tasks ------ - -For a plugin to be invoked through a task, it must implement a method with -decorator ``@webmethod`` - -For example, the ``test_plugin`` plugin defines one callable method: - - * get_security_reports - - -Adding new plugins ------------------- - -Cerberus needs to be easy to extend and configure so it can be tuned for each -installation. A plugin system based on setuptools entry points makes it easy -to add new monitors in the agents. In particular, Cerberus uses Stevedore, and -you should put your entry point definitions in the entry_points.txt file of -your Cerberus egg. -Alternatively, you can put your entry point definitions in the setup.cfg file -before installing Cerberus -Installing a plugin automatically activates it the next time the cerberus -manager starts. \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100644 index 159eb41..0000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,62 +0,0 @@ -.. cerberus documentation master file, created by - sphinx-quickstart on Wed May 14 23:05:42 2014. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -============================================== -Welcome to Cerberus's developer documentation! -============================================== - -Introduction -============ - -Cerberus is a Security As A Service project aimed at integrating security tools -inside Openstack. - -Cerberus offers a framework to integrate **security components** (scanners of -vulnerabilities, behavior analysis, IPS, IDS, SIEM) in order to propagate -changes of the platform to them and to collect security reports and security -alarms. - -Installation -============ - -.. toctree:: - :maxdepth: 1 - - installation - - -Architecture -============ - -.. toctree:: - :maxdepth: 1 - - arch - - -API References -============== - -.. toctree:: - :maxdepth: 1 - - webapi/root - webapi/v1 - - -Plugin development -================== - -.. toctree:: - :maxdepth: 1 - - development_plugin - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/installation.rst b/doc/source/installation.rst deleted file mode 100644 index 417b707..0000000 --- a/doc/source/installation.rst +++ /dev/null @@ -1,124 +0,0 @@ -####################################### -Cerberus installation and configuration -####################################### - - -Install from source -=================== - -There is no release of Cerberus as of now, the installation can be done from -the git repository. - -Retrieve and install Cerberus : - -:: - - git clone git://git.openstack.org/openstack/cerberus - cd cerberus - python setup.py install - -This procedure installs the ``cerberus`` python library and a few -executables: - -* ``cerberus-api``: API service -* ``cerberus-agent``: Task management service - -Install a sample configuration file : - -:: - - mkdir /etc/cerberus - cp etc/cerberus/cerberus.conf.sample /etc/cerberus/cerberus.conf - -Configure Cerberus -================== - -Edit :file:`/etc/cerberus/cerberus.conf` to configure Cerberus. - -The following shows the basic configuration items: - -.. code-block:: ini - - [DEFAULT] - verbose = True - log_dir = /var/log/cerberus - - rabbit_host = RABBIT_HOST - rabbit_userid = openstack - rabbit_password = RABBIT_PASSWORD - - [auth] - username = cerberus - password = CERBERUS_PASSWORD - tenant = service - region = RegionOne - url = http://localhost:5000/v2.0 - - [keystone_authtoken] - username = cerberus - password = CERBERUS_PASSWORD - project_name = service - region = RegionOne - auth_url = http://localhost:5000/v2.0 - auth_plugin = password - - [database] - connection = mysql://cerberus:CERBERUS_DBPASS@localhost/cerberus - -Setup the database and storage backend -====================================== - -MySQL/MariaDB is the recommended database engine. To setup the database, use -the ``mysql`` client: - -:: - - mysql -uroot -p << EOF - CREATE DATABASE cerberus; - GRANT ALL PRIVILEGES ON cerberus.* TO 'cerberus'@'localhost' IDENTIFIED BY 'CERBERUS_DBPASS'; - EOF - -Run the database synchronisation scripts: - -:: - - cerberus-dbsync upgrade - -Init the storage backend: - -:: - - cerberus-storage-init - -Setup Keystone -============== - -Cerberus uses Keystone for authentication. - -To integrate Cerberus to Keystone, run the following commands (as OpenStack -administrator): - -:: - - keystone user-create --name cerberus --pass CERBERUS_PASS - keystone user-role-add --user cerberus --role admin --tenant service - -Create the ``Security`` service and its endpoints: - -:: - - keystone service-create --name Cerberus --type security - keystone endpoint-create --service-id SECURITY_SERVICE_ID \ - --publicurl http://localhost:8300 \ - --adminurl http://localhost:8300 \ - --internalurl http://localhost:8300 - -Start Cerberus -============== - -Start the API and processing services : - -:: - - cerberus-api --config-file /etc/cerberus/cerberus.conf - cerberus-agent --config-file /etc/cerberus/cerberus.conf diff --git a/doc/source/readme.rst b/doc/source/readme.rst deleted file mode 100644 index 38ba804..0000000 --- a/doc/source/readme.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../README.rst \ No newline at end of file diff --git a/doc/source/usage.rst b/doc/source/usage.rst deleted file mode 100644 index d43dbb0..0000000 --- a/doc/source/usage.rst +++ /dev/null @@ -1,7 +0,0 @@ -===== -Usage -===== - -To use cerberus in a project:: - - import cerberus diff --git a/doc/source/webapi/root.rst b/doc/source/webapi/root.rst deleted file mode 100644 index 3440e30..0000000 --- a/doc/source/webapi/root.rst +++ /dev/null @@ -1,16 +0,0 @@ -======================== -Cerberus REST API (root) -======================== - -.. rest-controller:: cerberus.api.root:RootController - :webprefix: / / -.. Dirty hack till the bug is fixed so we can specify root path - -.. autotype:: cerberus.api.root.APILink - :members: - -.. autotype:: cerberus.api.root.APIMediaType - :members: - -.. autotype:: cerberus.api.root.APIVersion - :members: diff --git a/doc/source/webapi/v1.rst b/doc/source/webapi/v1.rst deleted file mode 100644 index 05de752..0000000 --- a/doc/source/webapi/v1.rst +++ /dev/null @@ -1,52 +0,0 @@ -====================== -Cerberus REST API (v1) -====================== - - -Plugins -======= - -.. rest-controller:: cerberus.api.v1.controllers.plugins:PluginsController - :webprefix: /v1/plugins - -.. autotype:: cerberus.api.v1.datamodels.plugin.PluginResource - :members: - - -Security alarms -=============== - -.. rest-controller:: cerberus.api.v1.controllers.security_alarms:SecurityAlarmsController - :webprefix: /v1/security_alarms - -.. rest-controller:: cerberus.api.v1.controllers.security_alarms:SecurityAlarmController - :webprefix: /v1/security_alarms/{id} - -.. autotype:: cerberus.api.v1.datamodels.security_alarm.SecurityAlarmResource - :members: - - -Security reports -================ - -.. rest-controller:: cerberus.api.v1.controllers.security_reports:SecurityReportsController - :webprefix: /v1/security_reports - -.. rest-controller:: cerberus.api.v1.controllers.security_reports:SecurityReportController - :webprefix: /v1/security_reports/{id} - -.. autotype:: cerberus.api.v1.datamodels.security_report.SecurityReportResource - :members: - - -Tasks -===== - -.. rest-controller:: cerberus.api.v1.controllers.tasks:TasksController - :webprefix: /v1/tasks - -.. rest-controller:: cerberus.api.v1.controllers.tasks:ActionController - :webprefix: /v1/tasks/{id}/actions - -.. autotype:: cerberus.api.v1.datamodels.task.TaskResource - :members: diff --git a/etc/cerberus/cerberus.conf.sample b/etc/cerberus/cerberus.conf.sample deleted file mode 100644 index b6eed73..0000000 --- a/etc/cerberus/cerberus.conf.sample +++ /dev/null @@ -1,746 +0,0 @@ -[DEFAULT] - -# -# Options defined in oslo.messaging -# - -# Use durable queues in amqp. (boolean value) -# Deprecated group/name - [DEFAULT]/rabbit_durable_queues -#amqp_durable_queues=false - -# Auto-delete queues in amqp. (boolean value) -#amqp_auto_delete=false - -# Size of RPC connection pool. (integer value) -#rpc_conn_pool_size=30 - -# Qpid broker hostname. (string value) -#qpid_hostname=cerberus - -# Qpid broker port. (integer value) -#qpid_port=5672 - -# Qpid HA cluster host:port pairs. (list value) -#qpid_hosts=$qpid_hostname:$qpid_port - -# Username for Qpid connection. (string value) -#qpid_username= - -# Password for Qpid connection. (string value) -#qpid_password= - -# Space separated list of SASL mechanisms to use for auth. -# (string value) -#qpid_sasl_mechanisms= - -# Seconds between connection keepalive heartbeats. (integer -# value) -#qpid_heartbeat=60 - -# Transport to use, either 'tcp' or 'ssl'. (string value) -#qpid_protocol=tcp - -# Whether to disable the Nagle algorithm. (boolean value) -#qpid_tcp_nodelay=true - -# The number of prefetched messages held by receiver. (integer -# value) -#qpid_receiver_capacity=1 - -# The qpid topology version to use. Version 1 is what was -# originally used by impl_qpid. Version 2 includes some -# backwards-incompatible changes that allow broker federation -# to work. Users should update to version 2 when they are -# able to take everything down, as it requires a clean break. -# (integer value) -#qpid_topology_version=1 - -# SSL version to use (valid only if SSL enabled). valid values -# are TLSv1, SSLv23 and SSLv3. SSLv2 may be available on some -# distributions. (string value) -#kombu_ssl_version= - -# SSL key file (valid only if SSL enabled). (string value) -#kombu_ssl_keyfile= - -# SSL cert file (valid only if SSL enabled). (string value) -#kombu_ssl_certfile= - -# SSL certification authority file (valid only if SSL -# enabled). (string value) -#kombu_ssl_ca_certs= - -# How long to wait before reconnecting in response to an AMQP -# consumer cancel notification. (floating point value) -#kombu_reconnect_delay=1.0 - -# The RabbitMQ broker address where a single node is used. -# (string value) -#rabbit_host=cerberus - -# The RabbitMQ broker port where a single node is used. -# (integer value) -#rabbit_port=5672 - -# RabbitMQ HA cluster host:port pairs. (list value) -#rabbit_hosts=$rabbit_host:$rabbit_port - -# Connect over SSL for RabbitMQ. (boolean value) -#rabbit_use_ssl=false - -# The RabbitMQ userid. (string value) -#rabbit_userid=guest - -# The RabbitMQ password. (string value) -#rabbit_password=guest - -# the RabbitMQ login method (string value) -#rabbit_login_method=AMQPLAIN - -# The RabbitMQ virtual host. (string value) -#rabbit_virtual_host=/ - -# How frequently to retry connecting with RabbitMQ. (integer -# value) -#rabbit_retry_interval=1 - -# How long to backoff for between retries when connecting to -# RabbitMQ. (integer value) -#rabbit_retry_backoff=2 - -# Maximum number of RabbitMQ connection retries. Default is 0 -# (infinite retry count). (integer value) -#rabbit_max_retries=0 - -# Use HA queues in RabbitMQ (x-ha-policy: all). If you change -# this option, you must wipe the RabbitMQ database. (boolean -# value) -#rabbit_ha_queues=false - -# If passed, use a fake RabbitMQ provider. (boolean value) -#fake_rabbit=false - -# ZeroMQ bind address. Should be a wildcard (*), an ethernet -# interface, or IP. The "host" option should point or resolve -# to this address. (string value) -#rpc_zmq_bind_address=* - -# MatchMaker driver. (string value) -#rpc_zmq_matchmaker=oslo.messaging._drivers.matchmaker.MatchMakerLocalhost - -# ZeroMQ receiver listening port. (integer value) -#rpc_zmq_port=9501 - -# Number of ZeroMQ contexts, defaults to 1. (integer value) -#rpc_zmq_contexts=1 - -# Maximum number of ingress messages to locally buffer per -# topic. Default is unlimited. (integer value) -#rpc_zmq_topic_backlog= - -# Directory for holding IPC sockets. (string value) -#rpc_zmq_ipc_dir=/var/run/openstack - -# Name of this node. Must be a valid hostname, FQDN, or IP -# address. Must match "host" option, if running Nova. (string -# value) -#rpc_zmq_host=cerberus - -# Seconds to wait before a cast expires (TTL). Only supported -# by impl_zmq. (integer value) -#rpc_cast_timeout=30 - -# Heartbeat frequency. (integer value) -#matchmaker_heartbeat_freq=300 - -# Heartbeat time-to-live. (integer value) -#matchmaker_heartbeat_ttl=600 - -# Size of RPC greenthread pool. (integer value) -#rpc_thread_pool_size=64 - -# Driver or drivers to handle sending notifications. (multi -# valued) -#notification_driver= - -# AMQP topic used for OpenStack notifications. (list value) -# Deprecated group/name - [rpc_notifier2]/topics -#notification_topics=notifications - -# Seconds to wait for a response from a call. (integer value) -#rpc_response_timeout=60 - -# A URL representing the messaging driver to use and its full -# configuration. If not set, we fall back to the rpc_backend -# option and driver specific configuration. (string value) -#transport_url= - -# The messaging driver to use, defaults to rabbit. Other -# drivers include qpid and zmq. (string value) -#rpc_backend=rabbit - -# The default exchange under which topics are scoped. May be -# overridden by an exchange name specified in the -# transport_url option. (string value) -#control_exchange=openstack - - -# -# Options defined in cerberus.service -# - -# Name of this node, which must be valid in an AMQP key. Can -# be an opaque identifier. For ZeroMQ only, must be a valid -# host name, FQDN, or IP address. (string value) -#host=cerberus - -# Dispatcher to process data. (multi valued) -#dispatcher=database - -# Number of workers for collector service. A single collector -# is enabled by default. (integer value) -#collector_workers=1 - -# Number of workers for notification service. A single -# notification agent is enabled by default. (integer value) -#notification_workers=1 - - -# -# Options defined in cerberus.api -# - -# The strategy to use for authentication. (string value) -#auth_strategy=keystone - - -# -# Options defined in cerberus.api.app -# - -# Configuration file for WSGI definition of API. (string -# value) -#api_paste_config=api_paste.ini - - -# -# Options defined in cerberus.client.nova_client -# - -# Allow novaclient's debug log output. (boolean value) -#nova_http_log_debug=false - - -# -# Options defined in cerberus.common.exception -# - -# Make exception message format errors fatal (boolean value) -#fatal_exception_format_errors=false - - -# -# Options defined in cerberus.openstack.common.eventlet_backdoor -# - -# Enable eventlet backdoor. Acceptable values are 0, , -# and :, where 0 results in listening on a random -# tcp port number; results in listening on the -# specified port number (and not enabling backdoor if that -# port is in use); and : results in listening on -# the smallest unused port number within the specified range -# of port numbers. The chosen port is displayed in the -# service's log file. (string value) -#backdoor_port= - - -# -# Options defined in cerberus.openstack.common.lockutils -# - -# Whether to disable inter-process locks (boolean value) -#disable_process_locking=false - -# Directory to use for lock files. (string value) -#lock_path= - - -# -# Options defined in cerberus.openstack.common.log -# - -# Print debugging output (set logging level to DEBUG instead -# of default WARNING level). (boolean value) -#debug=false - -# Print more verbose output (set logging level to INFO instead -# of default WARNING level). (boolean value) -#verbose=false - -# Log output to standard error (boolean value) -#use_stderr=true - -# Format string to use for log messages with context (string -# value) -#logging_context_format_string=%(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(request_id)s %(user_identity)s] %(instance)s%(message)s - -# Format string to use for log messages without context -# (string value) -#logging_default_format_string=%(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s - -# Data to append to log format when level is DEBUG (string -# value) -#logging_debug_format_suffix=%(funcName)s %(pathname)s:%(lineno)d - -# Prefix each line of exception output with this format -# (string value) -#logging_exception_prefix=%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s %(instance)s - -# List of logger=LEVEL pairs (list value) -#default_log_levels=amqp=WARN,amqplib=WARN,boto=WARN,qpid=WARN,sqlalchemy=WARN,suds=INFO,oslo.messaging=INFO,iso8601=WARN,requests.packages.urllib3.connectionpool=WARN - -# Publish error events (boolean value) -#publish_errors=false - -# Make deprecations fatal (boolean value) -#fatal_deprecations=false - -# If an instance is passed with the log message, format it -# like this (string value) -#instance_format="[instance: %(uuid)s] " - -# If an instance UUID is passed with the log message, format -# it like this (string value) -#instance_uuid_format="[instance: %(uuid)s] " - -# The name of logging configuration file. It does not disable -# existing loggers, but just appends specified logging -# configuration to any other existing logging options. Please -# see the Python logging module documentation for details on -# logging configuration files. (string value) -# Deprecated group/name - [DEFAULT]/log_config -#log_config_append= - -# DEPRECATED. A logging.Formatter log message format string -# which may use any of the available logging.LogRecord -# attributes. This option is deprecated. Please use -# logging_context_format_string and -# logging_default_format_string instead. (string value) -#log_format= - -# Format string for %%(asctime)s in log records. Default: -# %(default)s (string value) -#log_date_format=%Y-%m-%d %H:%M:%S - -# (Optional) Name of log file to output to. If no default is -# set, logging will go to stdout. (string value) -# Deprecated group/name - [DEFAULT]/logfile -#log_file= - -# (Optional) The base directory used for relative --log-file -# paths (string value) -# Deprecated group/name - [DEFAULT]/logdir -#log_dir= - -# Use syslog for logging. Existing syslog format is DEPRECATED -# during I, and then will be changed in J to honor RFC5424 -# (boolean value) -#use_syslog=false - -# (Optional) Use syslog rfc5424 format for logging. If -# enabled, will add APP-NAME (RFC5424) before the MSG part of -# the syslog message. The old format without APP-NAME is -# deprecated in I, and will be removed in J. (boolean value) -#use_syslog_rfc_format=false - -# Syslog facility to receive log lines (string value) -#syslog_log_facility=LOG_USER - - -# -# Options defined in cerberus.openstack.common.periodic_task -# - -# Some periodic tasks can be run in a separate process. Should -# we run them here? (boolean value) -#run_external_periodic_tasks=true - - -# -# Options defined in cerberus.openstack.common.policy -# - -# JSON file containing policy (string value) -#policy_file=policy.json - -# Rule enforced when requested rule is not found (string -# value) -#policy_default_rule=default - - -[api] - -# -# Options defined in cerberus.api.app -# - -# Host serving the API. (string value) -#host_ip=0.0.0.0 - -# Host port serving the API. (integer value) -#port=8300 - - -[database] - -# -# Options defined in cerberus.openstack.common.db.options -# - -# The file name to use with SQLite (string value) -#sqlite_db=cerberus.sqlite - -# If True, SQLite uses synchronous mode (boolean value) -#sqlite_synchronous=true - -# The backend to use for db (string value) -# Deprecated group/name - [DEFAULT]/db_backend -#backend=sqlalchemy - -# The SQLAlchemy connection string used to connect to the -# database (string value) -# Deprecated group/name - [DEFAULT]/sql_connection -# Deprecated group/name - [DATABASE]/sql_connection -# Deprecated group/name - [sql]/connection -#connection= - -# The SQL mode to be used for MySQL sessions. This option, -# including the default, overrides any server-set SQL mode. To -# use whatever SQL mode is set by the server configuration, -# set this to no value. Example: mysql_sql_mode= (string -# value) -#mysql_sql_mode=TRADITIONAL - -# Timeout before idle sql connections are reaped (integer -# value) -# Deprecated group/name - [DEFAULT]/sql_idle_timeout -# Deprecated group/name - [DATABASE]/sql_idle_timeout -# Deprecated group/name - [sql]/idle_timeout -#idle_timeout=3600 - -# Minimum number of SQL connections to keep open in a pool -# (integer value) -# Deprecated group/name - [DEFAULT]/sql_min_pool_size -# Deprecated group/name - [DATABASE]/sql_min_pool_size -#min_pool_size=1 - -# Maximum number of SQL connections to keep open in a pool -# (integer value) -# Deprecated group/name - [DEFAULT]/sql_max_pool_size -# Deprecated group/name - [DATABASE]/sql_max_pool_size -#max_pool_size= - -# Maximum db connection retries during startup. (setting -1 -# implies an infinite retry count) (integer value) -# Deprecated group/name - [DEFAULT]/sql_max_retries -# Deprecated group/name - [DATABASE]/sql_max_retries -#max_retries=10 - -# Interval between retries of opening a sql connection -# (integer value) -# Deprecated group/name - [DEFAULT]/sql_retry_interval -# Deprecated group/name - [DATABASE]/reconnect_interval -#retry_interval=10 - -# If set, use this value for max_overflow with sqlalchemy -# (integer value) -# Deprecated group/name - [DEFAULT]/sql_max_overflow -# Deprecated group/name - [DATABASE]/sqlalchemy_max_overflow -#max_overflow= - -# Verbosity of SQL debugging information. 0=None, -# 100=Everything (integer value) -# Deprecated group/name - [DEFAULT]/sql_connection_debug -#connection_debug=0 - -# Add python stack traces to SQL as comment strings (boolean -# value) -# Deprecated group/name - [DEFAULT]/sql_connection_trace -#connection_trace=false - -# If set, use this value for pool_timeout with sqlalchemy -# (integer value) -# Deprecated group/name - [DATABASE]/sqlalchemy_pool_timeout -#pool_timeout= - -# Enable the experimental use of database reconnect on -# connection lost (boolean value) -#use_db_reconnect=false - -# seconds between db connection retries (integer value) -#db_retry_interval=1 - -# Whether to increase interval between db connection retries, -# up to db_max_retry_interval (boolean value) -#db_inc_retry_interval=true - -# max seconds between db connection retries, if -# db_inc_retry_interval is enabled (integer value) -#db_max_retry_interval=10 - -# maximum db connection retries before error is raised. -# (setting -1 implies an infinite retry count) (integer value) -#db_max_retries=20 - - -[keystone_authtoken] - -# -# Options defined in keystoneclient.middleware.auth_token -# - -# Prefix to prepend at the beginning of the path. Deprecated, -# use identity_uri. (string value) -#auth_admin_prefix= - -# Host providing the admin Identity API endpoint. Deprecated, -# use identity_uri. (string value) -#auth_host=127.0.0.1 - -# Port of the admin Identity API endpoint. Deprecated, use -# identity_uri. (integer value) -#auth_port=35357 - -# Protocol of the admin Identity API endpoint (http or https). -# Deprecated, use identity_uri. (string value) -#auth_protocol=https - -# Complete public Identity API endpoint (string value) -#auth_uri= - -# Complete admin Identity API endpoint. This should specify -# the unversioned root endpoint e.g. https://localhost:35357/ -# (string value) -#identity_uri= - -# API version of the admin Identity API endpoint (string -# value) -#auth_version= - -# Do not handle authorization requests within the middleware, -# but delegate the authorization decision to downstream WSGI -# components (boolean value) -#delay_auth_decision=false - -# Request timeout value for communicating with Identity API -# server. (boolean value) -#http_connect_timeout= - -# How many times are we trying to reconnect when communicating -# with Identity API Server. (integer value) -#http_request_max_retries=3 - -# This option is deprecated and may be removed in a future -# release. Single shared secret with the Keystone -# configuration used for bootstrapping a Keystone -# installation, or otherwise bypassing the normal -# authentication process. This option should not be used, use -# `admin_user` and `admin_password` instead. (string value) -#admin_token= - -# Keystone account username (string value) -#admin_user= - -# Keystone account password (string value) -#admin_password= - -# Keystone service account tenant name to validate user tokens -# (string value) -#admin_tenant_name=admin - -# Env key for the swift cache (string value) -#cache= - -# Required if Keystone server requires client certificate -# (string value) -#certfile= - -# Required if Keystone server requires client certificate -# (string value) -#keyfile= - -# A PEM encoded Certificate Authority to use when verifying -# HTTPs connections. Defaults to system CAs. (string value) -#cafile= - -# Verify HTTPS connections. (boolean value) -#insecure=false - -# Directory used to cache files related to PKI tokens (string -# value) -#signing_dir= - -# Optionally specify a list of memcached server(s) to use for -# caching. If left undefined, tokens will instead be cached -# in-process. (list value) -# Deprecated group/name - [DEFAULT]/memcache_servers -#memcached_servers= - -# In order to prevent excessive effort spent validating -# tokens, the middleware caches previously-seen tokens for a -# configurable duration (in seconds). Set to -1 to disable -# caching completely. (integer value) -#token_cache_time=300 - -# Determines the frequency at which the list of revoked tokens -# is retrieved from the Identity service (in seconds). A high -# number of revocation events combined with a low cache -# duration may significantly reduce performance. (integer -# value) -#revocation_cache_time=10 - -# (optional) if defined, indicate whether token data should be -# authenticated or authenticated and encrypted. Acceptable -# values are MAC or ENCRYPT. If MAC, token data is -# authenticated (with HMAC) in the cache. If ENCRYPT, token -# data is encrypted and authenticated in the cache. If the -# value is not one of these options or empty, auth_token will -# raise an exception on initialization. (string value) -#memcache_security_strategy= - -# (optional, mandatory if memcache_security_strategy is -# defined) this string is used for key derivation. (string -# value) -#memcache_secret_key= - -# (optional) indicate whether to set the X-Service-Catalog -# header. If False, middleware will not ask for service -# catalog on token validation and will not set the X-Service- -# Catalog header. (boolean value) -#include_service_catalog=true - -# Used to control the use and type of token binding. Can be -# set to: "disabled" to not check token binding. "permissive" -# (default) to validate binding information if the bind type -# is of a form known to the server and ignore it if not. -# "strict" like "permissive" but if the bind type is unknown -# the token will be rejected. "required" any form of token -# binding is needed to be allowed. Finally the name of a -# binding method that must be present in tokens. (string -# value) -#enforce_token_bind=permissive - -# If true, the revocation list will be checked for cached -# tokens. This requires that PKI tokens are configured on the -# Keystone server. (boolean value) -#check_revocations_for_cached=false - -# Hash algorithms to use for hashing PKI tokens. This may be a -# single algorithm or multiple. The algorithms are those -# supported by Python standard hashlib.new(). The hashes will -# be tried in the order given, so put the preferred one first -# for performance. The result of the first hash will be stored -# in the cache. This will typically be set to multiple values -# only while migrating from a less secure algorithm to a more -# secure one. Once all the old tokens are expired this option -# should be set to a single value for better performance. -# (list value) -#hash_algorithms=md5 - - -[matchmaker_redis] - -# -# Options defined in oslo.messaging -# - -# Host to locate redis. (string value) -#host=127.0.0.1 - -# Use this port to connect to redis host. (integer value) -#port=6379 - -# Password for Redis server (optional). (string value) -#password= - - -[matchmaker_ring] - -# -# Options defined in oslo.messaging -# - -# Matchmaker ring file (JSON). (string value) -# Deprecated group/name - [DEFAULT]/matchmaker_ringfile -#ringfile=/etc/oslo/matchmaker_ring.json - - -[service_credentials] - -# -# Options defined in cerberus.service -# - -# User name to use for OpenStack service access. (string -# value) -#os_username=cerberus - -# Password to use for OpenStack service access. (string value) -#os_password=admin - -# Tenant ID to use for OpenStack service access. (string -# value) -#os_tenant_id= - -# Tenant name to use for OpenStack service access. (string -# value) -#os_tenant_name=admin - -# Certificate chain for SSL validation. (string value) -#os_cacert= - -# Auth URL to use for OpenStack service access. (string value) -#os_auth_url=http://localhost:5000/v2.0 - -# Region name to use for OpenStack service endpoints. (string -# value) -#os_region_name= - -# Type of endpoint in Identity service catalog to use for -# communication with OpenStack services. (string value) -#os_endpoint_type=publicURL - -# Disables X.509 certificate validation when an SSL connection -# to Identity Service is established. (boolean value) -#insecure=false - - -[service_types] - -# -# Options defined in cerberus.client.nova_client -# - -# Nova service type. (string value) -#nova=compute - - -[ssl] - -# -# Options defined in cerberus.openstack.common.sslutils -# - -# CA certificate file to use to verify connecting clients. -# (string value) -#ca_file= - -# Certificate file to use when starting the server securely. -# (string value) -#cert_file= - -# Private key file to use when starting the server securely. -# (string value) -#key_file= - - diff --git a/etc/cerberus/policy.json b/etc/cerberus/policy.json deleted file mode 100644 index 4ac0d25..0000000 --- a/etc/cerberus/policy.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "context_is_admin": "role:admin", - "default": "" -} \ No newline at end of file diff --git a/functionaltests/README.rst b/functionaltests/README.rst deleted file mode 100644 index c047afe..0000000 --- a/functionaltests/README.rst +++ /dev/null @@ -1,15 +0,0 @@ -FUNCTIONAL TEST -=============== - -Prerequisites -------------- - -1. **tempest** -2. **tempest_lib** -3. **nose** -4. **Configure tempest** - - -USAGE ------ -nosetests -sv cerberus/tests/functional/ \ No newline at end of file diff --git a/openstack-common.conf b/openstack-common.conf deleted file mode 100644 index 65befd4..0000000 --- a/openstack-common.conf +++ /dev/null @@ -1,36 +0,0 @@ -[DEFAULT] - -# The list of modules to copy from oslo-incubator.git -module=cliutils -module=config -module=config.generator -module=context -module=db -module=db.sqlalchemy -module=db.sqlalchemy.migration_cli -module=eventlet_backdoor -module=excutils -module=fileutils -module=flakes -module=gettextutils -module=importutils -module=install_venv_common -module=jsonutils -module=local -module=lockutils -module=log -module=log_handler -module=network_utils -module=notifier -module=periodic_task -module=policy -module=processutils -module=py3kcompat -module=service -module=setup -module=strutils -module=timeutils -module=test - -# The base module to hold the copy of openstack.common -base=cerberus \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 16b865d..0000000 --- a/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. -pbr>=0.6,<1.0 -alembic>=0.4.1,<=0.7.5.post2 -Babel>=1.3,<=1.3 -eventlet>=0.13.0,<0.16.0 -greenlet>=0.3.2,<=0.4.5 -lockfile>=0.8,<=0.10.2 -MySQL-python<=1.2.5 -oslo.config>=1.2.0,<1.5 -oslo.messaging>=1.3.0,<1.5 -oslo.utils<2.0.0 -oslo.serialization<1.7.0 -pecan>=0.4.5,<=0.8.3 -posix_ipc -python-keystoneclient>=0.7.0,<0.12.0 -python-neutronclient>=2.4.0 # Bug #1469087 in launchpad (https://bugs.launchpad.net/python-neutronclient/+bug/1469087) -python-novaclient>=2.17.0,<2.21 -six>=1.6.0,<=1.9.0 -SQLAlchemy>=0.7.8,!=0.9.5,<=0.9.99 -WebOb>=1.2.3,<=1.4 -WSME>=0.6,<=0.6.4 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 57ea77c..0000000 --- a/setup.cfg +++ /dev/null @@ -1,60 +0,0 @@ -[metadata] -name = cerberus -summary = Cerberus security component -description-file = - README.rst -author = OpenStack -author-email = openstack-dev@lists.openstack.org -home-page = http://www.openstack.org/ -classifier = - Environment :: OpenStack - Intended Audience :: Information Technology - Intended Audience :: System Administrators - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.3 - Programming Language :: Python :: 3.4 - -[files] -packages = - cerberus - -[build_sphinx] -source-dir = doc/source -build-dir = doc/build -all_files = 1 - -[upload_sphinx] -upload-dir = doc/build/html - -[compile_catalog] -directory = cerberus/locale -domain = cerberus - -[update_catalog] -domain = cerberus -output_dir = cerberus/locale -input_file = cerberus/locale/cerberus.pot - -[extract_messages] -keywords = _ gettext ngettext l_ lazy_gettext -mapping_file = babel.cfg -output_file = cerberus/locale/cerberus.pot - -[entry_points] -console_scripts = - cerberus-api = cerberus.cmd.api:main - cerberus-agent = cerberus.cmd.agent:main - dbcreate = cerberus.cmd.db_create:main - cerberus-dbsync = cerberus.cmd.dbsync:main - -cerberus.plugins = - testplugin = cerberus.plugins.test_plugin:TestPlugin - taskplugin = cerberus.plugins.task_plugin:TaskPlugin - -oslo.messaging.drivers = - cerberusdriver = cerberus.common.cerberus_impl_rabbit:CerberusRabbitDriver diff --git a/setup.py b/setup.py deleted file mode 100755 index 70c2b3f..0000000 --- a/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT -import setuptools - -setuptools.setup( - setup_requires=['pbr'], - pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index abef355..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,24 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. - -hacking>=0.8.0,<0.9 -# mock object framework -mock>=1.0,<=1.0.1 -coverage>=3.6,<=3.7.1 -discover<=0.4.0 -# fixture stubbing -fixtures>=0.3.14,<=1.0.0 -oslotest>=1.2.0,<1.3 -python-subunit>=0.0.18,<=1.1.0 -nose -nose-exclude -nosexcover -mox>=0.5.3,<=0.5.3 -tempest-lib - -# Doc requirements -sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2 -oslosphinx<=2.5.0 -sphinxcontrib-httpdomain<=1.3.0 -sphinxcontrib-pecanwsme>=0.6,<=0.8.0 diff --git a/tools/config/check_uptodate.sh b/tools/config/check_uptodate.sh deleted file mode 100755 index 1885e70..0000000 --- a/tools/config/check_uptodate.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash - -PROJECT_NAME=${PROJECT_NAME:-cerberus} -CFGFILE_NAME=${PROJECT_NAME}.conf.sample - -if [ -e etc/${PROJECT_NAME}/${CFGFILE_NAME} ]; then - CFGFILE=etc/${PROJECT_NAME}/${CFGFILE_NAME} -elif [ -e etc/${CFGFILE_NAME} ]; then - CFGFILE=etc/${CFGFILE_NAME} -else - echo "${0##*/}: can not find config file" - exit 1 -fi - -TEMPDIR=`mktemp -d /tmp/${PROJECT_NAME}.XXXXXX` -trap "rm -rf $TEMPDIR" EXIT - -tools/config/generate_sample.sh -b ./ -p ${PROJECT_NAME} -o ${TEMPDIR} - -if ! diff -u ${TEMPDIR}/${CFGFILE_NAME} ${CFGFILE} -then - echo "${0##*/}: ${PROJECT_NAME}.conf.sample is not up to date." - echo "${0##*/}: Please run ${0%%${0##*/}}generate_sample.sh." - exit 1 -fi diff --git a/tools/config/generate_sample.sh b/tools/config/generate_sample.sh deleted file mode 100755 index ba63071..0000000 --- a/tools/config/generate_sample.sh +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env bash - -print_hint() { - echo "Try \`${0##*/} --help' for more information." >&2 -} - -PARSED_OPTIONS=$(getopt -n "${0##*/}" -o hb:p:m:l:o: \ - --long help,base-dir:,package-name:,output-dir:,module:,library: -- "$@") - -if [ $? != 0 ] ; then print_hint ; exit 1 ; fi - -eval set -- "$PARSED_OPTIONS" - -while true; do - case "$1" in - -h|--help) - echo "${0##*/} [options]" - echo "" - echo "options:" - echo "-h, --help show brief help" - echo "-b, --base-dir=DIR project base directory" - echo "-p, --package-name=NAME project package name" - echo "-o, --output-dir=DIR file output directory" - echo "-m, --module=MOD extra python module to interrogate for options" - echo "-l, --library=LIB extra library that registers options for discovery" - exit 0 - ;; - -b|--base-dir) - shift - BASEDIR=`echo $1 | sed -e 's/\/*$//g'` - shift - ;; - -p|--package-name) - shift - PACKAGENAME=`echo $1` - shift - ;; - -o|--output-dir) - shift - OUTPUTDIR=`echo $1 | sed -e 's/\/*$//g'` - shift - ;; - -m|--module) - shift - MODULES="$MODULES -m $1" - shift - ;; - -l|--library) - shift - LIBRARIES="$LIBRARIES -l $1" - shift - ;; - --) - break - ;; - esac -done - -BASEDIR=${BASEDIR:-`pwd`} -if ! [ -d $BASEDIR ] -then - echo "${0##*/}: missing project base directory" >&2 ; print_hint ; exit 1 -elif [[ $BASEDIR != /* ]] -then - BASEDIR=$(cd "$BASEDIR" && pwd) -fi - -PACKAGENAME=${PACKAGENAME:-$(python setup.py --name)} -TARGETDIR=$BASEDIR/$PACKAGENAME -if ! [ -d $TARGETDIR ] -then - echo "${0##*/}: invalid project package name" >&2 ; print_hint ; exit 1 -fi - -OUTPUTDIR=${OUTPUTDIR:-$BASEDIR/etc} -# NOTE(bnemec): Some projects put their sample config in etc/, -# some in etc/$PACKAGENAME/ -if [ -d $OUTPUTDIR/$PACKAGENAME ] -then - OUTPUTDIR=$OUTPUTDIR/$PACKAGENAME -elif ! [ -d $OUTPUTDIR ] -then - echo "${0##*/}: cannot access \`$OUTPUTDIR': No such file or directory" >&2 - exit 1 -fi - -BASEDIRESC=`echo $BASEDIR | sed -e 's/\//\\\\\//g'` -find $TARGETDIR -type f -name "*.pyc" -delete -FILES=$(find $TARGETDIR -type f -name "*.py" ! -path "*/tests/*" \ - -exec grep -l "Opt(" {} + | sed -e "s/^$BASEDIRESC\///g" | sort -u) - -RC_FILE="`dirname $0`/oslo.config.generator.rc" -if test -r "$RC_FILE" -then - source "$RC_FILE" -fi - -for mod in ${CERBERUS_CONFIG_GENERATOR_EXTRA_MODULES}; do - MODULES="$MODULES -m $mod" -done - -for lib in ${CERBERUS_CONFIG_GENERATOR_EXTRA_LIBRARIES}; do - LIBRARIES="$LIBRARIES -l $lib" -done - -export EVENTLET_NO_GREENDNS=yes - -OS_VARS=$(set | sed -n '/^OS_/s/=[^=]*$//gp' | xargs) -[ "$OS_VARS" ] && eval "unset \$OS_VARS" -DEFAULT_MODULEPATH=cerberus.openstack.common.config.generator -MODULEPATH=${MODULEPATH:-$DEFAULT_MODULEPATH} -OUTPUTFILE=$OUTPUTDIR/$PACKAGENAME.conf.sample -python -m $MODULEPATH $MODULES $LIBRARIES $FILES > $OUTPUTFILE - -# Hook to allow projects to append custom config file snippets -CONCAT_FILES=$(ls $BASEDIR/tools/config/*.conf.sample 2>/dev/null) -for CONCAT_FILE in $CONCAT_FILES; do - cat $CONCAT_FILE >> $OUTPUTFILE -done diff --git a/tools/config/oslo.config.generator.rc b/tools/config/oslo.config.generator.rc deleted file mode 100644 index 2363f94..0000000 --- a/tools/config/oslo.config.generator.rc +++ /dev/null @@ -1,2 +0,0 @@ -export CERBERUS_CONFIG_GENERATOR_EXTRA_LIBRARIES='oslo.messaging' -export CERBERUS_CONFIG_GENERATOR_EXTRA_MODULES=keystoneclient.middleware.auth_token \ No newline at end of file diff --git a/tools/install_venv_common.py b/tools/install_venv_common.py deleted file mode 100644 index 46822e3..0000000 --- a/tools/install_venv_common.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Provides methods needed by installation script for OpenStack development -virtual environments. - -Since this script is used to bootstrap a virtualenv from the system's Python -environment, it should be kept strictly compatible with Python 2.6. - -Synced in from openstack-common -""" - -from __future__ import print_function - -import optparse -import os -import subprocess -import sys - - -class InstallVenv(object): - - def __init__(self, root, venv, requirements, - test_requirements, py_version, - project): - self.root = root - self.venv = venv - self.requirements = requirements - self.test_requirements = test_requirements - self.py_version = py_version - self.project = project - - def die(self, message, *args): - print(message % args, file=sys.stderr) - sys.exit(1) - - def check_python_version(self): - if sys.version_info < (2, 6): - self.die("Need Python Version >= 2.6") - - def run_command_with_code(self, cmd, redirect_output=True, - check_exit_code=True): - """Runs a command in an out-of-process shell. - - Returns the output of that command. Working directory is self.root. - """ - if redirect_output: - stdout = subprocess.PIPE - else: - stdout = None - - proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout) - output = proc.communicate()[0] - if check_exit_code and proc.returncode != 0: - self.die('Command "%s" failed.\n%s', ' '.join(cmd), output) - return (output, proc.returncode) - - def run_command(self, cmd, redirect_output=True, check_exit_code=True): - return self.run_command_with_code(cmd, redirect_output, - check_exit_code)[0] - - def get_distro(self): - if (os.path.exists('/etc/fedora-release') or - os.path.exists('/etc/redhat-release')): - return Fedora( - self.root, self.venv, self.requirements, - self.test_requirements, self.py_version, self.project) - else: - return Distro( - self.root, self.venv, self.requirements, - self.test_requirements, self.py_version, self.project) - - def check_dependencies(self): - self.get_distro().install_virtualenv() - - def create_virtualenv(self, no_site_packages=True): - """Creates the virtual environment and installs PIP. - - Creates the virtual environment and installs PIP only into the - virtual environment. - """ - if not os.path.isdir(self.venv): - print('Creating venv...', end=' ') - if no_site_packages: - self.run_command(['virtualenv', '-q', '--no-site-packages', - self.venv]) - else: - self.run_command(['virtualenv', '-q', self.venv]) - print('done.') - else: - print("venv already exists...") - pass - - def pip_install(self, *args): - self.run_command(['tools/with_venv.sh', - 'pip', 'install', '--upgrade'] + list(args), - redirect_output=False) - - def install_dependencies(self): - print('Installing dependencies with pip (this can take a while)...') - - # First things first, make sure our venv has the latest pip and - # setuptools and pbr - self.pip_install('pip>=1.4') - self.pip_install('setuptools') - self.pip_install('pbr') - - self.pip_install('-r', self.requirements, '-r', self.test_requirements) - - def parse_args(self, argv): - """Parses command-line arguments.""" - parser = optparse.OptionParser() - parser.add_option('-n', '--no-site-packages', - action='store_true', - help="Do not inherit packages from global Python " - "install") - return parser.parse_args(argv[1:])[0] - - -class Distro(InstallVenv): - - def check_cmd(self, cmd): - return bool(self.run_command(['which', cmd], - check_exit_code=False).strip()) - - def install_virtualenv(self): - if self.check_cmd('virtualenv'): - return - - if self.check_cmd('easy_install'): - print('Installing virtualenv via easy_install...', end=' ') - if self.run_command(['easy_install', 'virtualenv']): - print('Succeeded') - return - else: - print('Failed') - - self.die('ERROR: virtualenv not found.\n\n%s development' - ' requires virtualenv, please install it using your' - ' favorite package management tool' % self.project) - - -class Fedora(Distro): - """This covers all Fedora-based distributions. - - Includes: Fedora, RHEL, CentOS, Scientific Linux - """ - - def check_pkg(self, pkg): - return self.run_command_with_code(['rpm', '-q', pkg], - check_exit_code=False)[1] == 0 - - def install_virtualenv(self): - if self.check_cmd('virtualenv'): - return - - if not self.check_pkg('python-virtualenv'): - self.die("Please install 'python-virtualenv'.") - - super(Fedora, self).install_virtualenv() diff --git a/tools/pretty_tox.sh b/tools/pretty_tox.sh deleted file mode 100644 index 6c4759b..0000000 --- a/tools/pretty_tox.sh +++ /dev/null @@ -1,6 +0,0 @@ -#! /bin/sh - -TESTRARGS=$1 - -exec 3>&1 -status=$(exec 4>&1 >&3; ( python setup.py testr --slowest --testr-args="--subunit $TESTRARGS"; echo $? >&4 ) | subunit2junitxml --output-to=junitxml-result.xml) && exit $status diff --git a/tools/subunit-trace.py b/tools/subunit-trace.py deleted file mode 100755 index 73f2f10..0000000 --- a/tools/subunit-trace.py +++ /dev/null @@ -1,307 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 Hewlett-Packard Development Company, L.P. -# Copyright 2014 Samsung Electronics -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Trace a subunit stream in reasonable detail and high accuracy.""" - -import argparse -import functools -import os -import re -import sys - -import mimeparse -import subunit -import testtools - -DAY_SECONDS = 60 * 60 * 24 -FAILS = [] -RESULTS = {} - - -class Starts(testtools.StreamResult): - - def __init__(self, output): - super(Starts, self).__init__() - self._output = output - - def startTestRun(self): - self._neednewline = False - self._emitted = set() - - def status(self, test_id=None, test_status=None, test_tags=None, - runnable=True, file_name=None, file_bytes=None, eof=False, - mime_type=None, route_code=None, timestamp=None): - super(Starts, self).status( - test_id, test_status, - test_tags=test_tags, runnable=runnable, file_name=file_name, - file_bytes=file_bytes, eof=eof, mime_type=mime_type, - route_code=route_code, timestamp=timestamp) - if not test_id: - if not file_bytes: - return - if not mime_type or mime_type == 'test/plain;charset=utf8': - mime_type = 'text/plain; charset=utf-8' - primary, sub, parameters = mimeparse.parse_mime_type(mime_type) - content_type = testtools.content_type.ContentType( - primary, sub, parameters) - content = testtools.content.Content( - content_type, lambda: [file_bytes]) - text = content.as_text() - if text and text[-1] not in '\r\n': - self._neednewline = True - self._output.write(text) - elif test_status == 'inprogress' and test_id not in self._emitted: - if self._neednewline: - self._neednewline = False - self._output.write('\n') - worker = '' - for tag in test_tags or (): - if tag.startswith('worker-'): - worker = '(' + tag[7:] + ') ' - if timestamp: - timestr = timestamp.isoformat() - else: - timestr = '' - self._output.write('%s: %s%s [start]\n' % - (timestr, worker, test_id)) - self._emitted.add(test_id) - - -def cleanup_test_name(name, strip_tags=True, strip_scenarios=False): - """Clean up the test name for display. - - By default we strip out the tags in the test because they don't help us - in identifying the test that is run to it's result. - - Make it possible to strip out the testscenarios information (not to - be confused with tempest scenarios) however that's often needed to - indentify generated negative tests. - """ - if strip_tags: - tags_start = name.find('[') - tags_end = name.find(']') - if tags_start > 0 and tags_end > tags_start: - newname = name[:tags_start] - newname += name[tags_end + 1:] - name = newname - - if strip_scenarios: - tags_start = name.find('(') - tags_end = name.find(')') - if tags_start > 0 and tags_end > tags_start: - newname = name[:tags_start] - newname += name[tags_end + 1:] - name = newname - - return name - - -def get_duration(timestamps): - start, end = timestamps - if not start or not end: - duration = '' - else: - delta = end - start - duration = '%d.%06ds' % ( - delta.days * DAY_SECONDS + delta.seconds, delta.microseconds) - return duration - - -def find_worker(test): - for tag in test['tags']: - if tag.startswith('worker-'): - return int(tag[7:]) - return 'NaN' - - -# Print out stdout/stderr if it exists, always -def print_attachments(stream, test, all_channels=False): - """Print out subunit attachments. - - Print out subunit attachments that contain content. This - runs in 2 modes, one for successes where we print out just stdout - and stderr, and an override that dumps all the attachments. - """ - channels = ('stdout', 'stderr') - for name, detail in test['details'].items(): - # NOTE(sdague): the subunit names are a little crazy, and actually - # are in the form pythonlogging:'' (with the colon and quotes) - name = name.split(':')[0] - if detail.content_type.type == 'test': - detail.content_type.type = 'text' - if (all_channels or name in channels) and detail.as_text(): - title = "Captured %s:" % name - stream.write("\n%s\n%s\n" % (title, ('~' * len(title)))) - # indent attachment lines 4 spaces to make them visually - # offset - for line in detail.as_text().split('\n'): - stream.write(" %s\n" % line) - - -def show_outcome(stream, test, print_failures=False, failonly=False): - global RESULTS - status = test['status'] - # TODO(sdague): ask lifeless why on this? - if status == 'exists': - return - - worker = find_worker(test) - name = cleanup_test_name(test['id']) - duration = get_duration(test['timestamps']) - - if worker not in RESULTS: - RESULTS[worker] = [] - RESULTS[worker].append(test) - - # don't count the end of the return code as a fail - if name == 'process-returncode': - return - - if status == 'fail': - FAILS.append(test) - stream.write('{%s} %s [%s] ... FAILED\n' % ( - worker, name, duration)) - if not print_failures: - print_attachments(stream, test, all_channels=True) - elif not failonly: - if status == 'success': - stream.write('{%s} %s [%s] ... ok\n' % ( - worker, name, duration)) - print_attachments(stream, test) - elif status == 'skip': - stream.write('{%s} %s ... SKIPPED: %s\n' % ( - worker, name, test['details']['reason'].as_text())) - else: - stream.write('{%s} %s [%s] ... %s\n' % ( - worker, name, duration, test['status'])) - if not print_failures: - print_attachments(stream, test, all_channels=True) - - stream.flush() - - -def print_fails(stream): - """Print summary failure report. - - Currently unused, however there remains debate on inline vs. at end - reporting, so leave the utility function for later use. - """ - if not FAILS: - return - stream.write("\n==============================\n") - stream.write("Failed %s tests - output below:" % len(FAILS)) - stream.write("\n==============================\n") - for f in FAILS: - stream.write("\n%s\n" % f['id']) - stream.write("%s\n" % ('-' * len(f['id']))) - print_attachments(stream, f, all_channels=True) - stream.write('\n') - - -def count_tests(key, value): - count = 0 - for k, v in RESULTS.items(): - for item in v: - if key in item: - if re.search(value, item[key]): - count += 1 - return count - - -def run_time(): - runtime = 0.0 - for k, v in RESULTS.items(): - for test in v: - runtime += float(get_duration(test['timestamps']).strip('s')) - return runtime - - -def worker_stats(worker): - tests = RESULTS[worker] - num_tests = len(tests) - delta = tests[-1]['timestamps'][1] - tests[0]['timestamps'][0] - return num_tests, delta - - -def print_summary(stream): - stream.write("\n======\nTotals\n======\n") - stream.write("Run: %s in %s sec.\n" % (count_tests('status', '.*'), - run_time())) - stream.write(" - Passed: %s\n" % count_tests('status', 'success')) - stream.write(" - Skipped: %s\n" % count_tests('status', 'skip')) - stream.write(" - Failed: %s\n" % count_tests('status', 'fail')) - - # we could have no results, especially as we filter out the process-codes - if RESULTS: - stream.write("\n==============\nWorker Balance\n==============\n") - - for w in range(max(RESULTS.keys()) + 1): - if w not in RESULTS: - stream.write( - " - WARNING: missing Worker %s! " - "Race in testr accounting.\n" % w) - else: - num, time = worker_stats(w) - stream.write(" - Worker %s (%s tests) => %ss\n" % - (w, num, time)) - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--no-failure-debug', '-n', action='store_true', - dest='print_failures', help='Disable printing failure ' - 'debug information in realtime') - parser.add_argument('--fails', '-f', action='store_true', - dest='post_fails', help='Print failure debug ' - 'information after the stream is proccesed') - parser.add_argument('--failonly', action='store_true', - dest='failonly', help="Don't print success items", - default=( - os.environ.get('TRACE_FAILONLY', False) - is not False)) - return parser.parse_args() - - -def main(): - args = parse_args() - stream = subunit.ByteStreamToStreamResult( - sys.stdin, non_subunit_name='stdout') - starts = Starts(sys.stdout) - outcomes = testtools.StreamToDict( - functools.partial(show_outcome, sys.stdout, - print_failures=args.print_failures, - failonly=args.failonly - )) - summary = testtools.StreamSummary() - result = testtools.CopyStreamResult([starts, outcomes, summary]) - result.startTestRun() - try: - stream.run(result) - finally: - result.stopTestRun() - if count_tests('status', '.*') == 0: - print("The test run didn't actually run any tests") - return 1 - if args.post_fails: - print_fails(sys.stdout) - print_summary(sys.stdout) - return (0 if summary.wasSuccessful() else 1) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 9be39b3..0000000 --- a/tox.ini +++ /dev/null @@ -1,50 +0,0 @@ -[tox] -minversion = 1.6 -envlist = py33,py34,py26,py27,pypy,pep8 -skipsdist = True - -[testenv] -usedevelop = True -install_command = pip install -U {opts} {packages} -setenv = VIRTUAL_ENV={envdir} -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt -commands = - bash -c "TESTS_DIR=./cerberus/tests/unit/ python setup.py testr --slowest --testr-args='{posargs}'" - -[testenv:pep8] -commands = flake8 {posargs} cerberus - -[testenv:venv] -commands = {posargs} - -[testenv:cover] -commands = - python setup.py testr --coverage {posargs} - -[testenv:docs] -commands = python setup.py build_sphinx - -[flake8] -# E125 continuation line does not distinguish itself from next logical line -# E126 continuation line over-indented for hanging indent -# E128 continuation line under-indented for visual indent -# E129 visually indented line with same indent as next logical line -# E265 block comment should start with ‘# ‘ -# E713 test for membership should be ‘not in’ -# F402 import module shadowed by loop variable -# F811 redefinition of unused variable -# F812 list comprehension redefines name from line -# H104 file contains nothing but comments -# H237 module is removed in Python 3 -# H305 imports not grouped correctly -# H307 like imports should be grouped together -# H401 docstring should not start with a space -# H402 one line docstring needs punctuation -# H405 multi line docstring summary not separated with an empty line -# H904 Wrap long lines in parentheses instead of a backslash -# TODO(marun) H404 multi line docstring should start with a summary -ignore = E125,E126,E128,E129,E265,E713,F402,F811,F812,H104,H237,H305,H307,H401,H402,H404,H405,H904 -show-source = true -builtins = _ -exclude = .venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,tools