aboutsummaryrefslogtreecommitdiffstats
path: root/moon_cache
diff options
context:
space:
mode:
Diffstat (limited to 'moon_cache')
-rw-r--r--moon_cache/.gitignore105
-rw-r--r--moon_cache/Changelog34
-rw-r--r--moon_cache/LICENSE202
-rw-r--r--moon_cache/MANIFEST.in10
-rw-r--r--moon_cache/README.md3
-rw-r--r--moon_cache/moon_cache/__init__.py13
-rw-r--r--moon_cache/moon_cache/cache.py1466
-rw-r--r--moon_cache/moon_cache/context.py345
-rw-r--r--moon_cache/moon_cache/request_wrapper.py35
-rw-r--r--moon_cache/requirements.txt1
-rw-r--r--moon_cache/setup.py51
-rw-r--r--moon_cache/tests/unit_python/api/__init__.py11
-rw-r--r--moon_cache/tests/unit_python/api/test_attributes.py36
-rw-r--r--moon_cache/tests/unit_python/api/test_cache.py535
-rw-r--r--moon_cache/tests/unit_python/conftest.py151
-rw-r--r--moon_cache/tests/unit_python/mock_repo/__init__.py12
-rw-r--r--moon_cache/tests/unit_python/mock_repo/data.py306
-rw-r--r--moon_cache/tests/unit_python/mock_repo/urls.py174
-rw-r--r--moon_cache/tests/unit_python/requirements.txt7
19 files changed, 3497 insertions, 0 deletions
diff --git a/moon_cache/.gitignore b/moon_cache/.gitignore
new file mode 100644
index 00000000..7bff7318
--- /dev/null
+++ b/moon_cache/.gitignore
@@ -0,0 +1,105 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+
diff --git a/moon_cache/Changelog b/moon_cache/Changelog
new file mode 100644
index 00000000..23ce6f5f
--- /dev/null
+++ b/moon_cache/Changelog
@@ -0,0 +1,34 @@
+# Copyright 2018 Orange and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+
+CHANGES
+=======
+
+0.1
+---
+- First version of the moon_cache library.
+
+0.2
+---
+- Add authentication to all requests
+- Cache the Manager API key
+- Add pipeline information in cache
+- Add getInstance method to cache to be Singleton
+- Calling update from __init__
+
+0.3
+---
+- Fix VIM project ID in Cache and Context
+
+0.4
+---
+- Fix partial update problem in cache
+- fix update_pipelines issue
+
+0.5
+---
+- Add API to allow modifications of items in cache
+- Add the global attribute in cache
diff --git a/moon_cache/LICENSE b/moon_cache/LICENSE
new file mode 100644
index 00000000..d6456956
--- /dev/null
+++ b/moon_cache/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/moon_cache/MANIFEST.in b/moon_cache/MANIFEST.in
new file mode 100644
index 00000000..21c2cf92
--- /dev/null
+++ b/moon_cache/MANIFEST.in
@@ -0,0 +1,10 @@
+# Copyright 2018 Orange and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+include README.md
+include LICENSE
+include Changelog
+include setup.py
+include requirements.txt
diff --git a/moon_cache/README.md b/moon_cache/README.md
new file mode 100644
index 00000000..3a28813d
--- /dev/null
+++ b/moon_cache/README.md
@@ -0,0 +1,3 @@
+# moon_cache
+
+Python library that implement the cache system of the Moon engine \ No newline at end of file
diff --git a/moon_cache/moon_cache/__init__.py b/moon_cache/moon_cache/__init__.py
new file mode 100644
index 00000000..9f3c4ca0
--- /dev/null
+++ b/moon_cache/moon_cache/__init__.py
@@ -0,0 +1,13 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+__version__ = "0.7"
diff --git a/moon_cache/moon_cache/cache.py b/moon_cache/moon_cache/cache.py
new file mode 100644
index 00000000..3fb9018d
--- /dev/null
+++ b/moon_cache/moon_cache/cache.py
@@ -0,0 +1,1466 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+import logging
+import time
+import copy
+from uuid import uuid4
+import moon_cache.request_wrapper as requests
+from moon_utilities import exceptions
+
+logger = logging.getLogger("moon.cache.cache")
+
+
+class Cache(object):
+ """
+ Cache object allowing to save all data for a specific moon component
+ """
+
+ __UPDATE_INTERVAL = 10
+
+ __MANAGER_API_KEY = None
+
+ __CURRENT_SERVER = None
+ __CURRENT_SERVER_API_KEY = None
+
+ __pipelines = {}
+ __PIPELINES_UPDATE = 0
+
+ __security_functions = {}
+ __SECURITY_FUNCTIONS_UPDATE = 0
+
+ __attributes = {}
+ __ATTRS_UPDATE = 0
+
+ __pdp = {}
+ __PDP_UPDATE = 0
+
+ __policies = {}
+ __POLICIES_UPDATE = 0
+
+ __models = {}
+ __MODELS_UPDATE = 0
+
+ __subjects = {}
+ __objects = {}
+ __actions = {}
+
+ __subject_assignments = {}
+ __object_assignments = {}
+ __action_assignments = {}
+
+ __subject_categories = {}
+ __SUBJECT_CATEGORIES_UPDATE = 0
+ __object_categories = {}
+ __OBJECT_CATEGORIES_UPDATE = 0
+ __action_categories = {}
+ __ACTION_CATEGORIES_UPDATE = 0
+
+ __meta_rules = {}
+ __META_RULES_UPDATE = 0
+
+ __rules = {}
+ __RULES_UPDATE = 0
+
+ __subject_data = []
+ __object_data = []
+ __action_data = []
+
+ __authz_requests = {}
+
+ __instance = None
+
+ @staticmethod
+ def getInstance(manager_url=None, incremental=True, manager_api_key=None, static_conf=None):
+ """ Static access method. """
+
+ if Cache.__instance == None:
+ Cache(manager_url, incremental, manager_api_key, static_conf)
+
+ return Cache.__instance
+
+ @staticmethod
+ def deleteInstance():
+ Cache.__instance = None
+
+ def __init__(self, manager_url=None, incremental=True, manager_api_key=None, static_conf=None):
+
+ if Cache.__instance != None:
+ raise exceptions.MoonError("This class is a singleton! use getInstance() instead")
+ else:
+ Cache.__instance = self
+
+ self.manager_url = manager_url
+ self.incremental = incremental
+ self.headers = {"X-Api-Key": manager_api_key}
+ self.__MANAGER_API_KEY = manager_api_key
+
+ if self.manager_url:
+ self.update()
+ elif static_conf:
+ self.update_from_static_conf(static_conf)
+ logger.info("Update done!")
+
+ def update(self, pdp_id=None, pipeline=None):
+ """
+ Force the update of one or more elements
+ :param pdp_id: PDP ID
+ :param pipeline: Pipeline ID
+ :return: None
+ """
+ if not self.manager_url:
+ return
+ if pipeline:
+ # if we are in wrapper, retrieve pipeline list from pdp
+ self.__update_pipelines(pipeline)
+ else:
+ # else update pdp and policy list
+ self.__update_pdp(pdp_id)
+ self.__update_policies(pdp_id)
+ if not self.incremental:
+ # if it is written in config file ; retrieve all data from Manager
+ self.__update_models()
+ self.__update_meta_rules()
+ self.__update_rules()
+ self.__update_subject_categories()
+ self.__update_object_categories()
+ self.__update_action_categories()
+ for _policy_id in self.__policies:
+ self.__update_subjects(_policy_id)
+ self.__update_objects(_policy_id)
+ self.__update_actions(_policy_id)
+ self.__update_subject_assignments(_policy_id)
+ self.__update_object_assignments(_policy_id)
+ self.__update_action_assignments(_policy_id)
+
+ def set_current_server(self, url, api_key):
+ self.__CURRENT_SERVER = url
+ self.__CURRENT_SERVER_API_KEY = api_key
+
+ @staticmethod
+ def __update_list_to_dict(data):
+ """
+ Transform a list in a dictionary
+ :param data: the list to be transformed
+ :return: a dictionary
+ """
+ return {uuid4().hex: value for value in data}
+
+ def update_from_static_conf(self, conf):
+ """
+ Get the data in the JSON file and save its content into the cache
+ :param conf: the content of the JSON file
+ :return:
+ """
+ logger.info("update_from_static_conf {}".format(conf.get("policies", [])))
+ self.__pdp = self.__update_list_to_dict(conf.get("pdp", []))
+ self.__policies = self.__update_list_to_dict(conf.get("policies", []))
+ self.__models = self.__update_list_to_dict(conf.get("models", []))
+ self.__subjects = self.__update_list_to_dict(conf.get("subjects", []))
+ self.__objects = self.__update_list_to_dict(conf.get("objects", []))
+ self.__actions = self.__update_list_to_dict(conf.get("actions", []))
+ self.__subject_categories = self.__update_list_to_dict(conf.get("subjects_categories", []))
+ self.__object_categories = self.__update_list_to_dict(conf.get("objects_categories", []))
+ self.__action_categories = self.__update_list_to_dict(conf.get("actions_categories", []))
+ # FIXME: should add DATA in Cache
+ self.__subject_data = conf.get("subjects_data", [])
+ self.__object_data = conf.get("objects_data", [])
+ self.__action_data = conf.get("actions_data", [])
+ self.__subject_assignments = self.__update_list_to_dict(conf.get("subjects_assignments", []))
+ self.__object_assignments = self.__update_list_to_dict(conf.get("objects_assignments", []))
+ self.__action_assignments = self.__update_list_to_dict(conf.get("actions_assignments", []))
+ self.__rules = conf.get("rules", [])
+ self.__meta_rules = self.__update_list_to_dict(conf.get("meta_rules", []))
+
+ @property
+ def manager_api_token(self):
+ return self.__MANAGER_API_KEY
+
+ @property
+ def authz_requests(self):
+ """
+ Authorization requests
+ :return: a dictionary
+ """
+ return self.__authz_requests
+
+ # Attributes
+
+ @property
+ def attributes(self):
+ """
+ Global attributes
+ :return: a dictionary containing attributes
+ """
+ _keys = list(self.__attributes.keys())
+ self.update_attribute()
+ return self.__attributes
+
+ def set_attribute(self, name, value=None):
+ """
+ Set one global attribute
+ :return: a dictionary containing attributes
+ """
+ self.__attributes[name] = value
+ return self.__attributes
+
+ def update_attribute(self, name=None):
+ """
+ Update one global attribute from the Manager
+ :return: a dictionary containing attributes
+ """
+ if self.manager_url:
+ if not name:
+ response = requests.get("{}/attributes".format(self.manager_url),
+ headers=self.headers)
+ for key in response.json()["attributes"]:
+ self.__attributes[key] = response.json()["attributes"][key]['value']
+ else:
+ response = requests.get("{}/attributes/{}".format(self.manager_url, name),
+ headers=self.headers)
+ self.__attributes[name] = response.json()["attributes"]['value']
+
+ # perimeter functions
+
+ def __check_policies(self, policy_ids):
+ for policy in policy_ids:
+ if policy not in self.__policies:
+ raise exceptions.PolicyUnknown
+
+ @property
+ def subjects(self):
+ """
+ Subjects
+ :return: a dictionary
+ """
+ return self.__subjects
+
+ def add_subject(self, value):
+ _id = value.get("id", uuid4().hex)
+ self.__check_policies(value.get("policy_list", []))
+ policy_id = value.get("policy_list", [])[0]
+ if policy_id not in self.__subjects:
+ self.__subjects[policy_id] = {_id: dict(value)}
+ else:
+ self.__subjects[policy_id][_id] = dict(value)
+ return {_id: dict(value)}
+
+ def delete_subject(self, policy_id=None, perimeter_id=None):
+ if not policy_id and perimeter_id:
+ self.__subjects.pop(perimeter_id)
+ return
+ elif not perimeter_id:
+ self.__subjects = {}
+ return
+ self.__check_policies([policy_id])
+ self.__subjects[perimeter_id].get("policies").remove(policy_id)
+
+ def update_subject(self, perimeter_id, value):
+ self.__check_policies(value.get("policies", []))
+ policy_id = value.get("policy_list", [])[0]
+ _policies = self.__subjects[policy_id][perimeter_id].get("policies", [])
+ for policy in value.get("policies", []):
+ if policy not in _policies:
+ _policies.append(policy)
+ value.pop("policies", None)
+ prev_dict = dict(self.__subjects[policy_id][perimeter_id])
+ prev_dict.update(value)
+ prev_dict["policies"] = _policies
+ self.__subjects[policy_id][perimeter_id] = dict(prev_dict)
+
+ def __update_subjects(self, policy_id):
+ """
+ Update all subjects in a specific policy
+ :param policy_id: the policy ID
+ :return: None
+ """
+ response = requests.get("{}/policies/{}/subjects".format(self.manager_url, policy_id),
+ headers=self.headers)
+ if 'subjects' in response.json():
+ self.__subjects[policy_id] = response.json()['subjects']
+ else:
+ raise exceptions.SubjectUnknown("Cannot find subject within policy_id {}".format(
+ policy_id))
+
+ def get_subject(self, policy_id, name):
+ """
+ Get one subject knowing its name
+ :param policy_id: the policy ID
+ :param name: the subject name
+ :return: a dictionary
+ """
+ if not policy_id:
+ raise exceptions.PolicyUnknown("Cannot find policy within policy_id {}".format(
+ policy_id))
+
+ if policy_id in self.subjects:
+ for _subject_id, _subject_dict in self.subjects[policy_id].items():
+ if _subject_id == name or _subject_dict.get("name") == name:
+ return _subject_id
+
+ if self.manager_url:
+ self.__update_subjects(policy_id)
+
+ if policy_id in self.subjects:
+ for _subject_id, _subject_dict in self.subjects[policy_id].items():
+ if _subject_id == name or _subject_dict.get("name") == name:
+ return _subject_id
+
+ raise exceptions.SubjectUnknown("Cannot find subject {}".format(name))
+
+ @property
+ def objects(self):
+ """
+ Objects
+ :return: a dictionary
+ """
+ return self.__objects
+
+ def add_object(self, value):
+ _id = value.get("id", uuid4().hex)
+ self.__check_policies(value.get("policy_list", []))
+ policy_id = value.get("policy_list", [])[0]
+ if policy_id not in self.__objects:
+ self.__objects[policy_id] = {_id: dict(value)}
+ else:
+ self.__objects[policy_id][_id] = dict(value)
+ return {_id: dict(value)}
+
+ def delete_object(self, policy_id=None, perimeter_id=None):
+ if not policy_id and perimeter_id:
+ self.__objects.pop(perimeter_id)
+ return
+ elif not perimeter_id:
+ self.__objects = {}
+ return
+ self.__check_policies([policy_id])
+ self.__objects[perimeter_id].get("policies").remove(policy_id)
+
+ def update_object(self, perimeter_id, value):
+ self.__check_policies(value.get("policies", []))
+ policy_id = value.get("policy_list", [])[0]
+ _policies = self.__objects[policy_id][perimeter_id].get("policies", [])
+ for policy in value.get("policies", []):
+ if policy not in _policies:
+ _policies.append(policy)
+ value.pop("policies", None)
+ prev_dict = dict(self.__objects[policy_id][perimeter_id])
+ prev_dict.update(value)
+ prev_dict["policies"] = _policies
+ self.__objects[policy_id][perimeter_id] = dict(prev_dict)
+
+ def __update_objects(self, policy_id):
+ """
+ Update all objects in a specific policy
+ :param policy_id: the policy ID
+ :return: None
+ """
+ response = requests.get("{}/policies/{}/objects".format(self.manager_url, policy_id),
+ headers=self.headers)
+ if 'objects' in response.json():
+ self.__objects[policy_id] = response.json()['objects']
+ else:
+ raise exceptions.ObjectUnknown("Cannot find object within policy_id {}".format(
+ policy_id))
+
+ def get_object(self, policy_id, name):
+ """
+ Get an object knowing its name
+ :param policy_id: the policy ID
+ :param name: the object name
+ :return: a dictionary
+ """
+ if not policy_id:
+ raise exceptions.PolicyUnknown("Cannot find policy within policy_id {}".format(
+ policy_id))
+
+ if policy_id in self.objects:
+ for _object_id, _object_dict in self.__objects[policy_id].items():
+ if _object_id == name or _object_dict.get("name") == name:
+ return _object_id
+
+ if self.manager_url:
+ self.__update_objects(policy_id)
+
+ if policy_id in self.objects:
+ for _object_id, _object_dict in self.__objects[policy_id].items():
+ if _object_id == name or _object_dict.get("name") == name:
+ return _object_id
+
+ raise exceptions.ObjectUnknown("Cannot find object {}".format(name))
+
+ @property
+ def actions(self):
+ """
+ Actions
+ :return: a dictionary
+ """
+ return self.__actions
+
+ def add_action(self, value):
+ _id = value.get("id", uuid4().hex)
+ self.__check_policies(value.get("policy_list", []))
+ policy_id = value.get("policy_list", [])[0]
+ if policy_id not in self.__actions:
+ self.__actions[policy_id] = {_id: dict(value)}
+ else:
+ self.__actions[policy_id][_id] = dict(value)
+ return {_id: dict(value)}
+
+ def delete_action(self, policy_id=None, perimeter_id=None):
+ if not policy_id and perimeter_id:
+ self.__actions.pop(perimeter_id)
+ return
+ elif not perimeter_id:
+ self.__actions = {}
+ return
+ self.__check_policies([policy_id])
+ self.__actions[perimeter_id].get("policies").remove(policy_id)
+
+ def update_action(self, perimeter_id, value):
+ self.__check_policies(value.get("policies", []))
+ policy_id = value.get("policy_list", [])[0]
+ _policies = self.__actions[policy_id][perimeter_id].get("policies", [])
+ for policy in value.get("policies", []):
+ if policy not in _policies:
+ _policies.append(policy)
+ value.pop("policies", None)
+ prev_dict = dict(self.__actions[policy_id][perimeter_id])
+ prev_dict.update(value)
+ prev_dict["policies"] = _policies
+ self.__actions[policy_id][perimeter_id] = dict(prev_dict)
+
+ def __update_actions(self, policy_id):
+ """
+ Update all actions in a specific policy
+ :param policy_id: the policy ID
+ :return: None
+ """
+ response = requests.get("{}/policies/{}/actions".format(self.manager_url, policy_id),
+ headers=self.headers)
+
+ if 'actions' in response.json():
+ self.__actions[policy_id] = response.json()['actions']
+ else:
+ raise exceptions.ActionUnknown("Cannot find action within policy_id {}".format(
+ policy_id))
+
+ def get_action(self, policy_id, name):
+ """
+ Get an action knowing its name
+ :param policy_id: the policy ID
+ :param name: the action name
+ :return: a dictionary
+ """
+ if not policy_id:
+ raise exceptions.PolicyUnknown("Cannot find policy within policy_id {}".format(
+ policy_id))
+
+ if policy_id in self.actions:
+ for _action_id, _action_dict in self.__actions[policy_id].items():
+ if _action_id == name or _action_dict.get("name") == name:
+ return _action_id
+
+ if self.manager_url:
+ self.__update_actions(policy_id)
+
+ for _action_id, _action_dict in self.__actions[policy_id].items():
+ if _action_id == name or _action_dict.get("name") == name:
+ return _action_id
+
+ raise exceptions.ActionUnknown("Cannot find action {}".format(name))
+
+ # meta_rule functions
+
+ @property
+ def meta_rules(self):
+ """
+ Meta Rules
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__META_RULES_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__META_RULES_UPDATE = current_time
+ self.__update_meta_rules()
+ self.__META_RULES_UPDATE = current_time
+ return self.__meta_rules
+
+ def add_meta_rule(self, value):
+ _id = uuid4().hex
+ self.__meta_rules[_id] = dict(value)
+
+ def delete_meta_rule(self, meta_rule_id):
+ self.__meta_rules.pop(meta_rule_id)
+
+ def __update_meta_rules(self):
+ """
+ Update all meta rules
+ :return: None
+ """
+ response = requests.get("{}/meta_rules".format(self.manager_url), headers=self.headers)
+
+ if 'meta_rules' in response.json():
+ self.__meta_rules = response.json()['meta_rules']
+ else:
+ raise exceptions.MetaRuleUnknown("Cannot find meta rules")
+
+ # rule functions
+
+ @property
+ def rules(self):
+ """
+ Rules
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__RULES_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__RULES_UPDATE = current_time
+ self.__update_rules()
+ self.__RULES_UPDATE = current_time
+ return self.__rules
+
+ def add_rule(self, value):
+ value = dict(value)
+ _id = value.get("policy_id")
+ if "value" in value:
+ for key in value["value"]:
+ value[key] = value["value"][key]
+ value.pop('value')
+ if _id in self.__rules:
+ self.__rules[_id]['rules'].append(value)
+ else:
+ self.__rules[_id] = {
+ "policy_id": _id,
+ "rules": [value]
+ }
+
+ def delete_rule(self, policy_id, rule_id=None):
+ if not rule_id:
+ self.__rules.pop(policy_id)
+ return
+ for _index, _rule in enumerate(self.__rules.get(policy_id, {}).get("rules")):
+ if _rule.get('id') == rule_id:
+ index = _index
+ break
+ else:
+ return
+ self.__rules.get(policy_id, {}).get("rules").pop(index)
+
+ def __update_rules(self):
+ """
+ Update all rules
+ :return: None
+ """
+ for policy_id in self.policies:
+
+ response = requests.get("{}/policies/{}/rules".format(
+ self.manager_url, policy_id), headers=self.headers)
+ if 'rules' in response.json():
+ self.__rules[policy_id] = response.json()['rules']
+ else:
+ logger.warning(" no 'rules' found within policy_id: {}".format(policy_id))
+
+ # assignment functions
+
+ def update_assignments(self, policy_id=None, perimeter_id=None):
+ """
+ Update all assignments for a specific perimeter (subject, object or action)
+ :param policy_id: the policy ID
+ :param perimeter_id: the perimeter ID
+ :return: None
+ """
+ if self.manager_url:
+ if policy_id:
+ self.__update_subject_assignments(policy_id=policy_id, perimeter_id=perimeter_id)
+ self.__update_object_assignments(policy_id=policy_id, perimeter_id=perimeter_id)
+ self.__update_action_assignments(policy_id=policy_id, perimeter_id=perimeter_id)
+ else:
+ for policy_id in self.__policies:
+ self.__update_subject_assignments(policy_id=policy_id,
+ perimeter_id=perimeter_id)
+ self.__update_object_assignments(policy_id=policy_id,
+ perimeter_id=perimeter_id)
+ self.__update_action_assignments(policy_id=policy_id,
+ perimeter_id=perimeter_id)
+
+ @property
+ def subject_assignments(self):
+ """
+ Subject Assignments
+ :return: a dictionary
+ """
+ return self.__subject_assignments
+
+ def __update_subject_assignments(self, policy_id, perimeter_id=None):
+ """
+ Update all assignments for a specific perimeter
+ :param policy_id: the policy ID
+ :param perimeter_id: the perimeter ID
+ :return: None
+ """
+ if perimeter_id:
+ response = requests.get("{}/policies/{}/subject_assignments/{}".format(
+ self.manager_url, policy_id, perimeter_id), headers=self.headers)
+ else:
+ response = requests.get("{}/policies/{}/subject_assignments".format(
+ self.manager_url, policy_id), headers=self.headers)
+
+ if 'subject_assignments' in response.json():
+ if policy_id not in self.subject_assignments:
+ self.__subject_assignments[policy_id] = {}
+ self.__subject_assignments[policy_id] = response.json()['subject_assignments']
+ else:
+ raise exceptions.SubjectAssignmentUnknown(
+ "Cannot find subject assignment within policy_id {}".format(policy_id))
+
+ def get_subject_assignments(self, policy_id, perimeter_id, category_id):
+ """
+ Get all subject assignments for a specific perimeter ID and in a specific category ID
+ :param policy_id: the policy ID
+ :param perimeter_id: the perimeter ID
+ :param category_id: the category ID
+ :return: a dictionary
+ """
+ if not policy_id:
+ raise exceptions.PolicyUnknown("Cannot find policy within policy_id {}".format(
+ policy_id))
+
+ if self.manager_url:
+ self.__update_subject_assignments(policy_id, perimeter_id)
+
+ for key, value in self.__subject_assignments[policy_id].items():
+ if all(k in value for k in ("subject_id", "category_id", "assignments")):
+ if perimeter_id == value['subject_id'] and category_id == value['category_id']:
+ return value['assignments']
+ else:
+ logger.warning("'subject_id' or 'category_id' or 'assignments'"
+ " keys are not found in subject_assignments")
+ return []
+
+ def add_subject_assignment(self, policy_id, perimeter_id, category_id, data_id):
+ self.__check_policies([policy_id])
+ for key, value in self.__subject_assignments.get(policy_id, {}).items():
+ if all(k in value for k in ("subject_id", "category_id", "assignments")):
+ if perimeter_id == value['subject_id'] and category_id == value['category_id']:
+ if data_id not in value['assignments']:
+ value['assignments'].append(data_id)
+ return value['assignments']
+ else:
+ logger.warning("'subject_id' or 'category_id' or 'assignments'"
+ " keys are not found in subject_assignments")
+ value = {
+ "id": uuid4().hex,
+ "policy_id": policy_id,
+ "subject_id": perimeter_id,
+ "category_id": category_id,
+ "assignments": [data_id, ],
+ }
+ if policy_id not in self.__subject_assignments:
+ self.__subject_assignments[policy_id] = {}
+ self.__subject_assignments[policy_id][value["id"]] = value
+ return value
+
+ def delete_subject_assignment(self, policy_id=None, perimeter_id=None,
+ category_id=None, data_id=None):
+ if not policy_id and not perimeter_id and not category_id and not data_id:
+ self.__subject_assignments = {}
+ return
+ self.__check_policies([policy_id])
+ for key, value in self.__subject_assignments[policy_id].items():
+ if all(k in value for k in ("subject_id", "category_id", "assignments")):
+ if perimeter_id == value['subject_id'] and category_id == value['category_id']:
+ try:
+ value['assignments'].remove(data_id)
+ except ValueError:
+ pass
+ return value['assignments']
+ else:
+ logger.warning("'subject_id' or 'category_id' or 'assignments'"
+ " keys are not found in subject_assignments")
+ return []
+
+ @property
+ def object_assignments(self):
+ """
+ Object Assignments
+ :return: a dictionary
+ """
+ return self.__object_assignments
+
+ def __update_object_assignments(self, policy_id, perimeter_id=None):
+ """
+ Update all assignments for a specific perimeter
+ :param policy_id: the policy ID
+ :param perimeter_id: the perimeter ID
+ :return: None
+ """
+ if perimeter_id:
+ response = requests.get("{}/policies/{}/object_assignments/{}".format(
+ self.manager_url, policy_id, perimeter_id), headers=self.headers)
+ else:
+ response = requests.get("{}/policies/{}/object_assignments".format(
+ self.manager_url, policy_id), headers=self.headers)
+
+ if 'object_assignments' in response.json():
+ if policy_id not in self.object_assignments:
+ self.__object_assignments[policy_id] = {}
+
+ self.__object_assignments[policy_id] = response.json()['object_assignments']
+ else:
+ raise exceptions.ObjectAssignmentUnknown(
+ "Cannot find object assignment within policy_id {}".format(policy_id))
+
+ def get_object_assignments(self, policy_id, perimeter_id, category_id):
+ """
+ Get all object assignments for a specific perimeter ID and in a specific category ID
+ :param policy_id: the policy ID
+ :param perimeter_id: the perimeter ID
+ :param category_id: the category ID
+ :return: a dictionary
+ """
+ if not policy_id:
+ raise exceptions.PolicyUnknown("Cannot find policy within policy_id {}".format(
+ policy_id))
+
+ if self.manager_url:
+ self.__update_object_assignments(policy_id, perimeter_id)
+
+ for key, value in self.object_assignments[policy_id].items():
+ if all(k in value for k in ("object_id", "category_id", "assignments")):
+ if perimeter_id == value['object_id'] and category_id == value['category_id']:
+ return value['assignments']
+ else:
+ logger.warning("'object_id' or 'category_id' or'assignments'"
+ " keys are not found in object_assignments")
+ return []
+
+ def add_object_assignment(self, policy_id, perimeter_id, category_id, data_id):
+ self.__check_policies([policy_id])
+ for key, value in self.__object_assignments.get(policy_id, {}).items():
+ if all(k in value for k in ("object_id", "category_id", "assignments")):
+ if perimeter_id == value['object_id'] and category_id == value['category_id']:
+ if data_id not in value['assignments']:
+ value['assignments'].append(data_id)
+ return value['assignments']
+ else:
+ logger.warning("'object_id' or 'category_id' or 'assignments'"
+ " keys are not found in object_assignments")
+ value = {
+ "id": uuid4().hex,
+ "policy_id": policy_id,
+ "object_id": perimeter_id,
+ "category_id": category_id,
+ "assignments": [data_id, ],
+ }
+ if policy_id not in self.__object_assignments:
+ self.__object_assignments[policy_id] = {}
+ self.__object_assignments[policy_id][value["id"]] = value
+ return value
+
+ def delete_object_assignment(self, policy_id=None, perimeter_id=None,
+ category_id=None, data_id=None):
+ if not policy_id and not perimeter_id and not category_id and not data_id:
+ self.__object_assignments = {}
+ return
+ self.__check_policies([policy_id])
+ for key, value in self.__object_assignments[policy_id].items():
+ if all(k in value for k in ("object_id", "category_id", "assignments")):
+ if perimeter_id == value['object_id'] and category_id == value['category_id']:
+ try:
+ value['assignments'].remove(data_id)
+ except ValueError:
+ pass
+ return value['assignments']
+ else:
+ logger.warning("'object_id' or 'category_id' or 'assignments'"
+ " keys are not found in object_assignments")
+ return []
+
+ @property
+ def action_assignments(self):
+ """
+ Action Assignments
+ :return: a dictionary
+ """
+ return self.__action_assignments
+
+ def __update_action_assignments(self, policy_id, perimeter_id=None):
+ """
+ Update all assignments for a specific perimeter
+ :param policy_id: the policy ID
+ :param perimeter_id: the perimeter ID
+ :return: None
+ """
+ if perimeter_id:
+ response = requests.get("{}/policies/{}/action_assignments/{}".format(
+ self.manager_url, policy_id, perimeter_id), headers=self.headers)
+ else:
+ response = requests.get("{}/policies/{}/action_assignments".format(
+ self.manager_url, policy_id), headers=self.headers)
+
+ if 'action_assignments' in response.json():
+ if policy_id not in self.__action_assignments:
+ self.__action_assignments[policy_id] = {}
+
+ self.__action_assignments[policy_id] = response.json()['action_assignments']
+ else:
+ raise exceptions.ActionAssignmentUnknown(
+ "Cannot find action assignment within policy_id {}".format(policy_id))
+
+ def get_action_assignments(self, policy_id, perimeter_id, category_id):
+ """
+ Get all subject assignments for a specific perimeter ID and in a specific category ID
+ :param policy_id: the policy ID
+ :param perimeter_id: the perimeter ID
+ :param category_id: the category ID
+ :return: a dictionary
+ """
+ if not policy_id:
+ raise exceptions.PolicyUnknown("Cannot find policy within policy_id {}".format(
+ policy_id))
+
+ if self.manager_url:
+ # FIXME: this should be not done automatically (performance consuming)
+ self.__update_action_assignments(policy_id, perimeter_id)
+
+ for key, value in self.action_assignments[policy_id].items():
+ if all(k in value for k in ("action_id", "category_id", "assignments")):
+ if perimeter_id == value['action_id'] and category_id == value['category_id']:
+ return value['assignments']
+ else:
+ logger.warning("'action_id' or 'category_id' or'assignments'"
+ " keys are not found in action_assignments")
+ return []
+
+ def add_action_assignment(self, policy_id, perimeter_id, category_id, data_id):
+ self.__check_policies([policy_id])
+ for key, value in self.__action_assignments.get(policy_id, {}).items():
+ if all(k in value for k in ("action_id", "category_id", "assignments")):
+ if perimeter_id == value['action_id'] and category_id == value['category_id']:
+ if data_id not in value['assignments']:
+ value['assignments'].append(data_id)
+ return value['assignments']
+ else:
+ logger.warning("'action_id' or 'category_id' or 'assignments'"
+ " keys are not found in action_assignments")
+ value = {
+ "id": uuid4().hex,
+ "policy_id": policy_id,
+ "action_id": perimeter_id,
+ "category_id": category_id,
+ "assignments": [data_id, ],
+ }
+ if policy_id not in self.__action_assignments:
+ self.__action_assignments[policy_id] = {}
+ self.__action_assignments[policy_id][value["id"]] = value
+ return value
+
+ def delete_action_assignment(self, policy_id=None, perimeter_id=None,
+ category_id=None, data_id=None):
+ if not policy_id and not perimeter_id and not category_id and not data_id:
+ self.__action_assignments = {}
+ return
+ self.__check_policies([policy_id])
+ for key, value in self.__action_assignments[policy_id].items():
+ if all(k in value for k in ("action_id", "category_id", "assignments")):
+ if perimeter_id == value['action_id'] and category_id == value['category_id']:
+ try:
+ value['assignments'].remove(data_id)
+ except ValueError:
+ pass
+ return value['assignments']
+ else:
+ logger.warning("'action_id' or 'category_id' or 'assignments'"
+ " keys are not found in action_assignments")
+ return []
+
+ # category functions
+
+ @property
+ def subject_categories(self):
+ """
+ Subject Categories
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__SUBJECT_CATEGORIES_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__SUBJECT_CATEGORIES_UPDATE = current_time
+ self.__update_subject_categories()
+ self.__SUBJECT_CATEGORIES_UPDATE = current_time
+ return self.__subject_categories
+
+ def add_subject_category(self, value):
+ _id = uuid4().hex
+ self.__subject_categories[_id] = dict(value)
+
+ def delete_subject_category(self, category_id):
+ self.__subject_categories.pop(category_id)
+
+ def update_subject_category(self, category_id, value):
+ self.__subject_categories[category_id] = dict(value)
+
+ def __update_subject_categories(self):
+ """
+ Update all subject categories
+ :return: None
+ """
+ response = requests.get("{}/subject_categories".format(self.manager_url),
+ headers=self.headers)
+
+ if 'subject_categories' in response.json():
+ self.__subject_categories.update(response.json()['subject_categories'])
+ else:
+ raise exceptions.SubjectCategoryUnknown("Cannot find subject category")
+
+ @property
+ def object_categories(self):
+ """
+ Object Categories
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__OBJECT_CATEGORIES_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__OBJECT_CATEGORIES_UPDATE = current_time
+ self.__update_object_categories()
+ self.__OBJECT_CATEGORIES_UPDATE = current_time
+ return self.__object_categories
+
+ def add_object_category(self, value):
+ _id = uuid4().hex
+ self.__object_categories[_id] = dict(value)
+
+ def delete_object_category(self, category_id):
+ self.__object_categories.pop(category_id)
+
+ def update_object_category(self, category_id, value):
+ self.__object_categories[category_id] = dict(value)
+
+ def __update_object_categories(self):
+ """
+ Update all object categories
+ :return: None
+ """
+ response = requests.get("{}/object_categories".format(self.manager_url),
+ headers=self.headers)
+
+ if 'object_categories' in response.json():
+ self.__object_categories.update(response.json()['object_categories'])
+ else:
+ raise exceptions.ObjectCategoryUnknown("Cannot find object category")
+
+ @property
+ def action_categories(self):
+ """
+ Action Categories
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__ACTION_CATEGORIES_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__ACTION_CATEGORIES_UPDATE = current_time
+ self.__update_action_categories()
+ self.__ACTION_CATEGORIES_UPDATE = current_time
+ return self.__action_categories
+
+ def add_action_category(self, value):
+ _id = uuid4().hex
+ self.__action_categories[_id] = dict(value)
+
+ def delete_action_category(self, category_id):
+ self.__action_categories.pop(category_id)
+
+ def update_action_category(self, category_id, value):
+ self.__action_categories[category_id] = dict(value)
+
+ def __update_action_categories(self):
+ """
+ Update all action categories
+ :return: None
+ """
+ response = requests.get("{}/action_categories".format(self.manager_url),
+ headers=self.headers)
+
+ if 'action_categories' in response.json():
+ self.__action_categories.update(response.json()['action_categories'])
+ else:
+ raise exceptions.ActionCategoryUnknown("Cannot find action category")
+
+ # PDP functions
+
+ def __update_pdp(self, uuid=None):
+ """
+ Update one or all PDP
+ :param uuid: the PDP ID to update
+ :return: None
+ """
+ if not uuid:
+ response = requests.get("{}/pdp".format(self.manager_url), headers=self.headers)
+ else:
+ response = requests.get("{}/pdp/{}".format(self.manager_url, uuid),
+ headers=self.headers)
+ try:
+ pdp = response.json()
+ except Exception as e:
+ logger.error("Got an error from the server: {}".format(response.content))
+ raise e
+ if 'pdps' in pdp:
+ self.__pdp = copy.deepcopy(pdp["pdps"])
+
+ else:
+ logger.error("Receive bad response from manager: {}".format(pdp))
+ raise exceptions.DataContentError("Cannot find 'pdps' key")
+
+ @property
+ def pdp(self):
+ """Policy Decision Point
+ Example of content:
+ {
+ "pdp_id": {
+ "vim_project_id": "vim_project_id",
+ "name": "pdp1",
+ "description": "test",
+ "security_pipeline": [
+ "policy_id"
+ ]
+ }
+ }
+
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__PDP_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__PDP_UPDATE = current_time
+ self.__update_pdp()
+ self.__PDP_UPDATE = current_time
+ return self.__pdp
+
+ def add_pdp(self, pdp_id=None, data=None):
+ if not pdp_id:
+ pdp_id = uuid4().hex
+ self.__pdp[pdp_id] = data
+
+ def delete_pdp(self, pdp_id):
+ self.__pdp.pop(pdp_id)
+
+ # policy functions
+ def __update_policies(self, pdp_id=None):
+ """
+ Update all policies
+ :param pdp_id: the PDP ID (if not given, update all policies)
+ :return: None
+ """
+ response = requests.get("{}/policies".format(self.manager_url), headers=self.headers)
+ policies = response.json()
+
+ if 'policies' in policies:
+ for key, value in policies["policies"].items():
+ if not pdp_id or (pdp_id and key in self.__pdp.get("security_pipeline", [])):
+ self.__policies[key] = value
+ else:
+ raise exceptions.PolicyContentError("Cannot find 'policies' key")
+
+ @property
+ def policies(self):
+ """
+ Policies
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__POLICIES_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__POLICIES_UPDATE = current_time
+ self.__update_policies()
+ self.__POLICIES_UPDATE = current_time
+ return self.__policies
+
+ def add_policy(self, value):
+ _id = value.get("id", uuid4().hex)
+ self.__policies[_id] = dict(value)
+ return {_id: self.__policies[_id]}
+
+ def delete_policy(self, policy_id):
+ self.__policies.pop(policy_id)
+
+ def update_policy(self, policy_id, value):
+ self.__policies[policy_id] = dict(value)
+
+ # model functions
+
+ def __update_models(self):
+ """
+ Update all models
+ :return: None
+ """
+ response = requests.get("{}/models".format(self.manager_url), headers=self.headers)
+ models = response.json()
+ if 'models' in models:
+ for key, value in models["models"].items():
+ self.__models[key] = value
+ else:
+ raise exceptions.DataContentError("Cannot find 'models' key")
+
+ @property
+ def models(self):
+ """
+ Models
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__MODELS_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__MODELS_UPDATE = current_time
+ self.__update_models()
+ self.__MODELS_UPDATE = current_time
+ return self.__models
+
+ def add_model(self, value):
+ _id = value.get("id", uuid4().hex)
+ if "meta_rules" not in value:
+ value["meta_rules"] = []
+ self.__models[_id] = dict(value)
+ return {_id: self.__models[_id]}
+
+ def delete_model(self, model_id):
+ self.__models.pop(model_id)
+
+ def update_model(self, model_id, value):
+ self.__models[model_id] = dict(value)
+
+ # helper functions
+
+ def get_policy_from_meta_rules(self, meta_rule_id):
+ """
+ Get the policy ID with the given meta rule ID
+ :param meta_rule_id: the meta rule ID
+ :return: a policy ID
+ """
+ for pdp_key, pdp_value in self.pdp.items():
+ if "security_pipeline" in pdp_value:
+ for policy_id in pdp_value["security_pipeline"]:
+ if policy_id in self.policies and "model_id" in self.policies[policy_id]:
+ model_id = self.policies[policy_id]["model_id"]
+ if model_id in self.models and "meta_rules" in self.models[model_id]:
+ if meta_rule_id in self.models[model_id]["meta_rules"]:
+ return policy_id
+ else:
+ logger.warning(
+ "Cannot find model_id: {} within "
+ "models and 'meta_rules' key".format(model_id))
+ else:
+ logger.warning(
+ "Cannot find policy_id: {} "
+ "within policies and 'model_id' key".format(
+ policy_id))
+ else:
+ logger.warning("Cannot find 'security_pipeline' "
+ "key within pdp ")
+
+ def get_meta_rule_ids_from_pdp_value(self, pdp_value):
+ """
+ Get the meta rule ID given the content of a PDP
+ :param pdp_value: the content of the PDP
+ :return: a meta rule ID
+ """
+ meta_rules = []
+ if "security_pipeline" in pdp_value:
+ for policy_id in pdp_value["security_pipeline"]:
+ if policy_id not in self.policies or "model_id" not in self.policies[policy_id]:
+ raise exceptions.PolicyUnknown("Cannot find 'models' key")
+ model_id = self.policies[policy_id]["model_id"]
+ if model_id not in self.models or 'meta_rules' not in self.models[model_id]:
+ raise exceptions.DataContentError("Cannot find 'models' key")
+ for meta_rule in self.models[model_id]["meta_rules"]:
+ meta_rules.append(meta_rule)
+ return meta_rules
+ raise exceptions.PdpContentError
+
+ def get_pdp_from_vim_project(self, vim_project_id):
+ """
+ Get the PDP ID given the VIM project ID
+ :param vim_project_id: the VIM project ID
+ :return: the PDP ID
+ """
+ for pdp_key, pdp_value in self.pdp.items():
+ if "vim_project_id" in pdp_value and \
+ vim_project_id == pdp_value["vim_project_id"]:
+ return pdp_key
+
+ def get_vim_project_id_from_policy_id(self, policy_id):
+ """
+ Get the VIM project ID given the policy ID
+ :param policy_id: the policy ID
+ :return: the VIM project ID
+ """
+ for pdp_key, pdp_value in self.pdp.items():
+ if "security_pipeline" in pdp_value and \
+ "vim_project_id" in pdp_value:
+ if policy_id in pdp_value["security_pipeline"]:
+ return pdp_value["vim_project_id"]
+ else:
+ logger.warning(" 'security_pipeline','vim_project_id' "
+ "key not in pdp {}".format(pdp_value))
+
+ def get_pdp_id_from_policy_id(self, policy_id):
+ """
+ Get the PDP ID given the policy ID
+ :param policy_id: the policy ID
+ :return: the PDP ID
+ """
+ for _pdp_id in self.pdp:
+ if policy_id in self.__pdp.get(_pdp_id).get("security_pipeline"):
+ return _pdp_id
+
+ def get_vim_project_id_from_pdp_id(self, pdp_id):
+ """
+ Get the VIM project ID given the PDP ID
+ :param pdp_id: the PDP ID
+ :return: the VIM project ID
+ """
+ if pdp_id in self.pdp:
+ pdp_value = self.pdp.get(pdp_id)
+ if "security_pipeline" in pdp_value and \
+ "vim_project_id" in pdp_value:
+ return pdp_value["vim_project_id"]
+ logger.warning("Unknown PDP ID".format(pdp_id))
+
+ # pipelines functions
+
+ @property
+ def pipelines(self):
+ """
+ Pipelines
+ :return: a dictionary
+ """
+ return self.__pipelines
+
+ def add_pipeline(self, pipeline_id=None, data=None):
+ if not pipeline_id:
+ pipeline_id = uuid4().hex
+ self.__pipelines[pipeline_id] = data
+
+ def delete_pipeline(self, pipeline_id=None):
+ self.__pipelines.pop(pipeline_id)
+
+ def __update_pipelines(self, pdp_id=None):
+ """
+ Update all pipelines
+ :param pdp_id: the PDP ID
+ :return: None
+ """
+ headers = {
+ 'x-api-key': self.__CURRENT_SERVER_API_KEY
+ }
+ req = requests.get("{}/pdp".format(self.manager_url), headers=self.headers)
+ pdps = req.json().get("pdps", {})
+ for _pdp_id in pdps:
+ if pdp_id and pdp_id != _pdp_id:
+ continue
+ for policy_id in pdps[_pdp_id].get("security_pipeline", []):
+ _policy = requests.get("{}/policies/{}".format(self.manager_url, policy_id),
+ headers=self.headers)
+ req = requests.get("{}/pipelines".format(self.__CURRENT_SERVER),
+ headers=headers)
+ _pipelines = req.json().get('pipelines', {})
+ self.__pipelines[_pdp_id] = {
+ "pdp_id": _pdp_id,
+ "vim_project_id": pdps[_pdp_id].get("vim_project_id", ""),
+ "protocol": _pipelines[_pdp_id].get("protocol", "http"),
+ "host": _pipelines[_pdp_id].get("server_ip", "127.0.0.1"),
+ "port": _pipelines[_pdp_id].get("port", "8000"),
+ }
+
+ def get_pipeline_id_from_project_id(self, project_id):
+ """
+ Retrieve the pipeline ID from the project ID
+ :param project_id: the VIM project ID
+ :return: a pipeline_id
+ """
+ for _pdp_id in self.pdp:
+ if self.__pdp.get(_pdp_id).get("vim_project_id") == project_id:
+ return _pdp_id
+ # try:
+ # return self.__pdp.get(_pdp_id).get("security_pipeline")[0]
+ # except IndexError:
+ # return
+
+ def get_pipeline_url(self, project_id=None, pipeline_id=None, pdp_id=None):
+ """
+ Retrieve the URL of the pipeline
+ :param project_id: the VIM project ID
+ :param pipeline_id: the pipeline ID
+ :param pdp_id: the PDP ID
+ :return: the URL
+ """
+ self.__update_pdp()
+ if pdp_id:
+ return "{proto}://{host}:{port}".format(
+ proto=self.__pipelines[pdp_id].get("protocol", "http"),
+ host=self.__pipelines[pdp_id].get("host", "127.0.0.1"),
+ port=self.__pipelines[pdp_id].get("port", "8000"),
+ )
+ if project_id:
+ for _pdp_id in self.__pdp:
+ if self.__pdp.get(_pdp_id).get("vim_project_id") == project_id:
+ return "{proto}://{host}:{port}".format(
+ proto=self.__pipelines[_pdp_id].get("protocol", "http"),
+ host=self.__pipelines[_pdp_id].get("host", "127.0.0.1"),
+ port=self.__pipelines[_pdp_id].get("port", "8000"),
+ )
+ if pipeline_id and pipeline_id in self.pipelines:
+ return "{proto}://{host}:{port}".format(
+ proto=self.__pipelines[pipeline_id].get("protocol", "http"),
+ host=self.__pipelines[pipeline_id].get("host", "127.0.0.1"),
+ port=self.__pipelines[pipeline_id].get("port", "8000"),
+ )
+
+ def get_api_key(self, project_id=None, pipeline_id=None, pdp_id=None):
+ """
+ Retrieve the API ky of the pipeline
+ :param project_id: the VIM project ID
+ :param pipeline_id: the pipeline ID
+ :param pdp_id: the PDP ID
+ :return: the URL
+ """
+ self.__update_pdp()
+ if pdp_id:
+ return self.__pipelines[pdp_id].get("api_key", "")
+ if project_id:
+ for _pdp_id in self.__pdp:
+ if self.__pdp.get(_pdp_id).get("vim_project_id") == project_id:
+ return self.__pipelines[_pdp_id].get("api_key", "")
+ if pipeline_id and pipeline_id in self.pipelines:
+ return self.__pipelines[pipeline_id].get("api_key", "")
+
+ # security_functions functions
+
+ @property
+ def security_functions(self):
+ """
+ Security Functions
+ :return: a dictionary
+ """
+ if self.manager_url:
+ current_time = time.time()
+ if self.__SECURITY_FUNCTIONS_UPDATE + self.__UPDATE_INTERVAL < current_time:
+ self.__SECURITY_FUNCTIONS_UPDATE = current_time
+ self.__update_security_functions()
+ self.__SECURITY_FUNCTIONS_UPDATE = current_time
+ return self.__security_functions
+
+ def __update_security_functions(self):
+ """
+ Update security functions
+ :return: None
+ """
+ req = requests.get("{}/policies".format(self.manager_url), headers=self.headers)
+ for key in req.json():
+ self.__security_functions[key] = req.json()[key]
+
+ @property
+ def subject_data(self):
+ """Subject Data
+ :return: a dictionary"""
+ return self.__subject_data
+
+ def add_subject_data(self, policy_id, category_id, data):
+ for index, value in enumerate(self.__subject_data):
+ if policy_id == value["policy_id"] and category_id == value["category_id"]:
+ self.__subject_data[index]["data"][data.get('id', uuid4().hex)] = data
+ return self.__subject_data[index]
+ else:
+ _id = data.get('id', uuid4().hex)
+ data['id'] = _id
+ value = {
+ "policy_id": policy_id,
+ "category_id": category_id,
+ "data": {
+ _id: data
+ }
+ }
+ self.__subject_data.append(value)
+ return value
+
+ def delete_subject_data(self, policy_id=None, category_id=None, data_id=None):
+ if not policy_id and not category_id and not data_id:
+ self.__subject_data = []
+ for index, value in enumerate(self.__subject_data):
+ if policy_id == value["policy_id"] and category_id == value["category_id"]:
+ self.__subject_data[index]["data"].pop(data_id)
+
+ @property
+ def object_data(self):
+ """Object Data
+ :return: a dictionary"""
+ return self.__object_data
+
+ def add_object_data(self, policy_id, category_id, data):
+ for index, value in enumerate(self.__object_data):
+ if policy_id == value["policy_id"] and category_id == value["category_id"]:
+ self.__object_data[index]["data"][data.get('id', uuid4().hex)] = data
+ return self.__object_data[index]
+ else:
+ _id = data.get('id', uuid4().hex)
+ data['id'] = _id
+ value = {
+ "policy_id": policy_id,
+ "category_id": category_id,
+ "data": {
+ _id: data
+ }
+ }
+ self.__object_data.append(value)
+ return value
+
+ def delete_object_data(self, policy_id=None, category_id=None, data_id=None):
+ if not policy_id and not category_id and not data_id:
+ self.__object_data = []
+ for index, value in enumerate(self.__object_data):
+ if policy_id == value["policy_id"] and category_id == value["category_id"]:
+ self.__object_data[index]["data"].pop(data_id)
+
+ @property
+ def action_data(self):
+ """Action Data
+ :return: a dictionary"""
+ return self.__action_data
+
+ def add_action_data(self, policy_id, category_id, data):
+ for index, value in enumerate(self.__action_data):
+ if policy_id == value["policy_id"] and category_id == value["category_id"]:
+ self.__action_data[index]["data"][data.get('id', uuid4().hex)] = data
+ return self.__action_data[index]
+ else:
+ _id = data.get('id', uuid4().hex)
+ data['id'] = _id
+ value = {
+ "policy_id": policy_id,
+ "category_id": category_id,
+ "data": {
+ _id: data
+ }
+ }
+ self.__action_data.append(value)
+ return value
+
+ def delete_action_data(self, policy_id=None, category_id=None, data_id=None):
+ if not policy_id and not category_id and not data_id:
+ self.__action_data = []
+ for index, value in enumerate(self.__action_data):
+ if policy_id == value["policy_id"] and category_id == value["category_id"]:
+ self.__action_data[index]["data"].pop(data_id)
+
+
diff --git a/moon_cache/moon_cache/context.py b/moon_cache/moon_cache/context.py
new file mode 100644
index 00000000..e1a33a07
--- /dev/null
+++ b/moon_cache/moon_cache/context.py
@@ -0,0 +1,345 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+import copy
+import logging
+from uuid import uuid4
+from moon_utilities import exceptions
+
+logger = logging.getLogger("moon.utilities." + __name__)
+
+
+class Context:
+
+ def __init__(self, init_context, cache):
+ if init_context is None:
+ raise Exception("Invalid context content object")
+
+ self.cache = cache
+ self.__current_request = None
+ self.__request_id = None
+ self.__meta_rule_ids = None
+ self.__pdp_set = {}
+ self.__index = -1
+ self.__pdp_id = init_context.get("pdp_id")
+
+ if self.__pdp_id:
+ self.__pdp_value = copy.deepcopy(self.cache.pdp[self.__pdp_id])
+ else:
+ self.__pdp_id = uuid4().hex
+ self.__pdp_value = self.__build_pdp()
+ self.cache.add_pdp(self.__pdp_id, self.__pdp_value)
+
+ self.__subject = init_context.get("subject_name")
+ self.__object = init_context.get("object_name")
+ self.__action = init_context.get("action_name")
+ self.__request_id = init_context.get("req_id")
+
+ self.__meta_rule_ids = self.cache.get_meta_rule_ids_from_pdp_value(self.__pdp_value)
+ self.__meta_rules = self.cache.meta_rules
+
+ def __build_pdp(self):
+ _pdp = {
+ 'name': 'abstract_pdp',
+ 'description': '',
+ 'vim_project_id': 'default',
+ 'security_pipeline': []
+ }
+ policy_id = list(self.cache.policies.keys())[0]
+ _pdp['security_pipeline'] = [policy_id]
+ return _pdp
+
+ def delete_cache(self):
+ self.cache = {}
+
+ def set_cache(self, cache):
+ self.cache = cache
+
+ def increment_index(self):
+ self.__index += 1
+ self.__init_current_request()
+ self.__init_pdp_set()
+
+ @property
+ def current_state(self):
+ self.__validate_meta_rule_content(self.__pdp_set[self.__meta_rule_ids[self.__index]])
+ return self.__pdp_set[self.__meta_rule_ids[self.__index]]['effect']
+
+ @current_state.setter
+ def current_state(self, state):
+ if state not in ("grant", "deny", "passed"):
+ state = "passed"
+ self.__validate_meta_rule_content(self.__pdp_set[self.__meta_rule_ids[self.__index]])
+ self.__pdp_set[self.__meta_rule_ids[self.__index]]['effect'] = state
+
+ @current_state.deleter
+ def current_state(self):
+ self.__validate_meta_rule_content(self.__pdp_set[self.__meta_rule_ids[self.__index]])
+ self.__pdp_set[self.__meta_rule_ids[self.__index]]['effect'] = "unset"
+
+ @property
+ def current_policy_id(self):
+ if "security_pipeline" not in self.__pdp_value:
+ raise exceptions.AuthzException('Cannot find security_pipeline key within pdp.')
+ return self.__pdp_value["security_pipeline"][self.__index]
+
+ @current_policy_id.setter
+ def current_policy_id(self, value):
+ pass
+
+ @current_policy_id.deleter
+ def current_policy_id(self):
+ pass
+
+ def __init_current_request(self):
+ if "security_pipeline" not in self.__pdp_value:
+ raise exceptions.PdpContentError
+ self.__subject = self.cache.get_subject(
+ self.__pdp_value["security_pipeline"][self.__index],
+ self.__subject)
+ self.__object = self.cache.get_object(
+ self.__pdp_value["security_pipeline"][self.__index],
+ self.__object)
+ self.__action = self.cache.get_action(
+ self.__pdp_value["security_pipeline"][self.__index],
+ self.__action)
+ self.__current_request = dict(self.initial_request)
+
+ def __init_pdp_set(self):
+ for meta_rule_id in self.__meta_rule_ids:
+ self.__pdp_set[meta_rule_id] = dict()
+ self.__pdp_set[meta_rule_id]["meta_rules"] = self.__meta_rules[meta_rule_id]
+ self.__pdp_set[meta_rule_id]["target"] = self.__add_target(meta_rule_id)
+ self.__pdp_set[meta_rule_id]["effect"] = "unset"
+ self.__pdp_set["effect"] = "deny"
+
+ def update_target(self):
+ # FIXME: not used...
+ for meta_rule_id in self.__meta_rule_ids:
+ result = dict()
+ _subject = self.__current_request["subject"]
+ _object = self.__current_request["object"]
+ _action = self.__current_request["action"]
+
+ meta_rules = self.cache.meta_rules
+ policy_id = self.cache.get_policy_from_meta_rules(meta_rule_id)
+
+ if 'subject_categories' not in meta_rules[meta_rule_id]:
+ raise exceptions.MetaRuleContentError(" 'subject_categories' key not found ")
+
+ self.cache.update_assignments(policy_id)
+
+ for sub_cat in meta_rules[meta_rule_id]['subject_categories']:
+ if sub_cat not in result:
+ result[sub_cat] = []
+ result[sub_cat].extend(
+ self.cache.get_subject_assignments(policy_id, _subject, sub_cat))
+
+ if 'object_categories' not in meta_rules[meta_rule_id]:
+ raise exceptions.MetaRuleContentError(" 'object_categories' key not found ")
+
+ for obj_cat in meta_rules[meta_rule_id]['object_categories']:
+ if obj_cat not in result:
+ result[obj_cat] = []
+ result[obj_cat].extend(
+ self.cache.get_object_assignments(policy_id, _object, obj_cat))
+
+ if 'action_categories' not in meta_rules[meta_rule_id]:
+ raise exceptions.MetaRuleContentError(" 'action_categories' key not found ")
+
+ for act_cat in meta_rules[meta_rule_id]['action_categories']:
+ if act_cat not in result:
+ result[act_cat] = []
+ result[act_cat].extend(
+ self.cache.get_action_assignments(policy_id, _action, act_cat))
+
+ self.__pdp_set[meta_rule_id]["target"] = result
+
+ def __add_target(self, meta_rule_id):
+ """build target from meta_rule
+
+ Target is dict of categories as keys ; and the value of each category
+ will be a list of assignments
+
+ """
+ result = dict()
+ _subject = self.__current_request["subject"]
+ _object = self.__current_request["object"]
+ _action = self.__current_request["action"]
+
+ meta_rules = self.cache.meta_rules
+ policy_id = self.cache.get_policy_from_meta_rules(meta_rule_id)
+
+ if 'subject_categories' not in meta_rules[meta_rule_id]:
+ raise exceptions.MetaRuleContentError(" 'subject_categories' key not found ")
+
+ for sub_cat in meta_rules[meta_rule_id]['subject_categories']:
+ if sub_cat not in result:
+ result[sub_cat] = []
+ if sub_cat.startswith("attributes:"):
+ result[sub_cat].append(
+ "attributes:" +
+ self.cache.attributes.get(sub_cat.replace("attributes:", ""), []))
+ else:
+ result[sub_cat].extend(
+ self.cache.get_subject_assignments(policy_id, _subject, sub_cat))
+
+ if 'object_categories' not in meta_rules[meta_rule_id]:
+ raise exceptions.MetaRuleContentError(" 'object_categories' key not found ")
+
+ for obj_cat in meta_rules[meta_rule_id]['object_categories']:
+ if obj_cat not in result:
+ result[obj_cat] = []
+ if obj_cat.startswith("attributes:"):
+ result[obj_cat].append(
+ "attributes:" +
+ self.cache.attributes.get(obj_cat.replace("attributes:", ""), []))
+ else:
+ result[obj_cat].extend(
+ self.cache.get_object_assignments(policy_id, _object, obj_cat))
+
+ if 'action_categories' not in meta_rules[meta_rule_id]:
+ raise exceptions.MetaRuleContentError(" 'action_categories' key not found ")
+
+ for act_cat in meta_rules[meta_rule_id]['action_categories']:
+ if act_cat not in result:
+ result[act_cat] = []
+ if act_cat.startswith("attributes:"):
+ result[act_cat].append(
+ "attributes:" +
+ self.cache.attributes.get(act_cat.replace("attributes:", ""), []))
+ else:
+ result[act_cat].extend(
+ self.cache.get_action_assignments(policy_id, _action, act_cat))
+
+ return result
+
+ def __repr__(self):
+ return """PDP ID: {id}
+current_request: {current_request}
+request_id: {request_id}
+index: {index}
+headers: {headers}
+pdp_set: {pdp_set}
+ """.format(
+ id=self.__pdp_id,
+ current_request=self.__current_request,
+ request_id=self.__request_id,
+ headers=self.__meta_rule_ids,
+ pdp_set=self.__pdp_set,
+ index=self.__index
+ )
+
+ def to_dict(self):
+ return {
+ "initial_request": copy.deepcopy(self.initial_request),
+ "current_request": copy.deepcopy(self.__current_request),
+ "headers": copy.deepcopy(self.__meta_rule_ids),
+ "index": copy.deepcopy(self.__index),
+ "pdp_set": copy.deepcopy(self.__pdp_set),
+ "request_id": copy.deepcopy(self.__request_id),
+ # "manager_url": copy.deepcopy(self.__manager_url),
+ # "interface_name": copy.deepcopy(self.__interface_name),
+ }
+
+ @property
+ def request_id(self):
+ return self.__request_id
+
+ @request_id.setter
+ def request_id(self, value):
+ raise Exception("You cannot update the request_id")
+
+ @request_id.deleter
+ def request_id(self):
+ raise Exception("You cannot update the request_id")
+
+ @property
+ def initial_request(self):
+ return {
+ "subject": self.__subject,
+ "object": self.__object,
+ "action": self.__action,
+ }
+
+ @initial_request.setter
+ def initial_request(self, value):
+ raise Exception("You are not allowed to update the initial_request")
+
+ @initial_request.deleter
+ def initial_request(self):
+ raise Exception("You are not allowed to delete the initial_request")
+
+ @property
+ def current_request(self):
+ if not self.__current_request:
+ self.__current_request = dict(self.initial_request)
+ return self.__current_request
+
+ @current_request.setter
+ def current_request(self, value):
+
+ self.__current_request = copy.deepcopy(value)
+ # Note (asteroide): if the current request is modified,
+ # we must update the PDP Set.
+ self.__init_pdp_set()
+
+ @current_request.deleter
+ def current_request(self):
+ self.__current_request = {}
+ self.__pdp_set = {}
+
+ '''
+ [Note ] Refactor name of headers to meta_rule_ids done ,
+ may need to refactor getter and setter of headers
+ '''
+
+ @property
+ def headers(self):
+ return self.__meta_rule_ids
+
+ @headers.setter
+ def headers(self, meta_rule_ids):
+ self.__meta_rule_ids = meta_rule_ids
+
+ @headers.deleter
+ def headers(self):
+ self.__meta_rule_ids = list()
+
+ @property
+ def index(self):
+ return self.__index
+
+ @index.setter
+ def index(self, index):
+ self.__index += 1
+
+ @index.deleter
+ def index(self):
+ self.__index = -1
+
+ @property
+ def pdp_set(self):
+ return self.__pdp_set
+
+ @pdp_set.setter
+ def pdp_set(self, value):
+ raise Exception("You are not allowed to modify the pdp_set")
+
+ @pdp_set.deleter
+ def pdp_set(self):
+ self.__pdp_set = {}
+
+ @staticmethod
+ def __validate_meta_rule_content(meta_rules):
+ if 'effect' not in meta_rules:
+ raise exceptions.PdpContentError("effect not in meta_rules")
diff --git a/moon_cache/moon_cache/request_wrapper.py b/moon_cache/moon_cache/request_wrapper.py
new file mode 100644
index 00000000..4b94507a
--- /dev/null
+++ b/moon_cache/moon_cache/request_wrapper.py
@@ -0,0 +1,35 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+import sys
+import requests
+from moon_utilities import exceptions
+
+
+def get(url, **kwarg):
+ try:
+ response = requests.get(url, **kwarg)
+ except requests.exceptions.RequestException as _exc:
+ raise exceptions.MoonError("request failure ", _exc)
+ except Exception as _exc:
+ raise exceptions.MoonError("Unexpected error ", _exc)
+ return response
+
+
+def put(url, json="", **kwarg):
+ try:
+ response = requests.put(url, json=json, **kwarg)
+ except requests.exceptions.RequestException as _exc:
+ raise exceptions.MoonError("request failure ", _exc)
+ except Exception as _exc:
+ raise exceptions.MoonError("Unexpected error ", _exc)
+ return response
diff --git a/moon_cache/requirements.txt b/moon_cache/requirements.txt
new file mode 100644
index 00000000..f2293605
--- /dev/null
+++ b/moon_cache/requirements.txt
@@ -0,0 +1 @@
+requests
diff --git a/moon_cache/setup.py b/moon_cache/setup.py
new file mode 100644
index 00000000..2a116ef2
--- /dev/null
+++ b/moon_cache/setup.py
@@ -0,0 +1,51 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+from setuptools import setup, find_packages
+import moon_cache
+
+with open('requirements.txt') as f:
+ required = list(filter(
+ lambda s: (len(s.strip()) > 0 and s.strip()[0] != '#'),
+ f.read().split('\n')))
+
+setup(
+
+ name='moon_cache',
+
+ version=moon_cache.__version__,
+
+ packages=find_packages(),
+
+ author='Thomas Duval',
+
+ author_email='thomas.duval@orange.com',
+
+ description='Some utilities for all the Moon components',
+
+ long_description=open('README.md').read(),
+
+ install_requires=required,
+
+ include_package_data=True,
+
+ url='',
+
+ classifiers=[
+ 'Programming Language :: Python :: 3',
+ 'Development Status :: 1 - Planning',
+ 'License :: OSI Approved',
+ 'Natural Language :: English',
+ 'Operating System :: OS Independent',
+ ],
+
+)
diff --git a/moon_cache/tests/unit_python/api/__init__.py b/moon_cache/tests/unit_python/api/__init__.py
new file mode 100644
index 00000000..582be686
--- /dev/null
+++ b/moon_cache/tests/unit_python/api/__init__.py
@@ -0,0 +1,11 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
diff --git a/moon_cache/tests/unit_python/api/test_attributes.py b/moon_cache/tests/unit_python/api/test_attributes.py
new file mode 100644
index 00000000..13bfc637
--- /dev/null
+++ b/moon_cache/tests/unit_python/api/test_attributes.py
@@ -0,0 +1,36 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+
+def test_get_attribute_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'mode'
+ value = "run"
+ cache_obj.set_attribute(name=name)
+ assert cache_obj.attributes
+ assert cache_obj.attributes.get(name)
+ assert cache_obj.attributes.get(name) == "build"
+ cache_obj.set_attribute(name=name, value=value)
+ assert cache_obj.attributes
+ assert cache_obj.attributes.get(name)
+ # Note: it is the same value because the cache systematically request the Manager
+ assert cache_obj.attributes.get(name) == "build"
+
+
+def test_get_attribute_unknown(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'test'
+ assert cache_obj.attributes
+ assert not cache_obj.attributes.get(name)
+
diff --git a/moon_cache/tests/unit_python/api/test_cache.py b/moon_cache/tests/unit_python/api/test_cache.py
new file mode 100644
index 00000000..52efb4e0
--- /dev/null
+++ b/moon_cache/tests/unit_python/api/test_cache.py
@@ -0,0 +1,535 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+import pytest
+import mock_repo.data as data_mock
+import mock_repo.urls as register_urls
+import requests
+import requests_mock
+from moon_utilities import exceptions
+
+
+def test_authz_request(configuration):
+ from moon_cache import cache
+ c = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ assert isinstance(c.authz_requests, dict)
+
+
+# ================================
+# tests for get (subject) in cache
+# ================================
+
+def test_get_subject_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'subject_name'
+ subject_id = cache_obj.get_subject(data_mock.shared_ids["policy"]["policy_id_1"], name)
+ assert subject_id is not None
+
+
+def test_get_subject_no_policy(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ with pytest.raises(exceptions.PolicyUnknown) as exception_info:
+ cache_obj.get_subject(None, "")
+ assert str(exception_info.value) == '400: Policy Unknown'
+
+
+def test_get_subject_invalid_name(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'invalid name'
+ with pytest.raises(exceptions.SubjectUnknown) as exception_info:
+ cache_obj.get_subject(data_mock.shared_ids["policy"]["policy_id_1"], name)
+ assert str(exception_info.value) == '400: Subject Unknown'
+
+
+def test_get_subject_invalid_response(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'policy_id_invalid_response'
+ with pytest.raises(exceptions.SubjectUnknown) as exception_info:
+ cache_obj.get_subject(data_mock.shared_ids["policy"]["policy_id_invalid_response"], name)
+ assert str(exception_info.value) == '400: Subject Unknown'
+
+
+# ================================================
+# tests for get (object) in cache
+# ================================================
+
+def test_get_object_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'object_name'
+ object_id = cache_obj.get_object(data_mock.shared_ids["policy"]["policy_id_1"], name)
+ assert object_id is not None
+
+
+def test_get_object_no_policy(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ with pytest.raises(exceptions.PolicyUnknown) as exception_info:
+ cache_obj.get_object(None, "")
+ assert str(exception_info.value) == '400: Policy Unknown'
+
+
+def test_get_object_invalid_name(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'invalid name'
+ with pytest.raises(exceptions.ObjectUnknown) as exception_info:
+ cache_obj.get_object(data_mock.shared_ids["policy"]["policy_id_1"], name)
+ assert str(exception_info.value) == '400: Object Unknown'
+
+
+def test_get_object_invalid_response(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'policy_id_invalid_response'
+ with pytest.raises(exceptions.ObjectUnknown) as exception_info:
+ cache_obj.get_object(data_mock.shared_ids["policy"]["policy_id_invalid_response"], name)
+ assert str(exception_info.value) == '400: Object Unknown'
+
+
+# ================================================
+# tests for get (action) in cache
+# ================================================
+
+def test_get_action_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'action_name'
+ action_id = cache_obj.get_action(data_mock.shared_ids["policy"]["policy_id_1"], name)
+ assert action_id is not None
+
+
+def test_get_action_no_policy(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ with pytest.raises(exceptions.PolicyUnknown) as exception_info:
+ cache_obj.get_action(None, "")
+ assert str(exception_info.value) == '400: Policy Unknown'
+
+
+def test_get_action_invalid_name(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'invalid name'
+ with pytest.raises(exceptions.ActionUnknown) as exception_info:
+ cache_obj.get_action(data_mock.shared_ids["policy"]["policy_id_1"], name)
+ assert str(exception_info.value) == '400: Action Unknown'
+
+
+def test_get_action_invalid_response(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ name = 'policy_id_invalid_response'
+ with pytest.raises(exceptions.ActionUnknown) as exception_info:
+ cache_obj.get_action(data_mock.shared_ids["policy"]["policy_id_invalid_response"], name)
+ assert str(exception_info.value) == '400: Action Unknown'
+
+
+# ===========================================
+# tests for get (subject_assignment) in cache
+# ===========================================
+
+def test_get_subject_assignment_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ subject_assignments = cache_obj.get_subject_assignments(
+ data_mock.shared_ids["policy"]["policy_id_1"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert subject_assignments is not None
+
+
+def test_get_subject_assignment_no_policy(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ with pytest.raises(exceptions.PolicyUnknown) as exception_info:
+ cache_obj.get_subject_assignments(None,
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert str(exception_info.value) == '400: Policy Unknown'
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_subject_assignment_invalid_subject_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/subject_assignments/{}'
+ .format(
+ configuration["management"]["url"],
+ data_mock.shared_ids["subject"]["invalid_subject_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'subject_assignments':
+ data_mock.subject_assignment_mock_invalid_subject_id
+ }
+ )
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ subject_assignments = cache_obj.get_subject_assignments(
+ data_mock.shared_ids["subject"]["invalid_subject_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(subject_assignments) == 0
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_subject_assignment_invalid_category_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/subject_assignments/{}'
+ .format(
+ configuration["management"]["url"],
+ data_mock.shared_ids["subject"]["invalid_category_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'subject_assignments':
+ data_mock.subject_assignment_mock_invalid_category_id
+ }
+ )
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ subject_assignments = cache_obj.get_subject_assignments(
+ data_mock.shared_ids["subject"]["invalid_category_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(subject_assignments) == 0
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_subject_assignment_invalid_assignment_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/subject_assignments/{}'
+ .format(
+ configuration["management"]["url"],
+ data_mock.shared_ids["subject"]["invalid_assignment_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'subject_assignments':
+ data_mock.subject_assignment_mock_invalid_assignment_id
+ }
+ )
+
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ subject_assignments = cache_obj.get_subject_assignments(
+ data_mock.shared_ids["subject"]["invalid_assignment_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(subject_assignments) == 0
+
+
+def test_get_subject_assignment_empty_perimeter(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ subject_assignments = cache_obj.get_subject_assignments(
+ data_mock.shared_ids["policy"]["policy_id_2"],
+ None,
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(subject_assignments) == 0
+
+
+def test_get_subject_assignment_invalid_category_failure(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ subject_assignments = cache_obj.get_subject_assignments(
+ data_mock.shared_ids["policy"]["policy_id_1"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["invalid_category_id_1"])
+ assert len(subject_assignments) == 0
+
+
+# ==========================================
+# tests for get (object_assignment) in cache
+# ==========================================
+
+def test_get_object_assignment_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ object_assignments = cache_obj.get_object_assignments(
+ data_mock.shared_ids["policy"]["policy_id_1"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_2"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert object_assignments is not None
+
+
+def test_get_object_assignment_no_policy(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ with pytest.raises(exceptions.PolicyUnknown) as exception_info:
+ cache_obj.get_object_assignments(None,
+ data_mock.shared_ids["perimeter"]["perimeter_id_2"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert str(exception_info.value) == '400: Policy Unknown'
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_object_assignment_invalid_object_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/object_assignments/{}'
+ .format(configuration["management"]["url"],
+ data_mock.shared_ids["object"]["invalid_object_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'object_assignments':
+ data_mock.object_assignment_mock_invalid_object_id
+ }
+ )
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ object_assignments = cache_obj.get_object_assignments(
+ data_mock.shared_ids["object"]["invalid_object_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(object_assignments) == 0
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_object_assignment_invalid_category_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/object_assignments/{}'
+ .format(configuration["management"]["url"],
+ data_mock.shared_ids["object"]["invalid_category_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'object_assignments':
+ data_mock.object_assignment_mock_invalid_category_id
+ }
+ )
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ object_assignments = cache_obj.get_object_assignments(
+ data_mock.shared_ids["object"]["invalid_category_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(object_assignments) == 0
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_object_assignment_invalid_assignment_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/object_assignments/{}'
+ .format(configuration["management"]["url"],
+ data_mock.shared_ids["object"]["invalid_assignment_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'object_assignments':
+ data_mock.object_assignment_mock_invalid_assignment_id
+ }
+ )
+
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ object_assignments = cache_obj.get_object_assignments(
+ data_mock.shared_ids["object"]["invalid_assignment_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(object_assignments) == 0
+
+
+def test_get_object_assignment_none_perimeter(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ object_assignments = cache_obj.get_object_assignments(
+ data_mock.shared_ids["policy"]["policy_id_2"],
+ None,
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(object_assignments) == 0
+
+
+def test_get_object_assignment_invalid_category_failure(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ object_assignments = cache_obj.get_object_assignments(
+ data_mock.shared_ids["policy"]["policy_id_1"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_2"],
+ data_mock.shared_ids["category"]["invalid_category_id_1"])
+ assert len(object_assignments) == 0
+
+
+# ==========================================
+# tests for get (action_assignment) in cache
+# ==========================================
+
+def test_get_action_assignment_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ action_assignments = cache_obj.get_action_assignments(
+ data_mock.shared_ids["policy"]["policy_id_1"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_3"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert action_assignments is not None
+
+
+def test_get_action_assignment_no_policy(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ with pytest.raises(exceptions.PolicyUnknown) as exception_info:
+ cache_obj.get_action_assignments(None,
+ data_mock.shared_ids["perimeter"]["perimeter_id_2"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert str(exception_info.value) == '400: Policy Unknown'
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_action_assignment_invalid_object_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/action_assignments/{}'
+ .format(configuration["management"]["url"],
+ data_mock.shared_ids["action"]["invalid_action_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'action_assignments':
+ data_mock.action_assignment_mock_invalid_action_id
+ }
+ )
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ action_assignments = cache_obj.get_action_assignments(
+ data_mock.shared_ids["action"]["invalid_action_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(action_assignments) == 0
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_action_assignment_invalid_category_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/action_assignments/{}'
+ .format(configuration["management"]["url"],
+ data_mock.shared_ids["action"]["invalid_category_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'action_assignments':
+ data_mock.action_assignment_mock_invalid_category_id
+ }
+ )
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ action_assignments = cache_obj.get_action_assignments(
+ data_mock.shared_ids["action"]["invalid_category_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(action_assignments) == 0
+
+
+@requests_mock.Mocker(kw='mock')
+def test_get_action_assignment_invalid_assignment_id(configuration, **kwargs):
+ from moon_cache import cache
+ kwargs['mock'].get('{}/policies/{}/action_assignments/{}'
+ .format(configuration["management"]["url"],
+ data_mock.shared_ids["action"]["invalid_assignment_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"]),
+ json={
+ 'action_assignments':
+ data_mock.action_assignment_mock_invalid_assignment_id
+ }
+ )
+
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ action_assignments = cache_obj.get_action_assignments(
+ data_mock.shared_ids["action"]["invalid_assignment_id"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_1"],
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(action_assignments) == 0
+
+
+def test_get_action_assignment_none_perimeter(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ action_assignments = cache_obj.get_action_assignments(
+ data_mock.shared_ids["policy"]["policy_id_2"],
+ None,
+ data_mock.shared_ids["category"]["category_id_1"])
+ assert len(action_assignments) == 0
+
+
+def test_get_action_assignment_invalid_category_failure(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration["management"]["url"])
+ action_assignments = cache_obj.get_action_assignments(
+ data_mock.shared_ids["policy"]["policy_id_1"],
+ data_mock.shared_ids["perimeter"]["perimeter_id_3"],
+ data_mock.shared_ids["category"]["invalid_category_id_1"])
+ assert len(action_assignments) == 0
+
+
+# ==================================
+# tests for helper function in cache
+# ==================================
+
+def test_get_policy_from_meta_rules_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration.get("management")['url'])
+ policy_id = cache_obj.get_policy_from_meta_rules(
+ data_mock.shared_ids["meta_rule"]["meta_rule_id_1"])
+ assert policy_id is not None
+
+
+''' tests for containers function , security pipeline in cache which not used for now
+ need to mock pdp object, /pods correctly
+'''
+
+
+def test_get_policy_from_meta_rules_failure(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration.get("management")['url'])
+ meta_rule_id = 'meta_rule_id3'
+ policy_id = cache_obj.get_policy_from_meta_rules(meta_rule_id)
+ assert policy_id is None
+
+
+def test_get_pdp_from_vim_project_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration.get("management")['url'])
+ vim_project_id = 'vim_project_id1'
+ pdp_key = cache_obj.get_pdp_from_vim_project(vim_project_id)
+ assert pdp_key is not None
+
+
+def test_get_pdp_from_vim_project_failure(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration.get("management")['url'])
+ vim_project_id = 'vim_project_id2'
+ pdp_key = cache_obj.get_pdp_from_vim_project(vim_project_id)
+ assert pdp_key is None
+
+
+def test_get_vim_project_id_from_policy_id_success(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration.get("management")['url'])
+ vim_project_id = cache_obj.get_vim_project_id_from_policy_id(
+ data_mock.shared_ids["policy"]["policy_id_1"])
+ assert vim_project_id is not None
+
+
+def test_get_vim_project_id_from_policy_id_failure(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration.get("management")['url'])
+ policy_id = 'policy_id_3'
+ vim_project_id = cache_obj.get_vim_project_id_from_policy_id(policy_id)
+ assert vim_project_id is None
+
+
+def test_get_pipeline_url(configuration):
+ from moon_cache import cache
+ cache_obj = cache.Cache.getInstance(manager_url=configuration.get("management")['url'])
+ cache_obj.set_current_server(url="http://127.0.0.1:10000", api_key="")
+ cache_obj.add_pipeline("policy_id_1", {
+ "name": "test",
+ "description": "test",
+ "host": "127.0.0.1",
+ "port": 20000,
+ })
+ cache_obj.add_pipeline("policy_id_2", {
+ "name": "test",
+ "description": "test",
+ "host": "127.0.0.1",
+ "port": 20001,
+ })
+ url = cache_obj.get_pipeline_url(pipeline_id="policy_id_1")
+ assert url == "http://127.0.0.1:20000"
+ url = cache_obj.get_pipeline_url(pipeline_id="policy_id_2")
+ assert url == "http://127.0.0.1:20001"
diff --git a/moon_cache/tests/unit_python/conftest.py b/moon_cache/tests/unit_python/conftest.py
new file mode 100644
index 00000000..6b271d7a
--- /dev/null
+++ b/moon_cache/tests/unit_python/conftest.py
@@ -0,0 +1,151 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+import pytest
+import requests_mock
+import yaml
+import mock_repo
+import mock_repo.urls as register_urls
+import mock_repo.data as data_mock
+
+__CONF = """
+database:
+ url: sqlite:////tmp/database_test.db
+ driver: moon_manager.plugins.sql
+ migration_dir: moon_manager.api.db.migrations
+
+management:
+ url: http://127.0.0.1:8000
+ user: admin
+ password: admin
+ token_file: /tmp/moon.pwd
+
+orchestration:
+ driver: moon_manager.plugins.pyorchestrator
+ connection: local
+ slaves:
+ port: 10000...10100
+ pipelines:
+ port: 20000...20100
+
+information:
+ driver: moon_manager.plugins.moon_openstack_plugin
+ openstack:
+ url: http://keystone:5000/v3
+ user: admin
+ password: p4ssw0rd
+ domain: default
+ project: admin
+ check_token: false
+ certificate: false
+
+plugins:
+ directory: /var/moon/plugins
+
+components:
+ manager:
+ port: 8080
+ bind: 0.0.0.0
+ hostname: manager
+
+logging:
+ version: 1
+
+ formatters:
+ brief:
+ format: "%(levelname)s %(name)s %(m, confessage)-30s"
+ custom:
+ format: "%(asctime)-15s %(levelname)s %(name)s %(m, confessage)s"
+
+ handlers:
+ console:
+ class : logging.StreamHandler
+ formatter: custom
+ level : INFO
+ stream : ext://sys.stdout
+ file:
+ class : logging.handlers.RotatingFileHandler
+ formatter: custom
+ level : DEBUG
+ filename: /tmp/moon.log
+ maxBytes: 1048576
+ backupCount: 3
+
+ loggers:
+ moon:
+ level: DEBUG
+ handlers: [console, file]
+ propagate: no
+
+ root:
+ level: ERROR
+ handlers: [console]
+"""
+
+
+@pytest.fixture
+def configuration():
+ return yaml.load(__CONF)
+
+
+def register_cache(m):
+ """ Modify the response from Requests module
+ """
+ conf = yaml.load(__CONF)
+ # register_urls.register_components(m, conf)
+ # register_urls.register_keystone(m, conf)
+
+ register_urls.register_pdp(m, conf)
+ register_urls.register_pipelines(m, conf)
+ register_urls.register_meta_rules(m, conf)
+ register_urls.register_policies(m, conf)
+ register_urls.register_slaves(m, conf)
+ register_urls.register_models(m, conf)
+
+ register_urls.register_policy_subject(m, conf, data_mock.shared_ids["policy"]["policy_id_1"])
+ register_urls.register_policy_subject_invalid_response(m, conf, data_mock.shared_ids["policy"]["policy_id_invalid_response"])
+
+ register_urls.register_policy_object(m, conf, data_mock.shared_ids["policy"]["policy_id_1"])
+ register_urls.register_policy_object_invalid_response(m, conf, data_mock.shared_ids["policy"]["policy_id_invalid_response"])
+
+ register_urls.register_policy_action(m, conf, data_mock.shared_ids["policy"]["policy_id_1"])
+ register_urls.register_policy_action_invalid_response(m, conf, data_mock.shared_ids["policy"]["policy_id_invalid_response"])
+
+ register_urls.register_policy_subject_assignment(m, conf, data_mock.shared_ids["policy"]["policy_id_1"], data_mock.shared_ids["perimeter"]["perimeter_id_1"])
+
+ register_urls.register_policy_subject_assignment_list(m, conf, data_mock.shared_ids["policy"]["policy_id_2"])
+
+ register_urls.register_policy_object_assignment(m, conf, data_mock.shared_ids["policy"]["policy_id_1"], data_mock.shared_ids["perimeter"]["perimeter_id_2"])
+
+ register_urls.register_policy_object_assignment_list(m, conf, data_mock.shared_ids["policy"]["policy_id_2"])
+
+ register_urls.register_policy_action_assignment(m, conf, data_mock.shared_ids["policy"]["policy_id_1"], data_mock.shared_ids["perimeter"]["perimeter_id_3"])
+
+ register_urls.register_policy_action_assignment_list(m, conf, data_mock.shared_ids["policy"]["policy_id_2"])
+
+ register_urls.register_attributes(m, conf)
+
+ # register_urls.register_policy_action_assignment(m, conf, "policy_id_2", "perimeter_id_2")
+ # register_urls.register_policy_action_assignment(m, conf, "policy_id_2", "perimeter_id_2")
+ # register_urls.register_policy_action_assignment(m, conf, "policy_id_2", "perimeter_id_2")
+
+ register_urls.register_rules(m, conf, "policy_id1")
+
+
+@pytest.fixture(autouse=True)
+def no_requests(monkeypatch):
+ """ Modify the response from Requests module
+ """
+ with requests_mock.Mocker(real_http=True) as m:
+ register_cache(m)
+ print("End registering URI")
+ yield m
diff --git a/moon_cache/tests/unit_python/mock_repo/__init__.py b/moon_cache/tests/unit_python/mock_repo/__init__.py
new file mode 100644
index 00000000..1856aa2c
--- /dev/null
+++ b/moon_cache/tests/unit_python/mock_repo/__init__.py
@@ -0,0 +1,12 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
diff --git a/moon_cache/tests/unit_python/mock_repo/data.py b/moon_cache/tests/unit_python/mock_repo/data.py
new file mode 100644
index 00000000..ff5e25a8
--- /dev/null
+++ b/moon_cache/tests/unit_python/mock_repo/data.py
@@ -0,0 +1,306 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+
+shared_ids = {
+ "policy": {
+ "policy_id_1": "policy_id_1",
+ "policy_id_2": "policy_id_2",
+ "policy_id_3": "policy_id_3",
+ "policy_id_invalid_response": "policy_id_invalid_response"
+ },
+ "category": {
+ "category_id_1": "category_id_1",
+ "invalid_category_id_1": " invalid_category_id_1"
+ },
+ "perimeter": {
+ "perimeter_id_1": "subject_id_1",
+ "perimeter_id_2": "object_id_1",
+ "perimeter_id_3": "action_id_1"
+ },
+ "meta_rule": {
+ "meta_rule_id_1": "meta_rule_id_1",
+ "meta_rule_id_2": "meta_rule_id_2"
+ },
+ "rule": {
+ "rule_id_1": "rule_id_2",
+ "rule_id_2": "rule_id_2"
+ },
+ "model": {
+ "model_id_1": "model_id_1"
+ },
+ "subject": {
+ "subject_id_1": "subject_id_1",
+ "invalid_subject_id": "invalid_subject_id",
+ "invalid_category_id": "invalid_category_id",
+ "invalid_assignment_id": "invalid_assignment_id"
+ },
+ "object": {
+ "object_id_1": "object_id_1",
+ "invalid_object_id": "invalid_object_id",
+ "invalid_category_id": "invalid_category_id",
+ "invalid_assignment_id": "invalid_assignment_id"
+ },
+ "action": {
+ "action_id_1": "action_id_1",
+ "invalid_action_id": "invalid_action_id",
+ "invalid_category_id": "invalid_category_id",
+ "invalid_assignment_id": "invalid_assignment_id"
+ }
+}
+
+pdp_mock = {
+ "pdp_id1": {
+ "name": "...",
+ "security_pipeline": ["policy_id_1", "policy_id_2"],
+ "vim_project_id": "vim_project_id1",
+ "description": "...",
+ }
+}
+
+meta_rules_mock = {
+ shared_ids["meta_rule"]["meta_rule_id_1"]: {
+ "name": "meta_rule1",
+ "algorithm": "name of the meta rule algorithm",
+ "subject_categories": ["subject_category_id1",
+ "subject_category_id2"],
+ "object_categories": ["object_category_id1"],
+ "action_categories": ["action_category_id1"]
+ },
+ shared_ids["meta_rule"]["meta_rule_id_2"]: {
+ "name": "name of the meta rules2",
+ "algorithm": "name of the meta rule algorithm",
+ "subject_categories": ["subject_category_id1",
+ "subject_category_id2"],
+ "object_categories": ["object_category_id1"],
+ "action_categories": ["action_category_id1"]
+ }
+}
+
+policies_mock = {
+ shared_ids["policy"]["policy_id_1"]: {
+ "name": "test_policy1",
+ "model_id": shared_ids["model"]["model_id_1"],
+ "genre": "authz",
+ "description": "test",
+ },
+ shared_ids["policy"]["policy_id_2"]: {
+ "name": "test_policy2",
+ "model_id": shared_ids["model"]["model_id_1"],
+ "genre": "authz",
+ "description": "test",
+ }
+}
+
+subject_mock = {
+ shared_ids["policy"]["policy_id_1"]: {
+ "subject_id": {
+ "name": "subject_name",
+ "vim_id": "vim_project_id1",
+ "description": "a description"
+ }
+ },
+ shared_ids["policy"]["policy_id_invalid_response"]: {
+ "subject_id": {
+ "name": "subject_name",
+ "vim_id": "vim_project_id1",
+ "description": "a description"
+ }
+ }
+
+}
+
+subject_assignment_mock = {
+ shared_ids["subject"]["subject_id_1"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "subject_id": "subject_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"],
+ }
+}
+
+subject_assignment_mock_invalid_subject_id = {
+ shared_ids["subject"]["invalid_subject_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "subject_id_invalid": "subject_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"],
+ }
+}
+
+subject_assignment_mock_invalid_category_id = {
+ shared_ids["subject"]["invalid_category_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "subject_id": "subject_id_1",
+ "category_id_invalid": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"],
+ }
+}
+
+subject_assignment_mock_invalid_assignment_id = {
+ shared_ids["subject"]["invalid_assignment_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "subject_id": "subject_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments_invalid": ["data_id_1, data_id_2"],
+ }
+}
+
+object_mock = {
+ shared_ids["policy"]["policy_id_1"]: {
+ "object_id": {
+ "name": "object_name",
+ "description": "a description"
+ }
+ }
+}
+
+object_assignment_mock = {
+ shared_ids["object"]["object_id_1"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "object_id": "object_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+object_assignment_mock_invalid_object_id = {
+ shared_ids["object"]["invalid_object_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "object_id": "object_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+object_assignment_mock_invalid_category_id = {
+ shared_ids["object"]["invalid_category_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "object_id": "object_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+object_assignment_mock_invalid_assignment_id = {
+ shared_ids["object"]["invalid_assignment_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "object_id": "object_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+action_mock = {
+ shared_ids["policy"]["policy_id_1"]: {
+ "action_id": {
+ "name": "action_name",
+ "description": "a description"
+ }
+ }
+}
+
+action_assignment_mock = {
+ shared_ids["action"]["action_id_1"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "action_id": "action_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+action_assignment_mock_invalid_action_id = {
+ shared_ids["action"]["invalid_action_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "action_id": "action_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+action_assignment_mock_invalid_category_id = {
+ shared_ids["action"]["invalid_category_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "action_id": "action_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+action_assignment_mock_invalid_assignment_id = {
+ shared_ids["action"]["invalid_assignment_id"]: {
+ "policy_id": shared_ids["policy"]["policy_id_1"],
+ "action_id": "action_id_1",
+ "category_id": shared_ids["category"]["category_id_1"],
+ "assignments": ["data_id_1, data_id_2"]
+ }
+}
+
+
+models_mock = {
+ shared_ids["model"]["model_id_1"]: {
+ "name": "test_model",
+ "description": "test",
+ "meta_rules": [shared_ids["meta_rule"]["meta_rule_id_1"]]
+ }
+}
+
+rules_mock = {
+ "rules": {
+ "meta_rule_id": shared_ids["meta_rule"]["meta_rule_id_1"],
+ shared_ids["rule"]["rule_id_1"]: {
+ "rule": ["subject_data_id1",
+ "object_data_id1",
+ "action_data_id1"],
+ "instructions": (
+ {"decision": "grant"},
+ # "grant" to immediately exit,
+ # "continue" to wait for the result of next policy
+ # "deny" to deny the request
+ )
+ },
+ shared_ids["rule"]["rule_id_2"]: {
+ "rule": ["subject_data_id2",
+ "object_data_id2",
+ "action_data_id2"],
+ "instructions": (
+ {
+ "update": {
+ "operation": "add",
+ # operations may be "add" or "delete"
+ "target": "rbac:role:admin"
+ # add the role admin to the current user
+ }
+ },
+ {"chain": {"name": "rbac"}}
+ # chain with the policy named rbac
+ )
+ }
+ }
+}
+
+pipelines_mock = {
+ "policy_id_1": {
+ "starttime": 1548688120.3931532,
+ "port": 20000,
+ "server_ip": "127.0.0.1",
+ "status": "up",
+ "log": "/tmp/moon_policy_id_1.log"
+ },
+ "policy_id_2": {
+ "starttime": 1548688120.3931532,
+ "port": 20001,
+ "server_ip": "127.0.0.1",
+ "status": "up",
+ "log": "/tmp/moon_policy_id_2.log"
+ }
+}
diff --git a/moon_cache/tests/unit_python/mock_repo/urls.py b/moon_cache/tests/unit_python/mock_repo/urls.py
new file mode 100644
index 00000000..4b6d5133
--- /dev/null
+++ b/moon_cache/tests/unit_python/mock_repo/urls.py
@@ -0,0 +1,174 @@
+# Software Name: MOON
+
+# Version: 5.4
+
+# SPDX-FileCopyrightText: Copyright (c) 2018-2020 Orange and its contributors
+# SPDX-License-Identifier: Apache-2.0
+
+# This software is distributed under the 'Apache License 2.0',
+# the text of which is available at 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+# or see the "LICENSE" file for more details.
+
+
+import mock_repo.data as data_mock
+
+
+def register_model_any(m, conf, module_name, mocked_data, key=None):
+ if key is None:
+ key = module_name
+ m.register_uri(
+ 'GET', '{}/{}'.format(conf['management']['url'],
+ module_name),
+
+ json={key: mocked_data}
+ )
+
+
+def register_policy_any(m, conf, policy_id, module_name, mocked_data, key=None):
+ if key is None:
+ key = module_name
+ m.register_uri(
+ 'GET', '{}/{}/{}/{}'.format(conf['management']['url'],
+ 'policies',
+ policy_id,
+ module_name),
+ json={key: mocked_data}
+ )
+
+
+def register_policy(m, conf, policy_id, mocked_data):
+ m.register_uri(
+ 'GET', '{}/{}/{}'.format(conf['management']['url'],
+ 'policies',
+ policy_id),
+ json={"policies": mocked_data}
+ )
+
+
+def register_pipelines(m, conf):
+ m.register_uri(
+ 'GET', 'http://127.0.0.1:20000/pipelines/policy_id_1',
+ json={'pipelines': data_mock.pipelines_mock}
+ )
+
+
+def register_slaves(m, conf):
+ m.register_uri(
+ 'GET', 'http://127.0.0.1:10000/pipelines',
+ json={'pipelines': data_mock.pipelines_mock}
+ )
+
+
+def register_pdp(m, conf):
+ register_model_any(m, conf, 'pdp', data_mock.pdp_mock, 'pdps')
+
+
+def register_meta_rules(m, conf):
+ register_model_any(m, conf, 'meta_rules', data_mock.meta_rules_mock)
+
+
+def register_policies(m, conf):
+ for _policy_id in data_mock.policies_mock:
+ register_policy(m, conf, _policy_id, data_mock.policies_mock[_policy_id])
+ register_model_any(m, conf, 'policies', data_mock.policies_mock)
+
+
+def register_models(m, conf):
+ register_model_any(m, conf, 'models', data_mock.models_mock)
+
+
+def register_policy_subject(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'subjects', data_mock.subject_mock[policy_id])
+
+
+def register_policy_subject_invalid_response(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'subjects', data_mock.subject_mock[policy_id],
+ 'subjects_invalid_key')
+
+
+def register_policy_object(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'objects', data_mock.object_mock[policy_id])
+
+
+def register_policy_object_invalid_response(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'objects', data_mock.subject_mock[policy_id],
+ 'objects_invalid_key')
+
+
+def register_policy_action(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'actions', data_mock.action_mock[policy_id])
+
+
+def register_policy_action_invalid_response(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'actions', data_mock.subject_mock[policy_id],
+ 'actions_invalid_key')
+
+
+def register_policy_subject_assignment_list(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'subject_assignments',
+ data_mock.subject_assignment_mock)
+
+
+def register_policy_object_assignment_list(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'object_assignments',
+ data_mock.object_assignment_mock)
+
+
+def register_policy_action_assignment_list(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'action_assignments',
+ data_mock.action_assignment_mock)
+
+
+def register_policy_subject_assignment(m, conf, policy_id, perimeter_id):
+ m.register_uri(
+ 'GET', '{}/{}/{}/subject_assignments/{}'.format(conf['management']['url'],
+ 'policies',
+ policy_id,
+ perimeter_id),
+ json={'subject_assignments': data_mock.subject_assignment_mock}
+ )
+
+
+def register_policy_object_assignment(m, conf, policy_id, perimeter_id):
+ m.register_uri(
+ 'GET', '{}/{}/{}/object_assignments/{}'.format(conf['management']['url'],
+ 'policies',
+ policy_id,
+ perimeter_id),
+ json={'object_assignments': data_mock.object_assignment_mock}
+ )
+
+
+def register_policy_action_assignment(m, conf, policy_id, perimeter_id):
+ m.register_uri(
+ 'GET', '{}/{}/{}/action_assignments/{}'.format(conf['management']['url'],
+ 'policies',
+ policy_id,
+ perimeter_id),
+ json={'action_assignments': data_mock.action_assignment_mock}
+ )
+
+
+def register_rules(m, conf, policy_id):
+ register_policy_any(m, conf, policy_id, 'rules', data_mock.rules_mock)
+
+
+def register_attributes(m, conf):
+ m.register_uri(
+ 'GET', 'http://127.0.0.1:8000/attributes/mode',
+ json={
+ 'attributes': {
+ 'id': 'mode', 'value': 'build', 'values': ['build', 'run'], 'default': 'run'
+ }
+ }
+ )
+ m.register_uri(
+ 'GET', 'http://127.0.0.1:8000/attributes',
+ json={
+ 'attributes': {
+ 'mode': {
+ 'id': 'mode', 'value': 'build', 'values': ['build', 'run'], 'default': 'run'}
+ }
+ }
+ )
+
diff --git a/moon_cache/tests/unit_python/requirements.txt b/moon_cache/tests/unit_python/requirements.txt
new file mode 100644
index 00000000..ca33f6d4
--- /dev/null
+++ b/moon_cache/tests/unit_python/requirements.txt
@@ -0,0 +1,7 @@
+pytest
+pytest-benchmark
+pbr
+pytest-cov
+cliff
+requests_mock
+moon_utilities \ No newline at end of file