diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-13 12:04:41 +0000 |
commit | 975f66f2eebe9dadba04f275774d4ab83f74cf25 (patch) | |
tree | 89bd26a93aaae6a25749145b7e4bca4a1e75b2be /ansible_collections/community/mongodb/plugins/cache | |
parent | Initial commit. (diff) | |
download | ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.tar.xz ansible-975f66f2eebe9dadba04f275774d4ab83f74cf25.zip |
Adding upstream version 7.7.0+dfsg.upstream/7.7.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/community/mongodb/plugins/cache')
-rw-r--r-- | ansible_collections/community/mongodb/plugins/cache/__init__.py | 0 | ||||
-rw-r--r-- | ansible_collections/community/mongodb/plugins/cache/mongodb.py | 204 |
2 files changed, 204 insertions, 0 deletions
diff --git a/ansible_collections/community/mongodb/plugins/cache/__init__.py b/ansible_collections/community/mongodb/plugins/cache/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/ansible_collections/community/mongodb/plugins/cache/__init__.py diff --git a/ansible_collections/community/mongodb/plugins/cache/mongodb.py b/ansible_collections/community/mongodb/plugins/cache/mongodb.py new file mode 100644 index 000000000..b51b7b293 --- /dev/null +++ b/ansible_collections/community/mongodb/plugins/cache/mongodb.py @@ -0,0 +1,204 @@ +# (c) 2018, Matt Martz <matt@sivel.net> +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + +DOCUMENTATION = ''' +name: mongodb +author: + - Matt Martz (@sivel) +version_added: "1.0.0" +short_description: Use MongoDB for caching +description: + - This cache uses per host records saved in MongoDB. +requirements: + - pymongo>=3 +options: + _uri: + description: + - MongoDB Connection String URI + required: False + env: + - name: ANSIBLE_CACHE_PLUGIN_CONNECTION + ini: + - key: fact_caching_connection + section: defaults + _prefix: + description: User defined prefix to use when creating the DB entries + default: ansible_facts + env: + - name: ANSIBLE_CACHE_PLUGIN_PREFIX + ini: + - key: fact_caching_prefix + section: defaults + _timeout: + default: 86400 + description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire + env: + - name: ANSIBLE_CACHE_PLUGIN_TIMEOUT + ini: + - key: fact_caching_timeout + section: defaults + type: integer +''' + +import datetime + +from contextlib import contextmanager + +from ansible import constants as C +from ansible.errors import AnsibleError +from ansible.plugins.cache import BaseCacheModule +from ansible.utils.display import Display +from ansible.module_utils._text import to_native + +pymongo_missing = False + +try: + import pymongo +except ImportError: + pymongo_missing = True + +display = Display() + + +class CacheModule(BaseCacheModule): + """ + A caching module backed by mongodb. + """ + def __init__(self, *args, **kwargs): + try: + if pymongo_missing: + raise AnsibleError("The 'pymongo' python module is required for the mongodb fact cache, 'pip install pymongo>=3.0'") + super(CacheModule, self).__init__(*args, **kwargs) + self._connection = self.get_option('_uri') + self._timeout = int(self.get_option('_timeout')) + self._prefix = self.get_option('_prefix') + except KeyError: + self._connection = C.CACHE_PLUGIN_CONNECTION + self._timeout = int(C.CACHE_PLUGIN_TIMEOUT) + self._prefix = C.CACHE_PLUGIN_PREFIX + + self._cache = {} + self._managed_indexes = False + + def _ttl_index_exists(self, collection): + ''' + Returns true if an index named ttl exists + on the given collection. + ''' + exists = False + try: + indexes = collection.list_indexes() + for index in indexes: + if index["name"] == "ttl": + exists = True + break + except pymongo.errors.OperationFailure as excep: + raise AnsibleError('Error checking MongoDB index: %s' % to_native(excep)) + return exists + + def _manage_indexes(self, collection): + ''' + This function manages indexes on the mongo collection. + We only do this once, at run time based on _managed_indexes, + rather than per connection instantiation as that would be overkill + ''' + _timeout = self._timeout + if _timeout and _timeout > 0: + try: + collection.create_index( + 'date', + name='ttl', + expireAfterSeconds=_timeout + ) + except pymongo.errors.OperationFailure: + # We make it here when the fact_caching_timeout was set to a different value between runs + if self._ttl_index_exists(collection): + collection.drop_index('ttl') + return self._manage_indexes(collection) + else: + if self._ttl_index_exists(collection): + collection.drop_index('ttl') + + @contextmanager + def _collection(self): + ''' + This is a context manager for opening and closing mongo connections as needed. This exists as to not create a global + connection, due to pymongo not being fork safe (http://api.mongodb.com/python/current/faq.html#is-pymongo-fork-safe) + ''' + mongo = pymongo.MongoClient(self._connection) + try: + db = mongo.get_default_database() + except pymongo.errors.ConfigurationError: + # We'll fall back to using ``ansible`` as the database if one was not provided + # in the MongoDB Connection String URI + db = mongo['ansible'] + + # The collection is hard coded as ``cache``, there are no configuration options for this + collection = db['cache'] + if not self._managed_indexes: + # Only manage the indexes once per run, not per connection + self._manage_indexes(collection) + self._managed_indexes = True + + yield collection + + mongo.close() + + def _make_key(self, key): + return '%s%s' % (self._prefix, key) + + def get(self, key): + if key not in self._cache: + with self._collection() as collection: + value = collection.find_one({'_id': self._make_key(key)}) + self._cache[key] = value['data'] + + return self._cache.get(key) + + def set(self, key, value): + self._cache[key] = value + with self._collection() as collection: + collection.update_one( + {'_id': self._make_key(key)}, + { + '$set': { + '_id': self._make_key(key), + 'data': value, + 'date': datetime.datetime.utcnow() + } + }, + upsert=True + ) + + def keys(self): + with self._collection() as collection: + return [doc['_id'] for doc in collection.find({}, {'_id': True})] + + def contains(self, key): + with self._collection() as collection: + return bool(collection.count({'_id': self._make_key(key)})) + + def delete(self, key): + del self._cache[key] + with self._collection() as collection: + collection.delete_one({'_id': self._make_key(key)}) + + def flush(self): + with self._collection() as collection: + collection.delete_many({}) + + def copy(self): + with self._collection() as collection: + return dict((d['_id'], d['data']) for d in collection.find({})) + + def __getstate__(self): + return dict() + + def __setstate__(self, data): + self.__init__() |